Bug#1069130: python-mapnik: FTBFS with Mapnik 4.0.0

Bas Couwenberg sebastic at xs4all.nl
Tue Apr 16 21:03:40 BST 2024


Source: python-mapnik
Version: 1:0.0~20200224-7da019cf9-5
Severity: important
Tags: ftbfs upstream patch
User: debian-gis at lists.debian.org
Usertags: mapnik-4.0

Dear Maintainer,

Your package FBTFS with Mapnik 4.0.0.

pkg-config needs to be used instead of mapnik-config, and a new git snapshot from the upstream master branch in required for the moved headers.

Kind Regards,

Bas
-------------- next part --------------
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/control python-mapnik-0.0~20200224-7da019cf9/debian/control
--- python-mapnik-0.0~20200224-7da019cf9/debian/control	2023-11-16 19:43:42.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/control	2024-04-16 13:31:27.000000000 +0200
@@ -7,7 +7,7 @@
                dh-python,
                dh-sequence-python3,
                libboost-python-dev,
-               libmapnik-dev (>= 3.1.0),
+               libmapnik-dev (>= 4.0.0~),
                python3-all-dev,
                python3-setuptools,
                python3-cairo,
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/rules python-mapnik-0.0~20200224-7da019cf9/debian/rules
--- python-mapnik-0.0~20200224-7da019cf9/debian/rules	2023-11-16 19:43:42.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/rules	2024-04-16 13:31:27.000000000 +0200
@@ -7,6 +7,10 @@
 # Enable hardening build flags
 export DEB_BUILD_MAINT_OPTIONS=hardening=+all
 
+export DEB_CFLAGS_STRIP = -Werror=implicit-function-declaration
+
+include /usr/share/dpkg/architecture.mk
+
 export PYBUILD_NAME=mapnik
 
 # Enable Cairo support
@@ -16,7 +20,7 @@
 export SYSTEM_FONTS=/usr/share/fonts
 
 # Custom mapnik libraries
-export LIB_DIR_NAME=/mapnik/3.1
+export LIB_DIR_NAME=$(DEB_HOST_MULTIARCH)/mapnik/4.0.0
 
 %:
 	dh $@ --buildsystem=pybuild
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/mapnik-4.0.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/mapnik-4.0.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/mapnik-4.0.patch	1970-01-01 01:00:00.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/mapnik-4.0.patch	2024-04-16 13:31:27.000000000 +0200
@@ -0,0 +1,65 @@
+Description: Use pkg-config instead of mapnik-config.
+Author: Bas Couwenberg <sebastic at debian.org>
+
+--- a/setup.py
++++ b/setup.py
+@@ -107,30 +107,23 @@ os.environ['ARCHFLAGS'] = ''
+ if os.environ.get("MASON_BUILD", "false") == "true":
+     # run bootstrap.sh to get mason builds
+     subprocess.call(['./bootstrap.sh'])
+-    mapnik_config = 'mason_packages/.link/bin/mapnik-config'
+     mason_build = True
+ else:
+-    mapnik_config = 'mapnik-config'
+     mason_build = False
+ 
+ 
+ linkflags = []
+-lib_path = os.path.join(check_output([mapnik_config, '--prefix']),'lib')
+-linkflags.extend(check_output([mapnik_config, '--libs']).split(' '))
+-linkflags.extend(check_output([mapnik_config, '--ldflags']).split(' '))
+-linkflags.extend(check_output([mapnik_config, '--dep-libs']).split(' '))
+-linkflags.extend([
+-'-lmapnik-wkt',
+-'-lmapnik-json',
+-] + ['-l%s' % i for i in get_boost_library_names()])
++lib_path = os.path.join(check_output(['pkg-config', '--variable=prefix', 'libmapnik']),'lib')
++linkflags.extend(check_output(['pkg-config', '--libs', 'libmapnik']).split(' '))
++linkflags.extend(['-l%s' % i for i in get_boost_library_names()])
+ 
+ # Dynamically make the mapnik/paths.py file
+ f_paths = open('mapnik/paths.py', 'w')
+ f_paths.write('import os\n')
+ f_paths.write('\n')
+ 
+-input_plugin_path = check_output([mapnik_config, '--input-plugins'])
+-font_path = check_output([mapnik_config, '--fonts'])
++input_plugin_path = check_output(['pkg-config', '--variable=plugins_dir', 'libmapnik'])
++font_path = check_output(['pkg-config', '--variable=fonts_dir', 'libmapnik'])
+ 
+ if mason_build:
+     try:
+@@ -225,7 +218,7 @@ if mason_build:
+         except shutil.Error:
+             pass
+ 
+-extra_comp_args = check_output([mapnik_config, '--cflags']).split(' ')
++extra_comp_args = check_output(['pkg-config', '--cflags', 'libmapnik']).split(' ')
+ 
+ extra_comp_args = list(filter(lambda arg: arg != "-fvisibility=hidden", extra_comp_args))
+ 
+@@ -253,9 +246,12 @@ else:
+     linkflags.append('-Wl,-rpath=$ORIGIN/lib')
+ 
+ if os.environ.get("CC", False) == False:
+-    os.environ["CC"] = check_output([mapnik_config, '--cxx'])
++    os.environ["CC"] = 'c++'
+ if os.environ.get("CXX", False) == False:
+-    os.environ["CXX"] = check_output([mapnik_config, '--cxx'])
++    os.environ["CXX"] = 'c++'
++
++extra_comp_args = list(filter(lambda arg: arg != "", extra_comp_args))
++linkflags = list(filter(lambda arg: arg != "", linkflags))
+ 
+ setup(
+     name="mapnik",
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/series python-mapnik-0.0~20200224-7da019cf9/debian/patches/series
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/series	2023-11-16 19:29:14.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/series	2024-04-16 13:31:27.000000000 +0200
@@ -1,6 +1,2 @@
-skip-tests-for-missing-data.patch
-boost1.71.patch
-proj6-apis.patch
-proj6-syntax.patch
-no-distutils.patch
-python-3.12.patch
+upstream-master.patch
+mapnik-4.0.patch
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/boost1.71.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/boost1.71.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/boost1.71.patch	2023-02-20 15:06:28.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/boost1.71.patch	1970-01-01 01:00:00.000000000 +0100
@@ -1,19 +0,0 @@
-Description: Fix libboost_python detection for boost1.71.
-Author: Bas Couwenberg <sebastic at debian.org>
-Forwarded: https://github.com/mapnik/python-mapnik/pull/227
-Applied-Upstream: https://github.com/mapnik/python-mapnik/commit/739276ff57d28c4b4e6eca741854048aa12414be
-
---- a/setup.py
-+++ b/setup.py
-@@ -40,8 +40,10 @@ def find_boost_library(_id):
-         # Debian naming convention for versions installed in parallel
-         suffixes.insert(0, "-py%d%d" % (sys.version_info.major,
-                                         sys.version_info.minor))
-+        suffixes.insert(1, "%d%d" % (sys.version_info.major,
-+                                     sys.version_info.minor))
-         # standard suffix for Python3
--        suffixes.insert(1, sys.version_info.major)
-+        suffixes.insert(2, sys.version_info.major)
-     for suf in suffixes:
-         name = "%s%s" % (_id, suf)
-         lib = find_library(name)
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/no-distutils.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/no-distutils.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/no-distutils.patch	2023-02-20 15:06:40.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/no-distutils.patch	1970-01-01 01:00:00.000000000 +0100
@@ -1,66 +0,0 @@
-Description: Don't use deprecated distutils module.
-Author: Bas Couwenberg <sebastic at debian.org>
-Forwarded: https://github.com/mapnik/python-mapnik/pull/267
-
---- a/build.py
-+++ b/build.py
-@@ -1,7 +1,7 @@
- import glob
-+import sysconfig
- import os
- from subprocess import Popen, PIPE
--from distutils import sysconfig
- 
- Import('env')
- 
-@@ -14,11 +14,15 @@ def call(cmd, silent=True):
- 
- 
- prefix = env['PREFIX']
--target_path = os.path.normpath(sysconfig.get_python_lib() + os.path.sep + env['MAPNIK_NAME'])
-+if "deb_system" in sysconfig.get_scheme_names():
-+    python_modules_dir = sysconfig.get_path("purelib", "deb_system")
-+else:
-+    python_modules_dir = sysconfig.get_path("purelib")
-+target_path = os.path.normpath(python_modules_dir + os.path.sep + env['MAPNIK_NAME'])
- 
- py_env = env.Clone()
- 
--py_env.Append(CPPPATH = sysconfig.get_python_inc())
-+py_env.Append(CPPPATH = sysconfig.get_path('include'))
- 
- py_env.Append(CPPDEFINES = env['LIBMAPNIK_DEFINES'])
- 
---- a/setup.py
-+++ b/setup.py
-@@ -7,7 +7,6 @@ import shutil
- import subprocess
- import sys
- import glob
--from distutils import sysconfig
- from ctypes.util import find_library
- 
- from setuptools import Command, Extension, setup
-@@ -84,22 +83,6 @@ class WhichBoostCommand(Command):
-         print("\n".join(get_boost_library_names()))
- 
- 
--cflags = sysconfig.get_config_var('CFLAGS')
--sysconfig._config_vars['CFLAGS'] = re.sub(
--    ' +', ' ', cflags.replace('-g ', '').replace('-Os', '').replace('-arch i386', ''))
--opt = sysconfig.get_config_var('OPT')
--sysconfig._config_vars['OPT'] = re.sub(
--    ' +', ' ', opt.replace('-g ', '').replace('-Os', ''))
--ldshared = sysconfig.get_config_var('LDSHARED')
--sysconfig._config_vars['LDSHARED'] = re.sub(
--    ' +', ' ', ldshared.replace('-g ', '').replace('-Os', '').replace('-arch i386', ''))
--ldflags = sysconfig.get_config_var('LDFLAGS')
--sysconfig._config_vars['LDFLAGS'] = re.sub(
--    ' +', ' ', ldflags.replace('-g ', '').replace('-Os', '').replace('-arch i386', ''))
--pycflags = sysconfig.get_config_var('PY_CFLAGS')
--sysconfig._config_vars['PY_CFLAGS'] = re.sub(
--    ' +', ' ', pycflags.replace('-g ', '').replace('-Os', '').replace('-arch i386', ''))
--sysconfig._config_vars['CFLAGSFORSHARED'] = ''
- os.environ['ARCHFLAGS'] = ''
- 
- if os.environ.get("MASON_BUILD", "false") == "true":
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-apis.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-apis.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-apis.patch	2023-02-20 15:06:28.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-apis.patch	1970-01-01 01:00:00.000000000 +0100
@@ -1,87 +0,0 @@
-Description: Upgrade to use new APIs
-Author: Artem Pavlenko <artem at mapnik.org>
-Origin: https://github.com/mapnik/python-mapnik/commit/feec9afa66131b074c40359529e498eab0d79a02
-Forwarded: not-needed
-
---- a/src/mapnik_proj_transform.cpp
-+++ b/src/mapnik_proj_transform.cpp
-@@ -48,7 +48,7 @@ struct proj_transform_pickle_suite : boo
-     getinitargs(const proj_transform& p)
-     {
-         using namespace boost::python;
--        return boost::python::make_tuple(p.source(),p.dest());
-+        return boost::python::make_tuple(p.definition());
-     }
- };
- 
-@@ -62,7 +62,7 @@ mapnik::coord2d forward_transform_c(mapn
-     if (!t.forward(x,y,z)) {
-         std::ostringstream s;
-         s << "Failed to forward project "
--          << "from " << t.source().params() << " to: " << t.dest().params();
-+          << t.definition();
-         throw std::runtime_error(s.str());
-     }
-     return mapnik::coord2d(x,y);
-@@ -76,7 +76,7 @@ mapnik::coord2d backward_transform_c(map
-     if (!t.backward(x,y,z)) {
-         std::ostringstream s;
-         s << "Failed to back project "
--          << "from " <<  t.dest().params() << " to: " << t.source().params();
-+         << t.definition();
-         throw std::runtime_error(s.str());
-     }
-     return mapnik::coord2d(x,y);
-@@ -88,7 +88,7 @@ mapnik::box2d<double> forward_transform_
-     if (!t.forward(new_box)) {
-         std::ostringstream s;
-         s << "Failed to forward project "
--          << "from " << t.source().params() << " to: " << t.dest().params();
-+          << t.definition();
-         throw std::runtime_error(s.str());
-     }
-     return new_box;
-@@ -100,7 +100,7 @@ mapnik::box2d<double> backward_transform
-     if (!t.backward(new_box)){
-         std::ostringstream s;
-         s << "Failed to back project "
--          << "from " <<  t.dest().params() << " to: " << t.source().params();
-+          << t.definition();
-         throw std::runtime_error(s.str());
-     }
-     return new_box;
-@@ -112,7 +112,7 @@ mapnik::box2d<double> forward_transform_
-     if (!t.forward(new_box,points)) {
-         std::ostringstream s;
-         s << "Failed to forward project "
--          << "from " << t.source().params() << " to: " << t.dest().params();
-+          << t.definition();
-         throw std::runtime_error(s.str());
-     }
-     return new_box;
-@@ -124,7 +124,7 @@ mapnik::box2d<double> backward_transform
-     if (!t.backward(new_box,points)){
-         std::ostringstream s;
-         s << "Failed to back project "
--          << "from " <<  t.dest().params() << " to: " << t.source().params();
-+          <<  t.definition();
-         throw std::runtime_error(s.str());
-     }
-     return new_box;
-@@ -136,7 +136,7 @@ void export_proj_transform ()
- {
-     using namespace boost::python;
- 
--    class_<proj_transform, boost::noncopyable>("ProjTransform", init< projection const&, projection const& >())
-+    class_<proj_transform, boost::noncopyable>("ProjTransform", init<projection const&, projection const&>())
-         .def_pickle(proj_transform_pickle_suite())
-         .def("forward", forward_transform_c)
-         .def("backward",backward_transform_c)
-@@ -144,6 +144,7 @@ void export_proj_transform ()
-         .def("backward",backward_transform_env)
-         .def("forward", forward_transform_env_p)
-         .def("backward",backward_transform_env_p)
-+        .def("definition",&proj_transform::definition)
-         ;
- 
- }
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-syntax.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-syntax.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-syntax.patch	2023-02-20 15:06:28.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/proj6-syntax.patch	1970-01-01 01:00:00.000000000 +0100
@@ -1,537 +0,0 @@
-Description: Update to use libproj >=6 projection initialisation syntax
-Author: Artem Pavlenko <artem at mapnik.org>
-Origin: https://github.com/mapnik/python-mapnik/commit/ca66af65204d68a5496a94d36d69bf61144daf3b
-Forwarded: not-needed
-
---- a/mapnik/__init__.py
-+++ b/mapnik/__init__.py
-@@ -156,7 +156,7 @@ class _Coord(Coord, _injector()):
-         Example: Project the geographic coordinates of the
-                  city center of Stuttgart into the local
-                  map projection (GK Zone 3/DHDN, EPSG 31467)
--        >>> p = Projection('+init=epsg:31467')
-+        >>> p = Projection('epsg:31467')
-         >>> Coord(9.1, 48.7).forward(p)
-         Coord(3507360.12813,5395719.2749)
-         """
-@@ -176,7 +176,7 @@ class _Coord(Coord, _injector()):
-                  city center of Stuttgart in the local
-                  map projection (GK Zone 3/DHDN, EPSG 31467)
-                  into geographic coordinates:
--        >>> p = Projection('+init=epsg:31467')
-+        >>> p = Projection('epsg:31467')
-         >>> Coord(3507360.12813,5395719.2749).inverse(p)
-         Coord(9.1, 48.7)
-         """
---- a/src/mapnik_layer.cpp
-+++ b/src/mapnik_layer.cpp
-@@ -146,13 +146,13 @@ void export_layer()
-     class_<layer>("Layer", "A Mapnik map layer.", init<std::string const&,optional<std::string const&> >(
-                       "Create a Layer with a named string and, optionally, an srs string.\n"
-                       "\n"
--                      "The srs can be either a Proj.4 epsg code ('+init=epsg:<code>') or\n"
--                      "of a Proj.4 literal ('+proj=<literal>').\n"
--                      "If no srs is specified it will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
-+                      "The srs can be either a Proj epsg code ('epsg:<code>') or\n"
-+                      "of a Proj literal ('+proj=<literal>').\n"
-+                      "If no srs is specified it will default to 'epsg:4326'\n"
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr\n"
-                       "<mapnik._mapnik.Layer object at 0x6a270>\n"
-                       ))
-@@ -166,7 +166,7 @@ void export_layer()
-              "\n"
-              "Usage:\n"
-              ">>> from mapnik import Layer\n"
--             ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+             ">>> lyr = Layer('My Layer','epsg:4326')\n"
-              ">>> lyr.envelope()\n"
-              "box2d(-1.0,-1.0,0.0,0.0) # default until a datasource is loaded\n"
-             )
-@@ -183,7 +183,7 @@ void export_layer()
-              "\n"
-              "Usage:\n"
-              ">>> from mapnik import Layer\n"
--             ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+             ">>> lyr = Layer('My Layer','epsg:4326')\n"
-              ">>> lyr.visible(1.0/1000000)\n"
-              "True\n"
-              ">>> lyr.active = False\n"
-@@ -198,7 +198,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr.active\n"
-                       "True # Active by default\n"
-                       ">>> lyr.active = False # set False to disable layer rendering\n"
-@@ -213,7 +213,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr.status\n"
-                       "True # Active by default\n"
-                       ">>> lyr.status = False # set False to disable layer rendering\n"
-@@ -250,7 +250,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer, Datasource\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr.datasource = Datasource(type='shape',file='world_borders')\n"
-                       ">>> lyr.datasource\n"
-                       "<mapnik.Datasource object at 0x65470>\n"
-@@ -285,7 +285,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr.maximum_scale_denominator\n"
-                       "1.7976931348623157e+308 # default is the numerical maximum\n"
-                       ">>> lyr.maximum_scale_denominator = 1.0/1000000\n"
-@@ -300,7 +300,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr.minimum_scale_denominator # default is 0\n"
-                       "0.0\n"
-                       ">>> lyr.minimum_scale_denominator = 1.0/1000000\n"
-@@ -315,7 +315,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import Layer\n"
--                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
-                       ">>> lyr.name\n"
-                       "'My Layer'\n"
-                       ">>> lyr.name = 'New Name'\n"
-@@ -330,7 +330,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import layer\n"
--                      ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = layer('My layer','epsg:4326')\n"
-                       ">>> lyr.queryable\n"
-                       "False # Not queryable by default\n"
-                       ">>> lyr.queryable = True\n"
-@@ -345,12 +345,12 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import layer\n"
--                      ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = layer('My layer','epsg:4326')\n"
-                       ">>> lyr.srs\n"
--                      "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # The default srs if not initialized with custom srs\n"
--                      ">>> # set to google mercator with Proj.4 literal\n"
-+                      "'epsg:4326' # The default srs if not initialized with custom srs\n"
-+                      ">>> # set to google mercator with Proj literal\n"
-                       "... \n"
--                      ">>> lyr.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'\n"
-+                      ">>> lyr.srs = 'epsg:3857'\n"
-             )
- 
-         .add_property("group_by",
-@@ -367,7 +367,7 @@ void export_layer()
-                       "\n"
-                       "Usage:\n"
-                       ">>> from mapnik import layer\n"
--                      ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
-+                      ">>> lyr = layer('My layer','epsg:4326')\n"
-                       ">>> lyr.styles\n"
-                       "<mapnik._mapnik.Names object at 0x6d3e8>\n"
-                       ">>> len(lyr.styles)\n"
---- a/src/mapnik_map.cpp
-+++ b/src/mapnik_map.cpp
-@@ -165,9 +165,9 @@ void export_map()
-     class_<Map>("Map","The map object.",init<int,int,optional<std::string const&> >(
-                     ( arg("width"),arg("height"),arg("srs") ),
-                     "Create a Map with a width and height as integers and, optionally,\n"
--                    "an srs string either with a Proj.4 epsg code ('+init=epsg:<code>')\n"
--                    "or with a Proj.4 literal ('+proj=<literal>').\n"
--                    "If no srs is specified the map will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
-+                    "an srs string either with a Proj epsg code ('epsg:<code>')\n"
-+                    "or with a Proj literal ('+proj=<literal>').\n"
-+                    "If no srs is specified the map will default to 'epsg:4326'\n"
-                     "\n"
-                     "Usage:\n"
-                     ">>> from mapnik import Map\n"
-@@ -175,7 +175,7 @@ void export_map()
-                     ">>> m\n"
-                     "<mapnik._mapnik.Map object at 0x6a240>\n"
-                     ">>> m.srs\n"
--                    "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
-+                    "'epsg:4326'\n"
-                     ))
- 
-         .def("append_style",insert_style,
-@@ -502,22 +502,22 @@ void export_map()
-         .add_property("srs",
-                       make_function(&Map::srs,return_value_policy<copy_const_reference>()),
-                       &Map::set_srs,
--                      "Spatial reference in Proj.4 format.\n"
-+                      "Spatial reference in Proj format.\n"
-                       "Either an epsg code or proj literal.\n"
-                       "For example, a proj literal:\n"
--                      "\t'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
-+                      "\t'epsg:4326'\n"
-                       "and a proj epsg code:\n"
--                      "\t'+init=epsg:4326'\n"
-+                      "\t'epsg:4326'\n"
-                       "\n"
-                       "Note: using epsg codes requires the installation of\n"
--                      "the Proj.4 'epsg' data file normally found in '/usr/local/share/proj'\n"
-+                      "the Proj 'epsg' data file normally found in '/usr/local/share/proj'\n"
-                       "\n"
-                       "Usage:\n"
-                       ">>> m.srs\n"
--                      "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # The default srs if not initialized with custom srs\n"
-+                      "'epsg:4326' # The default srs if not initialized with custom srs\n"
-                       ">>> # set to google mercator with Proj.4 literal\n"
-                       "... \n"
--                      ">>> m.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'\n"
-+                      ">>> m.srs = 'epsg:3857'\n"
-             )
- 
-         .add_property("width",
---- a/src/mapnik_projection.cpp
-+++ b/src/mapnik_projection.cpp
-@@ -95,8 +95,8 @@ void export_projection ()
-     using namespace boost::python;
- 
-     class_<projection>("Projection", "Represents a map projection.",init<std::string const&>(
--                           (arg("proj4_string")),
--                           "Constructs a new projection from its PROJ.4 string representation.\n"
-+                           (arg("proj_string")),
-+                           "Constructs a new projection from its PROJ string representation.\n"
-                            "\n"
-                            "The constructor will throw a RuntimeError in case the projection\n"
-                            "cannot be initialized.\n"
-@@ -105,9 +105,9 @@ void export_projection ()
-         .def_pickle(projection_pickle_suite())
-         .def ("params", make_function(&projection::params,
-                                       return_value_policy<copy_const_reference>()),
--              "Returns the PROJ.4 string for this projection.\n")
-+              "Returns the PROJ string for this projection.\n")
-         .def ("expanded",&projection::expanded,
--              "normalize PROJ.4 definition by expanding +init= syntax\n")
-+              "normalize PROJ definition by expanding epsg:XXXX syntax\n")
-         .add_property ("geographic", &projection::is_geographic,
-                        "This property is True if the projection is a geographic projection\n"
-                        "(i.e. it uses lon/lat coordinates)\n")
---- a/src/mapnik_python.cpp
-+++ b/src/mapnik_python.cpp
-@@ -598,9 +598,9 @@ std::string mapnik_version_string()
-     return MAPNIK_VERSION_STRING;
- }
- 
--bool has_proj4()
-+bool has_proj()
- {
--#if defined(MAPNIK_USE_PROJ4)
-+#if defined(MAPNIK_USE_PROJ)
-     return true;
- #else
-     return false;
-@@ -1035,8 +1035,8 @@ BOOST_PYTHON_MODULE(_mapnik)
-   ">>> m = Map(256,256)\n"
-   ">>> load_map(m,'mapfile_wgs84.xml')\n"
-   ">>> m.srs\n"
--  "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
--  ">>> m.srs = '+init=espg:3395'\n"
-+  "'epsg:4326'\n"
-+  ">>> m.srs = 'espg:3395'\n"
-   ">>> save_map(m,'mapfile_mercator.xml')\n"
-   "\n"
-   );
-@@ -1045,7 +1045,7 @@ BOOST_PYTHON_MODULE(_mapnik)
-     def("save_map_to_string", &save_map_to_string, save_map_to_string_overloads());
-     def("mapnik_version", &mapnik_version,"Get the Mapnik version number");
-     def("mapnik_version_string", &mapnik_version_string,"Get the Mapnik version string");
--    def("has_proj4", &has_proj4, "Get proj4 status");
-+    def("has_proj", &has_proj, "Get proj status");
-     def("has_jpeg", &has_jpeg, "Get jpeg read/write support status");
-     def("has_png", &has_png, "Get png read/write support status");
-     def("has_tiff", &has_tiff, "Get tiff read/write support status");
---- a/test/python_tests/agg_rasterizer_integer_overflow_test.py
-+++ b/test/python_tests/agg_rasterizer_integer_overflow_test.py
-@@ -27,7 +27,7 @@ geojson = {"type": "Feature",
- 
- def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_memory():
-     expected_color = mapnik.Color('white')
--    projection = '+init=epsg:4326'
-+    projection = 'epsg:4326'
-     ds = mapnik.MemoryDatasource()
-     context = mapnik.Context()
-     feat = mapnik.Feature.from_geojson(json.dumps(geojson), context)
-@@ -57,7 +57,7 @@ def test_that_coordinates_do_not_overflo
- 
- def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_csv():
-     expected_color = mapnik.Color('white')
--    projection = '+init=epsg:4326'
-+    projection = 'epsg:4326'
-     ds = mapnik.MemoryDatasource()
-     context = mapnik.Context()
-     feat = mapnik.Feature.from_geojson(json.dumps(geojson), context)
---- a/test/python_tests/datasource_test.py
-+++ b/test/python_tests/datasource_test.py
-@@ -30,7 +30,7 @@ def test_that_datasources_exist():
- 
- @raises(RuntimeError)
- def test_vrt_referring_to_missing_files():
--    srs = '+init=epsg:32630'
-+    srs = 'epsg:32630'
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-         lyr = mapnik.Layer('dataraster')
-         lyr.datasource = mapnik.Gdal(
---- a/test/python_tests/layer_modification_test.py
-+++ b/test/python_tests/layer_modification_test.py
-@@ -58,8 +58,8 @@ def test_adding_datasource_to_layer():
- 
-         # also note that since the srs was black it defaulted to wgs84
-         eq_(m.layers[0].srs,
--            '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
--        eq_(lyr.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
-+            'epsg:4326')
-+        eq_(lyr.srs, 'epsg:4326')
- 
-         # now add a datasource one...
-         ds = mapnik.Shapefile(file='../data/shp/world_merc.shp')
---- a/test/python_tests/layer_test.py
-+++ b/test/python_tests/layer_test.py
-@@ -14,7 +14,7 @@ from .utilities import run_all
- def test_layer_init():
-     l = mapnik.Layer('test')
-     eq_(l.name, 'test')
--    eq_(l.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
-+    eq_(l.srs, 'epsg:4326')
-     eq_(l.envelope(), mapnik.Box2d())
-     eq_(l.clear_label_cache, False)
-     eq_(l.cache_features, False)
---- a/test/python_tests/multi_tile_raster_test.py
-+++ b/test/python_tests/multi_tile_raster_test.py
-@@ -16,7 +16,7 @@ def setup():
- 
- 
- def test_multi_tile_policy():
--    srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
-+    srs = 'epsg:4326'
-     lyr = mapnik.Layer('raster')
-     if 'raster' in mapnik.DatasourceCache.plugin_names():
-         lyr.datasource = mapnik.Raster(
---- a/test/python_tests/object_test.py
-+++ b/test/python_tests/object_test.py
-@@ -331,7 +331,7 @@
- 
- #     eq_(m.width, 256)
- #     eq_(m.height, 256)
--#     eq_(m.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
-+#     eq_(m.srs, 'epsg:4326')
- #     eq_(m.base, '')
- #     eq_(m.maximum_extent, None)
- #     eq_(m.background_image, None)
-@@ -361,7 +361,7 @@
- 
- # # Map initialization from string
- # def test_map_init_from_string():
--#     map_string = '''<Map background-color="steelblue" base="./" srs="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs">
-+#     map_string = '''<Map background-color="steelblue" base="./" srs="epsg:4326">
- #      <Style name="My Style">
- #       <Rule>
- #        <PolygonSymbolizer fill="#f2eff9"/>
---- a/test/python_tests/ogr_test.py
-+++ b/test/python_tests/ogr_test.py
-@@ -36,7 +36,7 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         assert_almost_equal(e.maxy, 1649661.267, places=3)
-         meta = ds.describe()
-         eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
--        eq_('+proj=lcc' in meta['proj4'], True)
-+        eq_('+proj=lcc' in meta['proj'], True)
- 
-     # Shapefile properties
-     def test_shapefile_properties():
-@@ -111,7 +111,7 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         eq_(e.maxy, 1)
-         meta = ds.describe()
-         eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
--        eq_('+proj=merc' in meta['proj4'], True)
-+        eq_('+proj=merc' in meta['proj'], True)
- 
-     def test_ogr_reading_gpx_waypoint():
-         if not os.path.exists('../data/gpx/empty.gpx'):
-@@ -125,7 +125,7 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         eq_(e.maxy, 48)
-         meta = ds.describe()
-         eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
--        eq_('+proj=longlat' in meta['proj4'], True)
-+        eq_('+proj=longlat' in meta['proj'], True)
- 
-     def test_ogr_empty_data_should_not_throw():
-         if not os.path.exists('../data/gpx/empty.gpx'):
-@@ -144,7 +144,7 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         mapnik.logger.set_severity(default_logging_severity)
-         meta = ds.describe()
-         eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
--        eq_('+proj=longlat' in meta['proj4'], True)
-+        eq_('+proj=longlat' in meta['proj'], True)
- 
-     # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member.
-     # def test_handling_of_null_features():
-@@ -164,7 +164,7 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         assert_almost_equal(e.maxy, 45.0, places=1)
-         meta = ds.describe()
-         eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
--        #eq_('+proj=longlat' in meta['proj4'],True)
-+        #eq_('+proj=longlat' in meta['proj'],True)
-         fs = ds.featureset()
-         feat = fs.next()
-         actual = json.loads(feat.to_geojson())
---- a/test/python_tests/projection_test.py
-+++ b/test/python_tests/projection_test.py
-@@ -16,14 +16,14 @@ if PYTHON3:
- 
- 
- def test_normalizing_definition():
--    p = mapnik.Projection('+init=epsg:4326')
-+    p = mapnik.Projection('epsg:4326')
-     expanded = p.expanded()
-     eq_('+proj=longlat' in expanded, True)
- 
- 
- # Trac Ticket #128
- def test_wgs84_inverse_forward():
--    p = mapnik.Projection('+init=epsg:4326')
-+    p = mapnik.Projection('epsg:4326')
- 
-     c = mapnik.Coord(3.01331418311, 43.3333092669)
-     e = mapnik.Box2d(-122.54345245, 45.12312553, 68.2335581353, 48.231231233)
-@@ -78,7 +78,7 @@ def merc2wgs(x, y):
-         y = -85.0511
-     return [x, y]
- 
--# echo -109 37 | cs2cs -f "%.10f" +init=epsg:4326 +to +init=epsg:3857
-+# echo -109 37 | cs2cs -f "%.10f" epsg:4326 +to epsg:3857
- #-12133824.4964668211    4439106.7872505859 0.0000000000
- 
- # todo
-@@ -89,12 +89,12 @@ def merc2wgs(x, y):
- 
- 
- def test_proj_transform_between_init_and_literal():
--    one = mapnik.Projection('+init=epsg:4326')
--    two = mapnik.Projection('+init=epsg:3857')
-+    one = mapnik.Projection('epsg:4326')
-+    two = mapnik.Projection('epsg:3857')
-     tr1 = mapnik.ProjTransform(one, two)
-     tr1b = mapnik.ProjTransform(two, one)
--    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
--    merc = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
-+    wgs84 = 'epsg:4326'
-+    merc = 'epsg:3857'
-     src = mapnik.Projection(wgs84)
-     dest = mapnik.Projection(merc)
-     tr2 = mapnik.ProjTransform(src, dest)
-@@ -133,8 +133,8 @@ def test_proj_antimeridian_bbox():
-     # this is logic from feature_style_processor::prepare_layer()
-     PROJ_ENVELOPE_POINTS = 20  # include/mapnik/config.hpp
- 
--    prjGeog = mapnik.Projection('+init=epsg:4326')
--    prjProj = mapnik.Projection('+init=epsg:2193')
-+    prjGeog = mapnik.Projection('epsg:4326')
-+    prjProj = mapnik.Projection('epsg:2193')
-     prj_trans_fwd = mapnik.ProjTransform(prjProj, prjGeog)
-     prj_trans_rev = mapnik.ProjTransform(prjGeog, prjProj)
- 
---- a/test/python_tests/query_tolerance_test.py
-+++ b/test/python_tests/query_tolerance_test.py
-@@ -20,7 +20,7 @@ if 'shape' in mapnik.DatasourceCache.plu
-         if not os.path.exists('../data/shp/arrows.shp'):
-             raise SkipTest
- 
--        srs = '+init=epsg:4326'
-+        srs = 'epsg:4326'
-         lyr = mapnik.Layer('test')
-         ds = mapnik.Shapefile(file='../data/shp/arrows.shp')
-         lyr.datasource = ds
---- a/test/python_tests/render_test.py
-+++ b/test/python_tests/render_test.py
-@@ -170,7 +170,7 @@ def test_render_points():
-     s.rules.append(r)
-     lyr = mapnik.Layer(
-         'Places',
--        '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
-+        'epsg:4326')
-     lyr.datasource = ds
-     lyr.styles.append('places_labels')
-     # latlon bounding box corners
-@@ -178,8 +178,8 @@ def test_render_points():
-     lr_lonlat = mapnik.Coord(143.40, -38.80)
-     # render for different projections
-     projs = {
--        'google': '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over',
--        'latlon': '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs',
-+        'google': 'epsg:3857',
-+        'latlon': 'epsg:4326',
-         'merc': '+proj=merc +datum=WGS84 +k=1.0 +units=m +over +no_defs',
-         'utm': '+proj=utm +zone=54 +datum=WGS84'
-     }
-@@ -188,7 +188,7 @@ def test_render_points():
-         m.append_style('places_labels', s)
-         m.layers.append(lyr)
-         dest_proj = mapnik.Projection(projs[projdescr])
--        src_proj = mapnik.Projection('+init=epsg:4326')
-+        src_proj = mapnik.Projection('epsg:4326')
-         tr = mapnik.ProjTransform(src_proj, dest_proj)
-         m.zoom_to_box(tr.forward(mapnik.Box2d(ul_lonlat, lr_lonlat)))
-         # Render to SVG so that it can be checked how many points are there
---- a/test/python_tests/raster_symbolizer_test.py
-+++ b/test/python_tests/raster_symbolizer_test.py
-@@ -20,7 +20,7 @@ def test_dataraster_coloring():
-     if not os.path.exists('../data/raster/dataraster.tif'):
-         raise SkipTest
- 
--    srs = '+init=epsg:32630'
-+    srs = 'epsg:32630'
-     lyr = mapnik.Layer('dataraster')
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-         lyr.datasource = mapnik.Gdal(
-@@ -79,7 +79,7 @@ def test_dataraster_query_point():
-     if not os.path.exists('../data/raster/dataraster.tif'):
-         raise SkipTest
- 
--    srs = '+init=epsg:32630'
-+    srs = 'epsg:32630'
-     lyr = mapnik.Layer('dataraster')
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-         lyr.datasource = mapnik.Gdal(
-@@ -173,8 +173,8 @@ def test_raster_warping():
-     if not os.path.exists('../data/raster/dataraster.tif'):
-         raise SkipTest
- 
--    lyrSrs = "+init=epsg:32630"
--    mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
-+    lyrSrs = "epsg:32630"
-+    mapSrs = 'epsg:4326'
-     lyr = mapnik.Layer('dataraster', lyrSrs)
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-         lyr.datasource = mapnik.Gdal(
-@@ -217,8 +217,8 @@ def test_raster_warping_does_not_overcli
-     if not os.path.exists('../data/raster/dataraster.tif'):
-         raise SkipTest
- 
--    lyrSrs = "+init=epsg:32630"
--    mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
-+    lyrSrs = "epsg:32630"
-+    mapSrs = 'epsg:4326'
-     lyr = mapnik.Layer('dataraster', lyrSrs)
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-         lyr.datasource = mapnik.Gdal(
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/python-3.12.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/python-3.12.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/python-3.12.patch	2023-11-16 19:30:04.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/python-3.12.patch	1970-01-01 01:00:00.000000000 +0100
@@ -1,79 +0,0 @@
-Descriptiopn: Fix python 3.12 compatibility issues
-Author: Tom Hughes <tom at compton.nu>
-Origin: https://src.fedoraproject.org/rpms/python-mapnik/c/462f98a385e0833630212110931be21a3d7c64c5?branch=rawhide
-Forwarded: not-needed
-
---- a/src/python_grid_utils.cpp
-+++ b/src/python_grid_utils.cpp
-@@ -42,6 +42,7 @@
- #include "python_grid_utils.hpp"
- 
- // stl
-+#include <cwchar>
- #include <stdexcept>
- 
- namespace mapnik {
-@@ -67,7 +68,7 @@ void grid2utf(T const& grid_type,
-     for (std::size_t y = 0; y < data.height(); ++y)
-     {
-         std::uint16_t idx = 0;
--        const std::unique_ptr<Py_UNICODE[]> line(new Py_UNICODE[array_size]);
-+        const std::unique_ptr<wchar_t[]> line(new wchar_t[array_size]);
-         typename T::value_type const* row = data.get_row(y);
-         for (std::size_t x = 0; x < data.width(); ++x)
-         {
-@@ -93,19 +94,19 @@ void grid2utf(T const& grid_type,
-                         keys[val] = codepoint;
-                         key_order.push_back(val);
-                     }
--                    line[idx++] = static_cast<Py_UNICODE>(codepoint);
-+                    line[idx++] = static_cast<wchar_t>(codepoint);
-                     ++codepoint;
-                 }
-                 else
-                 {
--                    line[idx++] = static_cast<Py_UNICODE>(key_pos->second);
-+                    line[idx++] = static_cast<wchar_t>(key_pos->second);
-                 }
-             }
-             // else, shouldn't get here...
-         }
-         l.append(boost::python::object(
-                      boost::python::handle<>(
--                         PyUnicode_FromUnicode(line.get(), array_size))));
-+                         PyUnicode_FromWideChar(line.get(), array_size))));
-     }
- }
- 
-@@ -130,7 +131,7 @@ void grid2utf(T const& grid_type,
-     for (unsigned y = 0; y < grid_type.height(); y=y+resolution)
-     {
-         std::uint16_t idx = 0;
--        const std::unique_ptr<Py_UNICODE[]> line(new Py_UNICODE[array_size]);
-+        const std::unique_ptr<wchar_t[]> line(new wchar_t[array_size]);
-         mapnik::grid::value_type const* row = grid_type.get_row(y);
-         for (unsigned x = 0; x < grid_type.width(); x=x+resolution)
-         {
-@@ -156,19 +157,19 @@ void grid2utf(T const& grid_type,
-                         keys[val] = codepoint;
-                         key_order.push_back(val);
-                     }
--                    line[idx++] = static_cast<Py_UNICODE>(codepoint);
-+                    line[idx++] = static_cast<wchar_t>(codepoint);
-                     ++codepoint;
-                 }
-                 else
-                 {
--                    line[idx++] = static_cast<Py_UNICODE>(key_pos->second);
-+                    line[idx++] = static_cast<wchar_t>(key_pos->second);
-                 }
-             }
-             // else, shouldn't get here...
-         }
-         l.append(boost::python::object(
-                      boost::python::handle<>(
--                         PyUnicode_FromUnicode(line.get(), array_size))));
-+                         PyUnicode_FromWideChar(line.get(), array_size))));
-     }
- }
- 
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/skip-tests-for-missing-data.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/skip-tests-for-missing-data.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/skip-tests-for-missing-data.patch	2023-02-20 15:06:28.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/skip-tests-for-missing-data.patch	1970-01-01 01:00:00.000000000 +0100
@@ -1,1508 +0,0 @@
-Description: Skip tests for missing data.
-Author: Bas Couwenberg <sebastic at debian.org>
-Forwarded: https://github.com/mapnik/python-mapnik/pull/107
-
---- a/test/python_tests/sqlite_rtree_test.py
-+++ b/test/python_tests/sqlite_rtree_test.py
-@@ -5,6 +5,7 @@ import sys
- import threading
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -30,6 +31,9 @@ def create_ds(test_db, table):
- if 'sqlite' in mapnik.DatasourceCache.plugin_names():
- 
-     def test_rtree_creation():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         test_db = '../data/sqlite/world.sqlite'
-         index = test_db + '.index'
-         table = 'world_merc'
---- a/test/python_tests/cairo_test.py
-+++ b/test/python_tests/cairo_test.py
-@@ -6,6 +6,7 @@ import os
- import shutil
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -178,6 +179,9 @@ if mapnik.has_pycairo():
- 
-     if 'sqlite' in mapnik.DatasourceCache.plugin_names():
-         def _pycairo_surface(type, sym):
-+            if not os.path.exists('../data/good_maps/%s_symbolizer.xml' % sym):
-+                raise SkipTest
-+
-             test_cairo_file = '/tmp/mapnik-cairo-surface-test.%s.%s' % (
-                 sym, type)
-             expected_cairo_file = './images/pycairo/cairo-surface-expected.%s.%s' % (
---- a/test/python_tests/compositing_test.py
-+++ b/test/python_tests/compositing_test.py
-@@ -5,6 +5,7 @@ from __future__ import print_function
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -182,6 +183,9 @@ def test_pre_multiply_status_of_map2():
- 
- if 'shape' in mapnik.DatasourceCache.plugin_names():
-     def test_style_level_comp_op():
-+        if not os.path.exists('../data/good_maps/style_level_comp_op.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/style_level_comp_op.xml')
-         m.zoom_all()
-@@ -218,6 +222,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         eq_(len(fails), 0, '\n' + '\n'.join(fails))
- 
-     def test_style_level_opacity():
-+        if not os.path.exists('../data/good_maps/style_level_opacity_and_blur.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(
-             m, '../data/good_maps/style_level_opacity_and_blur.xml')
-@@ -235,6 +242,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
- 
- def test_rounding_and_color_expectations():
-+    if not os.path.exists('../data/images/stripes_pattern.png'):
-+        raise SkipTest
-+
-     m = mapnik.Map(1, 1)
-     m.background = mapnik.Color('rgba(255,255,255,.4999999)')
-     im = mapnik.Image(m.width, m.height)
-@@ -260,6 +270,9 @@ def test_rounding_and_color_expectations
- 
- 
- def test_background_image_and_background_color():
-+    if not os.path.exists('../data/images/stripes_pattern.png'):
-+        raise SkipTest
-+
-     m = mapnik.Map(8, 8)
-     m.background = mapnik.Color('rgba(255,255,255,.5)')
-     m.background_image = '../data/images/stripes_pattern.png'
-@@ -269,6 +282,9 @@ def test_background_image_and_background
- 
- 
- def test_background_image_with_alpha_and_background_color():
-+    if not os.path.exists('../data/images/yellow_half_trans.png'):
-+        raise SkipTest
-+
-     m = mapnik.Map(10, 10)
-     m.background = mapnik.Color('rgba(255,255,255,.5)')
-     m.background_image = '../data/images/yellow_half_trans.png'
-@@ -278,6 +294,9 @@ def test_background_image_with_alpha_and
- 
- 
- def test_background_image_with_alpha_and_background_color_against_composited_control():
-+    if not os.path.exists('../data/images/yellow_half_trans.png'):
-+        raise SkipTest
-+
-     m = mapnik.Map(10, 10)
-     m.background = mapnik.Color('rgba(255,255,255,.5)')
-     m.background_image = '../data/images/yellow_half_trans.png'
---- a/test/python_tests/csv_test.py
-+++ b/test/python_tests/csv_test.py
-@@ -7,6 +7,7 @@ import glob
- import os
- 
- from nose.tools import eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -31,10 +32,16 @@ def teardown():
- if 'csv' in mapnik.DatasourceCache.plugin_names():
- 
-     def get_csv_ds(filename):
-+        if not os.path.exists('../data/csv/'):
-+            raise SkipTest
-+
-         return mapnik.Datasource(
-             type='csv', file=os.path.join('../data/csv/', filename))
- 
-     def test_broken_files(visual=False):
-+        if not os.path.exists('../data/csv/'):
-+            raise SkipTest
-+
-         broken = glob.glob("../data/csv/fails/*.*")
-         broken.extend(glob.glob("../data/csv/warns/*.*"))
- 
-@@ -50,6 +57,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
-                     print('\x1b[1;32m? \x1b[0m', csv)
- 
-     def test_good_files(visual=False):
-+        if not os.path.exists('../data/csv/'):
-+            raise SkipTest
-+
-         good_files = glob.glob("../data/csv/*.*")
-         good_files.extend(glob.glob("../data/csv/warns/*.*"))
-         ignorable = os.path.join('..', 'data', 'csv', 'long_lat.vrt')
-@@ -508,6 +518,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
-         get_csv_ds('more_column_values_than_headers.csv')
- 
-     def test_that_feature_id_only_incremented_for_valid_rows(**kwargs):
-+        if not os.path.exists(os.path.join('../data/csv/warns', 'feature_id_counting.csv')):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(type='csv',
-                                file=os.path.join('../data/csv/warns', 'feature_id_counting.csv'))
-         eq_(len(ds.fields()), 3)
-@@ -529,6 +542,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
-         eq_(len(list(ds.all_features())), 2)
- 
-     def test_dynamically_defining_headers1(**kwargs):
-+        if not os.path.exists(os.path.join('../data/csv/fails', 'needs_headers_two_lines.csv')):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(type='csv',
-                                file=os.path.join(
-                                    '../data/csv/fails', 'needs_headers_two_lines.csv'),
-@@ -546,6 +562,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
-         eq_(len(list(ds.all_features())), 2)
- 
-     def test_dynamically_defining_headers2(**kwargs):
-+        if not os.path.exists(os.path.join('../data/csv/fails', 'needs_headers_one_line.csv')):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(type='csv',
-                                file=os.path.join(
-                                    '../data/csv/fails', 'needs_headers_one_line.csv'),
-@@ -563,6 +582,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
-         eq_(len(list(ds.all_features())), 1)
- 
-     def test_dynamically_defining_headers3(**kwargs):
-+        if not os.path.exists(os.path.join('../data/csv/fails', 'needs_headers_one_line_no_newline.csv')):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(type='csv',
-                                file=os.path.join(
-                                    '../data/csv/fails', 'needs_headers_one_line_no_newline.csv'),
---- a/test/python_tests/datasource_test.py
-+++ b/test/python_tests/datasource_test.py
-@@ -4,6 +4,7 @@ import sys
- from itertools import groupby
- 
- from nose.tools import eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -69,6 +70,9 @@ def test_vrt_referring_to_missing_files(
- 
- def test_field_listing():
-     if 'shape' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/shp/poly.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
-         fields = ds.fields()
-         eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
-@@ -81,6 +85,9 @@ def test_field_listing():
- 
- def test_total_feature_count_shp():
-     if 'shape' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/shp/poly.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
-         features = ds.all_features()
-         num_feats = len(list(features))
-@@ -89,6 +96,9 @@ def test_total_feature_count_shp():
- 
- def test_total_feature_count_json():
-     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/json/points.geojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/json/points.geojson', layer_by_index=0)
-         desc = ds.describe()
-         eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
-@@ -102,6 +112,9 @@ def test_total_feature_count_json():
- 
- def test_sqlite_reading():
-     if 'sqlite' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(
-             file='../data/sqlite/world.sqlite',
-             table_by_index=0)
-@@ -116,6 +129,9 @@ def test_sqlite_reading():
- 
- 
- def test_reading_json_from_string():
-+    if not os.path.exists('../data/json/points.geojson'):
-+        raise SkipTest
-+
-     with open('../data/json/points.geojson', 'r') as f:
-         json = f.read()
-     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-@@ -127,6 +143,9 @@ def test_reading_json_from_string():
- 
- def test_feature_envelope():
-     if 'shape' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/shp/poly.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
-         features = ds.all_features()
-         for feat in features:
-@@ -139,6 +158,9 @@ def test_feature_envelope():
- 
- def test_feature_attributes():
-     if 'shape' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/shp/poly.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
-         features = list(ds.all_features())
-         feat = features[0]
-@@ -150,6 +172,9 @@ def test_feature_attributes():
- 
- def test_ogr_layer_by_sql():
-     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-+        if not os.path.exists('../data/shp/poly.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/shp/poly.shp',
-                         layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
-         features = ds.all_features()
-@@ -164,6 +189,9 @@ def test_hit_grid():
-         return ["%d:%s" % (len(list(group)), name)
-                 for name, group in groupby(l)]
- 
-+    if not os.path.exists('../data/good_maps/agg_poly_gamma_map.xml'):
-+        raise SkipTest
-+
-     m = mapnik.Map(256, 256)
-     try:
-         mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
---- a/test/python_tests/extra_map_props_test.py
-+++ b/test/python_tests/extra_map_props_test.py
-@@ -4,6 +4,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -17,6 +18,9 @@ def setup():
- 
- 
- def test_arbitrary_parameters_attached_to_map():
-+    if not os.path.exists('../data/good_maps/extra_arbitary_map_parameters.xml'):
-+        raise SkipTest
-+
-     m = mapnik.Map(256, 256)
-     mapnik.load_map(m, '../data/good_maps/extra_arbitary_map_parameters.xml')
-     eq_(len(m.parameters), 5)
---- a/test/python_tests/feature_id_test.py
-+++ b/test/python_tests/feature_id_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -21,6 +22,9 @@ def setup():
- 
- 
- def compare_shape_between_mapnik_and_ogr(shapefile, query=None):
-+    if not os.path.exists(shapefile):
-+        raise SkipTest
-+
-     plugins = mapnik.DatasourceCache.plugin_names()
-     if 'shape' in plugins and 'ogr' in plugins:
-         ds1 = mapnik.Ogr(file=shapefile, layer_by_index=0)
-@@ -49,6 +53,9 @@ def test_shapefile_polygon_featureset_id
- 
- 
- def test_shapefile_polygon_feature_query_id():
-+    if not os.path.exists('../data/shp/world_merc.shp'):
-+        raise SkipTest
-+
-     bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404)
-     query = mapnik.Query(mapnik.Box2d(*bbox))
-     if 'ogr' in mapnik.DatasourceCache.plugin_names():
---- a/test/python_tests/fontset_test.py
-+++ b/test/python_tests/fontset_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -16,6 +17,9 @@ def setup():
- 
- 
- def test_loading_fontset_from_map():
-+    if not os.path.exists('../data/good_maps/fontset.xml'):
-+        raise SkipTest
-+
-     m = mapnik.Map(256, 256)
-     mapnik.load_map(m, '../data/good_maps/fontset.xml', True)
-     fs = m.find_fontset('book-fonts')
---- a/test/python_tests/geojson_plugin_test.py
-+++ b/test/python_tests/geojson_plugin_test.py
-@@ -4,6 +4,7 @@
- import os
- 
- from nose.tools import assert_almost_equal, eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -18,6 +19,9 @@ def setup():
- if 'geojson' in mapnik.DatasourceCache.plugin_names():
- 
-     def test_geojson_init():
-+        if not os.path.exists('../data/json/escaped.geojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='geojson',
-             file='../data/json/escaped.geojson')
-@@ -28,6 +32,9 @@ if 'geojson' in mapnik.DatasourceCache.p
-         assert_almost_equal(e.maxy, 41.480573, places=3)
- 
-     def test_geojson_properties():
-+        if not os.path.exists('../data/json/escaped.geojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='geojson',
-             file='../data/json/escaped.geojson')
-@@ -64,6 +71,9 @@ if 'geojson' in mapnik.DatasourceCache.p
-         eq_(f['NOM_FR'], u'Qu?bec')
- 
-     def test_large_geojson_properties():
-+        if not os.path.exists('../data/json/escaped.geojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='geojson',
-             file='../data/json/escaped.geojson',
-@@ -115,6 +125,9 @@ if 'geojson' in mapnik.DatasourceCache.p
- 
- #    @raises(RuntimeError)
-     def test_that_nonexistant_query_field_throws(**kwargs):
-+        if not os.path.exists('../data/json/escaped.geojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='geojson',
-             file='../data/json/escaped.geojson')
-@@ -131,6 +144,9 @@ if 'geojson' in mapnik.DatasourceCache.p
- #        fs = ds.features(query)
- 
-     def test_parsing_feature_collection_with_top_level_properties():
-+        if not os.path.exists('../data/json/feature_collection_level_properties.json'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='geojson',
-             file='../data/json/feature_collection_level_properties.json')
---- a/test/python_tests/image_filters_test.py
-+++ b/test/python_tests/image_filters_test.py
-@@ -4,6 +4,7 @@ import os
- import re
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -31,6 +32,9 @@ def test_append():
- 
- if 'shape' in mapnik.DatasourceCache.plugin_names():
-     def test_style_level_image_filter():
-+        if not os.path.exists('../data/good_maps/style_level_image_filter.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/style_level_image_filter.xml')
-         m.zoom_all()
---- a/test/python_tests/image_test.py
-+++ b/test/python_tests/image_test.py
-@@ -5,6 +5,7 @@ import os
- import sys
- 
- from nose.tools import assert_almost_equal, eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -370,6 +371,9 @@ def test_png_round_trip():
- 
- 
- def test_image_open_from_string():
-+    if not os.path.exists('../data/images/dummy.png'):
-+        raise SkipTest
-+
-     filepath = '../data/images/dummy.png'
-     im1 = mapnik.Image.open(filepath)
-     with open(filepath, READ_FLAGS) as f:
---- a/test/python_tests/image_tiff_test.py
-+++ b/test/python_tests/image_tiff_test.py
-@@ -5,6 +5,7 @@ import hashlib
- import os
- 
- from nose.tools import assert_not_equal, eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -208,6 +209,9 @@ def test_tiff_round_trip_tiled():
- 
- 
- def test_tiff_rgb8_compare():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_rgb8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_rgb8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-rgb8.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -223,6 +227,9 @@ def test_tiff_rgb8_compare():
- 
- 
- def test_tiff_rgba8_compare_scanline():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_rgba8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-rgba8-scanline.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -239,6 +246,9 @@ def test_tiff_rgba8_compare_scanline():
- 
- 
- def test_tiff_rgba8_compare_stripped():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_rgba8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-rgba8-stripped.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -255,6 +265,9 @@ def test_tiff_rgba8_compare_stripped():
- 
- 
- def test_tiff_rgba8_compare_tiled():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_rgba8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-rgba8-tiled.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -271,6 +284,9 @@ def test_tiff_rgba8_compare_tiled():
- 
- 
- def test_tiff_gray8_compare_scanline():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray8-scanline.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -287,6 +303,9 @@ def test_tiff_gray8_compare_scanline():
- 
- 
- def test_tiff_gray8_compare_stripped():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray8-stripped.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -303,6 +322,9 @@ def test_tiff_gray8_compare_stripped():
- 
- 
- def test_tiff_gray8_compare_tiled():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray8_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray8-tiled.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -319,6 +341,9 @@ def test_tiff_gray8_compare_tiled():
- 
- 
- def test_tiff_gray16_compare_scanline():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray16_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray16-scanline.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -335,6 +360,9 @@ def test_tiff_gray16_compare_scanline():
- 
- 
- def test_tiff_gray16_compare_stripped():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray16_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray16-stripped.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -351,6 +379,9 @@ def test_tiff_gray16_compare_stripped():
- 
- 
- def test_tiff_gray16_compare_tiled():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray16_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray16-tiled.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -367,6 +398,9 @@ def test_tiff_gray16_compare_tiled():
- 
- 
- def test_tiff_gray32f_compare_scanline():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray32f_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray32f-scanline.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -383,6 +417,9 @@ def test_tiff_gray32f_compare_scanline()
- 
- 
- def test_tiff_gray32f_compare_stripped():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray32f_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray32f-stripped.tiff'
-     im = mapnik.Image.open(filepath1)
-@@ -399,6 +436,9 @@ def test_tiff_gray32f_compare_stripped()
- 
- 
- def test_tiff_gray32f_compare_tiled():
-+    if not os.path.exists('../data/tiff/ndvi_256x256_gray32f_striped.tif'):
-+        raise SkipTest
-+
-     filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif'
-     filepath2 = '/tmp/mapnik-tiff-gray32f-tiled.tiff'
-     im = mapnik.Image.open(filepath1)
---- a/test/python_tests/layer_buffer_size_test.py
-+++ b/test/python_tests/layer_buffer_size_test.py
-@@ -2,6 +2,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -19,6 +20,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     # override the postive map buffer leading
-     # only one point to be rendered in the map
-     def test_layer_buffer_size_1():
-+        if not os.path.exists('../data/good_maps/layer_buffer_size_reduction.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         eq_(m.buffer_size, 0)
-         mapnik.load_map(m, '../data/good_maps/layer_buffer_size_reduction.xml')
---- a/test/python_tests/layer_modification_test.py
-+++ b/test/python_tests/layer_modification_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -16,6 +17,9 @@ def setup():
- 
- 
- def test_adding_datasource_to_layer():
-+    if not os.path.exists('../data/shp/world_merc.shp'):
-+        raise SkipTest
-+
-     map_string = '''<?xml version="1.0" encoding="utf-8"?>
- <Map>
- 
---- a/test/python_tests/map_query_test.py
-+++ b/test/python_tests/map_query_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import assert_almost_equal, eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -43,6 +44,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-     # map has never been zoomed (even with data)
-     @raises(RuntimeError)
-     def test_map_query_throw4():
-+        if not os.path.exists('../data/good_maps/agg_poly_gamma_map.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
-         m.query_point(0, 0, 0)
-@@ -50,12 +54,18 @@ if 'shape' in mapnik.DatasourceCache.plu
-     # invalid coords in general (do not intersect)
-     @raises(RuntimeError)
-     def test_map_query_throw5():
-+        if not os.path.exists('../data/good_maps/agg_poly_gamma_map.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
-         m.zoom_all()
-         m.query_point(0, 9999999999999999, 9999999999999999)
- 
-     def test_map_query_works1():
-+        if not os.path.exists('../data/good_maps/wgs842merc_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
-         merc_bounds = mapnik.Box2d(-20037508.34, -
-@@ -68,6 +78,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         eq_(feat.attributes['NAME_FORMA'], u'United States of America')
- 
-     def test_map_query_works2():
-+        if not os.path.exists('../data/good_maps/merc2wgs84_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml')
-         wgs84_bounds = mapnik.Box2d(-179.999999975, -
-@@ -87,6 +100,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         eq_(feat.attributes['NAME'], u'United States')
- 
-     def test_map_query_in_pixels_works1():
-+        if not os.path.exists('../data/good_maps/wgs842merc_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
-         merc_bounds = mapnik.Box2d(-20037508.34, -
-@@ -98,6 +114,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         eq_(feat.attributes['NAME_FORMA'], u'United States of America')
- 
-     def test_map_query_in_pixels_works2():
-+        if not os.path.exists('../data/good_maps/merc2wgs84_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml')
-         wgs84_bounds = mapnik.Box2d(-179.999999975, -
---- a/test/python_tests/markers_complex_rendering_test.py
-+++ b/test/python_tests/markers_complex_rendering_test.py
-@@ -2,6 +2,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -15,6 +16,9 @@ def setup():
- 
- if 'csv' in mapnik.DatasourceCache.plugin_names():
-     def test_marker_ellipse_render1():
-+        if not os.path.exists('../data/good_maps/marker_ellipse_transform.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform.xml')
-         m.zoom_all()
-@@ -32,6 +36,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
-                                                                 'test/python_tests/' + expected))
- 
-     def test_marker_ellipse_render2():
-+        if not os.path.exists('../data/good_maps/marker_ellipse_transform2.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform2.xml')
-         m.zoom_all()
---- a/test/python_tests/ogr_and_shape_geometries_test.py
-+++ b/test/python_tests/ogr_and_shape_geometries_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -30,6 +31,9 @@ plugins = mapnik.DatasourceCache.plugin_
- if 'shape' in plugins and 'ogr' in plugins:
- 
-     def ensure_geometries_are_interpreted_equivalently(filename):
-+        if not os.path.exists(filename):
-+            raise SkipTest
-+
-         ds1 = mapnik.Ogr(file=filename, layer_by_index=0)
-         ds2 = mapnik.Shapefile(file=filename)
-         fs1 = ds1.featureset()
---- a/test/python_tests/ogr_test.py
-+++ b/test/python_tests/ogr_test.py
-@@ -4,6 +4,7 @@
- import os
- 
- from nose.tools import assert_almost_equal, eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -24,6 +25,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
- 
-     # Shapefile initialization
-     def test_shapefile_init():
-+        if not os.path.exists('../data/shp/boundaries.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0)
-         e = ds.envelope()
-         assert_almost_equal(e.minx, -11121.6896651, places=7)
-@@ -36,6 +40,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
- 
-     # Shapefile properties
-     def test_shapefile_properties():
-+        if not os.path.exists('../data/shp/boundaries.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0)
-         f = list(ds.features_at_point(ds.envelope().center(), 0.001))[0]
-         eq_(ds.geometry_type(), mapnik.DataGeometryType.Polygon)
-@@ -56,6 +63,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
- 
-     @raises(RuntimeError)
-     def test_that_nonexistant_query_field_throws(**kwargs):
-+        if not os.path.exists('../data/shp/world_merc.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0)
-         eq_(len(ds.fields()), 11)
-         eq_(ds.fields(), ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
-@@ -87,6 +97,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
- 
-     # OGR plugin extent parameter
-     def test_ogr_extent_parameter():
-+        if not os.path.exists('../data/shp/world_merc.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(
-             file='../data/shp/world_merc.shp',
-             layer_by_index=0,
-@@ -101,6 +114,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         eq_('+proj=merc' in meta['proj4'], True)
- 
-     def test_ogr_reading_gpx_waypoint():
-+        if not os.path.exists('../data/gpx/empty.gpx'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer='waypoints')
-         e = ds.envelope()
-         eq_(e.minx, -122)
-@@ -112,6 +128,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-         eq_('+proj=longlat' in meta['proj4'], True)
- 
-     def test_ogr_empty_data_should_not_throw():
-+        if not os.path.exists('../data/gpx/empty.gpx'):
-+            raise SkipTest
-+
-         default_logging_severity = mapnik.logger.get_severity()
-         mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
-         # use logger to silence expected warnings
-@@ -134,6 +153,9 @@ if 'ogr' in mapnik.DatasourceCache.plugi
-     #    eq_(len(fs),1)
- 
-     def test_geometry_type():
-+        if not os.path.exists('../data/csv/wkt.csv'):
-+            raise SkipTest
-+
-         ds = mapnik.Ogr(file='../data/csv/wkt.csv', layer_by_index=0)
-         e = ds.envelope()
-         assert_almost_equal(e.minx, 1.0, places=1)
---- a/test/python_tests/palette_test.py
-+++ b/test/python_tests/palette_test.py
-@@ -5,6 +5,7 @@ import os
- import sys
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -26,6 +27,9 @@ expected_rgb = '[Palette 2 colors #ff00f
- 
- 
- def test_reading_palettes():
-+    if not os.path.exists('../data/palettes/palette64.act') or not os.path.exists('../data/palettes/palette256.act'):
-+        raise SkipTest
-+
-     with open('../data/palettes/palette64.act', 'rb') as act:
-         palette = mapnik.Palette(act.read(), 'act')
-     eq_(palette.to_string(), expected_64)
-@@ -41,6 +45,9 @@ def test_reading_palettes():
- if 'shape' in mapnik.DatasourceCache.plugin_names():
- 
-     def test_render_with_palette():
-+        if not os.path.exists('../data/good_maps/agg_poly_gamma_map.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(600, 400)
-         mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
-         m.zoom_all()
---- a/test/python_tests/png_encoding_test.py
-+++ b/test/python_tests/png_encoding_test.py
-@@ -4,6 +4,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -152,6 +153,9 @@ if mapnik.has_png():
-         eq_(t0_len < t1_len < t2_len, True)
- 
-     def test_transparency_levels_aerial():
-+        if not os.path.exists('../data/images/12_654_1580.png'):
-+            raise SkipTest
-+
-         im = mapnik.Image.open('../data/images/12_654_1580.png')
-         im_in = mapnik.Image.open(
-             './images/support/transparency/aerial_rgba.png')
---- a/test/python_tests/pngsuite_test.py
-+++ b/test/python_tests/pngsuite_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import assert_raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -26,6 +27,9 @@ def assert_good_file(fname):
- 
- 
- def get_pngs(good):
-+    if not os.path.exists(datadir):
-+        raise SkipTest
-+
-     files = [x for x in os.listdir(datadir) if x.endswith('.png')]
-     return [os.path.join(datadir, x)
-             for x in files if good != x.startswith('x')]
---- a/test/python_tests/query_tolerance_test.py
-+++ b/test/python_tests/query_tolerance_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -16,6 +17,9 @@ def setup():
- 
- if 'shape' in mapnik.DatasourceCache.plugin_names():
-     def test_query_tolerance():
-+        if not os.path.exists('../data/shp/arrows.shp'):
-+            raise SkipTest
-+
-         srs = '+init=epsg:4326'
-         lyr = mapnik.Layer('test')
-         ds = mapnik.Shapefile(file='../data/shp/arrows.shp')
---- a/test/python_tests/raster_symbolizer_test.py
-+++ b/test/python_tests/raster_symbolizer_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -16,6 +17,9 @@ def setup():
- 
- 
- def test_dataraster_coloring():
-+    if not os.path.exists('../data/raster/dataraster.tif'):
-+        raise SkipTest
-+
-     srs = '+init=epsg:32630'
-     lyr = mapnik.Layer('dataraster')
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-@@ -72,6 +76,9 @@ def test_dataraster_coloring():
- 
- 
- def test_dataraster_query_point():
-+    if not os.path.exists('../data/raster/dataraster.tif'):
-+        raise SkipTest
-+
-     srs = '+init=epsg:32630'
-     lyr = mapnik.Layer('dataraster')
-     if 'gdal' in mapnik.DatasourceCache.plugin_names():
-@@ -106,6 +113,9 @@ def test_dataraster_query_point():
- 
- 
- def test_load_save_map():
-+    if not os.path.exists('../data/good_maps/raster_symbolizer.xml'):
-+        raise SkipTest
-+
-     map = mapnik.Map(256, 256)
-     in_map = "../data/good_maps/raster_symbolizer.xml"
-     try:
-@@ -122,6 +132,9 @@ def test_load_save_map():
- 
- 
- def test_raster_with_alpha_blends_correctly_with_background():
-+    if not os.path.exists('../data/raster/white-alpha.png'):
-+        raise SkipTest
-+
-     WIDTH = 500
-     HEIGHT = 500
- 
-@@ -157,6 +170,9 @@ def test_raster_with_alpha_blends_correc
- 
- 
- def test_raster_warping():
-+    if not os.path.exists('../data/raster/dataraster.tif'):
-+        raise SkipTest
-+
-     lyrSrs = "+init=epsg:32630"
-     mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
-     lyr = mapnik.Layer('dataraster', lyrSrs)
-@@ -198,6 +214,9 @@ def test_raster_warping():
- 
- 
- def test_raster_warping_does_not_overclip_source():
-+    if not os.path.exists('../data/raster/dataraster.tif'):
-+        raise SkipTest
-+
-     lyrSrs = "+init=epsg:32630"
-     mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
-     lyr = mapnik.Layer('dataraster', lyrSrs)
---- a/test/python_tests/render_test.py
-+++ b/test/python_tests/render_test.py
-@@ -6,6 +6,7 @@ import sys
- import tempfile
- 
- from nose.tools import eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -126,6 +127,9 @@ def get_paired_images(w, h, mapfile):
- 
- 
- def test_render_from_serialization():
-+    if not os.path.exists('../data/good_maps/building_symbolizer.xml') or not os.path.exists('../data/good_maps/polygon_symbolizer.xml'):
-+        raise SkipTest
-+
-     try:
-         im, im2 = get_paired_images(
-             100, 100, '../data/good_maps/building_symbolizer.xml')
-@@ -263,6 +267,9 @@ def test_render_with_detector():
- if 'shape' in mapnik.DatasourceCache.plugin_names():
- 
-     def test_render_with_scale_factor():
-+        if not os.path.exists('../data/good_maps/marker-text-line.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(256, 256)
-         mapnik.load_map(m, '../data/good_maps/marker-text-line.xml')
-         m.zoom_all()
---- a/test/python_tests/reprojection_test.py
-+++ b/test/python_tests/reprojection_test.py
-@@ -2,6 +2,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -17,11 +18,17 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     #@raises(RuntimeError)
-     def test_zoom_all_will_fail():
-+        if not os.path.exists('../data/good_maps/wgs842merc_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
-         m.zoom_all()
- 
-     def test_zoom_all_will_work_with_max_extent():
-+        if not os.path.exists('../data/good_maps/wgs842merc_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
-         merc_bounds = mapnik.Box2d(-20037508.34, -
-@@ -39,6 +46,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         # eq_(m.envelope(),merc_bounds)
- 
-     def test_visual_zoom_all_rendering1():
-+        if not os.path.exists('../data/good_maps/wgs842merc_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
-         merc_bounds = mapnik.Box2d(-20037508.34, -
-@@ -57,6 +67,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-                                                                 'test/python_tests/' + expected))
- 
-     def test_visual_zoom_all_rendering2():
-+        if not os.path.exists('../data/good_maps/wgs842merc_reprojection.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml')
-         m.zoom_all()
-@@ -73,6 +86,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # maximum-extent read from map.xml
-     def test_visual_zoom_all_rendering3():
-+        if not os.path.exists('../data/good_maps/bounds_clipping.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(m, '../data/good_maps/bounds_clipping.xml')
-         m.zoom_all()
-@@ -89,6 +105,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # no maximum-extent
-     def test_visual_zoom_all_rendering4():
-+        if not os.path.exists('../data/good_maps/bounds_clipping.xml'):
-+            raise SkipTest
-+
-         m = mapnik.Map(512, 512)
-         mapnik.load_map(m, '../data/good_maps/bounds_clipping.xml')
-         m.maximum_extent = None
---- a/test/python_tests/save_map_test.py
-+++ b/test/python_tests/save_map_test.py
-@@ -5,6 +5,7 @@ import os
- import tempfile
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -70,6 +71,9 @@ def compare_map(xml):
- 
- 
- def test_compare_map():
-+    if not os.path.exists('../data/good_maps/'):
-+        raise SkipTest
-+
-     good_maps = glob.glob("../data/good_maps/*.xml")
-     good_maps = [os.path.normpath(p) for p in good_maps]
-     # remove one map that round trips CDATA differently, but this is okay
-@@ -85,6 +89,9 @@ def test_compare_map():
- 
- 
- def test_compare_map_deprecations():
-+    if not os.path.exists('../data/deprecated_maps/'):
-+        raise SkipTest
-+
-     dep = glob.glob("../data/deprecated_maps/*.xml")
-     dep = [os.path.normpath(p) for p in dep]
-     for m in dep:
---- a/test/python_tests/shapefile_test.py
-+++ b/test/python_tests/shapefile_test.py
-@@ -4,6 +4,7 @@
- import os
- 
- from nose.tools import assert_almost_equal, eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -19,6 +20,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # Shapefile initialization
-     def test_shapefile_init():
-+        if not os.path.exists('../data/shp/boundaries'):
-+            raise SkipTest
-+
-         s = mapnik.Shapefile(file='../data/shp/boundaries')
- 
-         e = s.envelope()
-@@ -30,6 +34,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # Shapefile properties
-     def test_shapefile_properties():
-+        if not os.path.exists('../data/shp/boundaries'):
-+            raise SkipTest
-+
-         s = mapnik.Shapefile(file='../data/shp/boundaries', encoding='latin1')
-         f = list(s.features_at_point(s.envelope().center()))[0]
- 
-@@ -45,6 +52,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     @raises(RuntimeError)
-     def test_that_nonexistant_query_field_throws(**kwargs):
-+        if not os.path.exists('../data/shp/world_merc'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/world_merc')
-         eq_(len(ds.fields()), 11)
-         eq_(ds.fields(), ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
-@@ -69,6 +79,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         ds.features(query)
- 
-     def test_dbf_logical_field_is_boolean():
-+        if not os.path.exists('../data/shp/long_lat'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/long_lat')
-         eq_(len(ds.fields()), 7)
-         eq_(ds.fields(), ['LONG', 'LAT', 'LOGICAL_TR',
-@@ -90,6 +103,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # created by hand in qgis 1.8.0
-     def test_shapefile_point2d_from_qgis():
-+        if not os.path.exists('../data/shp/points/qgis.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/points/qgis.shp')
-         eq_(len(ds.fields()), 2)
-         eq_(ds.fields(), ['id', 'name'])
-@@ -99,6 +115,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-     # ogr2ogr tests/data/shp/3dpoint/ogr_zfield.shp
-     # tests/data/shp/3dpoint/qgis.shp -zfield id
-     def test_shapefile_point_z_from_qgis():
-+        if not os.path.exists('../data/shp/points/ogr_zfield.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/points/ogr_zfield.shp')
-         eq_(len(ds.fields()), 2)
-         eq_(ds.fields(), ['id', 'name'])
-@@ -106,6 +125,9 @@ if 'shape' in mapnik.DatasourceCache.plu
-         eq_(len(list(ds.all_features())), 3)
- 
-     def test_shapefile_multipoint_from_qgis():
-+        if not os.path.exists('../data/shp/points/qgis_multi.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/points/qgis_multi.shp')
-         eq_(len(ds.fields()), 2)
-         eq_(ds.fields(), ['id', 'name'])
-@@ -114,6 +136,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # pointzm from arcinfo
-     def test_shapefile_point_zm_from_arcgis():
-+        if not os.path.exists('../data/shp/points/poi.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/points/poi.shp')
-         eq_(len(ds.fields()), 7)
-         eq_(ds.fields(),
-@@ -130,6 +155,9 @@ if 'shape' in mapnik.DatasourceCache.plu
- 
-     # copy of the above with ogr2ogr that makes m record 14 instead of 18
-     def test_shapefile_point_zm_from_ogr():
-+        if not os.path.exists('../data/shp/points/poi_ogr.shp'):
-+            raise SkipTest
-+
-         ds = mapnik.Shapefile(file='../data/shp/points/poi_ogr.shp')
-         eq_(len(ds.fields()), 7)
-         eq_(ds.fields(),
---- a/test/python_tests/shapeindex_test.py
-+++ b/test/python_tests/shapeindex_test.py
-@@ -6,6 +6,7 @@ import shutil
- from subprocess import PIPE, Popen
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -19,6 +20,9 @@ def setup():
- 
- 
- def test_shapeindex():
-+    if not os.path.exists('../data/shp/'):
-+        raise SkipTest
-+
-     # first copy shapefiles to tmp directory
-     source_dir = '../data/shp/'
-     working_dir = '/tmp/mapnik-shp-tmp/'
---- a/test/python_tests/sqlite_test.py
-+++ b/test/python_tests/sqlite_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_, raises
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -23,6 +24,9 @@ def teardown():
- if 'sqlite' in mapnik.DatasourceCache.plugin_names():
- 
-     def test_attachdb_with_relative_file():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         # The point table and index is in the qgis_spatiallite.sqlite
-         # database.  If either is not found, then this fails
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-@@ -36,6 +40,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_relative_file.requires_data = True
- 
-     def test_attachdb_with_multiple_files():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-                            table='attachedtest',
-                            attachdb='scratch1@:memory:,scratch2@:memory:',
-@@ -57,6 +64,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_multiple_files.requires_data = True
- 
-     def test_attachdb_with_absolute_file():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         # The point table and index is in the qgis_spatiallite.sqlite
-         # database.  If either is not found, then this fails
-         ds = mapnik.SQLite(file=os.getcwd() + '/../data/sqlite/world.sqlite',
-@@ -70,6 +80,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_absolute_file.requires_data = True
- 
-     def test_attachdb_with_index():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-                            table='attachedtest',
-                            attachdb='scratch@:memory:',
-@@ -91,6 +104,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_index.requires_data = True
- 
-     def test_attachdb_with_explicit_index():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-                            table='attachedtest',
-                            index_table='myindex',
-@@ -112,6 +128,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_explicit_index.requires_data = True
- 
-     def test_attachdb_with_sql_join():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-                            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
-                            attachdb='busines at business.sqlite'
-@@ -223,6 +242,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_sql_join.requires_data = True
- 
-     def test_attachdb_with_sql_join_count():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-                            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
-                            attachdb='busines at business.sqlite'
-@@ -293,6 +315,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_sql_join_count.requires_data = True
- 
-     def test_attachdb_with_sql_join_count2():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         '''
-         sqlite3 world.sqlite
-         attach database 'business.sqlite' as business;
-@@ -368,6 +393,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_sql_join_count2.requires_data = True
- 
-     def test_attachdb_with_sql_join_count3():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         '''
-         select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
-         '''
-@@ -441,6 +469,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_sql_join_count3.requires_data = True
- 
-     def test_attachdb_with_sql_join_count4():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         '''
-         select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
-         '''
-@@ -514,6 +545,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_sql_join_count4.requires_data = True
- 
-     def test_attachdb_with_sql_join_count5():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         '''
-         select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
-         '''
-@@ -555,6 +589,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_attachdb_with_sql_join_count5.requires_data = True
- 
-     def test_subqueries():
-+        if not os.path.exists('../data/sqlite/world.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-                            table='world_merc',
-                            )
-@@ -629,6 +666,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_subqueries.requires_data = True
- 
-     def test_empty_db():
-+        if not os.path.exists('../data/sqlite/empty.db'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-                            table='empty',
-                            )
-@@ -644,6 +684,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
- 
-     @raises(RuntimeError)
-     def test_that_nonexistant_query_field_throws(**kwargs):
-+        if not os.path.exists('../data/sqlite/empty.db'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-                            table='empty',
-                            )
-@@ -710,6 +753,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_that_nonexistant_query_field_throws.requires_data = True
- 
-     def test_intersects_token1():
-+        if not os.path.exists('../data/sqlite/empty.db'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-                            table='(select * from empty where !intersects!)',
-                            )
-@@ -724,6 +770,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_intersects_token1.requires_data = True
- 
-     def test_intersects_token2():
-+        if not os.path.exists('../data/sqlite/empty.db'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-                            table='(select * from empty where "a"!="b" and !intersects!)',
-                            )
-@@ -738,6 +787,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-     test_intersects_token2.requires_data = True
- 
-     def test_intersects_token3():
-+        if not os.path.exists('../data/sqlite/empty.db'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-                            table='(select * from empty where "a"!="b" and !intersects!)',
-                            )
-@@ -815,6 +867,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
-         eq_(ds.field_types(), ['int', 'str', 'int'])
- 
-     def test_that_64bit_int_fields_work():
-+        if not os.path.exists('../data/sqlite/64bit_int.sqlite'):
-+            raise SkipTest
-+
-         ds = mapnik.SQLite(file='../data/sqlite/64bit_int.sqlite',
-                            table='int_table',
-                            use_spatial_index=False
---- a/test/python_tests/topojson_plugin_test.py
-+++ b/test/python_tests/topojson_plugin_test.py
-@@ -5,6 +5,7 @@ from __future__ import absolute_import,
- import os
- 
- from nose.tools import assert_almost_equal, eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- 
-@@ -19,6 +20,9 @@ def setup():
- if 'topojson' in mapnik.DatasourceCache.plugin_names():
- 
-     def test_topojson_init():
-+        if not os.path.exists('../data/topojson/escaped.topojson'):
-+            raise SkipTest
-+
-         # topojson tests/data/json/escaped.geojson -o tests/data/topojson/escaped.topojson --properties
-         # topojson version 1.4.2
-         ds = mapnik.Datasource(
-@@ -31,6 +35,9 @@ if 'topojson' in mapnik.DatasourceCache.
-         assert_almost_equal(e.maxy, 41.480573, places=3)
- 
-     def test_topojson_properties():
-+        if not os.path.exists('../data/topojson/escaped.topojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='topojson',
-             file='../data/topojson/escaped.topojson')
-@@ -67,6 +74,9 @@ if 'topojson' in mapnik.DatasourceCache.
-         eq_(f['NOM_FR'], u'Qu?bec')
- 
-     def test_geojson_from_in_memory_string():
-+        if not os.path.exists('../data/topojson/escaped.topojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='topojson',
-             inline=open(
-@@ -89,6 +99,9 @@ if 'topojson' in mapnik.DatasourceCache.
- 
- #    @raises(RuntimeError)
-     def test_that_nonexistant_query_field_throws(**kwargs):
-+        if not os.path.exists('../data/topojson/escaped.topojson'):
-+            raise SkipTest
-+
-         ds = mapnik.Datasource(
-             type='topojson',
-             file='../data/topojson/escaped.topojson')
---- a/test/python_tests/pdf_printing_test.py
-+++ b/test/python_tests/pdf_printing_test.py
-@@ -3,6 +3,7 @@
- import os
- 
- from nose.tools import eq_
-+from nose.plugins.skip import SkipTest
- 
- import mapnik
- from .utilities import execution_path, run_all
-@@ -34,6 +35,9 @@ if mapnik.has_pycairo():
- 	import mapnik.printing
- 
- 	def test_pdf_printing():
-+                if not os.path.exists('../data/good_maps/marker-text-line.xml'):
-+                    raise SkipTest
-+
- 		source_xml = '../data/good_maps/marker-text-line.xml'.encode('utf-8')
- 		m = make_map_from_xml(source_xml)
- 
diff -Nru python-mapnik-0.0~20200224-7da019cf9/debian/patches/upstream-master.patch python-mapnik-0.0~20200224-7da019cf9/debian/patches/upstream-master.patch
--- python-mapnik-0.0~20200224-7da019cf9/debian/patches/upstream-master.patch	1970-01-01 01:00:00.000000000 +0100
+++ python-mapnik-0.0~20200224-7da019cf9/debian/patches/upstream-master.patch	2024-04-16 13:31:27.000000000 +0200
@@ -0,0 +1,14357 @@
+--- a/README.md
++++ b/README.md
+@@ -10,7 +10,7 @@ this currently does not work though. So
+ 
+ ### Create a virtual environment
+ 
+-It is highly suggested that you [a python virtualenv](http://docs.python-guide.org/en/latest/dev/virtualenvs/) when developing
++It is highly suggested that you have [a python virtualenv](http://docs.python-guide.org/en/latest/dev/virtualenvs/) when developing
+ on mapnik.
+ 
+ ### Building from Mason
+@@ -53,6 +53,10 @@ If you need Pycairo, make sure that PYCA
+ PYCAIRO=true python setup.py develop
+ ```
+ 
++### Building against Mapnik 3.0.x
++
++The `master` branch is no longer compatible with `3.0.x` series of Mapnik. To build against Mapnik 3.0.x, use [`v3.0.x`](https://github.com/mapnik/python-mapnik/tree/v3.0.x) branch.
++
+ ## Testing
+ 
+ Once you have installed you can test the package by running:
+@@ -74,10 +78,29 @@ Fatal Python error: PyThreadState_Get: n
+ Abort trap: 6
+ ```
+ 
+-That means you likely have built python-mapnik is linked against a differ python version than what you are running. To solve this try running:
++That means you likely have built python-mapnik linked against a different python version than what you are running. To solve this try running:
+ 
+ ```
+ /usr/bin/python <your script.py>
+ ```
+ 
++If you hit an error like the following when building with mason:
++
++```
++EnvironmentError: 
++Missing boost_python boost library, try to add its name with BOOST_PYTHON_LIB environment var.
++```
++
++Try to set `export BOOST_PYTHON_LIB=boost_python` before build.
++Also, if `boost_thread` or `boost_system` is missing, do likewise:
++
++```
++export BOOST_SYSTEM_LIB=boost_system
++export BOOST_THREAD_LIB=boost_thread
++```
++
+ If you still hit a problem create an issue and we'll try to help.
++
++## Tutorials
++
++- [Getting started with Python bindings](docs/getting-started.md)
+--- a/bootstrap.sh
++++ b/bootstrap.sh
+@@ -14,7 +14,7 @@ function install() {
+ ICU_VERSION="57.1"
+ 
+ function install_mason_deps() {
+-    install mapnik 3.0.15
++    install mapnik 3be9ce8fa
+     install jpeg_turbo 1.5.1
+     install libpng 1.6.28
+     install libtiff 4.0.7
+@@ -27,16 +27,16 @@ function install_mason_deps() {
+     install cairo 1.14.8
+     install webp 0.6.0
+     install libgdal 2.1.3
+-    install boost 1.63.0
+-    install boost_libsystem 1.63.0
+-    install boost_libfilesystem 1.63.0
+-    install boost_libprogram_options 1.63.0
+-    install boost_libregex_icu57 1.63.0
++    install boost 1.66.0
++    install boost_libsystem 1.66.0
++    install boost_libfilesystem 1.66.0
++    install boost_libprogram_options 1.66.0
++    install boost_libregex_icu57 1.66.0
+     install freetype 2.7.1
+     install harfbuzz 1.4.2-ft
+     # deps needed by python-mapnik (not mapnik core)
+-    install boost_libthread 1.63.0
+-    install boost_libpython 1.63.0
++    install boost_libthread 1.66.0
++    install boost_libpython 1.66.0
+     install postgis 2.3.2-1
+ }
+ 
+@@ -84,4 +84,4 @@ function main() {
+ main
+ 
+ set +eu
+-set +o pipefail
+\ No newline at end of file
++set +o pipefail
+--- a/demo/python/rundemo.py
++++ b/demo/python/rundemo.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/env python3
+ # -*- coding: utf-8 -*-
+ #
+ #
+@@ -55,7 +55,7 @@ m.background = mapnik.Color('white')
+ 
+ provpoly_lyr = mapnik.Layer('Provinces')
+ provpoly_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs"
+-provpoly_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/boundaries'), encoding='latin1')
++provpoly_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/boundaries'))
+ 
+ # We then define a style for the layer.  A layer can have one or many styles.
+ # Styles are named, so they can be shared across different layers.
+@@ -280,7 +280,7 @@ m.layers.append(roads1_lyr)
+ 
+ popplaces_lyr = mapnik.Layer('Populated Places')
+ popplaces_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs"
+-popplaces_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/popplaces'),encoding='latin1')
++popplaces_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/popplaces'))
+ 
+ popplaces_style = mapnik.Style()
+ popplaces_rule = mapnik.Rule()
+--- /dev/null
++++ b/docs/getting-started.md
+@@ -0,0 +1,238 @@
++# Getting started with Python bindings
++
++## Overview
++
++This tutorial will ensure that Mapnik and its Python bindings are properly installed and introduce you to some of the basic programming concepts for Mapnik.
++
++## Step 1: check installation
++
++Make sure you have mapnik installed. You should be able to open a terminal and type:
++
++```sh
++mapnik-config -v # should return a version number.
++```
++
++Next test the Python bindings. You should be able to open a terminal and type:
++
++```sh
++python -c "import mapnik;print mapnik.__file__" # should return the path to the python bindings and no errors
++```
++
++If the above does not work (e.g. throws an `ImportError`) then please go back and ensure [Mapnik](https://github.com/mapnik/mapnik/wiki/Mapnik-Installation) and the [Mapnik Python bindings](/README.md) are properly installed.
++## Step 2
++
++Now, we need some data to render. Let's use a shapefile of world border polygons from [naturalearthdata.com](http://naturalearthdata.com) ([direct link](http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/cultural/ne_110m_admin_0_countries.zip)). Unzip the archive in an easily accessible location of your choosing. In *Step 3* we will be referencing the path to this shapefile in Python code, so make sure you know where you put it.
++
++Once unzipped, you should see four files like:
++
++```sh
++ne_110m_admin_0_countries.shp
++ne_110m_admin_0_countries.shx
++ne_110m_admin_0_countries.dbf
++ne_110m_admin_0_countries.prj
++```
++
++To download and unzip on the command line with the do:
++
++```sh
++wget http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/cultural/ne_110m_admin_0_countries.zip
++unzip ne_110m_admin_0_countries.zip
++```
++
++## Step 3
++
++Now we're going to program in Python and Mapnik, using sample code and the Python interpreter.
++
++The idea here is not that you have to interact with Mapnik via Python, but that this is a good way to build foundational skills for how Mapnik works.
++
++So, let's begin! Open a Python interpreter simply by typing in your terminal:
++
++```sh
++python
++```
++
++The code below can be pasted into your interpreter. Ideally paste line by line so you can confirm each step is working. The commented lines (#) should be able to be pasted without trouble, but depending on your interpreter setting may cause errors.
++
++### Import Mapnik
++
++Import the Mapnik Python bindings:
++
++```python
++import mapnik
++```
++
++### Create a Map
++
++```python
++m = mapnik.Map(600,300) # create a map with a given width and height in pixels
++# note: m.srs will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
++# the 'map.srs' is the target projection of the map and can be whatever you wish
++m.background = mapnik.Color('steelblue') # set background colour to 'steelblue'.
++```
++
++### Create a Style
++
++Create the Styles which determines how the data is rendered:
++
++```python
++s = mapnik.Style() # style object to hold rules
++r = mapnik.Rule() # rule object to hold symbolizers
++# to fill a polygon we create a PolygonSymbolizer
++polygon_symbolizer = mapnik.PolygonSymbolizer()
++polygon_symbolizer.fill = mapnik.Color('#f2eff9')
++r.symbols.append(polygon_symbolizer) # add the symbolizer to the rule object
++
++# to add outlines to a polygon we create a LineSymbolizer
++line_symbolizer = mapnik.LineSymbolizer()
++line_symbolizer.stroke = mapnik.Color('rgb(50%,50%,50%)')
++line_symbolizer.stroke_width = 0.1
++r.symbols.append(line_symbolizer) # add the symbolizer to the rule object
++s.rules.append(r) # now add the rule to the style and we're done
++```
++
++And add the Style to the Map:
++
++```python
++m.append_style('My Style',s) # Styles are given names only as they are applied to the map
++```
++
++### Create a Datasource
++
++In *Step 2* above you should have downloaded a sample shapefile of polygons of world countries. We are now going to load that into a `mapnik.Datasource` object in Python.
++
++If your Python interpreter was launched from the same directory as you downloaded the natural earth shapefile to you should be able to use a relative path to create the datasource like:
++
++``` python
++ds = mapnik.Shapefile(file='ne_110m_admin_0_countries.shp')
++```
++
++Otherwise use an absolute path (exchanging `/Users/dane/Downloads/` for the correct path on your machine):
++
++``` python
++ds = mapnik.Shapefile(file='/Users/dane/Downloads/ne_110m_admin_0_countries.shp')
++```
++
++Note: optionally (to learn about your data) you can call the `envelope()` function off the datasource object to see the full coordinate bounds of the data:
++
++``` python
++>>> ds.envelope()
++Box2d(-180.0,-90.0,180.0,83.64513)
++```
++
++That shows the minx, miny, maxx, and maxy of the data. Because the above coordinates are between -180 and 180 for the x or longitude values and -90 and 90 for the y or latitude values we know this data is in *geographic* coordinates and uses degrees for units - a pretty good indication this is `WGS84 (aka EPSG:4326)`. This specific shapefile also stores this projection information as a `WKT` string in the `ne_110m_admin_0_countries.prj` file. See the `layer.srs` value below for why this matters.
++
++
++### Create a Layer
++
++Mapnik Layers are basically containers around datasources, that store useful properties. Lets now create a Layer object and add the datasource to it.
++
++``` python
++layer = mapnik.Layer('world') # new layer called 'world' (we could name it anything)
++# note: layer.srs will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
++```
++
++Note: the 'layer.srs' is the source projection of the Datasource and *must* match the projection of the coordinates of that data or else your map will likely be blank. Mapnik uses [Proj.4](http://trac.osgeo.org/proj/wiki/FAQ) strings to specify the spatial references system. In this case, the default `srs` Mapnik assumes (`+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs`) happens to match the projection of the data. When this is not the case you must set the layer.srs to the correct value (which is beyond the scope of this tutorial).
++
++Now attach the datasource to the layer, and reference:
++
++```python
++layer.datasource = ds
++```
++
++Lastly, we need to make sure the style we created above (and attached to the map) is also applied to the layer, by its string reference:
++
++```python
++layer.styles.append('My Style')
++```
++
++### Prepare the Map for rendering
++
++This step is critical. Finally add the layer to the map and zoom to the full extent of the data layer (using `zoom_all` which will calculate the cumulative extent of all layers attached to the map). If you do not zoom the Map to the extent of the layer(s), then the rendered output will be blank.
++
++```python
++m.layers.append(layer)
++m.zoom_all()
++```
++
++### Render your map
++
++Finish up by rendering your map image:
++
++```python
++# Write the data to a png image called world.png in the current directory
++mapnik.render_to_file(m,'world.png', 'png')
++
++# Exit the Python interpreter
++exit() # or ctrl-d
++```
++
++Then back in your normal shell type:
++
++```sh
++# On a mac
++open world.png
++# On windows
++start world.png
++```
++
++Or navigate to your base directory and open `world.png` and the result should look like this:
++
++![The world map](images/world.png)
++
++### Step 4
++
++The next logical step is to run that same code all at once as a Python script from your shell/terminal (rather than pasted into the Python interpreter line-by-line). This way you will be able to modify and experiment with the settings, then simply re-run the script.
++
++So, create a blank text file called `world.py`.
++
++Make it executable:
++
++    chmod +x world.py
++
++Then add a line at the top of the script like:
++
++```sh
++#!/usr/bin/env python
++```
++
++Finally, append the entire text below and save the file.
++
++```python
++import mapnik
++m = mapnik.Map(600,300)
++m.background = mapnik.Color('steelblue')
++s = mapnik.Style()
++r = mapnik.Rule()
++polygon_symbolizer = mapnik.PolygonSymbolizer()
++polygon_symbolizer.fill = mapnik.Color('#f2eff9')
++r.symbols.append(polygon_symbolizer)
++
++line_symbolizer = mapnik.LineSymbolizer()
++line_symbolizer.stroke = mapnik.Color('rgb(50%,50%,50%)')
++line_symbolizer.stroke_width = 0.1
++
++r.symbols.append(line_symbolizer)
++s.rules.append(r)
++m.append_style('My Style',s)
++ds = mapnik.Shapefile(file='ne_110m_admin_0_countries.shp')
++layer = mapnik.Layer('world')
++layer.datasource = ds
++layer.styles.append('My Style')
++m.layers.append(layer)
++m.zoom_all()
++mapnik.render_to_file(m,'world.png', 'png')
++print "rendered image to 'world.png'"
++```
++
++ * Don't forget to ensure the correct path to your `ne_110m_admin_0_countries.shp` shapefile.
++ * Mapnik accepts both the absolute path to your data as well as the relative path (Same goes for the path to where you want to save your file)
++
++Finally run the script with the command:
++
++
++```sh
++./world.py # You must be in the same directory as you saved the script
++```
++
++ * Note: if you re-run this script it will will re-write over the world.png map.
++ * Now you can easily open the script in a separate text editor and try changing the dimensions, colors, or datasource (remember to use the correct `srs` if you change the datasource).
+--- a/mapnik/__init__.py
++++ b/mapnik/__init__.py
+@@ -108,7 +108,7 @@ class Envelope(Box2d):
+         Box2d.__init__(self, *args, **kwargs)
+ 
+ 
+-class _Coord(Coord, _injector()):
++class Coord(_mapnik.Coord, _injector()):
+     """
+     Represents a point with two coordinates (either lon/lat or x/y).
+ 
+@@ -156,7 +156,7 @@ class _Coord(Coord, _injector()):
+         Example: Project the geographic coordinates of the
+                  city center of Stuttgart into the local
+                  map projection (GK Zone 3/DHDN, EPSG 31467)
+-        >>> p = Projection('+init=epsg:31467')
++        >>> p = Projection('epsg:31467')
+         >>> Coord(9.1, 48.7).forward(p)
+         Coord(3507360.12813,5395719.2749)
+         """
+@@ -176,14 +176,14 @@ class _Coord(Coord, _injector()):
+                  city center of Stuttgart in the local
+                  map projection (GK Zone 3/DHDN, EPSG 31467)
+                  into geographic coordinates:
+-        >>> p = Projection('+init=epsg:31467')
++        >>> p = Projection('epsg:31467')
+         >>> Coord(3507360.12813,5395719.2749).inverse(p)
+         Coord(9.1, 48.7)
+         """
+         return inverse_(self, projection)
+ 
+ 
+-class _Box2d(Box2d, _injector()):
++class Box2d(_mapnik.Box2d, _injector()):
+     """
+     Represents a spatial envelope (i.e. bounding box).
+ 
+@@ -238,7 +238,7 @@ class _Box2d(Box2d, _injector()):
+         return inverse_(self, projection)
+ 
+ 
+-class _Projection(Projection, _injector()):
++class Projection(_mapnik.Projection, _injector()):
+ 
+     def __repr__(self):
+         return "Projection('%s')" % self.params()
+@@ -266,15 +266,15 @@ class _Projection(Projection, _injector(
+         return inverse_(obj, self)
+ 
+ 
+-class _Feature(Feature, _injector()):
++class Feature(_mapnik.Feature, _injector()):
+     __geo_interface__ = property(lambda self: json.loads(self.to_geojson()))
+ 
+ 
+-class _Geometry(Geometry, _injector()):
++class Geometry(_mapnik.Geometry, _injector()):
+     __geo_interface__ = property(lambda self: json.loads(self.to_geojson()))
+ 
+ 
+-class _Datasource(Datasource, _injector()):
++class Datasource(_mapnik.Datasource, _injector()):
+ 
+     def featureset(self, fields = None, variables = {}):
+         query = Query(self.envelope())
+@@ -291,13 +291,13 @@ class _Datasource(Datasource, _injector(
+         return self.__iter__(fields, variables)
+ 
+ 
+-class _Color(Color, _injector()):
++class Color(_mapnik.Color, _injector()):
+ 
+     def __repr__(self):
+         return "Color(R=%d,G=%d,B=%d,A=%d)" % (self.r, self.g, self.b, self.a)
+ 
+ 
+-class _SymbolizerBase(SymbolizerBase, _injector()):
++class SymbolizerBase(_mapnik.SymbolizerBase, _injector()):
+     # back compatibility
+ 
+     @property
+@@ -804,7 +804,7 @@ class PythonDatasource(object):
+         return itertools.imap(make_it, features, itertools.count(1))
+ 
+ 
+-class _TextSymbolizer(TextSymbolizer, _injector()):
++class TextSymbolizer(_mapnik.TextSymbolizer, _injector()):
+ 
+     @property
+     def name(self):
+--- a/mapnik/printing/__init__.py
++++ b/mapnik/printing/__init__.py
+@@ -2,12 +2,9 @@
+ 
+ """Mapnik classes to assist in creating printable maps."""
+ 
+-from __future__ import absolute_import, print_function
+-
+ import logging
+ import math
+-
+-from mapnik import Box2d, Coord, Geometry, Layer, Map, Projection, Style, render
++from mapnik import Box2d, Coord, Geometry, Layer, Map, Projection, ProjTransform, Style, render
+ from mapnik.printing.conversions import m2pt, m2px
+ from mapnik.printing.formats import pagesizes
+ from mapnik.printing.scales import any_scale, default_scale, deg_min_sec_scale, sequence_scale
+@@ -25,12 +22,12 @@ except ImportError:
+     HAS_PANGOCAIRO_MODULE = False
+ 
+ try:
+-    from PyPDF2 import PdfFileReader, PdfFileWriter
+-    from PyPDF2.generic import (ArrayObject, DecodedStreamObject, DictionaryObject, FloatObject, NameObject,
+-        NumberObject, TextStringObject)
+-    HAS_PYPDF2 = True
++    from pypdf import PdfReader, PdfWriter
++    from pypdf.generic import (ArrayObject, DecodedStreamObject, DictionaryObject, FloatObject, NameObject,
++                                 NumberObject, TextStringObject)
++    HAS_PYPDF = True
+ except ImportError:
+-    HAS_PYPDF2 = False
++    HAS_PYPDF = False
+ 
+ """
+ Style of centering to use with the map.
+@@ -90,7 +87,7 @@ class PDFPrinter(object):
+         Args:
+             pagesize: tuple of page size in meters, see predefined sizes in mapnik.formats module
+             margin: page margin in meters
+-            box: the box to render the map into. Must be within page area, margin excluded. 
++            box: the box to render the map into. Must be within page area, margin excluded.
+                 This should be a Mapnik Box2d object. Default is the full page without margin.
+             percent_box: similar to box argument but specified as a percent (0->1) of the full page size.
+                 If both box and percent_box are specified percent_box will be used.
+@@ -104,7 +101,7 @@ class PDFPrinter(object):
+                 be a value from the mapnik.utils.centering class. The default is to center on the maps constrained
+                 axis. Typically this will be horizontal for portrait pages and vertical for landscape pages.
+             is_latlon: whether the map is in lat lon degrees or not.
+-            use_ocg_layers: create OCG layers in the PDF, requires PyPDF2
++            use_ocg_layers: create OCG layers in the PDF, requires pypdf
+             font_name: the font name used each time text is written (e.g., legend titles, representative fraction, etc.)
+         """
+         self._pagesize = pagesize
+@@ -563,7 +560,7 @@ class PDFPrinter(object):
+ 
+         Args:
+             m: the Map object to render the scale for
+-            ctx: A cairo context to render the scale into. If this is None, we create a context and find out 
++            ctx: A cairo context to render the scale into. If this is None, we create a context and find out
+                 the best location for the scale bar
+             width: the width of area available for rendering the scale bar (in meters)
+             num_divisions: the number of divisions for the scale bar
+@@ -737,7 +734,7 @@ class PDFPrinter(object):
+ 
+         # renders the vertical graticule axes
+         self._render_graticule_axes_and_text(
+-            m, 
++            m,
+             p2,
+             latlon_bounds,
+             latlon_buffer,
+@@ -1119,7 +1116,7 @@ class PDFPrinter(object):
+         Takes a multi pages PDF as input and converts each page to a layer in a single page PDF.
+ 
+         Note:
+-            requires PyPDF2 to be available
++            requires pypdf to be available
+ 
+         Args:
+             layer_names should be a sequence of the user visible names of the layers, if not given
+@@ -1128,17 +1125,17 @@ class PDFPrinter(object):
+             if output_name is not provided a temporary file will be used for the conversion which
+             will then be copied back over the source file.
+         """
+-        if not HAS_PYPDF2:
+-            raise RuntimeError("PyPDF2 not available; PyPDF2 required to convert pdf pages to layers")
++        if not HAS_PYPDF:
++            raise RuntimeError("pypdf not available; pypdf required to convert pdf pages to layers")
+ 
+         with open(filename, "rb+") as f:
+-            file_reader = PdfFileReader(f)
+-            file_writer = PdfFileWriter()
++            file_reader = PdfReader(f)
++            file_writer = PdfWriter()
+ 
+-            template_page_size = file_reader.pages[0].mediaBox
+-            output_pdf = file_writer.addBlankPage(
+-                width=template_page_size.getWidth(),
+-                height=template_page_size.getHeight())
++            template_page_size = file_reader.pages[0].mediabox
++            output_pdf = file_writer.add_blank_page(
++                width=template_page_size.width,
++                height=template_page_size.height)
+ 
+             content_key = NameObject('/Contents')
+             output_pdf[content_key] = ArrayObject()
+@@ -1149,15 +1146,15 @@ class PDFPrinter(object):
+             (properties, ocgs) = self._make_ocg_layers(file_reader, file_writer, output_pdf, layer_names)
+ 
+             properties_key = NameObject('/Properties')
+-            output_pdf[resource_key][properties_key] = file_writer._addObject(properties)
++            output_pdf[resource_key][properties_key] = file_writer._add_object(properties)
+ 
+             ocproperties = DictionaryObject()
+             ocproperties[NameObject('/OCGs')] = ocgs
+ 
+             default_view = self._get_pdf_default_view(ocgs, reverse_all_but_last)
+-            ocproperties[NameObject('/D')] = file_writer._addObject(default_view)
++            ocproperties[NameObject('/D')] = file_writer._add_object(default_view)
+ 
+-            file_writer._root_object[NameObject('/OCProperties')] = file_writer._addObject(ocproperties)
++            file_writer._root_object[NameObject('/OCProperties')] = file_writer._add_object(ocproperties)
+ 
+             f.seek(0)
+             file_writer.write(f)
+@@ -1189,7 +1186,7 @@ class PDFPrinter(object):
+                 page[NameObject(
+                     '/Contents')] = ArrayObject((ocgs_start, page['/Contents'], ocg_end))
+ 
+-            output_pdf.mergePage(page)
++            output_pdf.merge_page(page)
+ 
+             ocg = DictionaryObject()
+             ocg[NameObject('/Type')] = NameObject('/OCG')
+@@ -1199,7 +1196,7 @@ class PDFPrinter(object):
+             else:
+                 ocg[NameObject('/Name')] = TextStringObject('Layer %d' % (idx + 1))
+ 
+-            indirect_ocg = file_writer._addObject(ocg)
++            indirect_ocg = file_writer._add_object(ocg)
+             properties[ocg_name] = indirect_ocg
+             ocgs.append(indirect_ocg)
+ 
+@@ -1238,20 +1235,20 @@ class PDFPrinter(object):
+             The epsg code or the wkt text of the projection must be provided.
+             Must be called *after* the page has had .finish() called.
+         """
+-        if not HAS_PYPDF2:
+-            raise RuntimeError("PyPDF2 not available; PyPDF2 required to add geospatial header to PDF")
++        if not HAS_PYPDF:
++            raise RuntimeError("pypdf not available; pypdf required to add geospatial header to PDF")
+ 
+         if not any((epsg,wkt)):
+             raise RuntimeError("EPSG or WKT required to add geospatial header to PDF")
+ 
+         with open(filename, "rb+") as f:
+-            file_reader = PdfFileReader(f)
+-            file_writer = PdfFileWriter()
++            file_reader = PdfReader(f)
++            file_writer = PdfWriter()
+ 
+             # preserve OCProperties at document root if we have one
+             if NameObject('/OCProperties') in file_reader.trailer['/Root']:
+                 file_writer._root_object[NameObject('/OCProperties')] = file_reader.trailer[
+-                    '/Root'].getObject()[NameObject('/OCProperties')]
++                    '/Root'].get_object()[NameObject('/OCProperties')]
+ 
+             for page in file_reader.pages:
+                 gcs = DictionaryObject()
+@@ -1265,7 +1262,7 @@ class PDFPrinter(object):
+                 measure = self._get_pdf_measure(m, gcs)
+                 page[NameObject('/VP')] = self._get_pdf_vp(measure)
+ 
+-                file_writer.addPage(page)
++                file_writer.add_page(page)
+ 
+             f.seek(0)
+             file_writer.write(f)
+@@ -1318,11 +1315,11 @@ class PDFPrinter(object):
+         """
+         gpts = ArrayObject()
+ 
+-        proj = Projection(m.srs)
++        tr = ProjTransform(Projection(m.srs), Projection("epsg:4326"))
+         env = m.envelope()
+-        for x in ((env.minx, env.miny), (env.minx, env.maxy),
++        for p in ((env.minx, env.miny), (env.minx, env.maxy),
+                   (env.maxx, env.maxy), (env.maxx, env.miny)):
+-            latlon_corner = proj.inverse(Coord(*x))
++            latlon_corner = tr.forward(Coord(*p))
+             # these are in lat,lon order according to the specification
+             gpts.append(FloatObject(str(latlon_corner.y)))
+             gpts.append(FloatObject(str(latlon_corner.x)))
+--- /dev/null
++++ b/scripts/mason.sh
+@@ -0,0 +1,189 @@
++#!/usr/bin/env bash
++
++# Mason Client Version 1.0.0
++
++# See below for `set -euo pipefail`
++
++# Print file + line number when not in CLI mode
++if [[ "$0" != "$BASH_SOURCE" ]]; then
++function mason_error {
++    local _LINE _FN _FILE
++    read _LINE _FN _FILE <<< "`caller 1`"
++    if [ -t 1 ]; then
++        >&2 echo -e "\033[1m\033[31m$@ in ${_FILE} on line ${_LINE}\033[0m"
++    else
++        >&2 echo "$@ in ${_FILE} on line ${_LINE}"
++    fi
++}
++else
++function mason_error {
++    if [ -t 1 ]; then
++        >&2 echo -e "\033[1m\033[31m$@\033[0m"
++    else
++        >&2 echo "$@"
++    fi
++}
++fi
++
++function mason_info {
++    if [ -t 1 ]; then
++        >&2 echo -e "\033[1m\033[36m$@\033[0m"
++    else
++        >&2 echo "$@"
++    fi
++}
++
++function mason_detect_platform {
++    # Determine platform
++    if [[ -z "${MASON_PLATFORM:-}" ]]; then
++        if [[ "`uname -s`" = 'Darwin' ]]; then
++            MASON_PLATFORM="osx"
++        else
++            MASON_PLATFORM="linux"
++        fi
++    fi
++
++    # Determine platform version string
++    if [[ -z "${MASON_PLATFORM_VERSION:-}" ]]; then
++        MASON_PLATFORM_VERSION="`uname -m`"
++    fi
++}
++
++function mason_trim {
++    local _TMP="${1#"${1%%[![:space:]]*}"}"
++    echo -n "${_TMP%"${_TMP##*[![:space:]]}"}"
++}
++
++function mason_uppercase {
++    echo -n "$1" | tr "[a-z]" "[A-Z]"
++}
++
++function mason_use {
++    local _HEADER_ONLY=false _PACKAGE _SAFE_PACKAGE _VERSION _PLATFORM_ID _SLUG _INSTALL_PATH _INSTALL_PATH_RELATIVE
++
++    while [[ $# -gt 0 ]]; do
++        if [[ $1 == "--header-only" ]]; then
++            _HEADER_ONLY=true
++        elif [[ -z "${_PACKAGE:-}" ]]; then
++            _PACKAGE="$1"
++        elif [[ -z "${_VERSION:-}" ]]; then
++            _VERSION="$1"
++        else
++            mason_error "[Mason] mason_use() called with unrecognized arguments: '$@'"
++            exit 1
++        fi
++        shift
++    done
++
++    if [[ -z "${_PACKAGE:-}" ]]; then
++        mason_error "[Mason] No package name given"
++        exit 1
++    fi
++
++    # Create a package name that we can use as shell variable names.
++    _SAFE_PACKAGE="${_PACKAGE//[![:alnum:]]/_}"
++
++    if [[ -z "${_VERSION:-}" ]]; then
++        mason_error "[Mason] Specifying a version is required"
++        exit 1
++    fi
++
++    _PLATFORM_ID="${MASON_PLATFORM}-${MASON_PLATFORM_VERSION}"
++    if [[ "${_HEADER_ONLY}" = true ]] ; then
++        _PLATFORM_ID="headers"
++    fi
++
++    _SLUG="${_PLATFORM_ID}/${_PACKAGE}/${_VERSION}"
++    _INSTALL_PATH="${MASON_PACKAGE_DIR}/${_SLUG}"
++    _INSTALL_PATH_RELATIVE="${_INSTALL_PATH#`pwd`/}"
++
++    if [[ ! -d "${_INSTALL_PATH}" ]]; then
++        local _CACHE_PATH _URL _CACHE_DIR _ERROR
++        _CACHE_PATH="${MASON_PACKAGE_DIR}/.binaries/${_SLUG}.tar.gz"
++        if [ ! -f "${_CACHE_PATH}" ]; then
++            # Download the package
++            _URL="${MASON_REPOSITORY}/${_SLUG}.tar.gz"
++            mason_info "[Mason] Downloading package ${_URL}..."
++            _CACHE_DIR="`dirname "${_CACHE_PATH}"`"
++            mkdir -p "${_CACHE_DIR}"
++            if ! _ERROR=$(curl --retry 3 --silent --fail --show-error --location "${_URL}" --output "${_CACHE_PATH}.tmp" 2>&1); then
++                mason_error "[Mason] ${_ERROR}"
++                exit 1
++            else
++                # We downloaded to a temporary file to prevent half-finished downloads
++                mv "${_CACHE_PATH}.tmp" "${_CACHE_PATH}"
++            fi
++        fi
++
++        # Unpack the package
++        mason_info "[Mason] Unpacking package to ${_INSTALL_PATH_RELATIVE}..."
++        mkdir -p "${_INSTALL_PATH}"
++        tar xzf "${_CACHE_PATH}" -C "${_INSTALL_PATH}"
++    fi
++
++    # Error out if there is no config file.
++    if [[ ! -f "${_INSTALL_PATH}/mason.ini" ]]; then
++        mason_error "[Mason] Could not find mason.ini for package ${_PACKAGE} ${_VERSION}"
++        exit 1
++    fi
++
++    # We use this instead of declare, since it declare makes local variables when run in a function.
++    read "MASON_PACKAGE_${_SAFE_PACKAGE}_PREFIX" <<< "${_INSTALL_PATH}"
++
++    # Load the configuration from the ini file
++    local _LINE _KEY _VALUE
++    while read _LINE; do
++        _KEY="`mason_trim "${_LINE%%=*}"`"
++        if [[ "${_KEY}" =~ ^[a-z_]+$ ]]; then
++            _KEY="`mason_uppercase "${_KEY}"`" # Convert to uppercase
++            _LINE="${_LINE%%;*}" # Trim trailing comments
++            _VALUE="`mason_trim "${_LINE#*=}"`"
++            _VALUE="${_VALUE//\{prefix\}/${_INSTALL_PATH}}" # Replace {prefix}
++            read "MASON_PACKAGE_${_SAFE_PACKAGE}_${_KEY}" <<< "${_VALUE}"
++        fi
++    done < "${_INSTALL_PATH}/mason.ini"
++
++    # We're using the fact that this variable is declared to pass back the package name we parsed
++    # from the argument string to avoid polluting the global namespace.
++    if [ ! -z ${_MASON_SAFE_PACKAGE_NAME+x} ]; then
++        _MASON_SAFE_PACKAGE_NAME="${_SAFE_PACKAGE}"
++    fi
++}
++
++function mason_cli {
++    local _MASON_SAFE_PACKAGE_NAME= _PROP _VAR
++    if [[ $# -lt 1 ]]; then
++        mason_error "[Mason] Usage: $0 <property> [--header-only] <name> <version>"
++        mason_error "[Mason] <property> is one of 'include_dirs', 'definitions', 'options', 'ldflags', 'static_libs', or any custom variables in the package's mason.ini."
++        exit 1
++    fi
++
++    # Store first argument and pass the remaining arguments to mason_use
++    _PROP="`mason_uppercase "$1"`"
++    shift
++    mason_use "$@"
++
++    # Optionally print variables
++    _VAR="MASON_PACKAGE_${_MASON_SAFE_PACKAGE_NAME}_${_PROP}"
++    if [[ ! -z "${!_VAR:-}" ]]; then
++        echo "${!_VAR}"
++    fi
++}
++
++# Directory where Mason packages are located; typically ends with mason_packages
++if [[ -z "${MASON_PACKAGE_DIR:-}" ]]; then
++    MASON_PACKAGE_DIR="`pwd`/mason_packages"
++fi
++
++# URL prefix of where packages are located.
++if [[ -z "${MASON_REPOSITORY:-}" ]]; then
++    MASON_REPOSITORY="https://mason-binaries.s3.amazonaws.com"
++fi
++
++mason_detect_platform
++
++# Print variables if this shell script is invoked directly.
++if [[ "$0" = "$BASH_SOURCE" ]]; then
++    set -euo pipefail
++    mason_cli "$@"
++fi
+--- a/scripts/setup_mason.sh
++++ b/scripts/setup_mason.sh
+@@ -4,7 +4,7 @@ set -eu
+ set -o pipefail
+ 
+ # we pin the mason version to avoid changes in mason breaking builds
+-MASON_VERSION="1150c38"
++MASON_VERSION="751b5c5d"
+ 
+ function setup_mason() {
+     mkdir -p ./mason
+@@ -19,4 +19,4 @@ function setup_mason() {
+ setup_mason
+ 
+ set +eu
+-set +o pipefail
+\ No newline at end of file
++set +o pipefail
+--- a/setup.py
++++ b/setup.py
+@@ -7,6 +7,8 @@ import shutil
+ import subprocess
+ import sys
+ import glob
++import pkg_resources
++
+ from distutils import sysconfig
+ from ctypes.util import find_library
+ 
+@@ -40,8 +42,10 @@ def find_boost_library(_id):
+         # Debian naming convention for versions installed in parallel
+         suffixes.insert(0, "-py%d%d" % (sys.version_info.major,
+                                         sys.version_info.minor))
++        suffixes.insert(1, "%d%d" % (sys.version_info.major,
++                                     sys.version_info.minor))
+         # standard suffix for Python3
+-        suffixes.insert(1, sys.version_info.major)
++        suffixes.insert(2, sys.version_info.major)
+     for suf in suffixes:
+         name = "%s%s" % (_id, suf)
+         lib = find_library(name)
+@@ -228,19 +232,21 @@ extra_comp_args = list(filter(lambda arg
+ if os.environ.get("PYCAIRO", "false") == "true":
+     try:
+         extra_comp_args.append('-DHAVE_PYCAIRO')
+-        print("-I%s/include/pycairo".format(sys.exec_prefix))
+-        extra_comp_args.append("-I{0}/include/pycairo".format(sys.exec_prefix))
++        dist = pkg_resources.get_distribution('pycairo')
++        print(dist.location)
++        print("-I%s/cairo/include".format(dist.location))
++        extra_comp_args.append("-I{0}/cairo/include".format(dist.location))
+         #extra_comp_args.extend(check_output(["pkg-config", '--cflags', 'pycairo']).strip().split(' '))
+         #linkflags.extend(check_output(["pkg-config", '--libs', 'pycairo']).strip().split(' '))
+     except:
+         raise Exception("Failed to find compiler options for pycairo")
+ 
+ if sys.platform == 'darwin':
+-    extra_comp_args.append('-mmacosx-version-min=10.11')
++    extra_comp_args.append('-mmacosx-version-min=13.0')
+     # silence warning coming from boost python macros which
+     # would is hard to silence via pragma
+     extra_comp_args.append('-Wno-parentheses-equality')
+-    linkflags.append('-mmacosx-version-min=10.11')
++    linkflags.append('-mmacosx-version-min=13.0')
+ else:
+     linkflags.append('-lrt')
+     linkflags.append('-Wl,-z,origin')
+@@ -253,7 +259,7 @@ if os.environ.get("CXX", False) == False
+ 
+ setup(
+     name="mapnik",
+-    version="3.0.23",
++    version="4.0.0",
+     packages=['mapnik','mapnik.printing'],
+     author="Blake Thompson",
+     author_email="flippmoke at gmail.com",
+--- a/src/mapnik_datasource.cpp
++++ b/src/mapnik_datasource.cpp
+@@ -34,7 +34,7 @@
+ #include <vector>
+ 
+ // mapnik
+-#include <mapnik/box2d.hpp>
++#include <mapnik/geometry/box2d.hpp>
+ #include <mapnik/datasource.hpp>
+ #include <mapnik/datasource_cache.hpp>
+ #include <mapnik/feature_layer_desc.hpp>
+--- a/src/mapnik_datasource_cache.cpp
++++ b/src/mapnik_datasource_cache.cpp
+@@ -28,7 +28,7 @@
+ #include <boost/noncopyable.hpp>
+ #pragma GCC diagnostic pop
+ 
+-#include <mapnik/value_types.hpp>
++#include <mapnik/value/types.hpp>
+ #include <mapnik/params.hpp>
+ #include <mapnik/datasource.hpp>
+ #include <mapnik/datasource_cache.hpp>
+--- a/src/mapnik_enumeration.hpp
++++ b/src/mapnik_enumeration.hpp
+@@ -68,7 +68,7 @@ private:
+             using namespace boost::python::converter;
+             return base_type::base::to_python(
+                 registered<native_type>::converters.m_class_object
+-                ,  static_cast<long>( v ));
++                ,  static_cast<long>(native_type(v)));
+ 
+         }
+     };
+@@ -76,11 +76,9 @@ private:
+     void init() {
+         boost::python::implicitly_convertible<native_type, EnumWrapper>();
+         boost::python::to_python_converter<EnumWrapper, converter >();
+-
+-        for (unsigned i = 0; i < EnumWrapper::MAX; ++i)
++        for (auto const& kv : EnumWrapper::lookupMap())
+         {
+-            // Register the strings already defined for this enum.
+-            base_type::value( EnumWrapper::get_string( i ), native_type( i ) );
++            base_type::value(kv.second.c_str(), kv.first);
+         }
+     }
+ 
+--- a/src/mapnik_envelope.cpp
++++ b/src/mapnik_envelope.cpp
+@@ -30,8 +30,8 @@
+ #pragma GCC diagnostic pop
+ 
+ // mapnik
+-#include <mapnik/box2d.hpp>
+-#include <mapnik/value_error.hpp>
++#include <mapnik/geometry/box2d.hpp>
++#include <mapnik/value/error.hpp>
+ 
+ using mapnik::coord;
+ using mapnik::box2d;
+--- a/src/mapnik_feature.cpp
++++ b/src/mapnik_feature.cpp
+@@ -34,7 +34,7 @@
+ #pragma GCC diagnostic pop
+ 
+ // mapnik
+-#include <mapnik/value_types.hpp>
++#include <mapnik/value/types.hpp>
+ #include <mapnik/feature.hpp>
+ #include <mapnik/feature_factory.hpp>
+ #include <mapnik/feature_kv_iterator.hpp>
+--- a/src/mapnik_gamma_method.cpp
++++ b/src/mapnik_gamma_method.cpp
+@@ -36,11 +36,11 @@ void export_gamma_method()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::gamma_method_e>("gamma_method")
+-        .value("POWER", mapnik::GAMMA_POWER)
+-        .value("LINEAR",mapnik::GAMMA_LINEAR)
+-        .value("NONE", mapnik::GAMMA_NONE)
+-        .value("THRESHOLD", mapnik::GAMMA_THRESHOLD)
+-        .value("MULTIPLY", mapnik::GAMMA_MULTIPLY)
++        .value("POWER", mapnik::gamma_method_enum::GAMMA_POWER)
++        .value("LINEAR",mapnik::gamma_method_enum::GAMMA_LINEAR)
++        .value("NONE", mapnik::gamma_method_enum::GAMMA_NONE)
++        .value("THRESHOLD", mapnik::gamma_method_enum::GAMMA_THRESHOLD)
++        .value("MULTIPLY", mapnik::gamma_method_enum::GAMMA_MULTIPLY)
+         ;
+ 
+ }
+--- a/src/mapnik_geometry.cpp
++++ b/src/mapnik_geometry.cpp
+@@ -35,13 +35,13 @@
+ 
+ // mapnik
+ #include <mapnik/geometry.hpp>
+-#include <mapnik/geometry_type.hpp>
+-#include <mapnik/geometry_envelope.hpp>
+-#include <mapnik/geometry_is_valid.hpp>
+-#include <mapnik/geometry_is_simple.hpp>
+-#include <mapnik/geometry_is_empty.hpp>
+-#include <mapnik/geometry_correct.hpp>
+-#include <mapnik/geometry_centroid.hpp>
++#include <mapnik/geometry/geometry_type.hpp>
++#include <mapnik/geometry/envelope.hpp>
++#include <mapnik/geometry/is_valid.hpp>
++#include <mapnik/geometry/is_simple.hpp>
++#include <mapnik/geometry/is_empty.hpp>
++#include <mapnik/geometry/correct.hpp>
++#include <mapnik/geometry/centroid.hpp>
+ 
+ #include <mapnik/wkt/wkt_factory.hpp> // from_wkt
+ #include <mapnik/json/geometry_parser.hpp> // from_geojson
+@@ -117,12 +117,12 @@ PyObject* to_wkb_impl(mapnik::geometry::
+ 
+ std::string to_geojson_impl(mapnik::geometry::geometry<double> const& geom)
+ {
+-    std::string json;
+-    if (!mapnik::util::to_geojson(json, geom))
++    std::string wkt;
++    if (!mapnik::util::to_geojson(wkt, geom))
+     {
+         throw std::runtime_error("Generate JSON failed");
+     }
+-    return json;
++    return wkt;
+ }
+ 
+ std::string to_wkt_impl(mapnik::geometry::geometry<double> const& geom)
+@@ -168,14 +168,29 @@ void geometry_correct_impl(mapnik::geome
+     mapnik::geometry::correct(geom);
+ }
+ 
+-void polygon_set_exterior_impl(mapnik::geometry::polygon<double> & poly, mapnik::geometry::linear_ring<double> const& ring)
++void line_string_add_coord_impl1(mapnik::geometry::line_string<double> & l, double x, double y)
++{
++    l.emplace_back(x, y);
++}
++
++void line_string_add_coord_impl2(mapnik::geometry::line_string<double> & l, mapnik::geometry::point<double> const& p)
++{
++    l.push_back(p);
++}
++
++void linear_ring_add_coord_impl1(mapnik::geometry::linear_ring<double> & l, double x, double y)
++{
++    l.emplace_back(x, y);
++}
++
++void linear_ring_add_coord_impl2(mapnik::geometry::linear_ring<double> & l, mapnik::geometry::point<double> const& p)
+ {
+-    poly.exterior_ring = ring; // copy
++    l.push_back(p);
+ }
+ 
+-void polygon_add_hole_impl(mapnik::geometry::polygon<double> & poly, mapnik::geometry::linear_ring<double> const& ring)
++void polygon_add_ring_impl(mapnik::geometry::polygon<double> & poly, mapnik::geometry::linear_ring<double> const& ring)
+ {
+-    poly.interior_rings.push_back(ring); // copy
++    poly.push_back(ring); // copy
+ }
+ 
+ mapnik::geometry::point<double> geometry_centroid_impl(mapnik::geometry::geometry<double> const& geom)
+@@ -230,7 +245,8 @@ void export_geometry()
+ 
+     class_<line_string<double> >("LineString", init<>(
+                       "Constructs a new LineString object\n"))
+-        .def("add_coord", &line_string<double>::add_coord, "Adds coord")
++        .def("add_coord", &line_string_add_coord_impl1, "Adds coord x,y")
++        .def("add_point", &line_string_add_coord_impl2, "Adds point")
+ #if BOOST_VERSION >= 105800
+         .def("is_valid", &geometry_is_valid_impl)
+         .def("is_simple", &geometry_is_simple_impl)
+@@ -242,14 +258,14 @@ void export_geometry()
+ 
+     class_<linear_ring<double> >("LinearRing", init<>(
+                             "Constructs a new LinearRtring object\n"))
+-        .def("add_coord", &linear_ring<double>::add_coord, "Adds coord")
++        .def("add_coord", &linear_ring_add_coord_impl1, "Adds coord x,y")
++        .def("add_point", &linear_ring_add_coord_impl2, "Adds point")
+         ;
+ 
+     class_<polygon<double> >("Polygon", init<>(
+                         "Constructs a new Polygon object\n"))
+-        .add_property("exterior_ring", &polygon<double>::exterior_ring , "Exterior ring")
+-        .def("add_hole", &polygon_add_hole_impl, "Add interior ring")
+-        .def("num_rings", polygon_set_exterior_impl, "Number of rings (at least 1)")
++        .def("add_ring", &polygon_add_ring_impl, "Add ring")
++        .def("num_rings", &polygon<double>::size, "Number of rings")
+ #if BOOST_VERSION >= 105800
+         .def("is_valid", &geometry_is_valid_impl)
+         .def("is_simple", &geometry_is_simple_impl)
+--- a/src/mapnik_image.cpp
++++ b/src/mapnik_image.cpp
+@@ -250,22 +250,36 @@ std::shared_ptr<image_any> fromstring(st
+     {
+         return std::make_shared<image_any>(reader->read(0,0,reader->width(), reader->height()));
+     }
+-    throw mapnik::image_reader_exception("Failed to load image from buffer" );
++    throw mapnik::image_reader_exception("Failed to load image from String" );
++}
++
++namespace {
++struct view_release
++{
++    view_release(Py_buffer & view)
++        : view_(view) {}
++    ~view_release()
++    {
++        PyBuffer_Release(&view_);
++    }
++    Py_buffer & view_;
++};
+ }
+ 
+ std::shared_ptr<image_any> frombuffer(PyObject * obj)
+ {
+-    void const* buffer=0;
+-    Py_ssize_t buffer_len;
+-    if (PyObject_AsReadBuffer(obj, &buffer, &buffer_len) == 0)
++    Py_buffer view;
++    view_release helper(view);
++    if (obj != nullptr && PyObject_GetBuffer(obj, &view, PyBUF_SIMPLE) == 0)
+     {
+-        std::unique_ptr<image_reader> reader(get_image_reader(reinterpret_cast<char const*>(buffer),buffer_len));
++        std::unique_ptr<image_reader> reader
++            (get_image_reader(reinterpret_cast<char const*>(view.buf), view.len));
+         if (reader.get())
+         {
+             return std::make_shared<image_any>(reader->read(0,0,reader->width(),reader->height()));
+         }
+     }
+-    throw mapnik::image_reader_exception("Failed to load image from buffer" );
++    throw mapnik::image_reader_exception("Failed to load image from Buffer" );
+ }
+ 
+ void set_grayscale_to_alpha(image_any & im)
+--- a/src/mapnik_layer.cpp
++++ b/src/mapnik_layer.cpp
+@@ -146,13 +146,13 @@ void export_layer()
+     class_<layer>("Layer", "A Mapnik map layer.", init<std::string const&,optional<std::string const&> >(
+                       "Create a Layer with a named string and, optionally, an srs string.\n"
+                       "\n"
+-                      "The srs can be either a Proj.4 epsg code ('+init=epsg:<code>') or\n"
+-                      "of a Proj.4 literal ('+proj=<literal>').\n"
+-                      "If no srs is specified it will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
++                      "The srs can be either a Proj epsg code ('epsg:<code>') or\n"
++                      "of a Proj literal ('+proj=<literal>').\n"
++                      "If no srs is specified it will default to 'epsg:4326'\n"
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr\n"
+                       "<mapnik._mapnik.Layer object at 0x6a270>\n"
+                       ))
+@@ -166,7 +166,7 @@ void export_layer()
+              "\n"
+              "Usage:\n"
+              ">>> from mapnik import Layer\n"
+-             ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++             ">>> lyr = Layer('My Layer','epsg:4326')\n"
+              ">>> lyr.envelope()\n"
+              "box2d(-1.0,-1.0,0.0,0.0) # default until a datasource is loaded\n"
+             )
+@@ -183,7 +183,7 @@ void export_layer()
+              "\n"
+              "Usage:\n"
+              ">>> from mapnik import Layer\n"
+-             ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++             ">>> lyr = Layer('My Layer','epsg:4326')\n"
+              ">>> lyr.visible(1.0/1000000)\n"
+              "True\n"
+              ">>> lyr.active = False\n"
+@@ -198,7 +198,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr.active\n"
+                       "True # Active by default\n"
+                       ">>> lyr.active = False # set False to disable layer rendering\n"
+@@ -213,7 +213,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr.status\n"
+                       "True # Active by default\n"
+                       ">>> lyr.status = False # set False to disable layer rendering\n"
+@@ -250,7 +250,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer, Datasource\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr.datasource = Datasource(type='shape',file='world_borders')\n"
+                       ">>> lyr.datasource\n"
+                       "<mapnik.Datasource object at 0x65470>\n"
+@@ -285,7 +285,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr.maximum_scale_denominator\n"
+                       "1.7976931348623157e+308 # default is the numerical maximum\n"
+                       ">>> lyr.maximum_scale_denominator = 1.0/1000000\n"
+@@ -300,7 +300,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr.minimum_scale_denominator # default is 0\n"
+                       "0.0\n"
+                       ">>> lyr.minimum_scale_denominator = 1.0/1000000\n"
+@@ -315,7 +315,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import Layer\n"
+-                      ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = Layer('My Layer','epsg:4326')\n"
+                       ">>> lyr.name\n"
+                       "'My Layer'\n"
+                       ">>> lyr.name = 'New Name'\n"
+@@ -330,7 +330,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import layer\n"
+-                      ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = layer('My layer','epsg:4326')\n"
+                       ">>> lyr.queryable\n"
+                       "False # Not queryable by default\n"
+                       ">>> lyr.queryable = True\n"
+@@ -345,12 +345,12 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import layer\n"
+-                      ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = layer('My layer','epsg:4326')\n"
+                       ">>> lyr.srs\n"
+-                      "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # The default srs if not initialized with custom srs\n"
+-                      ">>> # set to google mercator with Proj.4 literal\n"
++                      "'epsg:4326' # The default srs if not initialized with custom srs\n"
++                      ">>> # set to google mercator with Proj literal\n"
+                       "... \n"
+-                      ">>> lyr.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'\n"
++                      ">>> lyr.srs = 'epsg:3857'\n"
+             )
+ 
+         .add_property("group_by",
+@@ -367,7 +367,7 @@ void export_layer()
+                       "\n"
+                       "Usage:\n"
+                       ">>> from mapnik import layer\n"
+-                      ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n"
++                      ">>> lyr = layer('My layer','epsg:4326')\n"
+                       ">>> lyr.styles\n"
+                       "<mapnik._mapnik.Names object at 0x6d3e8>\n"
+                       ">>> len(lyr.styles)\n"
+--- a/src/mapnik_map.cpp
++++ b/src/mapnik_map.cpp
+@@ -165,9 +165,9 @@ void export_map()
+     class_<Map>("Map","The map object.",init<int,int,optional<std::string const&> >(
+                     ( arg("width"),arg("height"),arg("srs") ),
+                     "Create a Map with a width and height as integers and, optionally,\n"
+-                    "an srs string either with a Proj.4 epsg code ('+init=epsg:<code>')\n"
+-                    "or with a Proj.4 literal ('+proj=<literal>').\n"
+-                    "If no srs is specified the map will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
++                    "an srs string either with a Proj epsg code ('epsg:<code>')\n"
++                    "or with a Proj literal ('+proj=<literal>').\n"
++                    "If no srs is specified the map will default to 'epsg:4326'\n"
+                     "\n"
+                     "Usage:\n"
+                     ">>> from mapnik import Map\n"
+@@ -175,7 +175,7 @@ void export_map()
+                     ">>> m\n"
+                     "<mapnik._mapnik.Map object at 0x6a240>\n"
+                     ">>> m.srs\n"
+-                    "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
++                    "'epsg:4326'\n"
+                     ))
+ 
+         .def("append_style",insert_style,
+@@ -502,22 +502,22 @@ void export_map()
+         .add_property("srs",
+                       make_function(&Map::srs,return_value_policy<copy_const_reference>()),
+                       &Map::set_srs,
+-                      "Spatial reference in Proj.4 format.\n"
++                      "Spatial reference in Proj format.\n"
+                       "Either an epsg code or proj literal.\n"
+                       "For example, a proj literal:\n"
+-                      "\t'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
++                      "\t'epsg:4326'\n"
+                       "and a proj epsg code:\n"
+-                      "\t'+init=epsg:4326'\n"
++                      "\t'epsg:4326'\n"
+                       "\n"
+                       "Note: using epsg codes requires the installation of\n"
+-                      "the Proj.4 'epsg' data file normally found in '/usr/local/share/proj'\n"
++                      "the Proj 'epsg' data file normally found in '/usr/local/share/proj'\n"
+                       "\n"
+                       "Usage:\n"
+                       ">>> m.srs\n"
+-                      "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # The default srs if not initialized with custom srs\n"
++                      "'epsg:4326' # The default srs if not initialized with custom srs\n"
+                       ">>> # set to google mercator with Proj.4 literal\n"
+                       "... \n"
+-                      ">>> m.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'\n"
++                      ">>> m.srs = 'epsg:3857'\n"
+             )
+ 
+         .add_property("width",
+--- a/src/mapnik_parameters.cpp
++++ b/src/mapnik_parameters.cpp
+@@ -33,7 +33,7 @@
+ #include <mapnik/params.hpp>
+ #include <mapnik/unicode.hpp>
+ #include <mapnik/value.hpp>
+-#include <mapnik/value_types.hpp>
++#include <mapnik/value/types.hpp>
+ // stl
+ #include <iterator>
+ 
+--- a/src/mapnik_proj_transform.cpp
++++ b/src/mapnik_proj_transform.cpp
+@@ -33,7 +33,7 @@
+ #include <mapnik/proj_transform.hpp>
+ #include <mapnik/projection.hpp>
+ #include <mapnik/coord.hpp>
+-#include <mapnik/box2d.hpp>
++#include <mapnik/geometry/box2d.hpp>
+ 
+ // stl
+ #include <stdexcept>
+@@ -48,7 +48,7 @@ struct proj_transform_pickle_suite : boo
+     getinitargs(const proj_transform& p)
+     {
+         using namespace boost::python;
+-        return boost::python::make_tuple(p.source(),p.dest());
++        return boost::python::make_tuple(p.definition());
+     }
+ };
+ 
+@@ -62,7 +62,7 @@ mapnik::coord2d forward_transform_c(mapn
+     if (!t.forward(x,y,z)) {
+         std::ostringstream s;
+         s << "Failed to forward project "
+-          << "from " << t.source().params() << " to: " << t.dest().params();
++          << t.definition();
+         throw std::runtime_error(s.str());
+     }
+     return mapnik::coord2d(x,y);
+@@ -76,7 +76,7 @@ mapnik::coord2d backward_transform_c(map
+     if (!t.backward(x,y,z)) {
+         std::ostringstream s;
+         s << "Failed to back project "
+-          << "from " <<  t.dest().params() << " to: " << t.source().params();
++         << t.definition();
+         throw std::runtime_error(s.str());
+     }
+     return mapnik::coord2d(x,y);
+@@ -88,7 +88,7 @@ mapnik::box2d<double> forward_transform_
+     if (!t.forward(new_box)) {
+         std::ostringstream s;
+         s << "Failed to forward project "
+-          << "from " << t.source().params() << " to: " << t.dest().params();
++          << t.definition();
+         throw std::runtime_error(s.str());
+     }
+     return new_box;
+@@ -100,7 +100,7 @@ mapnik::box2d<double> backward_transform
+     if (!t.backward(new_box)){
+         std::ostringstream s;
+         s << "Failed to back project "
+-          << "from " <<  t.dest().params() << " to: " << t.source().params();
++          << t.definition();
+         throw std::runtime_error(s.str());
+     }
+     return new_box;
+@@ -112,7 +112,7 @@ mapnik::box2d<double> forward_transform_
+     if (!t.forward(new_box,points)) {
+         std::ostringstream s;
+         s << "Failed to forward project "
+-          << "from " << t.source().params() << " to: " << t.dest().params();
++          << t.definition();
+         throw std::runtime_error(s.str());
+     }
+     return new_box;
+@@ -124,7 +124,7 @@ mapnik::box2d<double> backward_transform
+     if (!t.backward(new_box,points)){
+         std::ostringstream s;
+         s << "Failed to back project "
+-          << "from " <<  t.dest().params() << " to: " << t.source().params();
++          <<  t.definition();
+         throw std::runtime_error(s.str());
+     }
+     return new_box;
+@@ -136,7 +136,7 @@ void export_proj_transform ()
+ {
+     using namespace boost::python;
+ 
+-    class_<proj_transform, boost::noncopyable>("ProjTransform", init< projection const&, projection const& >())
++    class_<proj_transform, boost::noncopyable>("ProjTransform", init<projection const&, projection const&>())
+         .def_pickle(proj_transform_pickle_suite())
+         .def("forward", forward_transform_c)
+         .def("backward",backward_transform_c)
+@@ -144,6 +144,7 @@ void export_proj_transform ()
+         .def("backward",backward_transform_env)
+         .def("forward", forward_transform_env_p)
+         .def("backward",backward_transform_env_p)
++        .def("definition",&proj_transform::definition)
+         ;
+ 
+ }
+--- a/src/mapnik_projection.cpp
++++ b/src/mapnik_projection.cpp
+@@ -30,7 +30,7 @@
+ 
+ // mapnik
+ #include <mapnik/coord.hpp>
+-#include <mapnik/box2d.hpp>
++#include <mapnik/geometry/box2d.hpp>
+ #include <mapnik/projection.hpp>
+ 
+ using mapnik::projection;
+@@ -95,8 +95,8 @@ void export_projection ()
+     using namespace boost::python;
+ 
+     class_<projection>("Projection", "Represents a map projection.",init<std::string const&>(
+-                           (arg("proj4_string")),
+-                           "Constructs a new projection from its PROJ.4 string representation.\n"
++                           (arg("proj_string")),
++                           "Constructs a new projection from its PROJ string representation.\n"
+                            "\n"
+                            "The constructor will throw a RuntimeError in case the projection\n"
+                            "cannot be initialized.\n"
+@@ -105,9 +105,11 @@ void export_projection ()
+         .def_pickle(projection_pickle_suite())
+         .def ("params", make_function(&projection::params,
+                                       return_value_policy<copy_const_reference>()),
+-              "Returns the PROJ.4 string for this projection.\n")
+-        .def ("expanded",&projection::expanded,
+-              "normalize PROJ.4 definition by expanding +init= syntax\n")
++              "Returns the PROJ string for this projection.\n")
++        .def ("definition",&projection::definition,
++              "Return projection definition\n")
++        .def ("description", &projection::description,
++              "Returns projection description")
+         .add_property ("geographic", &projection::is_geographic,
+                        "This property is True if the projection is a geographic projection\n"
+                        "(i.e. it uses lon/lat coordinates)\n")
+--- a/src/mapnik_python.cpp
++++ b/src/mapnik_python.cpp
+@@ -100,7 +100,7 @@ void export_logger();
+ #include <mapnik/image_util.hpp>
+ #include <mapnik/image_any.hpp>
+ #include <mapnik/load_map.hpp>
+-#include <mapnik/value_error.hpp>
++#include <mapnik/value/error.hpp>
+ #include <mapnik/value.hpp>
+ #include <mapnik/save_map.hpp>
+ #include <mapnik/scale_denominator.hpp>
+@@ -598,9 +598,9 @@ std::string mapnik_version_string()
+     return MAPNIK_VERSION_STRING;
+ }
+ 
+-bool has_proj4()
++bool has_proj()
+ {
+-#if defined(MAPNIK_USE_PROJ4)
++#if defined(MAPNIK_USE_PROJ)
+     return true;
+ #else
+     return false;
+@@ -1035,8 +1035,8 @@ BOOST_PYTHON_MODULE(_mapnik)
+   ">>> m = Map(256,256)\n"
+   ">>> load_map(m,'mapfile_wgs84.xml')\n"
+   ">>> m.srs\n"
+-  "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n"
+-  ">>> m.srs = '+init=espg:3395'\n"
++  "'epsg:4326'\n"
++  ">>> m.srs = 'espg:3395'\n"
+   ">>> save_map(m,'mapfile_mercator.xml')\n"
+   "\n"
+   );
+@@ -1045,7 +1045,7 @@ BOOST_PYTHON_MODULE(_mapnik)
+     def("save_map_to_string", &save_map_to_string, save_map_to_string_overloads());
+     def("mapnik_version", &mapnik_version,"Get the Mapnik version number");
+     def("mapnik_version_string", &mapnik_version_string,"Get the Mapnik version string");
+-    def("has_proj4", &has_proj4, "Get proj4 status");
++    def("has_proj", &has_proj, "Get proj status");
+     def("has_jpeg", &has_jpeg, "Get jpeg read/write support status");
+     def("has_png", &has_png, "Get png read/write support status");
+     def("has_tiff", &has_tiff, "Get tiff read/write support status");
+--- a/src/mapnik_query.cpp
++++ b/src/mapnik_query.cpp
+@@ -32,7 +32,7 @@
+ 
+ // mapnik
+ #include <mapnik/query.hpp>
+-#include <mapnik/box2d.hpp>
++#include <mapnik/geometry/box2d.hpp>
+ 
+ #include <string>
+ #include <set>
+--- a/src/mapnik_raster_colorizer.cpp
++++ b/src/mapnik_raster_colorizer.cpp
+@@ -40,11 +40,6 @@ using mapnik::colorizer_stop;
+ using mapnik::colorizer_stops;
+ using mapnik::colorizer_mode_enum;
+ using mapnik::color;
+-using mapnik::COLORIZER_INHERIT;
+-using mapnik::COLORIZER_LINEAR;
+-using mapnik::COLORIZER_DISCRETE;
+-using mapnik::COLORIZER_EXACT;
+-
+ 
+ namespace {
+ void add_stop(raster_colorizer_ptr & rc, colorizer_stop & stop)
+@@ -196,10 +191,10 @@ void export_raster_colorizer()
+         ;
+ 
+     enum_<colorizer_mode_enum>("ColorizerMode")
+-        .value("COLORIZER_INHERIT", COLORIZER_INHERIT)
+-        .value("COLORIZER_LINEAR", COLORIZER_LINEAR)
+-        .value("COLORIZER_DISCRETE", COLORIZER_DISCRETE)
+-        .value("COLORIZER_EXACT", COLORIZER_EXACT)
++        .value("COLORIZER_INHERIT", colorizer_mode_enum::COLORIZER_INHERIT)
++        .value("COLORIZER_LINEAR", colorizer_mode_enum::COLORIZER_LINEAR)
++        .value("COLORIZER_DISCRETE", colorizer_mode_enum::COLORIZER_DISCRETE)
++        .value("COLORIZER_EXACT", colorizer_mode_enum::COLORIZER_EXACT)
+         .export_values()
+         ;
+ 
+--- a/src/mapnik_style.cpp
++++ b/src/mapnik_style.cpp
+@@ -30,7 +30,7 @@
+ #pragma GCC diagnostic pop
+ 
+ // mapnik
+-#include <mapnik/value_error.hpp>
++#include <mapnik/value/error.hpp>
+ #include <mapnik/rule.hpp>
+ #include "mapnik_enumeration.hpp"
+ #include <mapnik/feature_type_style.hpp>
+@@ -69,8 +69,8 @@ void export_style()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::filter_mode_e>("filter_mode")
+-        .value("ALL",mapnik::FILTER_ALL)
+-        .value("FIRST",mapnik::FILTER_FIRST)
++        .value("ALL",mapnik::filter_mode_enum::FILTER_ALL)
++        .value("FIRST",mapnik::filter_mode_enum::FILTER_FIRST)
+         ;
+ 
+     class_<rules>("Rules",init<>("default ctor"))
+--- a/src/mapnik_svg.hpp
++++ b/src/mapnik_svg.hpp
+@@ -23,9 +23,9 @@
+ #define MAPNIK_PYTHON_BINDING_SVG_INCLUDED
+ 
+ // mapnik
+-#include <mapnik/parse_transform.hpp>
++#include <mapnik/transform/parse_transform.hpp>
+ #include <mapnik/symbolizer.hpp>
+-#include <mapnik/value_error.hpp>
++#include <mapnik/value/error.hpp>
+ 
+ #pragma GCC diagnostic push
+ #include <mapnik/warning_ignore.hpp>
+--- a/src/mapnik_symbolizer.cpp
++++ b/src/mapnik_symbolizer.cpp
+@@ -40,7 +40,7 @@
+ #include "mapnik_enumeration.hpp"
+ #include "mapnik_svg.hpp"
+ #include <mapnik/expression_node.hpp>
+-#include <mapnik/value_error.hpp>
++#include <mapnik/value/error.hpp>
+ #include <mapnik/marker_cache.hpp> // for known_svg_prefix_
+ #include <mapnik/group/group_layout.hpp>
+ #include <mapnik/group/group_rule.hpp>
+@@ -87,6 +87,29 @@ struct value_to_target
+         case mapnik::property_types::target_double:
+             put(sym_, key, static_cast<mapnik::value_double>(val));
+             break;
++        case mapnik::property_types::target_pattern_alignment:
++        case mapnik::property_types::target_comp_op:
++        case mapnik::property_types::target_line_rasterizer:
++        case mapnik::property_types::target_scaling_method:
++        case mapnik::property_types::target_line_cap:
++        case mapnik::property_types::target_line_join:
++        case mapnik::property_types::target_smooth_algorithm:
++        case mapnik::property_types::target_simplify_algorithm:
++        case mapnik::property_types::target_halo_rasterizer:
++        case mapnik::property_types::target_markers_placement:
++        case mapnik::property_types::target_markers_multipolicy:
++        case mapnik::property_types::target_halo_comp_op:
++        case mapnik::property_types::target_text_transform:
++        case mapnik::property_types::target_horizontal_alignment:
++        case mapnik::property_types::target_justify_alignment:
++        case mapnik::property_types::target_vertical_alignment:
++        case mapnik::property_types::target_upright:
++        case mapnik::property_types::target_direction:
++        case mapnik::property_types::target_line_pattern:
++        {
++            put(sym_, key, mapnik::enumeration_wrapper(val));
++            break;
++        }
+         default:
+             put(sym_, key, val);
+             break;
+@@ -173,6 +196,16 @@ boost::python::object __getitem__(mapnik
+     return boost::python::object();
+ }
+ 
++boost::python::object symbolizer_keys(mapnik::symbolizer_base const& sym)
++{
++    boost::python::list keys;
++    for (auto const& kv : sym.properties)
++    {
++        std::string name = std::get<0>(mapnik::get_meta(kv.first));
++        keys.append(name);
++    }
++    return keys;
++}
+ /*
+ std::string __str__(mapnik::symbolizer const& sym)
+ {
+@@ -222,6 +255,7 @@ void export_symbolizer()
+     implicitly_convertible<std::string, mapnik::symbolizer_base::value_type>();
+     implicitly_convertible<mapnik::color, mapnik::symbolizer_base::value_type>();
+     implicitly_convertible<mapnik::expression_ptr, mapnik::symbolizer_base::value_type>();
++    implicitly_convertible<mapnik::path_expression_ptr, mapnik::symbolizer_base::value_type>();
+     implicitly_convertible<mapnik::enumeration_wrapper, mapnik::symbolizer_base::value_type>();
+     implicitly_convertible<std::shared_ptr<mapnik::group_symbolizer_properties>, mapnik::symbolizer_base::value_type>();
+ 
+@@ -245,6 +279,7 @@ void export_symbolizer()
+         .def("__setattr__",&__setitem__)
+         .def("__getitem__",&__getitem__)
+         .def("__getattr__",&__getitem__)
++        .def("keys", &symbolizer_keys)
+         //.def("__str__", &__str__)
+         .def(self == self) // __eq__
+         ;
+@@ -254,38 +289,38 @@ void export_text_symbolizer()
+ {
+     using namespace boost::python;
+     mapnik::enumeration_<mapnik::label_placement_e>("label_placement")
+-        .value("LINE_PLACEMENT", mapnik::LINE_PLACEMENT)
+-        .value("POINT_PLACEMENT", mapnik::POINT_PLACEMENT)
+-        .value("VERTEX_PLACEMENT", mapnik::VERTEX_PLACEMENT)
+-        .value("INTERIOR_PLACEMENT", mapnik::INTERIOR_PLACEMENT);
++        .value("LINE_PLACEMENT", mapnik::label_placement_enum::LINE_PLACEMENT)
++        .value("POINT_PLACEMENT", mapnik::label_placement_enum::POINT_PLACEMENT)
++        .value("VERTEX_PLACEMENT", mapnik::label_placement_enum::VERTEX_PLACEMENT)
++        .value("INTERIOR_PLACEMENT", mapnik::label_placement_enum::INTERIOR_PLACEMENT);
+ 
+     mapnik::enumeration_<mapnik::vertical_alignment_e>("vertical_alignment")
+-        .value("TOP", mapnik::V_TOP)
+-        .value("MIDDLE", mapnik::V_MIDDLE)
+-        .value("BOTTOM", mapnik::V_BOTTOM)
+-        .value("AUTO", mapnik::V_AUTO);
++        .value("TOP", mapnik::vertical_alignment_enum::V_TOP)
++        .value("MIDDLE", mapnik::vertical_alignment_enum::V_MIDDLE)
++        .value("BOTTOM", mapnik::vertical_alignment_enum::V_BOTTOM)
++        .value("AUTO", mapnik::vertical_alignment_enum::V_AUTO);
+ 
+     mapnik::enumeration_<mapnik::horizontal_alignment_e>("horizontal_alignment")
+-        .value("LEFT", mapnik::H_LEFT)
+-        .value("MIDDLE", mapnik::H_MIDDLE)
+-        .value("RIGHT", mapnik::H_RIGHT)
+-        .value("AUTO", mapnik::H_AUTO);
++        .value("LEFT", mapnik::horizontal_alignment_enum::H_LEFT)
++        .value("MIDDLE", mapnik::horizontal_alignment_enum::H_MIDDLE)
++        .value("RIGHT", mapnik::horizontal_alignment_enum::H_RIGHT)
++        .value("AUTO", mapnik::horizontal_alignment_enum::H_AUTO);
+ 
+     mapnik::enumeration_<mapnik::justify_alignment_e>("justify_alignment")
+-        .value("LEFT", mapnik::J_LEFT)
+-        .value("MIDDLE", mapnik::J_MIDDLE)
+-        .value("RIGHT", mapnik::J_RIGHT)
+-        .value("AUTO", mapnik::J_AUTO);
++        .value("LEFT", mapnik::justify_alignment_enum::J_LEFT)
++        .value("MIDDLE", mapnik::justify_alignment_enum::J_MIDDLE)
++        .value("RIGHT", mapnik::justify_alignment_enum::J_RIGHT)
++        .value("AUTO", mapnik::justify_alignment_enum::J_AUTO);
+ 
+     mapnik::enumeration_<mapnik::text_transform_e>("text_transform")
+-        .value("NONE", mapnik::NONE)
+-        .value("UPPERCASE", mapnik::UPPERCASE)
+-        .value("LOWERCASE", mapnik::LOWERCASE)
+-        .value("CAPITALIZE", mapnik::CAPITALIZE);
++        .value("NONE", mapnik::text_transform_enum::NONE)
++        .value("UPPERCASE", mapnik::text_transform_enum::UPPERCASE)
++        .value("LOWERCASE", mapnik::text_transform_enum::LOWERCASE)
++        .value("CAPITALIZE", mapnik::text_transform_enum::CAPITALIZE);
+ 
+     mapnik::enumeration_<mapnik::halo_rasterizer_e>("halo_rasterizer")
+-        .value("FULL", mapnik::HALO_RASTERIZER_FULL)
+-        .value("FAST", mapnik::HALO_RASTERIZER_FAST);
++        .value("FULL", mapnik::halo_rasterizer_enum::HALO_RASTERIZER_FULL)
++        .value("FAST", mapnik::halo_rasterizer_enum::HALO_RASTERIZER_FAST);
+ 
+     class_< text_symbolizer, bases<symbolizer_base> >("TextSymbolizer",
+                                                       init<>("Default ctor"))
+@@ -320,8 +355,8 @@ void export_polygon_pattern_symbolizer()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::pattern_alignment_e>("pattern_alignment")
+-        .value("LOCAL",mapnik::LOCAL_ALIGNMENT)
+-        .value("GLOBAL",mapnik::GLOBAL_ALIGNMENT)
++        .value("LOCAL",mapnik::pattern_alignment_enum::LOCAL_ALIGNMENT)
++        .value("GLOBAL",mapnik::pattern_alignment_enum::GLOBAL_ALIGNMENT)
+         ;
+ 
+     class_<polygon_pattern_symbolizer>("PolygonPatternSymbolizer",
+@@ -344,8 +379,8 @@ void export_point_symbolizer()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::point_placement_e>("point_placement")
+-        .value("CENTROID",mapnik::CENTROID_POINT_PLACEMENT)
+-        .value("INTERIOR",mapnik::INTERIOR_POINT_PLACEMENT)
++        .value("CENTROID",mapnik::point_placement_enum::CENTROID_POINT_PLACEMENT)
++        .value("INTERIOR",mapnik::point_placement_enum::INTERIOR_POINT_PLACEMENT)
+         ;
+ 
+     class_<point_symbolizer, bases<symbolizer_base> >("PointSymbolizer",
+@@ -359,15 +394,15 @@ void export_markers_symbolizer()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::marker_placement_e>("marker_placement")
+-        .value("POINT_PLACEMENT",mapnik::MARKER_POINT_PLACEMENT)
+-        .value("INTERIOR_PLACEMENT",mapnik::MARKER_INTERIOR_PLACEMENT)
+-        .value("LINE_PLACEMENT",mapnik::MARKER_LINE_PLACEMENT)
++        .value("POINT_PLACEMENT",mapnik::marker_placement_enum::MARKER_POINT_PLACEMENT)
++        .value("INTERIOR_PLACEMENT",mapnik::marker_placement_enum::MARKER_INTERIOR_PLACEMENT)
++        .value("LINE_PLACEMENT",mapnik::marker_placement_enum::MARKER_LINE_PLACEMENT)
+         ;
+ 
+     mapnik::enumeration_<mapnik::marker_multi_policy_e>("marker_multi_policy")
+-        .value("EACH",mapnik::MARKER_EACH_MULTI)
+-        .value("WHOLE",mapnik::MARKER_WHOLE_MULTI)
+-        .value("LARGEST",mapnik::MARKER_LARGEST_MULTI)
++        .value("EACH",mapnik::marker_multi_policy_enum::MARKER_EACH_MULTI)
++        .value("WHOLE",mapnik::marker_multi_policy_enum::MARKER_WHOLE_MULTI)
++        .value("LARGEST",mapnik::marker_multi_policy_enum::MARKER_LARGEST_MULTI)
+         ;
+ 
+     class_<markers_symbolizer, bases<symbolizer_base> >("MarkersSymbolizer",
+@@ -382,25 +417,25 @@ void export_line_symbolizer()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::line_rasterizer_e>("line_rasterizer")
+-        .value("FULL",mapnik::RASTERIZER_FULL)
+-        .value("FAST",mapnik::RASTERIZER_FAST)
++        .value("FULL",mapnik::line_rasterizer_enum::RASTERIZER_FULL)
++        .value("FAST",mapnik::line_rasterizer_enum::RASTERIZER_FAST)
+         ;
+ 
+     mapnik::enumeration_<mapnik::line_cap_e>("stroke_linecap",
+                              "The possible values for a line cap used when drawing\n"
+                              "with a stroke.\n")
+-        .value("BUTT_CAP",mapnik::BUTT_CAP)
+-        .value("SQUARE_CAP",mapnik::SQUARE_CAP)
+-        .value("ROUND_CAP",mapnik::ROUND_CAP)
++        .value("BUTT_CAP",mapnik::line_cap_enum::BUTT_CAP)
++        .value("SQUARE_CAP",mapnik::line_cap_enum::SQUARE_CAP)
++        .value("ROUND_CAP",mapnik::line_cap_enum::ROUND_CAP)
+         ;
+ 
+     mapnik::enumeration_<mapnik::line_join_e>("stroke_linejoin",
+                                       "The possible values for the line joining mode\n"
+                                       "when drawing with a stroke.\n")
+-        .value("MITER_JOIN",mapnik::MITER_JOIN)
+-        .value("MITER_REVERT_JOIN",mapnik::MITER_REVERT_JOIN)
+-        .value("ROUND_JOIN",mapnik::ROUND_JOIN)
+-        .value("BEVEL_JOIN",mapnik::BEVEL_JOIN)
++        .value("MITER_JOIN",mapnik::line_join_enum::MITER_JOIN)
++        .value("MITER_REVERT_JOIN",mapnik::line_join_enum::MITER_REVERT_JOIN)
++        .value("ROUND_JOIN",mapnik::line_join_enum::ROUND_JOIN)
++        .value("BEVEL_JOIN",mapnik::line_join_enum::BEVEL_JOIN)
+         ;
+ 
+ 
+@@ -425,8 +460,8 @@ void export_debug_symbolizer()
+     using namespace boost::python;
+ 
+     mapnik::enumeration_<mapnik::debug_symbolizer_mode_e>("debug_symbolizer_mode")
+-        .value("COLLISION",mapnik::DEBUG_SYM_MODE_COLLISION)
+-        .value("VERTEX",mapnik::DEBUG_SYM_MODE_VERTEX)
++        .value("COLLISION",mapnik::debug_symbolizer_mode_enum::DEBUG_SYM_MODE_COLLISION)
++        .value("VERTEX",mapnik::debug_symbolizer_mode_enum::DEBUG_SYM_MODE_VERTEX)
+         ;
+ 
+     class_<debug_symbolizer, bases<symbolizer_base> >("DebugSymbolizer",
+--- a/src/python_grid_utils.cpp
++++ b/src/python_grid_utils.cpp
+@@ -36,7 +36,7 @@
+ #include <mapnik/grid/grid_renderer.hpp>
+ #include <mapnik/grid/grid.hpp>
+ #include <mapnik/grid/grid_view.hpp>
+-#include <mapnik/value_error.hpp>
++#include <mapnik/value/error.hpp>
+ #include <mapnik/feature.hpp>
+ #include <mapnik/feature_kv_iterator.hpp>
+ #include "python_grid_utils.hpp"
+@@ -105,7 +105,7 @@ void grid2utf(T const& grid_type,
+         }
+         l.append(boost::python::object(
+                      boost::python::handle<>(
+-                         PyUnicode_FromUnicode(line.get(), array_size))));
++                         PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, line.get(), array_size))));
+     }
+ }
+ 
+@@ -168,7 +168,7 @@ void grid2utf(T const& grid_type,
+         }
+         l.append(boost::python::object(
+                      boost::python::handle<>(
+-                         PyUnicode_FromUnicode(line.get(), array_size))));
++                         PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, line.get(), array_size))));
+     }
+ }
+ 
+--- a/test/python_tests/agg_rasterizer_integer_overflow_test.py
++++ b/test/python_tests/agg_rasterizer_integer_overflow_test.py
+@@ -1,14 +1,6 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import json
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import run_all
+-
+ # geojson box of the world
+ geojson = {"type": "Feature",
+            "properties": {},
+@@ -24,10 +16,9 @@ geojson = {"type": "Feature",
+                                          [-17963313.143242701888084,
+                                           -6300857.11560364998877]]]}}
+ 
+-
+ def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_memory():
+     expected_color = mapnik.Color('white')
+-    projection = '+init=epsg:4326'
++    projection = 'epsg:4326'
+     ds = mapnik.MemoryDatasource()
+     context = mapnik.Context()
+     feat = mapnik.Feature.from_geojson(json.dumps(geojson), context)
+@@ -52,12 +43,11 @@ def test_that_coordinates_do_not_overflo
+     # m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372))
+     im = mapnik.Image(256, 256)
+     mapnik.render(m, im)
+-    eq_(im.get_pixel(128, 128), expected_color.packed())
+-
++    assert im.get_pixel(128, 128) ==  expected_color.packed()
+ 
+ def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_csv():
+     expected_color = mapnik.Color('white')
+-    projection = '+init=epsg:4326'
++    projection = 'epsg:4326'
+     ds = mapnik.MemoryDatasource()
+     context = mapnik.Context()
+     feat = mapnik.Feature.from_geojson(json.dumps(geojson), context)
+@@ -84,7 +74,4 @@ def test_that_coordinates_do_not_overflo
+     # m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372))
+     im = mapnik.Image(256, 256)
+     mapnik.render(m, im)
+-    eq_(im.get_pixel(128, 128), expected_color.packed())
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert im.get_pixel(128, 128) == expected_color.packed()
+--- a/test/python_tests/box2d_test.py
++++ b/test/python_tests/box2d_test.py
+@@ -1,184 +1,155 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-from nose.tools import assert_almost_equal, assert_false, assert_true, eq_
+-
+ import mapnik
+-
+-from .utilities import run_all
+-
++import pytest
+ 
+ def test_coord_init():
+     c = mapnik.Coord(100, 100)
+-
+-    eq_(c.x, 100)
+-    eq_(c.y, 100)
+-
++    assert c.x == 100
++    assert c.y == 100
+ 
+ def test_coord_multiplication():
+-    c = mapnik.Coord(100, 100)
+-    c *= 2
+-
+-    eq_(c.x, 200)
+-    eq_(c.y, 200)
+-
++     c = mapnik.Coord(100, 100)
++     c *= 2
++     assert c.x == 200
++     assert c.y == 200
+ 
+ def test_envelope_init():
+-    e = mapnik.Box2d(100, 100, 200, 200)
+-
+-    assert_true(e.contains(100, 100))
+-    assert_true(e.contains(100, 200))
+-    assert_true(e.contains(200, 200))
+-    assert_true(e.contains(200, 100))
+-
+-    assert_true(e.contains(e.center()))
+-
+-    assert_false(e.contains(99.9, 99.9))
+-    assert_false(e.contains(99.9, 200.1))
+-    assert_false(e.contains(200.1, 200.1))
+-    assert_false(e.contains(200.1, 99.9))
+-
+-    eq_(e.width(), 100)
+-    eq_(e.height(), 100)
+-
+-    eq_(e.minx, 100)
+-    eq_(e.miny, 100)
+-
+-    eq_(e.maxx, 200)
+-    eq_(e.maxy, 200)
+-
+-    eq_(e[0], 100)
+-    eq_(e[1], 100)
+-    eq_(e[2], 200)
+-    eq_(e[3], 200)
+-    eq_(e[0], e[-4])
+-    eq_(e[1], e[-3])
+-    eq_(e[2], e[-2])
+-    eq_(e[3], e[-1])
+-
+-    c = e.center()
+-
+-    eq_(c.x, 150)
+-    eq_(c.y, 150)
++     e = mapnik.Box2d(100, 100, 200, 200)
++     assert e.contains(100, 100)
++     assert e.contains(100, 200)
++     assert e.contains(200, 200)
++     assert e.contains(200, 100)
++     assert e.contains(e.center())
++     assert not e.contains(99.9, 99.9)
++     assert not e.contains(99.9, 200.1)
++     assert not e.contains(200.1, 200.1)
++     assert not e.contains(200.1, 99.9)
++     assert e.width() ==  100
++     assert e.height() == 100
++     assert e.minx ==  100
++     assert e.miny == 100
++     assert e.maxx == 200
++     assert e.maxy == 200
++     assert  e[0] == 100
++     assert  e[1] == 100
++     assert  e[2] == 200
++     assert  e[3] == 200
++     assert  e[0] == e[-4]
++     assert  e[1] == e[-3]
++     assert  e[2] == e[-2]
++     assert  e[3] == e[-1]
++     c = e.center()
++     assert  c.x == 150
++     assert  c.y == 150
+ 
+ 
+ def test_envelope_static_init():
+     e = mapnik.Box2d.from_string('100 100 200 200')
+     e2 = mapnik.Box2d.from_string('100,100,200,200')
+     e3 = mapnik.Box2d.from_string('100 , 100 , 200 , 200')
+-    eq_(e, e2)
+-    eq_(e, e3)
+ 
+-    assert_true(e.contains(100, 100))
+-    assert_true(e.contains(100, 200))
+-    assert_true(e.contains(200, 200))
+-    assert_true(e.contains(200, 100))
+-
+-    assert_true(e.contains(e.center()))
+-
+-    assert_false(e.contains(99.9, 99.9))
+-    assert_false(e.contains(99.9, 200.1))
+-    assert_false(e.contains(200.1, 200.1))
+-    assert_false(e.contains(200.1, 99.9))
+-
+-    eq_(e.width(), 100)
+-    eq_(e.height(), 100)
+-
+-    eq_(e.minx, 100)
+-    eq_(e.miny, 100)
+-
+-    eq_(e.maxx, 200)
+-    eq_(e.maxy, 200)
+-
+-    eq_(e[0], 100)
+-    eq_(e[1], 100)
+-    eq_(e[2], 200)
+-    eq_(e[3], 200)
+-    eq_(e[0], e[-4])
+-    eq_(e[1], e[-3])
+-    eq_(e[2], e[-2])
+-    eq_(e[3], e[-1])
++    assert  e == e2
++    assert  e == e3
++    assert e.contains(100, 100)
++    assert e.contains(100, 200)
++    assert e.contains(200, 200)
++    assert e.contains(200, 100)
++
++    assert e.contains(e.center())
++    assert not e.contains(99.9, 99.9)
++    assert not e.contains(99.9, 200.1)
++    assert not e.contains(200.1, 200.1)
++    assert not e.contains(200.1, 99.9)
++
++    assert  e.width() == 100
++    assert  e.height() == 100
++    assert  e.minx == 100
++    assert  e.miny == 100
++    assert  e.maxx == 200
++    assert  e.maxy == 200
++
++    assert  e[0] == 100
++    assert  e[1] == 100
++    assert  e[2] == 200
++    assert  e[3] == 200
++    assert  e[0] == e[-4]
++    assert  e[1] == e[-3]
++    assert  e[2] == e[-2]
++    assert  e[3] == e[-1]
+ 
+     c = e.center()
+-
+-    eq_(c.x, 150)
+-    eq_(c.y, 150)
+-
++    assert  c.x == 150
++    assert  c.y == 150
+ 
+ def test_envelope_multiplication():
+-    # no width then no impact of multiplication
+-    a = mapnik.Box2d(100, 100, 100, 100)
+-    a *= 5
+-    eq_(a.minx, 100)
+-    eq_(a.miny, 100)
+-    eq_(a.maxx, 100)
+-    eq_(a.maxy, 100)
+-
+-    a = mapnik.Box2d(100.0, 100.0, 100.0, 100.0)
+-    a *= 5
+-    eq_(a.minx, 100)
+-    eq_(a.miny, 100)
+-    eq_(a.maxx, 100)
+-    eq_(a.maxy, 100)
+-
+-    a = mapnik.Box2d(100.0, 100.0, 100.001, 100.001)
+-    a *= 5
+-    assert_almost_equal(a.minx, 99.9979, places=3)
+-    assert_almost_equal(a.miny, 99.9979, places=3)
+-    assert_almost_equal(a.maxx, 100.0030, places=3)
+-    assert_almost_equal(a.maxy, 100.0030, places=3)
+-
+-    e = mapnik.Box2d(100, 100, 200, 200)
+-    e *= 2
+-    eq_(e.minx, 50)
+-    eq_(e.miny, 50)
+-    eq_(e.maxx, 250)
+-    eq_(e.maxy, 250)
+-
+-    assert_true(e.contains(50, 50))
+-    assert_true(e.contains(50, 250))
+-    assert_true(e.contains(250, 250))
+-    assert_true(e.contains(250, 50))
+-
+-    assert_false(e.contains(49.9, 49.9))
+-    assert_false(e.contains(49.9, 250.1))
+-    assert_false(e.contains(250.1, 250.1))
+-    assert_false(e.contains(250.1, 49.9))
+-
+-    assert_true(e.contains(e.center()))
++     # no width then no impact of multiplication
++     a = mapnik.Box2d(100, 100, 100, 100)
++     a *= 5
++     assert a.minx == 100
++     assert a.miny == 100
++     assert a.maxx == 100
++     assert a.maxy == 100
++
++     a = mapnik.Box2d(100.0, 100.0, 100.0, 100.0)
++     a *= 5
++     assert  a.minx == 100
++     assert  a.miny == 100
++     assert  a.maxx == 100
++     assert  a.maxy == 100
++
++     a = mapnik.Box2d(100.0, 100.0, 100.001, 100.001)
++     a *= 5
++     assert a.minx == pytest.approx(99.9979, 1e-3)
++     assert a.miny == pytest.approx(99.9979, 1e-3)
++     assert a.maxx == pytest.approx(100.0030,1e-3)
++     assert a.maxy == pytest.approx(100.0030,1e-3)
++
++     e = mapnik.Box2d(100, 100, 200, 200)
++     e *= 2
++     assert  e.minx == 50
++     assert  e.miny == 50
++     assert  e.maxx == 250
++     assert  e.maxy == 250
++
++     assert e.contains(50, 50)
++     assert e.contains(50, 250)
++     assert e.contains(250, 250)
++     assert e.contains(250, 50)
++
++     assert not e.contains(49.9, 49.9)
++     assert not e.contains(49.9, 250.1)
++     assert not e.contains(250.1, 250.1)
++     assert not e.contains(250.1, 49.9)
++
++     c = e.center()
++     assert  c.x == 150
++     assert  c.y == 150
++
++     assert e.contains(c)
+ 
+-    eq_(e.width(), 200)
+-    eq_(e.height(), 200)
++     assert  e.width() == 200
++     assert  e.height()== 200
+ 
+-    eq_(e.minx, 50)
+-    eq_(e.miny, 50)
++     assert  e.minx == 50
++     assert  e.miny == 50
+ 
+-    eq_(e.maxx, 250)
+-    eq_(e.maxy, 250)
+-
+-    c = e.center()
+-
+-    eq_(c.x, 150)
+-    eq_(c.y, 150)
++     assert  e.maxx == 250
++     assert  e.maxy == 250
+ 
+ 
+ def test_envelope_clipping():
+-    e1 = mapnik.Box2d(-180, -90, 180, 90)
+-    e2 = mapnik.Box2d(-120, 40, -110, 48)
+-    e1.clip(e2)
+-    eq_(e1, e2)
+-
+-    # madagascar in merc
+-    e1 = mapnik.Box2d(4772116.5490, -2744395.0631, 5765186.4203, -1609458.0673)
+-    e2 = mapnik.Box2d(5124338.3753, -2240522.1727, 5207501.8621, -2130452.8520)
+-    e1.clip(e2)
+-    eq_(e1, e2)
+-
+-    # nz in lon/lat
+-    e1 = mapnik.Box2d(163.8062, -47.1897, 179.3628, -33.9069)
+-    e2 = mapnik.Box2d(173.7378, -39.6395, 174.4849, -38.9252)
+-    e1.clip(e2)
+-    eq_(e1, e2)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++     e1 = mapnik.Box2d(-180, -90, 180, 90)
++     e2 = mapnik.Box2d(-120, 40, -110, 48)
++     e1.clip(e2)
++     assert  e1 == e2
++
++     # madagascar in merc
++     e1 = mapnik.Box2d(4772116.5490, -2744395.0631, 5765186.4203, -1609458.0673)
++     e2 = mapnik.Box2d(5124338.3753, -2240522.1727, 5207501.8621, -2130452.8520)
++     e1.clip(e2)
++     assert  e1 == e2
++
++#     # nz in lon/lat
++     e1 = mapnik.Box2d(163.8062, -47.1897, 179.3628, -33.9069)
++     e2 = mapnik.Box2d(173.7378, -39.6395, 174.4849, -38.9252)
++     e1.clip(e2)
++     assert  e1 == e2
+--- a/test/python_tests/buffer_clear_test.py
++++ b/test/python_tests/buffer_clear_test.py
+@@ -1,30 +1,17 @@
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-
+ def test_clearing_image_data():
+     im = mapnik.Image(256, 256)
+     # make sure it equals itself
+     bytes = im.tostring()
+-    eq_(im.tostring(), bytes)
++    assert im.tostring() == bytes
+     # set background, then clear
+     im.fill(mapnik.Color('green'))
+-    eq_(im.tostring() != bytes, True)
++    assert not im.tostring() == bytes
+     # clear image, should now equal original
+     im.clear()
+-    eq_(im.tostring(), bytes)
+-
++    assert im.tostring() == bytes
+ 
+ def make_map():
+     ds = mapnik.MemoryDatasource()
+@@ -56,14 +43,10 @@ if mapnik.has_grid_renderer():
+         g = mapnik.Grid(256, 256)
+         utf = g.encode()
+         # make sure it equals itself
+-        eq_(g.encode(), utf)
++        assert g.encode() == utf
+         m = make_map()
+         mapnik.render_layer(m, g, layer=0, fields=['__id__', 'Name'])
+-        eq_(g.encode() != utf, True)
++        assert g.encode() != utf
+         # clear grid, should now match original
+         g.clear()
+-        eq_(g.encode(), utf)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert g.encode() == utf
+--- a/test/python_tests/cairo_test.py
++++ b/test/python_tests/cairo_test.py
+@@ -1,21 +1,15 @@
+-#!/usr/bin/env python
+-
+-from __future__ import print_function
+-
+ import os
+ import shutil
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ 
+ def make_tmp_map():
+@@ -41,13 +35,12 @@ def make_tmp_map():
+     m.layers.append(lyr)
+     return m
+ 
+-
+ def draw_title(m, ctx, text, size=10, color=mapnik.Color('black')):
+     """ Draw a Map Title near the top of a page."""
+     middle = m.width / 2.0
+     ctx.set_source_rgba(*cairo_color(color))
+     ctx.select_font_face(
+-        "DejaVu Sans Book",
++        "Helvetica",
+         cairo.FONT_SLANT_NORMAL,
+         cairo.FONT_WEIGHT_NORMAL)
+     ctx.set_font_size(size)
+@@ -92,12 +85,12 @@ def cairo_color(c):
+ if mapnik.has_pycairo():
+     import cairo
+ 
+-    def test_passing_pycairo_context_svg():
++    def test_passing_pycairo_context_svg(setup):
+         m = make_tmp_map()
+         m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90))
+         test_cairo_file = '/tmp/mapnik-cairo-context-test.svg'
+         surface = cairo.SVGSurface(test_cairo_file, m.width, m.height)
+-        expected_cairo_file = './images/pycairo/cairo-cairo-expected.svg'
++        expected_cairo_file = 'images/pycairo/cairo-cairo-expected.svg'
+         context = cairo.Context(surface)
+         mapnik.render(m, context)
+         draw_title(m, context, "Hello Map", size=20)
+@@ -111,7 +104,7 @@ if mapnik.has_pycairo():
+             os.stat(test_cairo_file).st_size)
+         msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+             diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+-        eq_(diff < 1500, True, msg)
++        assert diff < 1500,  msg
+         os.remove(test_cairo_file)
+ 
+     def test_passing_pycairo_context_pdf():
+@@ -119,7 +112,7 @@ if mapnik.has_pycairo():
+         m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90))
+         test_cairo_file = '/tmp/mapnik-cairo-context-test.pdf'
+         surface = cairo.PDFSurface(test_cairo_file, m.width, m.height)
+-        expected_cairo_file = './images/pycairo/cairo-cairo-expected.pdf'
++        expected_cairo_file = 'images/pycairo/cairo-cairo-expected.pdf'
+         context = cairo.Context(surface)
+         mapnik.render(m, context)
+         draw_title(m, context, "Hello Map", size=20)
+@@ -133,7 +126,7 @@ if mapnik.has_pycairo():
+             os.stat(test_cairo_file).st_size)
+         msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+             diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+-        eq_(diff < 1500, True, msg)
++        assert diff < 1500, msg
+         os.remove(test_cairo_file)
+ 
+     def test_passing_pycairo_context_png():
+@@ -141,8 +134,8 @@ if mapnik.has_pycairo():
+         m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90))
+         test_cairo_file = '/tmp/mapnik-cairo-context-test.png'
+         surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, m.width, m.height)
+-        expected_cairo_file = './images/pycairo/cairo-cairo-expected.png'
+-        expected_cairo_file2 = './images/pycairo/cairo-cairo-expected-reduced.png'
++        expected_cairo_file = 'images/pycairo/cairo-cairo-expected.png'
++        expected_cairo_file2 = 'images/pycairo/cairo-cairo-expected-reduced.png'
+         context = cairo.Context(surface)
+         mapnik.render(m, context)
+         draw_title(m, context, "Hello Map", size=20)
+@@ -160,7 +153,7 @@ if mapnik.has_pycairo():
+             os.stat(test_cairo_file).st_size)
+         msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+             diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+-        eq_(diff < 500, True, msg)
++        assert diff < 500, msg
+         os.remove(test_cairo_file)
+         if not os.path.exists(
+                 expected_cairo_file2) or os.environ.get('UPDATE'):
+@@ -173,14 +166,14 @@ if mapnik.has_pycairo():
+             os.stat(reduced_color_image).st_size)
+         msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+             diff, reduced_color_image, 'tests/python_tests/' + expected_cairo_file2)
+-        eq_(diff < 500, True, msg)
++        assert diff < 500,  msg
+         os.remove(reduced_color_image)
+ 
+     if 'sqlite' in mapnik.DatasourceCache.plugin_names():
+         def _pycairo_surface(type, sym):
+             test_cairo_file = '/tmp/mapnik-cairo-surface-test.%s.%s' % (
+                 sym, type)
+-            expected_cairo_file = './images/pycairo/cairo-surface-expected.%s.%s' % (
++            expected_cairo_file = 'images/pycairo/cairo-surface-expected.%s.%s' % (
+                 sym, type)
+             m = mapnik.Map(256, 256)
+             mapnik.load_map(m, '../data/good_maps/%s_symbolizer.xml' % sym)
+@@ -207,9 +200,9 @@ if mapnik.has_pycairo():
+                 msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+                     diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+                 if os.uname()[0] == 'Darwin':
+-                    eq_(diff < 2100, True, msg)
++                    assert diff < 2100, msg
+                 else:
+-                    eq_(diff < 23000, True, msg)
++                    assert diff < 23000, msg
+                 os.remove(test_cairo_file)
+                 return True
+             else:
+@@ -219,23 +212,19 @@ if mapnik.has_pycairo():
+                 return True
+ 
+         def test_pycairo_svg_surface1():
+-            eq_(_pycairo_surface('svg', 'point'), True)
++            assert _pycairo_surface('svg', 'point')
+ 
+         def test_pycairo_svg_surface2():
+-            eq_(_pycairo_surface('svg', 'building'), True)
++            assert _pycairo_surface('svg', 'building')
+ 
+         def test_pycairo_svg_surface3():
+-            eq_(_pycairo_surface('svg', 'polygon'), True)
++            assert _pycairo_surface('svg', 'polygon')
+ 
+         def test_pycairo_pdf_surface1():
+-            eq_(_pycairo_surface('pdf', 'point'), True)
++            assert _pycairo_surface('pdf', 'point')
+ 
+         def test_pycairo_pdf_surface2():
+-            eq_(_pycairo_surface('pdf', 'building'), True)
++            assert _pycairo_surface('pdf', 'building')
+ 
+         def test_pycairo_pdf_surface3():
+-            eq_(_pycairo_surface('pdf', 'polygon'), True)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++            assert _pycairo_surface('pdf', 'polygon')
+--- a/test/python_tests/color_test.py
++++ b/test/python_tests/color_test.py
+@@ -1,121 +1,102 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-
+ def test_color_init():
+     c = mapnik.Color(12, 128, 255)
+-    eq_(c.r, 12)
+-    eq_(c.g, 128)
+-    eq_(c.b, 255)
+-    eq_(c.a, 255)
+-    eq_(False, c.get_premultiplied())
++    assert c.r == 12
++    assert c.g == 128
++    assert c.b == 255
++    assert c.a == 255
++    assert not c.get_premultiplied()
+     c = mapnik.Color(16, 32, 64, 128)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(False, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert not c.get_premultiplied()
+     c = mapnik.Color(16, 32, 64, 128, True)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(True, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert c.get_premultiplied()
+     c = mapnik.Color('rgba(16,32,64,0.5)')
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(False, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert not c.get_premultiplied()
+     c = mapnik.Color('rgba(16,32,64,0.5)', True)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(True, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert c.get_premultiplied()
+     hex_str = '#10204080'
+     c = mapnik.Color(hex_str)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(hex_str, c.to_hex_string())
+-    eq_(False, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert hex_str == c.to_hex_string()
++    assert not c.get_premultiplied()
+     c = mapnik.Color(hex_str, True)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(hex_str, c.to_hex_string())
+-    eq_(True, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert hex_str == c.to_hex_string()
++    assert c.get_premultiplied()
+     rgba_int = 2151686160
+     c = mapnik.Color(rgba_int)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(rgba_int, c.packed())
+-    eq_(False, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert rgba_int == c.packed()
++    assert not c.get_premultiplied()
+     c = mapnik.Color(rgba_int, True)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
+-    eq_(rgba_int, c.packed())
+-    eq_(True, c.get_premultiplied())
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
++    assert rgba_int == c.packed()
++    assert c.get_premultiplied()
+ 
+ 
+ def test_color_properties():
+     c = mapnik.Color(16, 32, 64, 128)
+-    eq_(c.r, 16)
+-    eq_(c.g, 32)
+-    eq_(c.b, 64)
+-    eq_(c.a, 128)
++    assert c.r == 16
++    assert c.g == 32
++    assert c.b == 64
++    assert c.a == 128
+     c.r = 17
+-    eq_(c.r, 17)
++    assert c.r == 17
+     c.g = 33
+-    eq_(c.g, 33)
++    assert c.g == 33
+     c.b = 65
+-    eq_(c.b, 65)
++    assert c.b == 65
+     c.a = 128
+-    eq_(c.a, 128)
++    assert c.a == 128
+ 
+ 
+ def test_color_premultiply():
+     c = mapnik.Color(16, 33, 255, 128)
+-    eq_(c.premultiply(), True)
+-    eq_(c.r, 8)
+-    eq_(c.g, 17)
+-    eq_(c.b, 128)
+-    eq_(c.a, 128)
++    assert c.premultiply()
++    assert c.r == 8
++    assert c.g == 17
++    assert c.b == 128
++    assert c.a == 128
+     # Repeating it again should do nothing
+-    eq_(c.premultiply(), False)
+-    eq_(c.r, 8)
+-    eq_(c.g, 17)
+-    eq_(c.b, 128)
+-    eq_(c.a, 128)
++    assert not c.premultiply()
++    assert c.r == 8
++    assert c.g == 17
++    assert c.b == 128
++    assert c.a == 128
+     c.demultiply()
+     c.demultiply()
+     # This will not return the same values as before but we expect that
+-    eq_(c.r, 15)
+-    eq_(c.g, 33)
+-    eq_(c.b, 255)
+-    eq_(c.a, 128)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert c.r == 15
++    assert c.g == 33
++    assert c.b == 255
++    assert c.a == 128
+--- a/test/python_tests/compare_test.py
++++ b/test/python_tests/compare_test.py
+@@ -1,48 +1,32 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-
+ def test_another_compare():
+     im = mapnik.Image(5, 5)
+     im2 = mapnik.Image(5, 5)
+     im2.fill(mapnik.Color('rgba(255,255,255,0)'))
+-    eq_(im.compare(im2, 16), im.width() * im.height())
+-
++    assert im.compare(im2, 16) == im.width() * im.height()
+ 
+ def test_compare_rgba8():
+     im = mapnik.Image(5, 5, mapnik.ImageType.rgba8)
+     im.fill(mapnik.Color(0, 0, 0, 0))
+-    eq_(im.compare(im), 0)
++    assert im.compare(im) == 0
+     im2 = mapnik.Image(5, 5, mapnik.ImageType.rgba8)
+     im2.fill(mapnik.Color(0, 0, 0, 0))
+-    eq_(im.compare(im2), 0)
+-    eq_(im2.compare(im), 0)
++    assert im.compare(im2) == 0
++    assert im2.compare(im) == 0
+     im2.fill(mapnik.Color(0, 0, 0, 12))
+-    eq_(im.compare(im2), 25)
+-    eq_(im.compare(im2, 0, False), 0)
++    assert im.compare(im2) == 25
++    assert im.compare(im2, 0, False) == 0
+     im3 = mapnik.Image(5, 5, mapnik.ImageType.rgba8)
+     im3.set_pixel(0, 0, mapnik.Color(0, 0, 0, 0))
+     im3.set_pixel(0, 1, mapnik.Color(1, 1, 1, 1))
+     im3.set_pixel(1, 0, mapnik.Color(2, 2, 2, 2))
+     im3.set_pixel(1, 1, mapnik.Color(3, 3, 3, 3))
+-    eq_(im.compare(im3), 3)
+-    eq_(im.compare(im3, 1), 2)
+-    eq_(im.compare(im3, 2), 1)
+-    eq_(im.compare(im3, 3), 0)
++    assert im.compare(im3) == 3
++    assert im.compare(im3, 1) == 2
++    assert im.compare(im3, 2) == 1
++    assert im.compare(im3, 3) == 0
+ 
+ 
+ def test_compare_2_image():
+@@ -50,75 +34,71 @@ def test_compare_2_image():
+     im.set_pixel(0, 0, mapnik.Color(254, 254, 254, 254))
+     im.set_pixel(4, 4, mapnik.Color('white'))
+     im2 = mapnik.Image(5, 5)
+-    eq_(im2.compare(im, 16), 2)
++    assert im2.compare(im, 16) == 2
+ 
+ 
+ def test_compare_dimensions():
+     im = mapnik.Image(2, 2)
+     im2 = mapnik.Image(3, 3)
+-    eq_(im.compare(im2), 4)
+-    eq_(im2.compare(im), 9)
++    assert im.compare(im2) == 4
++    assert im2.compare(im) == 9
+ 
+ 
+ def test_compare_gray8():
+     im = mapnik.Image(2, 2, mapnik.ImageType.gray8)
+     im.fill(0)
+-    eq_(im.compare(im), 0)
++    assert im.compare(im) == 0
+     im2 = mapnik.Image(2, 2, mapnik.ImageType.gray8)
+     im2.fill(0)
+-    eq_(im.compare(im2), 0)
+-    eq_(im2.compare(im), 0)
+-    eq_(im.compare(im2, 0, False), 0)
++    assert im.compare(im2) == 0
++    assert im2.compare(im) == 0
++    assert im.compare(im2, 0, False) == 0
+     im3 = mapnik.Image(2, 2, mapnik.ImageType.gray8)
+     im3.set_pixel(0, 0, 0)
+     im3.set_pixel(0, 1, 1)
+     im3.set_pixel(1, 0, 2)
+     im3.set_pixel(1, 1, 3)
+-    eq_(im.compare(im3), 3)
+-    eq_(im.compare(im3, 1), 2)
+-    eq_(im.compare(im3, 2), 1)
+-    eq_(im.compare(im3, 3), 0)
++    assert im.compare(im3) == 3
++    assert im.compare(im3, 1) == 2
++    assert im.compare(im3, 2) == 1
++    assert im.compare(im3, 3) == 0
+ 
+ 
+ def test_compare_gray16():
+     im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+     im.fill(0)
+-    eq_(im.compare(im), 0)
++    assert im.compare(im) == 0
+     im2 = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+     im2.fill(0)
+-    eq_(im.compare(im2), 0)
+-    eq_(im2.compare(im), 0)
+-    eq_(im.compare(im2, 0, False), 0)
++    assert im.compare(im2) == 0
++    assert im2.compare(im) == 0
++    assert im.compare(im2, 0, False) == 0
+     im3 = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+     im3.set_pixel(0, 0, 0)
+     im3.set_pixel(0, 1, 1)
+     im3.set_pixel(1, 0, 2)
+     im3.set_pixel(1, 1, 3)
+-    eq_(im.compare(im3), 3)
+-    eq_(im.compare(im3, 1), 2)
+-    eq_(im.compare(im3, 2), 1)
+-    eq_(im.compare(im3, 3), 0)
++    assert im.compare(im3) == 3
++    assert im.compare(im3, 1) == 2
++    assert im.compare(im3, 2) == 1
++    assert im.compare(im3, 3) == 0
+ 
+ 
+ def test_compare_gray32f():
+     im = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
+     im.fill(0.5)
+-    eq_(im.compare(im), 0)
++    assert im.compare(im) == 0
+     im2 = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
+     im2.fill(0.5)
+-    eq_(im.compare(im2), 0)
+-    eq_(im2.compare(im), 0)
+-    eq_(im.compare(im2, 0, False), 0)
++    assert im.compare(im2) == 0
++    assert im2.compare(im) == 0
++    assert im.compare(im2, 0, False) == 0
+     im3 = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
+     im3.set_pixel(0, 0, 0.5)
+     im3.set_pixel(0, 1, 1.5)
+     im3.set_pixel(1, 0, 2.5)
+     im3.set_pixel(1, 1, 3.5)
+-    eq_(im.compare(im3), 3)
+-    eq_(im.compare(im3, 1.0), 2)
+-    eq_(im.compare(im3, 2.0), 1)
+-    eq_(im.compare(im3, 3.0), 0)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert im.compare(im3) == 3
++    assert im.compare(im3, 1.0) == 2
++    assert im.compare(im3, 2.0) == 1
++    assert im.compare(im3, 3.0) == 0
+--- a/test/python_tests/compositing_test.py
++++ b/test/python_tests/compositing_test.py
+@@ -1,22 +1,14 @@
+-# encoding: utf8
+-
+-from __future__ import print_function
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import (get_unique_colors, pixel2channels, side_by_side_image, execution_path)
+ 
+-from .utilities import (execution_path, get_unique_colors, pixel2channels,
+-                        run_all, side_by_side_image)
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
++    yield
+ 
+ def is_pre(color, alpha):
+     return (color * 255.0 / alpha) <= 255
+@@ -93,14 +85,14 @@ def validate_pixels_are_premultiplied(im
+     return (num_bad == 0, bad_pixels)
+ 
+ 
+-def test_compare_images():
+-    b = mapnik.Image.open('./images/support/b.png')
++def test_compare_images(setup):
++    b = mapnik.Image.open('images/support/b.png')
+     b.premultiply()
+     num_ops = len(mapnik.CompositeOp.names)
+     successes = []
+     fails = []
+     for name in mapnik.CompositeOp.names:
+-        a = mapnik.Image.open('./images/support/a.png')
++        a = mapnik.Image.open('images/support/a.png')
+         a.premultiply()
+         a.composite(b, getattr(mapnik.CompositeOp, name))
+         actual = '/tmp/mapnik-comp-op-test-' + name + '.png'
+@@ -132,53 +124,53 @@ def test_compare_images():
+                 name +
+                 '.fail.png',
+                 'png32')
+-    eq_(len(successes), num_ops, '\n' + '\n'.join(fails))
++    assert len(successes) == num_ops, '\n' + '\n'.join(fails)
+     b.demultiply()
+     # b will be slightly modified by pre and then de multiplication rounding errors
+     # TODO - write test to ensure the image is 99% the same.
+     #expected_b = mapnik.Image.open('./images/support/b.png')
+     # b.save('/tmp/mapnik-comp-op-test-original-mask.png')
+-    #eq_(b.tostring('png32'),expected_b.tostring('png32'), '/tmp/mapnik-comp-op-test-original-mask.png is no longer equivalent to original mask: ./images/support/b.png')
++    #assert b.tostring('png32') == expected_b.tostring('png32'), '/tmp/mapnik-comp-op-test-original-mask.png is no longer equivalent to original mask: ./images/support/b.png'
+ 
+ 
+ def test_pre_multiply_status():
+-    b = mapnik.Image.open('./images/support/b.png')
++    b = mapnik.Image.open('images/support/b.png')
+     # not premultiplied yet, should appear that way
+     result = validate_pixels_are_not_premultiplied(b)
+-    eq_(result, True)
++    assert result
+     # not yet premultiplied therefore should return false
+     result = validate_pixels_are_premultiplied(b)
+-    eq_(result[0], False)
++    assert not result[0]
+     # now actually premultiply the pixels
+     b.premultiply()
+     # now checking if premultiplied should succeed
+     result = validate_pixels_are_premultiplied(b)
+-    eq_(result[0], True)
++    assert result[0]
+     # should now not appear to look not premultiplied
+     result = validate_pixels_are_not_premultiplied(b)
+-    eq_(result, False)
++    assert not result
+     # now actually demultiply the pixels
+     b.demultiply()
+     # should now appear demultiplied
+     result = validate_pixels_are_not_premultiplied(b)
+-    eq_(result, True)
++    assert result
+ 
+ 
+ def test_pre_multiply_status_of_map1():
+     m = mapnik.Map(256, 256)
+     im = mapnik.Image(m.width, m.height)
+-    eq_(validate_pixels_are_not_premultiplied(im), True)
++    assert validate_pixels_are_not_premultiplied(im)
+     mapnik.render(m, im)
+-    eq_(validate_pixels_are_not_premultiplied(im), True)
++    assert validate_pixels_are_not_premultiplied(im)
+ 
+ 
+ def test_pre_multiply_status_of_map2():
+     m = mapnik.Map(256, 256)
+     m.background = mapnik.Color(1, 1, 1, 255)
+     im = mapnik.Image(m.width, m.height)
+-    eq_(validate_pixels_are_not_premultiplied(im), True)
++    assert validate_pixels_are_not_premultiplied(im)
+     mapnik.render(m, im)
+-    eq_(validate_pixels_are_not_premultiplied(im), True)
++    assert validate_pixels_are_not_premultiplied(im)
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+     def test_style_level_comp_op():
+@@ -215,7 +207,7 @@ if 'shape' in mapnik.DatasourceCache.plu
+                     name +
+                     '.fail.png',
+                     'png32')
+-        eq_(len(fails), 0, '\n' + '\n'.join(fails))
++        assert len(fails) == 0, '\n' + '\n'.join(fails)
+ 
+     def test_style_level_opacity():
+         m = mapnik.Map(512, 512)
+@@ -228,10 +220,8 @@ if 'shape' in mapnik.DatasourceCache.plu
+         expected = 'images/support/mapnik-style-level-opacity.png'
+         im.save(actual, 'png32')
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'tests/python_tests/' + expected))
++        assert im.tostring('png32') == expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,
++                                                                                                                          'tests/python_tests/' + expected)
+ 
+ 
+ def test_rounding_and_color_expectations():
+@@ -239,24 +229,24 @@ def test_rounding_and_color_expectations
+     m.background = mapnik.Color('rgba(255,255,255,.4999999)')
+     im = mapnik.Image(m.width, m.height)
+     mapnik.render(m, im)
+-    eq_(get_unique_colors(im), ['rgba(255,255,255,127)'])
++    assert get_unique_colors(im) == ['rgba(255,255,255,127)']
+     m = mapnik.Map(1, 1)
+     m.background = mapnik.Color('rgba(255,255,255,.5)')
+     im = mapnik.Image(m.width, m.height)
+     mapnik.render(m, im)
+-    eq_(get_unique_colors(im), ['rgba(255,255,255,128)'])
++    assert get_unique_colors(im) == ['rgba(255,255,255,128)']
+     im_file = mapnik.Image.open('../data/images/stripes_pattern.png')
+-    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(74,74,74,255)'])
++    assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(74,74,74,255)']
+     # should have no effect
+     im_file.premultiply()
+-    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(74,74,74,255)'])
++    assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(74,74,74,255)']
+     im_file.apply_opacity(.5)
+     # should have effect now that image has transparency
+     im_file.premultiply()
+-    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(37,37,37,127)'])
++    assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(37,37,37,127)']
+     # should restore to original nonpremultiplied colors
+     im_file.demultiply()
+-    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(74,74,74,127)'])
++    assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(74,74,74,127)']
+ 
+ 
+ def test_background_image_and_background_color():
+@@ -265,7 +255,7 @@ def test_background_image_and_background
+     m.background_image = '../data/images/stripes_pattern.png'
+     im = mapnik.Image(m.width, m.height)
+     mapnik.render(m, im)
+-    eq_(get_unique_colors(im), ['rgba(255,255,255,128)', 'rgba(74,74,74,255)'])
++    assert get_unique_colors(im) == ['rgba(255,255,255,128)', 'rgba(74,74,74,255)']
+ 
+ 
+ def test_background_image_with_alpha_and_background_color():
+@@ -274,7 +264,7 @@ def test_background_image_with_alpha_and
+     m.background_image = '../data/images/yellow_half_trans.png'
+     im = mapnik.Image(m.width, m.height)
+     mapnik.render(m, im)
+-    eq_(get_unique_colors(im), ['rgba(255,255,85,191)'])
++    assert get_unique_colors(im) == ['rgba(255,255,85,191)']
+ 
+ 
+ def test_background_image_with_alpha_and_background_color_against_composited_control():
+@@ -295,8 +285,4 @@ def test_background_image_with_alpha_and
+     # compare image rendered (compositing in `agg_renderer<T>::setup`)
+     # vs image composited via python bindings
+     #raise Todo("looks like we need to investigate PNG color rounding when saving")
+-    # eq_(get_unique_colors(im),get_unique_colors(im1))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    #assert get_unique_colors(im) == get_unique_colors(im1)
+--- a/test/python_tests/copy_test.py
++++ b/test/python_tests/copy_test.py
+@@ -1,21 +1,5 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-
+ def test_image_16_8_simple():
+     im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+     im.set_pixel(0, 0, 256)
+@@ -23,16 +7,16 @@ def test_image_16_8_simple():
+     im.set_pixel(1, 0, 5)
+     im.set_pixel(1, 1, 2)
+     im2 = im.copy(mapnik.ImageType.gray8)
+-    eq_(im2.get_pixel(0, 0), 255)
+-    eq_(im2.get_pixel(0, 1), 255)
+-    eq_(im2.get_pixel(1, 0), 5)
+-    eq_(im2.get_pixel(1, 1), 2)
++    assert im2.get_pixel(0, 0) ==  255
++    assert im2.get_pixel(0, 1) ==  255
++    assert im2.get_pixel(1, 0) ==  5
++    assert im2.get_pixel(1, 1) ==  2
+     # Cast back!
+     im = im2.copy(mapnik.ImageType.gray16)
+-    eq_(im.get_pixel(0, 0), 255)
+-    eq_(im.get_pixel(0, 1), 255)
+-    eq_(im.get_pixel(1, 0), 5)
+-    eq_(im.get_pixel(1, 1), 2)
++    assert im.get_pixel(0, 0) ==  255
++    assert im.get_pixel(0, 1) ==  255
++    assert im.get_pixel(1, 0) ==  5
++    assert im.get_pixel(1, 1) ==  2
+ 
+ 
+ def test_image_32f_8_simple():
+@@ -42,20 +26,20 @@ def test_image_32f_8_simple():
+     im.set_pixel(1, 0, 120.6)
+     im.set_pixel(1, 1, 360.2)
+     im2 = im.copy(mapnik.ImageType.gray8)
+-    eq_(im2.get_pixel(0, 0), 120)
+-    eq_(im2.get_pixel(0, 1), 0)
+-    eq_(im2.get_pixel(1, 0), 120)  # Notice this is truncated!
+-    eq_(im2.get_pixel(1, 1), 255)
++    assert im2.get_pixel(0, 0) ==  120
++    assert im2.get_pixel(0, 1) ==  0
++    assert im2.get_pixel(1, 0) ==  120  # Notice this is truncated!
++    assert im2.get_pixel(1, 1) ==  255
+ 
+ 
+ def test_image_offset_and_scale():
+     im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+-    eq_(im.offset, 0.0)
+-    eq_(im.scaling, 1.0)
++    assert im.offset ==  0.0
++    assert im.scaling ==  1.0
+     im.offset = 1.0
+     im.scaling = 2.0
+-    eq_(im.offset, 1.0)
+-    eq_(im.scaling, 2.0)
++    assert im.offset ==  1.0
++    assert im.scaling ==  2.0
+ 
+ 
+ def test_image_16_8_scale_and_offset():
+@@ -67,17 +51,17 @@ def test_image_16_8_scale_and_offset():
+     offset = 255
+     scaling = 3
+     im2 = im.copy(mapnik.ImageType.gray8, offset, scaling)
+-    eq_(im2.get_pixel(0, 0), 0)
+-    eq_(im2.get_pixel(0, 1), 1)
+-    eq_(im2.get_pixel(1, 0), 255)
+-    eq_(im2.get_pixel(1, 1), 120)
++    assert im2.get_pixel(0, 0) ==  0
++    assert im2.get_pixel(0, 1) ==  1
++    assert im2.get_pixel(1, 0) ==  255
++    assert im2.get_pixel(1, 1) ==  120
+     # pixels will be a little off due to offsets in reverting!
+     im3 = im2.copy(mapnik.ImageType.gray16)
+-    eq_(im3.get_pixel(0, 0), 255)  # Rounding error with ints
+-    eq_(im3.get_pixel(0, 1), 258)  # same
++    assert im3.get_pixel(0, 0) ==  255  # Rounding error with ints
++    assert im3.get_pixel(0, 1) ==  258  # same
+     # The other one was way out of range for our scale/offset
+-    eq_(im3.get_pixel(1, 0), 1020)
+-    eq_(im3.get_pixel(1, 1), 615)  # same
++    assert im3.get_pixel(1, 0) ==  1020
++    assert im3.get_pixel(1, 1) ==  615  # same
+ 
+ 
+ def test_image_16_32f_scale_and_offset():
+@@ -89,16 +73,12 @@ def test_image_16_32f_scale_and_offset()
+     offset = 255
+     scaling = 3.2
+     im2 = im.copy(mapnik.ImageType.gray32f, offset, scaling)
+-    eq_(im2.get_pixel(0, 0), 0.3125)
+-    eq_(im2.get_pixel(0, 1), 0.9375)
+-    eq_(im2.get_pixel(1, 0), -79.6875)
+-    eq_(im2.get_pixel(1, 1), 112.5)
++    assert im2.get_pixel(0, 0) ==  0.3125
++    assert im2.get_pixel(0, 1) ==  0.9375
++    assert im2.get_pixel(1, 0) ==  -79.6875
++    assert im2.get_pixel(1, 1) ==  112.5
+     im3 = im2.copy(mapnik.ImageType.gray16)
+-    eq_(im3.get_pixel(0, 0), 256)
+-    eq_(im3.get_pixel(0, 1), 258)
+-    eq_(im3.get_pixel(1, 0), 0)
+-    eq_(im3.get_pixel(1, 1), 615)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert im3.get_pixel(0, 0) ==  256
++    assert im3.get_pixel(0, 1) ==  258
++    assert im3.get_pixel(1, 0) ==  0
++    assert im3.get_pixel(1, 1) ==  615
+--- a/test/python_tests/csv_test.py
++++ b/test/python_tests/csv_test.py
+@@ -1,32 +1,15 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-from __future__ import print_function
+-
+ import glob
+ import os
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
+-
++import pytest
+ from .utilities import execution_path
+ 
+-
+-default_logging_severity = mapnik.logger.get_severity()
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+-    # make the tests silent since we intentially test error conditions that
+-    # are noisy
+-    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
+-
+-def teardown():
+-    mapnik.logger.set_severity(default_logging_severity)
++    yield
+ 
+ if 'csv' in mapnik.DatasourceCache.plugin_names():
+ 
+@@ -34,7 +17,7 @@ if 'csv' in mapnik.DatasourceCache.plugi
+         return mapnik.Datasource(
+             type='csv', file=os.path.join('../data/csv/', filename))
+ 
+-    def test_broken_files(visual=False):
++    def test_broken_files(setup, visual=False):
+         broken = glob.glob("../data/csv/fails/*.*")
+         broken.extend(glob.glob("../data/csv/warns/*.*"))
+ 
+@@ -49,10 +32,11 @@ if 'csv' in mapnik.DatasourceCache.plugi
+                 except Exception:
+                     print('\x1b[1;32m? \x1b[0m', csv)
+ 
+-    def test_good_files(visual=False):
++    def test_good_files(setup, visual=False):
+         good_files = glob.glob("../data/csv/*.*")
+         good_files.extend(glob.glob("../data/csv/warns/*.*"))
+         ignorable = os.path.join('..', 'data', 'csv', 'long_lat.vrt')
++        print("ignorable:", ignorable)
+         good_files.remove(ignorable)
+         for f in good_files:
+             if f.endswith('.index'):
+@@ -70,46 +54,45 @@ if 'csv' in mapnik.DatasourceCache.plugi
+ 
+     def test_lon_lat_detection(**kwargs):
+         ds = get_csv_ds('lon_lat.csv')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['lon', 'lat'])
+-        eq_(ds.field_types(), ['int', 'int'])
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['lon' ==  'lat']
++        assert ds.field_types(), ['int' ==  'int']
+         query = mapnik.Query(ds.envelope())
+         for fld in ds.fields():
+             query.add_property_name(fld)
+         fs = ds.features(query)
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+         feat = fs.next()
+         attr = {'lon': 0, 'lat': 0}
+-        eq_(feat.attributes, attr)
++        assert feat.attributes ==  attr
+ 
+     def test_lng_lat_detection(**kwargs):
+         ds = get_csv_ds('lng_lat.csv')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['lng', 'lat'])
+-        eq_(ds.field_types(), ['int', 'int'])
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['lng' ==  'lat']
++        assert ds.field_types(), ['int' ==  'int']
+         query = mapnik.Query(ds.envelope())
+         for fld in ds.fields():
+             query.add_property_name(fld)
+         fs = ds.features(query)
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+         feat = fs.next()
+         attr = {'lng': 0, 'lat': 0}
+-        eq_(feat.attributes, attr)
++        assert feat.attributes ==  attr
+ 
+     def test_type_detection(**kwargs):
+         ds = get_csv_ds('nypd.csv')
+-        eq_(ds.fields(),
+-            ['Precinct',
+-             'Phone',
+-             'Address',
+-             'City',
+-             'geo_longitude',
+-             'geo_latitude',
+-             'geo_accuracy'])
+-        eq_(ds.field_types(), ['str', 'str',
+-                               'str', 'str', 'float', 'float', 'str'])
++        assert ds.fields() == ['Precinct',
++                               'Phone',
++                               'Address',
++                               'City',
++                               'geo_longitude',
++                               'geo_latitude',
++                               'geo_accuracy']
++        assert ds.field_types() == ['str', 'str',
++                                    'str', 'str', 'float', 'float', 'str']
+         feat = ds.featureset().next()
+         attr = {
+             'City': u'New York, NY',
+@@ -119,33 +102,33 @@ if 'csv' in mapnik.DatasourceCache.plugi
+             'Precinct': u'5th Precinct',
+             'geo_longitude': -70,
+             'geo_latitude': 40}
+-        eq_(feat.attributes, attr)
+-        eq_(len(list(ds.all_features())), 2)
++        assert feat.attributes ==  attr
++        assert len(list(ds.all_features())) ==  2
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_skipping_blank_rows(**kwargs):
+         ds = get_csv_ds('blank_rows.csv')
+-        eq_(ds.fields(), ['x', 'y', 'name'])
+-        eq_(ds.field_types(), ['int', 'int', 'str'])
+-        eq_(len(list(ds.all_features())), 2)
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert ds.fields(), ['x', 'y' ==  'name']
++        assert ds.field_types(), ['int', 'int' ==  'str']
++        assert len(list(ds.all_features())) ==  2
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_empty_rows(**kwargs):
+         ds = get_csv_ds('empty_rows.csv')
+-        eq_(len(ds.fields()), 10)
+-        eq_(len(ds.field_types()), 10)
+-        eq_(ds.fields(), ['x', 'y', 'text', 'date', 'integer',
+-                          'boolean', 'float', 'time', 'datetime', 'empty_column'])
+-        eq_(ds.field_types(), ['int', 'int', 'str', 'str',
+-                               'int', 'bool', 'float', 'str', 'str', 'str'])
++        assert len(ds.fields()) ==  10
++        assert len(ds.field_types()) ==  10
++        assert ds.fields() == ['x', 'y', 'text', 'date', 'integer',
++                               'boolean', 'float', 'time', 'datetime', 'empty_column']
++        assert ds.field_types() == ['int', 'int', 'str', 'str',
++                                    'int', 'bool', 'float', 'str', 'str', 'str']
+         fs = ds.featureset()
+         attr = {
+             'x': 0,
+@@ -162,146 +145,138 @@ if 'csv' in mapnik.DatasourceCache.plugi
+         for feat in fs:
+             if first:
+                 first = False
+-                eq_(feat.attributes, attr)
+-            eq_(len(feat), 10)
+-            eq_(feat['empty_column'], u'')
++                assert feat.attributes ==  attr
++            assert len(feat) ==  10
++            assert feat['empty_column'] ==  u''
+ 
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_slashes(**kwargs):
+         ds = get_csv_ds('has_attributes_with_slashes.csv')
+-        eq_(len(ds.fields()), 3)
++        assert len(ds.fields()) ==  3
+         fs = list(ds.all_features())
+-        eq_(fs[0].attributes, {'x': 0, 'y': 0, 'name': u'a/a'})
+-        eq_(fs[1].attributes, {'x': 1, 'y': 4, 'name': u'b/b'})
+-        eq_(fs[2].attributes, {'x': 10, 'y': 2.5, 'name': u'c/c'})
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert fs[0].attributes == {'x': 0, 'y': 0, 'name': u'a/a'}
++        assert fs[1].attributes == {'x': 1, 'y': 4, 'name': u'b/b'}
++        assert fs[2].attributes == {'x': 10, 'y': 2.5,  'name': u'c/c'}
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_wkt_field(**kwargs):
+         ds = get_csv_ds('wkt.csv')
+-        eq_(len(ds.fields()), 1)
+-        eq_(ds.fields(), ['type'])
+-        eq_(ds.field_types(), ['str'])
++        assert len(ds.fields()) ==  1
++        assert ds.fields() ==  ['type']
++        assert ds.field_types() ==  ['str']
+         fs = list(ds.all_features())
+-        # eq_(len(fs[0].geometries()),1)
+-        eq_(fs[0].geometry.type(), mapnik.GeometryType.Point)
+-        # eq_(len(fs[1].geometries()),1)
+-        eq_(fs[1].geometry.type(), mapnik.GeometryType.LineString)
+-        # eq_(len(fs[2].geometries()),1)
+-        eq_(fs[2].geometry.type(), mapnik.GeometryType.Polygon)
+-        # eq_(len(fs[3].geometries()),1) # one geometry, two parts
+-        eq_(fs[3].geometry.type(), mapnik.GeometryType.Polygon)
+-        # eq_(len(fs[4].geometries()),4)
+-        eq_(fs[4].geometry.type(), mapnik.GeometryType.MultiPoint)
+-        # eq_(len(fs[5].geometries()),2)
+-        eq_(fs[5].geometry.type(), mapnik.GeometryType.MultiLineString)
+-        # eq_(len(fs[6].geometries()),2)
+-        eq_(fs[6].geometry.type(), mapnik.GeometryType.MultiPolygon)
+-        # eq_(len(fs[7].geometries()),2)
+-        eq_(fs[7].geometry.type(), mapnik.GeometryType.MultiPolygon)
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Collection)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert fs[0].geometry.type() ==  mapnik.GeometryType.Point
++        assert fs[1].geometry.type() ==  mapnik.GeometryType.LineString
++        assert fs[2].geometry.type() ==  mapnik.GeometryType.Polygon
++        assert fs[3].geometry.type() ==  mapnik.GeometryType.Polygon
++        assert fs[4].geometry.type() ==  mapnik.GeometryType.MultiPoint
++        assert fs[5].geometry.type() ==  mapnik.GeometryType.MultiLineString
++        assert fs[6].geometry.type() ==  mapnik.GeometryType.MultiPolygon
++        assert fs[7].geometry.type() ==  mapnik.GeometryType.MultiPolygon
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Collection
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_handling_of_missing_header(**kwargs):
+         ds = get_csv_ds('missing_header.csv')
+-        eq_(len(ds.fields()), 6)
+-        eq_(ds.fields(), ['one', 'two', 'x', 'y', '_4', 'aftermissing'])
++        assert len(ds.fields()) ==  6
++        assert ds.fields() == ['one', 'two', 'x', 'y', '_4', 'aftermissing']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['_4'], 'missing')
++        assert feat['_4'] ==  'missing'
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_handling_of_headers_that_are_numbers(**kwargs):
+         ds = get_csv_ds('numbers_for_headers.csv')
+-        eq_(len(ds.fields()), 5)
+-        eq_(ds.fields(), ['x', 'y', '1990', '1991', '1992'])
++        assert len(ds.fields()) ==  5
++        assert ds.fields() == ['x', 'y', '1990', '1991', '1992']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['1990'], 1)
+-        eq_(feat['1991'], 2)
+-        eq_(feat['1992'], 3)
+-        eq_(mapnik.Expression("[1991]=2").evaluate(feat), True)
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['1990'] ==  1
++        assert feat['1991'] ==  2
++        assert feat['1992'] ==  3
++        assert mapnik.Expression("[1991]=2").evaluate(feat)
+ 
+     def test_quoted_numbers(**kwargs):
+         ds = get_csv_ds('points.csv')
+-        eq_(len(ds.fields()), 6)
+-        eq_(ds.fields(), ['lat', 'long', 'name', 'nr', 'color', 'placements'])
++        assert len(ds.fields()) ==  6
++        assert ds.fields(), ['lat', 'long', 'name', 'nr', 'color' ==  'placements']
+         fs = list(ds.all_features())
+-        eq_(fs[0]['placements'], "N,S,E,W,SW,10,5")
+-        eq_(fs[1]['placements'], "N,S,E,W,SW,10,5")
+-        eq_(fs[2]['placements'], "N,S,E,W,SW,10,5")
+-        eq_(fs[3]['placements'], "N,S,E,W,SW,10,5")
+-        eq_(fs[4]['placements'], "N,S,E,W,SW,10,5")
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert fs[0]['placements'] == "N,S,E,W,SW,10,5"
++        assert fs[1]['placements'] == "N,S,E,W,SW,10,5"
++        assert fs[2]['placements'] == "N,S,E,W,SW,10,5"
++        assert fs[3]['placements'] == "N,S,E,W,SW,10,5"
++        assert fs[4]['placements'] == "N,S,E,W,SW,10,5"
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_reading_windows_newlines(**kwargs):
+         ds = get_csv_ds('windows_newlines.csv')
+-        eq_(len(ds.fields()), 3)
++        assert len(ds.fields()) ==  3
+         feats = list(ds.all_features())
+-        eq_(len(feats), 1)
++        assert len(feats) ==  1
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 1)
+-        eq_(feat['y'], 10)
+-        eq_(feat['z'], 9999.9999)
++        assert feat['x'] ==  1
++        assert feat['y'] ==  10
++        assert feat['z'] ==  9999.9999
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_reading_mac_newlines(**kwargs):
+         ds = get_csv_ds('mac_newlines.csv')
+-        eq_(len(ds.fields()), 3)
++        assert len(ds.fields()) ==  3
+         feats = list(ds.all_features())
+-        eq_(len(feats), 1)
++        assert len(feats) ==  1
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 1)
+-        eq_(feat['y'], 10)
+-        eq_(feat['z'], 9999.9999)
++        assert feat['x'] ==  1
++        assert feat['y'] ==  10
++        assert feat['z'] ==  9999.9999
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def check_newlines(filename):
+         ds = get_csv_ds(filename)
+-        eq_(len(ds.fields()), 3)
++        assert len(ds.fields()) ==  3
+         feats = list(ds.all_features())
+-        eq_(len(feats), 1)
++        assert len(feats) ==  1
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['line'], 'many\n  lines\n  of text\n  with unix newlines')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['line'] ==  'many\n  lines\n  of text\n  with unix newlines'
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_mixed_mac_unix_newlines(**kwargs):
+         check_newlines('mac_newlines_with_unix_inline.csv')
+@@ -325,111 +300,112 @@ if 'csv' in mapnik.DatasourceCache.plugi
+ 
+     def test_tabs(**kwargs):
+         ds = get_csv_ds('tabs_in_csv.csv')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'z'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'z']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], -122)
+-        eq_(feat['y'], 48)
+-        eq_(feat['z'], 0)
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert feat['x'] ==  -122
++        assert feat['y'] ==  48
++        assert feat['z'] ==  0
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_separator_pipes(**kwargs):
+         ds = get_csv_ds('pipe_delimiters.csv')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'z'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'z']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['z'], 'hello')
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['z'] ==  'hello'
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_separator_semicolon(**kwargs):
+         ds = get_csv_ds('semicolon_delimiters.csv')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'z'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'z']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['z'], 'hello')
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['z'] ==  'hello'
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_that_null_and_bool_keywords_are_empty_strings(**kwargs):
+         ds = get_csv_ds('nulls_and_booleans_as_strings.csv')
+-        eq_(len(ds.fields()), 4)
+-        eq_(ds.fields(), ['x', 'y', 'null', 'boolean'])
+-        eq_(ds.field_types(), ['int', 'int', 'str', 'bool'])
+-        fs = ds.featureset()
+-        feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['null'], 'null')
+-        eq_(feat['boolean'], True)
+-        feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['null'], '')
+-        eq_(feat['boolean'], False)
++        assert len(ds.fields()) ==  4
++        assert ds.fields(), ['x', 'y', 'null' ==  'boolean']
++        assert ds.field_types(), ['int', 'int', 'str' ==  'bool']
++        fs = ds.featureset()
++        feat = fs.next()
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['null'] ==  'null'
++        assert feat['boolean'] ==  True
++        feat = fs.next()
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['null'] ==  ''
++        assert feat['boolean'] ==  False
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+-    @raises(RuntimeError)
+     def test_that_nonexistant_query_field_throws(**kwargs):
+-        ds = get_csv_ds('lon_lat.csv')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['lon', 'lat'])
+-        eq_(ds.field_types(), ['int', 'int'])
+-        query = mapnik.Query(ds.envelope())
+-        for fld in ds.fields():
+-            query.add_property_name(fld)
+-        # also add an invalid one, triggering throw
+-        query.add_property_name('bogus')
+-        ds.features(query)
++        with pytest.raises(RuntimeError):
++            ds = get_csv_ds('lon_lat.csv')
++            assert len(ds.fields()) ==  2
++            assert ds.fields(), ['lon' ==  'lat']
++            assert ds.field_types(), ['int' ==  'int']
++            query = mapnik.Query(ds.envelope())
++            for fld in ds.fields():
++                query.add_property_name(fld)
++                # also add an invalid one, triggering throw
++                query.add_property_name('bogus')
++                ds.features(query)
++
+ 
+     def test_that_leading_zeros_mean_strings(**kwargs):
+         ds = get_csv_ds('leading_zeros.csv')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'fips'])
+-        eq_(ds.field_types(), ['int', 'int', 'str'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'fips']
++        assert ds.field_types(), ['int', 'int' ==  'str']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['fips'], '001')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['fips'] ==  '001'
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['fips'], '003')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['fips'] ==  '003'
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['fips'], '005')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['fips'] ==  '005'
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_advanced_geometry_detection(**kwargs):
+         ds = get_csv_ds('point_wkt.csv')
+-        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Point)
++        assert ds.describe()['geometry_type'] ==  mapnik.DataGeometryType.Point
+         ds = get_csv_ds('poly_wkt.csv')
+-        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Polygon)
++        assert ds.describe()['geometry_type'] ==  mapnik.DataGeometryType.Polygon
+         ds = get_csv_ds('multi_poly_wkt.csv')
+-        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Polygon)
++        assert ds.describe()['geometry_type'] ==  mapnik.DataGeometryType.Polygon
+         ds = get_csv_ds('line_wkt.csv')
+-        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.LineString)
++        assert ds.describe()['geometry_type'] ==  mapnik.DataGeometryType.LineString
+ 
+     def test_creation_of_csv_from_in_memory_string(**kwargs):
+         csv_string = '''
+@@ -437,10 +413,10 @@ if 'csv' in mapnik.DatasourceCache.plugi
+           "POINT (120.15 48.47)","Winthrop, WA"
+           '''  # csv plugin will test lines <= 10 chars for being fully blank
+         ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string})
+-        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Point)
++        assert ds.describe()['geometry_type'] ==  mapnik.DataGeometryType.Point
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['Name'], u"Winthrop, WA")
++        assert feat['Name'], u"Winthrop ==  WA"
+ 
+     def test_creation_of_csv_from_in_memory_string_with_uft8(**kwargs):
+         csv_string = '''
+@@ -448,37 +424,29 @@ if 'csv' in mapnik.DatasourceCache.plugi
+           "POINT (120.15 48.47)","Qu?bec"
+           '''  # csv plugin will test lines <= 10 chars for being fully blank
+         ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string})
+-        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Point)
++        assert ds.describe()['geometry_type'] ==  mapnik.DataGeometryType.Point
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['Name'], u"Qu?bec")
++        assert feat['Name'] ==  u"Qu?bec"
+ 
+     def validate_geojson_datasource(ds):
+-        eq_(len(ds.fields()), 1)
+-        eq_(ds.fields(), ['type'])
+-        eq_(ds.field_types(), ['str'])
++        assert len(ds.fields()) ==  1
++        assert ds.fields() ==  ['type']
++        assert ds.field_types() ==  ['str']
+         fs = list(ds.all_features())
+-        # eq_(len(fs[0].geometries()),1)
+-        eq_(fs[0].geometry.type(), mapnik.GeometryType.Point)
+-        # eq_(len(fs[1].geometries()),1)
+-        eq_(fs[1].geometry.type(), mapnik.GeometryType.LineString)
+-        # eq_(len(fs[2].geometries()),1)
+-        eq_(fs[2].geometry.type(), mapnik.GeometryType.Polygon)
+-        # eq_(len(fs[3].geometries()),1) # one geometry, two parts
+-        eq_(fs[3].geometry.type(), mapnik.GeometryType.Polygon)
+-        # eq_(len(fs[4].geometries()),4)
+-        eq_(fs[4].geometry.type(), mapnik.GeometryType.MultiPoint)
+-        # eq_(len(fs[5].geometries()),2)
+-        eq_(fs[5].geometry.type(), mapnik.GeometryType.MultiLineString)
+-        # eq_(len(fs[6].geometries()),2)
+-        eq_(fs[6].geometry.type(), mapnik.GeometryType.MultiPolygon)
+-        # eq_(len(fs[7].geometries()),2)
+-        eq_(fs[7].geometry.type(), mapnik.GeometryType.MultiPolygon)
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Collection)
+-        eq_(desc['name'], 'csv')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert fs[0].geometry.type() ==  mapnik.GeometryType.Point
++        assert fs[1].geometry.type() ==  mapnik.GeometryType.LineString
++        assert fs[2].geometry.type() ==  mapnik.GeometryType.Polygon
++        assert fs[3].geometry.type() ==  mapnik.GeometryType.Polygon
++        assert fs[4].geometry.type() ==  mapnik.GeometryType.MultiPoint
++        assert fs[5].geometry.type() ==  mapnik.GeometryType.MultiLineString
++        assert fs[6].geometry.type() ==  mapnik.GeometryType.MultiPolygon
++        assert fs[7].geometry.type() ==  mapnik.GeometryType.MultiPolygon
++        desc = ds.describe()
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Collection
++        assert desc['name'] ==  'csv'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+     def test_json_field1(**kwargs):
+         ds = get_csv_ds('geojson_double_quote_escape.csv')
+@@ -494,129 +462,129 @@ if 'csv' in mapnik.DatasourceCache.plugi
+ 
+     def test_that_blank_undelimited_rows_are_still_parsed(**kwargs):
+         ds = get_csv_ds('more_headers_than_column_values.csv')
+-        eq_(len(ds.fields()), 0)
+-        eq_(ds.fields(), [])
+-        eq_(ds.field_types(), [])
++        assert len(ds.fields()) ==  0
++        assert ds.fields() ==  []
++        assert ds.field_types() ==  []
+         fs = list(ds.featureset())
+-        eq_(len(fs), 0)
++        assert len(fs) ==  0
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], None)
++        assert desc['geometry_type'] ==  None
+ 
+-    @raises(RuntimeError)
+     def test_that_fewer_headers_than_rows_throws(**kwargs):
+-        # this has invalid header # so throw
+-        get_csv_ds('more_column_values_than_headers.csv')
++        with pytest.raises(RuntimeError):
++            # this has invalid header # so throw
++            get_csv_ds('more_column_values_than_headers.csv')
+ 
+     def test_that_feature_id_only_incremented_for_valid_rows(**kwargs):
+         ds = mapnik.Datasource(type='csv',
+                                file=os.path.join('../data/csv/warns', 'feature_id_counting.csv'))
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'id'])
+-        eq_(ds.field_types(), ['int', 'int', 'int'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'id']
++        assert ds.field_types(), ['int', 'int' ==  'int']
+         fs = ds.featureset()
+         # first
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['id'], 1)
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['id'] ==  1
+         # second, should have skipped bogus one
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['id'], 2)
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['id'] ==  2
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(len(list(ds.all_features())), 2)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert len(list(ds.all_features())) ==  2
+ 
+     def test_dynamically_defining_headers1(**kwargs):
+         ds = mapnik.Datasource(type='csv',
+                                file=os.path.join(
+                                    '../data/csv/fails', 'needs_headers_two_lines.csv'),
+                                headers='x,y,name')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'name'])
+-        eq_(ds.field_types(), ['int', 'int', 'str'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'name']
++        assert ds.field_types(), ['int', 'int' ==  'str']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['name'], 'data_name')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['name'] ==  'data_name'
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(len(list(ds.all_features())), 2)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert len(list(ds.all_features())) ==  2
+ 
+     def test_dynamically_defining_headers2(**kwargs):
+         ds = mapnik.Datasource(type='csv',
+                                file=os.path.join(
+                                    '../data/csv/fails', 'needs_headers_one_line.csv'),
+                                headers='x,y,name')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'name'])
+-        eq_(ds.field_types(), ['int', 'int', 'str'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'name']
++        assert ds.field_types(), ['int', 'int' ==  'str']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['name'], 'data_name')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['name'] ==  'data_name'
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(len(list(ds.all_features())), 1)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert len(list(ds.all_features())) ==  1
+ 
+     def test_dynamically_defining_headers3(**kwargs):
+         ds = mapnik.Datasource(type='csv',
+                                file=os.path.join(
+                                    '../data/csv/fails', 'needs_headers_one_line_no_newline.csv'),
+                                headers='x,y,name')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'name'])
+-        eq_(ds.field_types(), ['int', 'int', 'str'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'name']
++        assert ds.field_types(), ['int', 'int' ==  'str']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['x'], 0)
+-        eq_(feat['y'], 0)
+-        eq_(feat['name'], 'data_name')
++        assert feat['x'] ==  0
++        assert feat['y'] ==  0
++        assert feat['name'] ==  'data_name'
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(len(list(ds.all_features())), 1)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert len(list(ds.all_features())) ==  1
+ 
+     def test_that_64bit_int_fields_work(**kwargs):
+         ds = get_csv_ds('64bit_int.csv')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'bigint'])
+-        eq_(ds.field_types(), ['int', 'int', 'int'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'bigint']
++        assert ds.field_types(), ['int', 'int' ==  'int']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['bigint'], 2147483648)
++        assert feat['bigint'] ==  2147483648
+         feat = fs.next()
+-        eq_(feat['bigint'], 9223372036854775807)
+-        eq_(feat['bigint'], 0x7FFFFFFFFFFFFFFF)
++        assert feat['bigint'] ==  9223372036854775807
++        assert feat['bigint'] ==  0x7FFFFFFFFFFFFFFF
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(len(list(ds.all_features())), 2)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert len(list(ds.all_features())) ==  2
+ 
+     def test_various_number_types(**kwargs):
+         ds = get_csv_ds('number_types.csv')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['x', 'y', 'floats'])
+-        eq_(ds.field_types(), ['int', 'int', 'float'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['x', 'y' ==  'floats']
++        assert ds.field_types(), ['int', 'int' ==  'float']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['floats'], .0)
++        assert feat['floats'] ==  .0
+         feat = fs.next()
+-        eq_(feat['floats'], +.0)
++        assert feat['floats'] ==  +.0
+         feat = fs.next()
+-        eq_(feat['floats'], 1e-06)
++        assert feat['floats'] ==  1e-06
+         feat = fs.next()
+-        eq_(feat['floats'], -1e-06)
++        assert feat['floats'] ==  -1e-06
+         feat = fs.next()
+-        eq_(feat['floats'], 0.000001)
++        assert feat['floats'] ==  0.000001
+         feat = fs.next()
+-        eq_(feat['floats'], 1.234e+16)
++        assert feat['floats'] ==  1.234e+16
+         feat = fs.next()
+-        eq_(feat['floats'], 1.234e+16)
++        assert feat['floats'] ==  1.234e+16
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(len(list(ds.all_features())), 8)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert len(list(ds.all_features())) ==  8
+ 
+     def test_manually_supplied_extent(**kwargs):
+         csv_string = '''
+@@ -625,21 +593,17 @@ if 'csv' in mapnik.DatasourceCache.plugi
+         ds = mapnik.Datasource(
+             **{"type": "csv", "extent": "-180,-90,180,90", "inline": csv_string})
+         b = ds.envelope()
+-        eq_(b.minx, -180)
+-        eq_(b.miny, -90)
+-        eq_(b.maxx, 180)
+-        eq_(b.maxy, 90)
++        assert b.minx ==  -180
++        assert b.miny ==  -90
++        assert b.maxx ==  180
++        assert b.maxy ==  90
+ 
+     def test_inline_geojson(**kwargs):
+         csv_string = "geojson\n'{\"coordinates\":[-92.22568,38.59553],\"type\":\"Point\"}'"
+         ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string})
+-        eq_(len(ds.fields()), 0)
+-        eq_(ds.fields(), [])
+-        # FIXME - re-enable after https://github.com/mapnik/mapnik/issues/2319 is fixed
+-        #fs = ds.featureset()
+-        #feat = fs.next()
+-        # eq_(feat.num_geometries(),1)
+-
+-if __name__ == "__main__":
+-    setup()
+-    [eval(run)(visual=True) for run in dir() if 'test_' in run]
++        assert len(ds.fields()) ==  0
++        assert ds.fields() ==  []
++        fs = ds.featureset()
++        feat = fs.next()
++        assert feat.geometry.type() == mapnik.GeometryType.Point
++        assert feat.geometry.to_wkt() == "POINT(-92.22568 38.59553)"
+--- a/test/python_tests/datasource_test.py
++++ b/test/python_tests/datasource_test.py
+@@ -1,82 +1,71 @@
+-#!/usr/bin/env python
+ import os
+ import sys
+-from itertools import groupby
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
++from itertools import groupby
+ 
+-from .utilities import execution_path, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-if PYTHON3:
+-    xrange = range
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
++    yield
+ 
+ def test_that_datasources_exist():
+     if len(mapnik.DatasourceCache.plugin_names()) == 0:
+         print('***NOTICE*** - no datasource plugins have been loaded')
+ 
+ # adapted from raster_symboliser_test#test_dataraster_query_point
+-
+-
+- at raises(RuntimeError)
+-def test_vrt_referring_to_missing_files():
+-    srs = '+init=epsg:32630'
+-    if 'gdal' in mapnik.DatasourceCache.plugin_names():
+-        lyr = mapnik.Layer('dataraster')
+-        lyr.datasource = mapnik.Gdal(
+-            file='../data/raster/missing_raster.vrt',
+-            band=1,
+-        )
+-        lyr.srs = srs
+-        _map = mapnik.Map(256, 256, srs)
+-        _map.layers.append(lyr)
+-
+-        # center of extent of raster
+-        x, y = 556113.0, 4381428.0  # center of extent of raster
+-
+-        _map.zoom_all()
+-
+-        # Fancy stuff to supress output of error
+-        # open 2 fds
+-        null_fds = [os.open(os.devnull, os.O_RDWR) for x in xrange(2)]
+-        # save the current file descriptors to a tuple
+-        save = os.dup(1), os.dup(2)
+-        # put /dev/null fds on 1 and 2
+-        os.dup2(null_fds[0], 1)
+-        os.dup2(null_fds[1], 2)
+-
+-        # *** run the function ***
+-        try:
+-            # Should RuntimeError here
+-            list(_map.query_point(0, x, y))
+-        finally:
+-            # restore file descriptors so I can print the results
+-            os.dup2(save[0], 1)
+-            os.dup2(save[1], 2)
+-            # close the temporary fds
+-            os.close(null_fds[0])
+-            os.close(null_fds[1])
++def test_vrt_referring_to_missing_files(setup):
++    with pytest.raises(RuntimeError):
++        srs = 'epsg:32630'
++        if 'gdal' in mapnik.DatasourceCache.plugin_names():
++            lyr = mapnik.Layer('dataraster')
++            lyr.datasource = mapnik.Gdal(
++                file='../data/raster/missing_raster.vrt',
++                band=1,
++            )
++            lyr.srs = srs
++            _map = mapnik.Map(256, 256, srs)
++            _map.layers.append(lyr)
++
++            # center of extent of raster
++            x, y = 556113.0, 4381428.0  # center of extent of raster
++            _map.zoom_all()
++
++            # Fancy stuff to supress output of error
++            # open 2 fds
++            null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)]
++            # save the current file descriptors to a tuple
++            save = os.dup(1), os.dup(2)
++            # put /dev/null fds on 1 and 2
++            os.dup2(null_fds[0], 1)
++            os.dup2(null_fds[1], 2)
++
++            # *** run the function ***
++            try:
++                # Should RuntimeError here
++                list(_map.query_point(0, x, y))
++            finally:
++                # restore file descriptors so I can print the results
++                os.dup2(save[0], 1)
++                os.dup2(save[1], 2)
++                # close the temporary fds
++                os.close(null_fds[0])
++                os.close(null_fds[1])
+ 
+ 
+ def test_field_listing():
+     if 'shape' in mapnik.DatasourceCache.plugin_names():
+         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
+         fields = ds.fields()
+-        eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
++        assert fields, ['AREA', 'EAS_ID' ==  'PRFEDEA']
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Polygon)
+-        eq_(desc['name'], 'shape')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Polygon
++        assert desc['name'] ==  'shape'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+ 
+ 
+ def test_total_feature_count_shp():
+@@ -84,20 +73,19 @@ def test_total_feature_count_shp():
+         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
+         features = ds.all_features()
+         num_feats = len(list(features))
+-        eq_(num_feats, 10)
+-
++        assert num_feats ==  10
+ 
+ def test_total_feature_count_json():
+     if 'ogr' in mapnik.DatasourceCache.plugin_names():
+         ds = mapnik.Ogr(file='../data/json/points.geojson', layer_by_index=0)
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(desc['name'], 'ogr')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert desc['name'] ==  'ogr'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+         features = ds.all_features()
+         num_feats = len(list(features))
+-        eq_(num_feats, 5)
++        assert num_feats ==  5
+ 
+ 
+ def test_sqlite_reading():
+@@ -106,13 +94,13 @@ def test_sqlite_reading():
+             file='../data/sqlite/world.sqlite',
+             table_by_index=0)
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Polygon)
+-        eq_(desc['name'], 'sqlite')
+-        eq_(desc['type'], mapnik.DataType.Vector)
+-        eq_(desc['encoding'], 'utf-8')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Polygon
++        assert desc['name'] ==  'sqlite'
++        assert desc['type'] ==  mapnik.DataType.Vector
++        assert desc['encoding'] ==  'utf-8'
+         features = ds.all_features()
+         num_feats = len(list(features))
+-        eq_(num_feats, 245)
++        assert num_feats ==  245
+ 
+ 
+ def test_reading_json_from_string():
+@@ -122,7 +110,7 @@ def test_reading_json_from_string():
+         ds = mapnik.Ogr(file=json, layer_by_index=0)
+         features = ds.all_features()
+         num_feats = len(list(features))
+-        eq_(num_feats, 5)
++        assert num_feats ==  5
+ 
+ 
+ def test_feature_envelope():
+@@ -132,9 +120,9 @@ def test_feature_envelope():
+         for feat in features:
+             env = feat.envelope()
+             contains = ds.envelope().contains(env)
+-            eq_(contains, True)
++            assert contains ==  True
+             intersects = ds.envelope().contains(env)
+-            eq_(intersects, True)
++            assert intersects ==  True
+ 
+ 
+ def test_feature_attributes():
+@@ -143,9 +131,9 @@ def test_feature_attributes():
+         features = list(ds.all_features())
+         feat = features[0]
+         attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
+-        eq_(feat.attributes, attrs)
+-        eq_(ds.fields(), ['AREA', 'EAS_ID', 'PRFEDEA'])
+-        eq_(ds.field_types(), ['float', 'int', 'str'])
++        assert feat.attributes ==  attrs
++        assert ds.fields(), ['AREA', 'EAS_ID' ==  'PRFEDEA']
++        assert ds.field_types(), ['float', 'int' ==  'str']
+ 
+ 
+ def test_ogr_layer_by_sql():
+@@ -154,7 +142,7 @@ def test_ogr_layer_by_sql():
+                         layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
+         features = ds.all_features()
+         num_feats = len(list(features))
+-        eq_(num_feats, 1)
++        assert num_feats ==  1
+ 
+ 
+ def test_hit_grid():
+@@ -170,8 +158,8 @@ def test_hit_grid():
+         m.zoom_all()
+         join_field = 'NAME'
+         fg = []  # feature grid
+-        for y in xrange(0, 256, 4):
+-            for x in xrange(0, 256, 4):
++        for y in range(0, 256, 4):
++            for x in range(0, 256, 4):
+                 featureset = m.query_map_point(0, x, y)
+                 added = False
+                 for feature in featureset:
+@@ -180,14 +168,9 @@ def test_hit_grid():
+                 if not added:
+                     fg.append('')
+         hit_list = '|'.join(rle_encode(fg))
+-        eq_(hit_list[:16], '730:|2:Greenland')
+-        eq_(hit_list[-12:], '1:Chile|812:')
++        assert hit_list[:16] ==  '730:|2:Greenland'
++        assert hit_list[-12:] ==  '1:Chile|812:'
+     except RuntimeError as e:
+         # only test datasources that we have installed
+         if not 'Could not create datasource' in str(e):
+             raise RuntimeError(str(e))
+-
+-
+-if __name__ == '__main__':
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/datasource_xml_template_test.py
++++ b/test/python_tests/datasource_xml_template_test.py
+@@ -1,27 +1,28 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_datasource_template_is_working():
++def test_datasource_template_is_working(setup):
+     m = mapnik.Map(256, 256)
+-    try:
+-        mapnik.load_map(m, '../data/good_maps/datasource.xml')
+-    except RuntimeError as e:
+-        if "Required parameter 'type'" in str(e):
+-            raise RuntimeError(e)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    mapnik.load_map(m, '../data/good_maps/datasource.xml')
++    for layer in m.layers:
++        layer_bbox = layer.envelope()
++        bbox = None
++        first = True
++        for feature in layer.datasource:
++            assert feature.envelope() == feature.geometry.envelope()
++            assert layer_bbox.contains(feature.envelope())
++            if first:
++                first = False
++                bbox = feature.envelope()
++            else:
++                bbox += feature.envelope()
++        assert layer_bbox == bbox
+--- a/test/python_tests/extra_map_props_test.py
++++ b/test/python_tests/extra_map_props_test.py
+@@ -1,48 +1,43 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ 
+-def test_arbitrary_parameters_attached_to_map():
++def test_arbitrary_parameters_attached_to_map(setup):
+     m = mapnik.Map(256, 256)
+     mapnik.load_map(m, '../data/good_maps/extra_arbitary_map_parameters.xml')
+-    eq_(len(m.parameters), 5)
+-    eq_(m.parameters['key'], 'value2')
+-    eq_(m.parameters['key3'], 'value3')
+-    eq_(m.parameters['unicode'], u'iv?n')
+-    eq_(m.parameters['integer'], 10)
+-    eq_(m.parameters['decimal'], .999)
++    assert len(m.parameters) ==  5
++    assert m.parameters['key'] ==  'value2'
++    assert m.parameters['key3'] ==  'value3'
++    assert m.parameters['unicode'] ==  u'iv?n'
++    assert m.parameters['integer'] ==  10
++    assert m.parameters['decimal'] ==  .999
+     m2 = mapnik.Map(256, 256)
+     for k, v in m.parameters:
+         m2.parameters.append(mapnik.Parameter(k, v))
+-    eq_(len(m2.parameters), 5)
+-    eq_(m2.parameters['key'], 'value2')
+-    eq_(m2.parameters['key3'], 'value3')
+-    eq_(m2.parameters['unicode'], u'iv?n')
+-    eq_(m2.parameters['integer'], 10)
+-    eq_(m2.parameters['decimal'], .999)
++    assert len(m2.parameters) ==  5
++    assert m2.parameters['key'] ==  'value2'
++    assert m2.parameters['key3'] ==  'value3'
++    assert m2.parameters['unicode'] ==  u'iv?n'
++    assert m2.parameters['integer'] ==  10
++    assert m2.parameters['decimal'] ==  .999
+     map_string = mapnik.save_map_to_string(m)
+     m3 = mapnik.Map(256, 256)
+     mapnik.load_map_from_string(m3, map_string)
+-    eq_(len(m3.parameters), 5)
+-    eq_(m3.parameters['key'], 'value2')
+-    eq_(m3.parameters['key3'], 'value3')
+-    eq_(m3.parameters['unicode'], u'iv?n')
+-    eq_(m3.parameters['integer'], 10)
+-    eq_(m3.parameters['decimal'], .999)
++    assert len(m3.parameters) ==  5
++    assert m3.parameters['key'] ==  'value2'
++    assert m3.parameters['key3'] ==  'value3'
++    assert m3.parameters['unicode'] ==  u'iv?n'
++    assert m3.parameters['integer'] ==  10
++    assert m3.parameters['decimal'] ==  .999
+ 
+ 
+ def test_serializing_arbitrary_parameters():
+@@ -52,9 +47,5 @@ def test_serializing_arbitrary_parameter
+ 
+     m2 = mapnik.Map(1, 1)
+     mapnik.load_map_from_string(m2, mapnik.save_map_to_string(m))
+-    eq_(m2.parameters['width'], m.width)
+-    eq_(m2.parameters['height'], m.height)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert m2.parameters['width'] ==  m.width
++    assert m2.parameters['height'] ==  m.height
+--- a/test/python_tests/feature_id_test.py
++++ b/test/python_tests/feature_id_test.py
+@@ -1,24 +1,19 @@
+-#!/usr/bin/env python
+-
+-import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all
+-
++import os
++import pytest
+ try:
+     import itertools.izip as zip
+ except ImportError:
+     pass
+ 
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
++    yield
+ 
+ def compare_shape_between_mapnik_and_ogr(shapefile, query=None):
+     plugins = mapnik.DatasourceCache.plugin_names()
+@@ -34,13 +29,11 @@ def compare_shape_between_mapnik_and_ogr
+         count = 0
+         for feat1, feat2 in zip(fs1, fs2):
+             count += 1
+-            eq_(feat1.id(), feat2.id(),
+-                '%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"'
+-                % (count, feat1.id(), str(feat1.attributes), feat2.id(), str(feat2.attributes)))
++            assert feat1.id() == feat2.id(), '%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"' % (count, feat1.id(), str(feat1.attributes), feat2.id(), str(feat2.attributes))
+     return True
+ 
+ 
+-def test_shapefile_line_featureset_id():
++def test_shapefile_line_featureset_id(setup):
+     compare_shape_between_mapnik_and_ogr('../data/shp/polylines.shp')
+ 
+ 
+@@ -60,21 +53,15 @@ def test_shapefile_polygon_feature_query
+ 
+ 
+ def test_feature_hit_count():
+-    pass
+-    #raise Todo("need to optimize multigeom bbox handling in shapeindex: https://github.com/mapnik/mapnik/issues/783")
+     # results in different results between shp and ogr!
+     #bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628)
+-    #bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
+-    #query = mapnik.Query(mapnik.Box2d(*bbox))
+-    # if 'ogr' in mapnik.DatasourceCache.plugin_names():
+-    #    ds1 = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
+-    #    for fld in ds1.fields():
+-    #        query.add_property_name(fld)
+-    #    ds2 = mapnik.Shapefile(file='../data/shp/world_merc.shp')
+-    #    count1 = len(ds1.features(query).features)
+-    #    count2 = len(ds2.features(query).features)
+-    #    eq_(count1,count2,"Feature count differs between OGR driver (%s features) and Shapefile Driver (%s features) when querying the same bbox" % (count1,count2))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
++    query = mapnik.Query(mapnik.Box2d(*bbox))
++    if 'ogr' in mapnik.DatasourceCache.plugin_names():
++        ds1 = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
++        for fld in ds1.fields():
++            query.add_property_name(fld)
++        ds2 = mapnik.Shapefile(file='../data/shp/world_merc.shp')
++        count1 = len(list(ds1.features(query)))
++        count2 = len(list(ds2.features(query)))
++        assert count1 < count2 # expected 17 and 20
+--- a/test/python_tests/feature_test.py
++++ b/test/python_tests/feature_test.py
+@@ -1,26 +1,17 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ from binascii import unhexlify
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
+-
+-from .utilities import run_all
+-
++import pytest
+ 
+ def test_default_constructor():
+     f = mapnik.Feature(mapnik.Context(), 1)
+-    eq_(f is not None, True)
++    assert f is not None
+ 
+ 
+ def test_feature_geo_interface():
+     ctx = mapnik.Context()
+     feat = mapnik.Feature(ctx, 1)
+     feat.geometry = mapnik.Geometry.from_wkt('Point (0 0)')
+-    eq_(feat.__geo_interface__['geometry'], {
+-        u'type': u'Point', u'coordinates': [0, 0]})
++    assert feat.__geo_interface__['geometry'] == {u'type': u'Point', u'coordinates': [0, 0]}
+ 
+ 
+ def test_python_extended_constructor():
+@@ -31,15 +22,15 @@ def test_python_extended_constructor():
+     wkt = 'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'
+     f.geometry = mapnik.Geometry.from_wkt(wkt)
+     f['foo'] = 'bar'
+-    eq_(f['foo'], 'bar')
+-    eq_(f.envelope(), mapnik.Box2d(10.0, 10.0, 45.0, 45.0))
++    assert f['foo'] ==  'bar'
++    assert f.envelope(), mapnik.Box2d(10.0, 10.0, 45.0 ==  45.0)
+     # reset
+     f['foo'] = u"avi?n"
+-    eq_(f['foo'], u"avi?n")
++    assert f['foo'] ==  u"avi?n"
+     f['foo'] = 1.4
+-    eq_(f['foo'], 1.4)
++    assert f['foo'] ==  1.4
+     f['foo'] = True
+-    eq_(f['foo'], True)
++    assert f['foo'] ==  True
+ 
+ 
+ def test_add_geom_wkb():
+@@ -49,13 +40,13 @@ def test_add_geom_wkb():
+     if hasattr(geometry, 'is_valid'):
+         # Those are only available when python-mapnik has been built with
+         # boost >= 1.56.
+-        eq_(geometry.is_valid(), True)
+-        eq_(geometry.is_simple(), True)
+-    eq_(geometry.envelope(), mapnik.Box2d(10.0, 10.0, 40.0, 40.0))
++        assert geometry.is_valid() ==  True
++        assert geometry.is_simple() ==  True
++    assert geometry.envelope(), mapnik.Box2d(10.0, 10.0, 40.0 ==  40.0)
+     geometry.correct()
+     if hasattr(geometry, 'is_valid'):
+         # valid after calling correct
+-        eq_(geometry.is_valid(), True)
++        assert geometry.is_valid() ==  True
+ 
+ 
+ def test_feature_expression_evaluation():
+@@ -63,13 +54,13 @@ def test_feature_expression_evaluation()
+     context.push('name')
+     f = mapnik.Feature(context, 1)
+     f['name'] = 'a'
+-    eq_(f['name'], u'a')
++    assert f['name'] ==  u'a'
+     expr = mapnik.Expression("[name]='a'")
+     evaluated = expr.evaluate(f)
+-    eq_(evaluated, True)
++    assert evaluated ==  True
+     num_attributes = len(f)
+-    eq_(num_attributes, 1)
+-    eq_(f.id(), 1)
++    assert num_attributes ==  1
++    assert f.id() ==  1
+ 
+ # https://github.com/mapnik/mapnik/issues/933
+ 
+@@ -79,16 +70,16 @@ def test_feature_expression_evaluation_m
+     context.push('name')
+     f = mapnik.Feature(context, 1)
+     f['name'] = u'a'
+-    eq_(f['name'], u'a')
++    assert f['name'] ==  u'a'
+     expr = mapnik.Expression("[fielddoesnotexist]='a'")
+-    eq_('fielddoesnotexist' in f, False)
++    assert not 'fielddoesnotexist' in f
+     try:
+         expr.evaluate(f)
+     except Exception as e:
+-        eq_("Key does not exist" in str(e), True)
++        assert "Key does not exist" in str(e) ==  True
+     num_attributes = len(f)
+-    eq_(num_attributes, 1)
+-    eq_(f.id(), 1)
++    assert num_attributes ==  1
++    assert f.id() ==  1
+ 
+ # https://github.com/mapnik/mapnik/issues/934
+ 
+@@ -98,31 +89,27 @@ def test_feature_expression_evaluation_a
+     context.push('name with space')
+     f = mapnik.Feature(context, 1)
+     f['name with space'] = u'a'
+-    eq_(f['name with space'], u'a')
++    assert f['name with space'] ==  u'a'
+     expr = mapnik.Expression("[name with space]='a'")
+-    eq_(str(expr), "([name with space]='a')")
+-    eq_(expr.evaluate(f), True)
++    assert str(expr) ==  "([name with space]='a')"
++    assert expr.evaluate(f) ==  True
+ 
+ # https://github.com/mapnik/mapnik/issues/2390
+ 
+-
+- at raises(RuntimeError)
+ def test_feature_from_geojson():
+-    ctx = mapnik.Context()
+-    inline_string = """
+-    {
+-         "geometry" : {
+-            "coordinates" : [ 0,0 ]
+-            "type" : "Point"
+-         },
+-         "type" : "Feature",
+-         "properties" : {
+-            "this":"that"
+-            "known":"nope because missing comma"
+-         }
+-    }
+-    """
+-    mapnik.Feature.from_geojson(inline_string, ctx)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    with pytest.raises(RuntimeError):
++        ctx = mapnik.Context()
++        inline_string = """
++        {
++        "geometry" : {
++        "coordinates" : [ 0,0 ]
++        "type" : "Point"
++        },
++        "type" : "Feature",
++        "properties" : {
++        "this":"that"
++        "known":"nope because missing comma"
++        }
++        }
++        """
++        mapnik.Feature.from_geojson(inline_string, ctx)
+--- a/test/python_tests/filter_test.py
++++ b/test/python_tests/filter_test.py
+@@ -1,18 +1,5 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-import sys
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
+-
+-from .utilities import run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-if PYTHON3:
+-    long = int
+-    unicode = str
+-
++import pytest
+ 
+ if hasattr(mapnik, 'Expression'):
+     mapnik.Filter = mapnik.Expression
+@@ -96,11 +83,11 @@ def test_filter_init():
+ 
+     first = filters[0]
+     for f in filters:
+-        eq_(str(first), str(f))
++        assert str(first) ==  str(f)
+ 
+     s = m.find_style('s2')
+ 
+-    eq_(s.filter_mode, mapnik.filter_mode.FIRST)
++    assert s.filter_mode ==  mapnik.filter_mode.FIRST
+ 
+ 
+ def test_geometry_type_eval():
+@@ -110,45 +97,42 @@ def test_geometry_type_eval():
+     f = mapnik.Feature(context2, 0)
+     f["mapnik::geometry_type"] = 'sneaky'
+     expr = mapnik.Expression("[mapnik::geometry_type]")
+-    eq_(expr.evaluate(f), 0)
++    assert expr.evaluate(f) ==  0
+ 
+     expr = mapnik.Expression("[mapnik::geometry_type]")
+     context = mapnik.Context()
+ 
+     # no geometry
+     f = mapnik.Feature(context, 0)
+-    eq_(expr.evaluate(f), 0)
+-    eq_(mapnik.Expression("[mapnik::geometry_type]=0").evaluate(f), True)
++    assert expr.evaluate(f) ==  0
++    assert mapnik.Expression("[mapnik::geometry_type]=0").evaluate(f)
+ 
+     # POINT = 1
+     f = mapnik.Feature(context, 0)
+     f.geometry = mapnik.Geometry.from_wkt('POINT(10 40)')
+-    eq_(expr.evaluate(f), 1)
+-    eq_(mapnik.Expression("[mapnik::geometry_type]=point").evaluate(f), True)
++    assert expr.evaluate(f) ==  1
++    assert mapnik.Expression("[mapnik::geometry_type]=point").evaluate(f)
+ 
+     # LINESTRING = 2
+     f = mapnik.Feature(context, 0)
+     f.geometry = mapnik.Geometry.from_wkt('LINESTRING (30 10, 10 30, 40 40)')
+-    eq_(expr.evaluate(f), 2)
+-    eq_(mapnik.Expression(
+-        "[mapnik::geometry_type] = linestring").evaluate(f), True)
++    assert expr.evaluate(f) ==  2
++    assert mapnik.Expression("[mapnik::geometry_type] = linestring").evaluate(f)
+ 
+     # POLYGON = 3
+     f = mapnik.Feature(context, 0)
+     f.geometry = mapnik.Geometry.from_wkt(
+         'POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))')
+-    eq_(expr.evaluate(f), 3)
+-    eq_(mapnik.Expression(
+-        "[mapnik::geometry_type] = polygon").evaluate(f), True)
++    assert expr.evaluate(f) ==  3
++    assert mapnik.Expression("[mapnik::geometry_type] = polygon").evaluate(f)
+ 
+     # COLLECTION = 4
+     f = mapnik.Feature(context, 0)
+     geom = mapnik.Geometry.from_wkt(
+         'GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))')
+     f.geometry = geom
+-    eq_(expr.evaluate(f), 4)
+-    eq_(mapnik.Expression(
+-        "[mapnik::geometry_type] = collection").evaluate(f), True)
++    assert expr.evaluate(f) ==  4
++    assert mapnik.Expression("[mapnik::geometry_type] = collection").evaluate(f)
+ 
+ 
+ def test_regex_match():
+@@ -157,7 +141,7 @@ def test_regex_match():
+     f = mapnik.Feature(context, 0)
+     f["name"] = 'test'
+     expr = mapnik.Expression("[name].match('test')")
+-    eq_(expr.evaluate(f), True)  # 1 == True
++    assert expr.evaluate(f)  # 1 == True
+ 
+ 
+ def test_unicode_regex_match():
+@@ -166,7 +150,7 @@ def test_unicode_regex_match():
+     f = mapnik.Feature(context, 0)
+     f["name"] = 'Qu?bec'
+     expr = mapnik.Expression("[name].match('Qu?bec')")
+-    eq_(expr.evaluate(f), True)  # 1 == True
++    assert expr.evaluate(f) # 1 == True
+ 
+ 
+ def test_regex_replace():
+@@ -175,12 +159,12 @@ def test_regex_replace():
+     f = mapnik.Feature(context, 0)
+     f["name"] = 'test'
+     expr = mapnik.Expression("[name].replace('(\\B)|( )','$1 ')")
+-    eq_(expr.evaluate(f), 't e s t')
++    assert expr.evaluate(f) ==  't e s t'
+ 
+ 
+ def test_unicode_regex_replace_to_str():
+     expr = mapnik.Expression("[name].replace('(\\B)|( )','$1 ')")
+-    eq_(str(expr), "[name].replace('(\\B)|( )','$1 ')")
++    assert str(expr), "[name].replace('(\\B)|( )' == '$1 ')"
+ 
+ 
+ def test_unicode_regex_replace():
+@@ -190,7 +174,7 @@ def test_unicode_regex_replace():
+     f["name"] = 'Qu?bec'
+     expr = mapnik.Expression("[name].replace('(\\B)|( )','$1 ')")
+     # will fail if -DBOOST_REGEX_HAS_ICU is not defined
+-    eq_(expr.evaluate(f), u'Q u ? b e c')
++    assert expr.evaluate(f) ==  u'Q u ? b e c'
+ 
+ 
+ def test_float_precision():
+@@ -199,16 +183,16 @@ def test_float_precision():
+     f = mapnik.Feature(context, 0)
+     f["num1"] = 1.0000
+     f["num2"] = 1.0001
+-    eq_(f["num1"], 1.0000)
+-    eq_(f["num2"], 1.0001)
++    assert f["num1"] ==  1.0000
++    assert f["num2"] ==  1.0001
+     expr = mapnik.Expression("[num1] = 1.0000")
+-    eq_(expr.evaluate(f), True)
++    assert expr.evaluate(f)
+     expr = mapnik.Expression("[num1].match('1')")
+-    eq_(expr.evaluate(f), True)
++    assert expr.evaluate(f)
+     expr = mapnik.Expression("[num2] = 1.0001")
+-    eq_(expr.evaluate(f), True)
++    assert expr.evaluate(f)
+     expr = mapnik.Expression("[num2].match('1.0001')")
+-    eq_(expr.evaluate(f), True)
++    assert expr.evaluate(f)
+ 
+ 
+ def test_string_matching_on_precision():
+@@ -216,9 +200,9 @@ def test_string_matching_on_precision():
+     context.push('num')
+     f = mapnik.Feature(context, 0)
+     f["num"] = "1.0000"
+-    eq_(f["num"], "1.0000")
++    assert f["num"] ==  "1.0000"
+     expr = mapnik.Expression("[num].match('.*(^0|00)$')")
+-    eq_(expr.evaluate(f), True)
++    assert expr.evaluate(f)
+ 
+ 
+ def test_creation_of_null_value():
+@@ -226,12 +210,12 @@ def test_creation_of_null_value():
+     context.push('nv')
+     f = mapnik.Feature(context, 0)
+     f["nv"] = None
+-    eq_(f["nv"], None)
+-    eq_(f["nv"] is None, True)
++    assert f["nv"] ==  None
++    assert f["nv"] is None
+     # test boolean
+     f["nv"] = 0
+-    eq_(f["nv"], 0)
+-    eq_(f["nv"] is not None, True)
++    assert f["nv"] ==  0
++    assert f["nv"] is not None
+ 
+ 
+ def test_creation_of_bool():
+@@ -239,39 +223,39 @@ def test_creation_of_bool():
+     context.push('bool')
+     f = mapnik.Feature(context, 0)
+     f["bool"] = True
+-    eq_(f["bool"], True)
++    assert f["bool"]
+     # TODO - will become int of 1 do to built in boost python conversion
+     # https://github.com/mapnik/mapnik/issues/1873
+-    eq_(isinstance(f["bool"], bool) or isinstance(f["bool"], long), True)
++    assert isinstance(f["bool"], bool) or isinstance(f["bool"], int)
+     f["bool"] = False
+-    eq_(f["bool"], False)
+-    eq_(isinstance(f["bool"], bool) or isinstance(f["bool"], long), True)
++    assert f["bool"] ==  False
++    assert isinstance(f["bool"], bool) or isinstance(f["bool"], int)
+     # test NoneType
+     f["bool"] = None
+-    eq_(f["bool"], None)
+-    eq_(isinstance(f["bool"], bool) or isinstance(f["bool"], long), False)
++    assert f["bool"] ==  None
++    assert not isinstance(f["bool"], bool) or isinstance(f["bool"], int)
+     # test integer
+     f["bool"] = 0
+-    eq_(f["bool"], 0)
++    assert f["bool"] ==  0
+     # https://github.com/mapnik/mapnik/issues/1873
+     # ugh, boost_python's built into converter does not work right
+-    # eq_(isinstance(f["bool"],bool),False)
++    # assert isinstance(f["bool"],bool) == False
+ 
+ null_equality = [
+-    ['hello', False, unicode],
+-    [u'', False, unicode],
+-    [0, False, long],
+-    [123, False, long],
++    ['hello', False, str],
++    [u'', False, str],
++    [0, False, int],
++    [123, False, int],
+     [0.0, False, float],
+     [123.123, False, float],
+     [.1, False, float],
+     # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+-    [False, False, long],
++    [False, False, int],
+     # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+-    [True, False, long],
++    [True, False, int],
+     [None, True, None],
+-    [2147483648, False, long],
+-    [922337203685477580, False, long]
++    [2147483648, False, int],
++    [922337203685477580, False, int]
+ ]
+ 
+ 
+@@ -280,16 +264,15 @@ def test_expressions_with_null_equality(
+         context = mapnik.Context()
+         f = mapnik.Feature(context, 0)
+         f["prop"] = eq[0]
+-        eq_(f["prop"], eq[0])
++        assert f["prop"] ==  eq[0]
+         if eq[0] is None:
+-            eq_(f["prop"] is None, True)
++            assert f["prop"] is None
+         else:
+-            eq_(isinstance(f['prop'], eq[2]), True,
+-                '%s is not an instance of %s' % (f['prop'], eq[2]))
++            assert isinstance(f['prop'], eq[2]), '%s is not an instance of %s' % (f['prop'], eq[2])
+         expr = mapnik.Expression("[prop] = null")
+-        eq_(expr.evaluate(f), eq[1])
++        assert expr.evaluate(f) ==  eq[1]
+         expr = mapnik.Expression("[prop] is null")
+-        eq_(expr.evaluate(f), eq[1])
++        assert expr.evaluate(f) ==  eq[1]
+ 
+ 
+ def test_expressions_with_null_equality2():
+@@ -297,35 +280,34 @@ def test_expressions_with_null_equality2
+         context = mapnik.Context()
+         f = mapnik.Feature(context, 0)
+         f["prop"] = eq[0]
+-        eq_(f["prop"], eq[0])
++        assert f["prop"] ==  eq[0]
+         if eq[0] is None:
+-            eq_(f["prop"] is None, True)
++            assert f["prop"] is None
+         else:
+-            eq_(isinstance(f['prop'], eq[2]), True,
+-                '%s is not an instance of %s' % (f['prop'], eq[2]))
++            assert isinstance(f['prop'],  eq[2]), '%s is not an instance of %s' % (f['prop'], eq[2])
+         # TODO - support `is not` syntax:
+         # https://github.com/mapnik/mapnik/issues/796
+         expr = mapnik.Expression("not [prop] is null")
+-        eq_(expr.evaluate(f), not eq[1])
++        assert not expr.evaluate(f) == eq[1]
+         # https://github.com/mapnik/mapnik/issues/1642
+         expr = mapnik.Expression("[prop] != null")
+-        eq_(expr.evaluate(f), not eq[1])
++        assert not expr.evaluate(f) == eq[1]
+ 
+ truthyness = [
+-    [u'hello', True, unicode],
+-    [u'', False, unicode],
+-    [0, False, long],
+-    [123, True, long],
++    [u'hello', True, str],
++    [u'', False, str],
++    [0, False, int],
++    [123, True, int],
+     [0.0, False, float],
+     [123.123, True, float],
+     [.1, True, float],
+     # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+-    [False, False, long],
++    [False, False, int],
+     # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+-    [True, True, long],
++    [True, True, int],
+     [None, False, None],
+-    [2147483648, True, long],
+-    [922337203685477580, True, long]
++    [2147483648, True, int],
++    [922337203685477580, True, int]
+ ]
+ 
+ 
+@@ -334,26 +316,25 @@ def test_expressions_for_thruthyness():
+     for eq in truthyness:
+         f = mapnik.Feature(context, 0)
+         f["prop"] = eq[0]
+-        eq_(f["prop"], eq[0])
++        assert f["prop"] ==  eq[0]
+         if eq[0] is None:
+-            eq_(f["prop"] is None, True)
++            assert f["prop"] is None
+         else:
+-            eq_(isinstance(f['prop'], eq[2]), True,
+-                '%s is not an instance of %s' % (f['prop'], eq[2]))
++            assert isinstance(f['prop'], eq[2]), '%s is not an instance of %s' % (f['prop'], eq[2])
+         expr = mapnik.Expression("[prop]")
+-        eq_(expr.to_bool(f), eq[1])
++        assert expr.to_bool(f) ==  eq[1]
+         expr = mapnik.Expression("not [prop]")
+-        eq_(expr.to_bool(f), not eq[1])
++        assert not expr.to_bool(f) ==  eq[1]
+         expr = mapnik.Expression("! [prop]")
+-        eq_(expr.to_bool(f), not eq[1])
++        assert not expr.to_bool(f) == eq[1]
+     # also test if feature does not have property at all
+     f2 = mapnik.Feature(context, 1)
+     # no property existing will return value_null since
+     # https://github.com/mapnik/mapnik/commit/562fada9d0f680f59b2d9f396c95320a0d753479#include/mapnik/feature.hpp
+-    eq_(f2["prop"] is None, True)
++    assert f2["prop"] is None
+     expr = mapnik.Expression("[prop]")
+-    eq_(expr.evaluate(f2), None)
+-    eq_(expr.to_bool(f2), False)
++    assert expr.evaluate(f2) ==  None
++    assert expr.to_bool(f2) ==  False
+ 
+ # https://github.com/mapnik/mapnik/issues/1859
+ 
+@@ -364,93 +345,86 @@ def test_if_null_and_empty_string_are_eq
+     f["empty"] = u""
+     f["null"] = None
+     # ensure base assumptions are good
+-    eq_(mapnik.Expression("[empty] = ''").to_bool(f), True)
+-    eq_(mapnik.Expression("[null] = null").to_bool(f), True)
+-    eq_(mapnik.Expression("[empty] != ''").to_bool(f), False)
+-    eq_(mapnik.Expression("[null] != null").to_bool(f), False)
++    assert mapnik.Expression("[empty] = ''").to_bool(f)
++    assert mapnik.Expression("[null] = null").to_bool(f)
++    assert not mapnik.Expression("[empty] != ''").to_bool(f)
++    assert not mapnik.Expression("[null] != null").to_bool(f)
+     # now test expected behavior
+-    eq_(mapnik.Expression("[null] = ''").to_bool(f), False)
+-    eq_(mapnik.Expression("[empty] = null").to_bool(f), False)
+-    eq_(mapnik.Expression("[empty] != null").to_bool(f), True)
++    assert not mapnik.Expression("[null] = ''").to_bool(f)
++    assert not mapnik.Expression("[empty] = null").to_bool(f)
++    assert mapnik.Expression("[empty] != null").to_bool(f)
+     # this one is the back compatibility shim
+-    eq_(mapnik.Expression("[null] != ''").to_bool(f), False)
++    assert not mapnik.Expression("[null] != ''").to_bool(f)
+ 
+ 
+ def test_filtering_nulls_and_empty_strings():
+     context = mapnik.Context()
+     f = mapnik.Feature(context, 0)
+     f["prop"] = u"hello"
+-    eq_(f["prop"], u"hello")
+-    eq_(mapnik.Expression("[prop]").to_bool(f), True)
+-    eq_(mapnik.Expression("! [prop]").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop] != null").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop] != ''").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop] != null and [prop] != ''").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop] != null or [prop] != ''").to_bool(f), True)
++    assert f["prop"] ==  u"hello"
++    assert mapnik.Expression("[prop]").to_bool(f)
++    assert not mapnik.Expression("! [prop]").to_bool(f)
++    assert mapnik.Expression("[prop] != null").to_bool(f)
++    assert mapnik.Expression("[prop] != ''").to_bool(f)
++    assert mapnik.Expression("[prop] != null and [prop] != ''").to_bool(f)
++    assert mapnik.Expression("[prop] != null or [prop] != ''").to_bool(f)
+     f["prop2"] = u""
+-    eq_(f["prop2"], u"")
+-    eq_(mapnik.Expression("[prop2]").to_bool(f), False)
+-    eq_(mapnik.Expression("! [prop2]").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop2] != null").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop2] != ''").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop2] = ''").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop2] != null or [prop2] != ''").to_bool(f), True)
+-    eq_(mapnik.Expression(
+-        "[prop2] != null and [prop2] != ''").to_bool(f), False)
++    assert f["prop2"] ==  u""
++    assert not mapnik.Expression("[prop2]").to_bool(f)
++    assert mapnik.Expression("! [prop2]").to_bool(f)
++    assert mapnik.Expression("[prop2] != null").to_bool(f)
++    assert not mapnik.Expression("[prop2] != ''").to_bool(f)
++    assert mapnik.Expression("[prop2] = ''").to_bool(f)
++    assert mapnik.Expression("[prop2] != null or [prop2] != ''").to_bool(f)
++    assert not mapnik.Expression("[prop2] != null and [prop2] != ''").to_bool(f)
+     f["prop3"] = None
+-    eq_(f["prop3"], None)
+-    eq_(mapnik.Expression("[prop3]").to_bool(f), False)
+-    eq_(mapnik.Expression("! [prop3]").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop3] != null").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop3] = null").to_bool(f), True)
++    assert f["prop3"] ==  None
++    assert not mapnik.Expression("[prop3]").to_bool(f)
++    assert mapnik.Expression("! [prop3]").to_bool(f)
++    assert not mapnik.Expression("[prop3] != null").to_bool(f)
++    assert mapnik.Expression("[prop3] = null").to_bool(f)
+ 
+     # https://github.com/mapnik/mapnik/issues/1859
+-    #eq_(mapnik.Expression("[prop3] != ''").to_bool(f),True)
+-    eq_(mapnik.Expression("[prop3] != ''").to_bool(f), False)
++    #assert mapnik.Expression("[prop3] != ''").to_bool(f) == True
++    assert not mapnik.Expression("[prop3] != ''").to_bool(f)
+ 
+-    eq_(mapnik.Expression("[prop3] = ''").to_bool(f), False)
++    assert not mapnik.Expression("[prop3] = ''").to_bool(f)
+ 
+     # https://github.com/mapnik/mapnik/issues/1859
+-    #eq_(mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f),True)
+-    eq_(mapnik.Expression(
+-        "[prop3] != null or [prop3] != ''").to_bool(f), False)
++    #assert mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f) == True
++    assert not mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f)
+ 
+-    eq_(mapnik.Expression(
+-        "[prop3] != null and [prop3] != ''").to_bool(f), False)
++    assert not mapnik.Expression("[prop3] != null and [prop3] != ''").to_bool(f)
+     # attr not existing should behave the same as prop3
+-    eq_(mapnik.Expression("[prop4]").to_bool(f), False)
+-    eq_(mapnik.Expression("! [prop4]").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop4] != null").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop4] = null").to_bool(f), True)
++    assert not mapnik.Expression("[prop4]").to_bool(f)
++    assert mapnik.Expression("! [prop4]").to_bool(f)
++    assert not mapnik.Expression("[prop4] != null").to_bool(f)
++    assert mapnik.Expression("[prop4] = null").to_bool(f)
+ 
+     # https://github.com/mapnik/mapnik/issues/1859
+-    ##eq_(mapnik.Expression("[prop4] != ''").to_bool(f),True)
+-    eq_(mapnik.Expression("[prop4] != ''").to_bool(f), False)
++    ##assert mapnik.Expression("[prop4] != ''").to_bool(f) == True
++    assert not mapnik.Expression("[prop4] != ''").to_bool(f)
+ 
+-    eq_(mapnik.Expression("[prop4] = ''").to_bool(f), False)
++    assert not mapnik.Expression("[prop4] = ''").to_bool(f)
+ 
+     # https://github.com/mapnik/mapnik/issues/1859
+-    ##eq_(mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f),True)
+-    eq_(mapnik.Expression(
+-        "[prop4] != null or [prop4] != ''").to_bool(f), False)
++    ##assert mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f) == True
++    assert not mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f)
+ 
+-    eq_(mapnik.Expression(
+-        "[prop4] != null and [prop4] != ''").to_bool(f), False)
++    assert not mapnik.Expression("[prop4] != null and [prop4] != ''").to_bool(f)
+     f["prop5"] = False
+-    eq_(f["prop5"], False)
+-    eq_(mapnik.Expression("[prop5]").to_bool(f), False)
+-    eq_(mapnik.Expression("! [prop5]").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop5] != null").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop5] = null").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop5] != ''").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop5] = ''").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop5] != null or [prop5] != ''").to_bool(f), True)
+-    eq_(mapnik.Expression(
+-        "[prop5] != null and [prop5] != ''").to_bool(f), True)
++    assert f["prop5"] ==  False
++    assert not mapnik.Expression("[prop5]").to_bool(f)
++    assert mapnik.Expression("! [prop5]").to_bool(f)
++    assert mapnik.Expression("[prop5] != null").to_bool(f)
++    assert not mapnik.Expression("[prop5] = null").to_bool(f)
++    assert mapnik.Expression("[prop5] != ''").to_bool(f)
++    assert not mapnik.Expression("[prop5] = ''").to_bool(f)
++    assert mapnik.Expression("[prop5] != null or [prop5] != ''").to_bool(f)
++    assert mapnik.Expression("[prop5] != null and [prop5] != ''").to_bool(f)
+     # note, we need to do [prop5] != 0 here instead of false due to this bug:
+     # https://github.com/mapnik/mapnik/issues/1873
+-    eq_(mapnik.Expression(
+-        "[prop5] != null and [prop5] != '' and [prop5] != 0").to_bool(f), False)
++    assert not mapnik.Expression("[prop5] != null and [prop5] != '' and [prop5] != 0").to_bool(f)
+ 
+ # https://github.com/mapnik/mapnik/issues/1872
+ 
+@@ -459,12 +433,12 @@ def test_falseyness_comparision():
+     context = mapnik.Context()
+     f = mapnik.Feature(context, 0)
+     f["prop"] = 0
+-    eq_(mapnik.Expression("[prop]").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop] = false").to_bool(f), True)
+-    eq_(mapnik.Expression("not [prop] != false").to_bool(f), True)
+-    eq_(mapnik.Expression("not [prop] = true").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop] = true").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop] != true").to_bool(f), True)
++    assert not mapnik.Expression("[prop]").to_bool(f)
++    assert mapnik.Expression("[prop] = false").to_bool(f)
++    assert mapnik.Expression("not [prop] != false").to_bool(f)
++    assert mapnik.Expression("not [prop] = true").to_bool(f)
++    assert not mapnik.Expression("[prop] = true").to_bool(f)
++    assert mapnik.Expression("[prop] != true").to_bool(f)
+ 
+ # https://github.com/mapnik/mapnik/issues/1806, fixed by
+ # https://github.com/mapnik/mapnik/issues/1872
+@@ -474,12 +448,12 @@ def test_truthyness_comparision():
+     context = mapnik.Context()
+     f = mapnik.Feature(context, 0)
+     f["prop"] = 1
+-    eq_(mapnik.Expression("[prop]").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop] = false").to_bool(f), False)
+-    eq_(mapnik.Expression("not [prop] != false").to_bool(f), False)
+-    eq_(mapnik.Expression("not [prop] = true").to_bool(f), False)
+-    eq_(mapnik.Expression("[prop] = true").to_bool(f), True)
+-    eq_(mapnik.Expression("[prop] != true").to_bool(f), False)
++    assert mapnik.Expression("[prop]").to_bool(f) ==  True
++    assert mapnik.Expression("[prop] = false").to_bool(f) ==  False
++    assert mapnik.Expression("not [prop] != false").to_bool(f) ==  False
++    assert mapnik.Expression("not [prop] = true").to_bool(f) ==  False
++    assert mapnik.Expression("[prop] = true").to_bool(f) ==  True
++    assert mapnik.Expression("[prop] != true").to_bool(f) ==  False
+ 
+ 
+ def test_division_by_zero():
+@@ -490,13 +464,9 @@ def test_division_by_zero():
+     f = mapnik.Feature(c, 0)
+     f['a'] = 1
+     f['b'] = 0
+-    eq_(expr.evaluate(f), None)
++    assert expr.evaluate(f) ==  None
+ 
+ 
+- at raises(RuntimeError)
+ def test_invalid_syntax1():
+-    mapnik.Expression('abs()')
+-
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    with pytest.raises(RuntimeError):
++        mapnik.Expression('abs()')
+--- a/test/python_tests/fontset_test.py
++++ b/test/python_tests/fontset_test.py
+@@ -1,47 +1,40 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ 
+-def test_loading_fontset_from_map():
++def test_loading_fontset_from_map(setup):
+     m = mapnik.Map(256, 256)
+     mapnik.load_map(m, '../data/good_maps/fontset.xml', True)
+     fs = m.find_fontset('book-fonts')
+-    eq_(len(fs.names), 2)
+-    eq_(list(fs.names), ['DejaVu Sans Book', 'DejaVu Sans Oblique'])
++    assert len(fs.names) ==  2
++    assert list(fs.names) == ['DejaVu Sans Book', 'DejaVu Sans Oblique']
+ 
+ # def test_loading_fontset_from_python():
+ #     m = mapnik.Map(256,256)
+ #     fset = mapnik.FontSet('foo')
+ #     fset.add_face_name('Comic Sans')
+ #     fset.add_face_name('Papyrus')
+-#     eq_(fset.name,'foo')
++#     assert fset.name == 'foo'
+ #     fset.name = 'my-set'
+-#     eq_(fset.name,'my-set')
++#     assert fset.name == 'my-set'
+ #     m.append_fontset('my-set', fset)
+ #     sty = mapnik.Style()
+ #     rule = mapnik.Rule()
+ #     tsym = mapnik.TextSymbolizer()
+-#     eq_(tsym.fontset,None)
++#     assert tsym.fontset == None
+ #     tsym.fontset = fset
+ #     rule.symbols.append(tsym)
+ #     sty.rules.append(rule)
+ #     m.append_style('Style',sty)
+ #     serialized_map = mapnik.save_map_to_string(m)
+-#     eq_('fontset-name="my-set"' in serialized_map,True)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++#     assert 'fontset-name="my-set"' in serialized_map == True
+--- a/test/python_tests/geojson_plugin_test.py
++++ b/test/python_tests/geojson_plugin_test.py
+@@ -1,67 +1,63 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import assert_almost_equal, eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'geojson' in mapnik.DatasourceCache.plugin_names():
+ 
+-    def test_geojson_init():
++    def test_geojson_init(setup):
+         ds = mapnik.Datasource(
+             type='geojson',
+             file='../data/json/escaped.geojson')
+         e = ds.envelope()
+-        assert_almost_equal(e.minx, -81.705583, places=7)
+-        assert_almost_equal(e.miny, 41.480573, places=6)
+-        assert_almost_equal(e.maxx, -81.705583, places=5)
+-        assert_almost_equal(e.maxy, 41.480573, places=3)
++        assert e.minx == pytest.approx(-81.705583, abs=1e-7)
++        assert e.miny == pytest.approx(41.480573, abs=1e-6)
++        assert e.maxx == pytest.approx(-81.705583, abs=1e-5)
++        assert e.maxy == pytest.approx(41.480573, abs=1e-3)
+ 
+     def test_geojson_properties():
+         ds = mapnik.Datasource(
+             type='geojson',
+             file='../data/json/escaped.geojson')
+         f = list(ds.features_at_point(ds.envelope().center()))[0]
+-        eq_(len(ds.fields()), 11)
++        assert len(ds.fields()) ==  11
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
++        assert f['name'] ==  u'Test'
++        assert f['int'] ==  1
++        assert f['description'] ==  u'Test: \u005C'
++        assert f['spaces'] ==  u'this has spaces'
++        assert f['double'] ==  1.1
++        assert f['boolean'] ==  True
++        assert f['NOM_FR'] ==  u'Qu\xe9bec'
++        assert f['NOM_FR'] ==  u'Qu?bec'
+ 
+         ds = mapnik.Datasource(
+             type='geojson',
+             file='../data/json/escaped.geojson')
+         f = list(ds.all_features())[0]
+-        eq_(len(ds.fields()), 11)
++        assert len(ds.fields()) ==  11
+ 
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
++        assert f['name'] ==  u'Test'
++        assert f['int'] ==  1
++        assert f['description'] ==  u'Test: \u005C'
++        assert f['spaces'] ==  u'this has spaces'
++        assert f['double'] ==  1.1
++        assert f['boolean'] ==  True
++        assert f['NOM_FR'] ==  u'Qu\xe9bec'
++        assert f['NOM_FR'] ==  u'Qu?bec'
+ 
+     def test_large_geojson_properties():
+         ds = mapnik.Datasource(
+@@ -69,36 +65,36 @@ if 'geojson' in mapnik.DatasourceCache.p
+             file='../data/json/escaped.geojson',
+             cache_features=False)
+         f = list(ds.features_at_point(ds.envelope().center()))[0]
+-        eq_(len(ds.fields()), 11)
++        assert len(ds.fields()) ==  11
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
++        assert f['name'] ==  u'Test'
++        assert f['int'] ==  1
++        assert f['description'] ==  u'Test: \u005C'
++        assert f['spaces'] ==  u'this has spaces'
++        assert f['double'] ==  1.1
++        assert f['boolean'] ==  True
++        assert f['NOM_FR'] ==  u'Qu\xe9bec'
++        assert f['NOM_FR'] ==  u'Qu?bec'
+ 
+         ds = mapnik.Datasource(
+             type='geojson',
+             file='../data/json/escaped.geojson')
+         f = list(ds.all_features())[0]
+-        eq_(len(ds.fields()), 11)
++        assert len(ds.fields()) ==  11
+ 
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
++        assert f['name'] ==  u'Test'
++        assert f['int'] ==  1
++        assert f['description'] ==  u'Test: \u005C'
++        assert f['spaces'] ==  u'this has spaces'
++        assert f['double'] ==  1.1
++        assert f['boolean'] ==  True
++        assert f['NOM_FR'] ==  u'Qu\xe9bec'
++        assert f['NOM_FR'] ==  u'Qu?bec'
+ 
+     def test_geojson_from_in_memory_string():
+         # will silently fail since it is a geometry and needs to be a featurecollection.
+@@ -107,21 +103,21 @@ if 'geojson' in mapnik.DatasourceCache.p
+         ds = mapnik.Datasource(
+             type='geojson',
+             inline='{ "type":"FeatureCollection", "features": [ { "type":"Feature", "properties":{"name":"test"}, "geometry": { "type":"LineString","coordinates":[[0,0],[10,10]] } } ]}')
+-        eq_(len(ds.fields()), 1)
++        assert len(ds.fields()) ==  1
+         f = list(ds.all_features())[0]
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.LineString)
+-        eq_(f['name'], u'test')
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.LineString
++        assert f['name'] ==  u'test'
+ 
+ #    @raises(RuntimeError)
+     def test_that_nonexistant_query_field_throws(**kwargs):
+         ds = mapnik.Datasource(
+             type='geojson',
+             file='../data/json/escaped.geojson')
+-        eq_(len(ds.fields()), 11)
++        assert len(ds.fields()) ==  11
+         # TODO - this sorting is messed up
+-        #eq_(ds.fields(),['name', 'int', 'double', 'description', 'boolean', 'NOM_FR'])
+-        #eq_(ds.field_types(),['str', 'int', 'float', 'str', 'bool', 'str'])
++        #assert ds.fields(),['name', 'int', 'double', 'description', 'boolean' ==  'NOM_FR']
++        #assert ds.field_types(),['str', 'int', 'float', 'str', 'bool' ==  'str']
+ # TODO - should geojson plugin throw like others?
+ #        query = mapnik.Query(ds.envelope())
+ #        for fld in ds.fields():
+@@ -137,9 +133,5 @@ if 'geojson' in mapnik.DatasourceCache.p
+         f = list(ds.all_features())[0]
+ 
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_(f['feat_name'], u'feat_value')
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert f['feat_name'] ==  u'feat_value'
+--- a/test/python_tests/geometry_io_test.py
++++ b/test/python_tests/geometry_io_test.py
+@@ -1,25 +1,12 @@
+-# encoding: utf8
+-
+-import os
+ from binascii import unhexlify
+-
+-from nose.tools import eq_, assert_raises
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all
+-
++import pytest
+ try:
+     import json
+ except ImportError:
+     import simplejson as json
+ 
+ 
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+ wkts = [
+     [mapnik.GeometryType.Point,
+      "POINT(30 10)",
+@@ -183,20 +170,20 @@ unsupported_wkb = [
+ 
+ def test_path_geo_interface():
+     geom = mapnik.Geometry.from_wkt('POINT(0 0)')
+-    eq_(geom.__geo_interface__, {u'type': u'Point', u'coordinates': [0, 0]})
++    assert geom.__geo_interface__, {u'type': u'Point', u'coordinates': [0 ==  0]}
+ 
+ 
+ def test_valid_wkb_parsing():
+     count = 0
+     for wkb in empty_wkbs:
+         geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2]))
+-        eq_(geom.is_empty(), True)
+-        eq_(geom.type(), wkb[0])
++        assert geom.is_empty() ==  True
++        assert geom.type() ==  wkb[0]
+ 
+     for wkb in wkts:
+         geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2]))
+-        eq_(geom.is_empty(), False)
+-        eq_(geom.type(), wkb[0])
++        assert geom.is_empty() ==  False
++        assert geom.type() ==  wkb[0]
+ 
+ 
+ def test_wkb_parsing_error():
+@@ -205,7 +192,7 @@ def test_wkb_parsing_error():
+         try:
+             geom = mapnik.Geometry.from_wkb(unhexlify(wkb))
+             # should not get here
+-            eq_(True, False)
++            assert True ==  False
+         except:
+             pass
+     assert True
+@@ -218,8 +205,8 @@ def test_empty_wkb_parsing():
+     count = 0
+     for wkb in partially_empty_wkb:
+         geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2]))
+-        eq_(geom.type(), wkb[0])
+-        eq_(geom.is_empty(), False)
++        assert geom.type() ==  wkb[0]
++        assert geom.is_empty() ==  False
+ 
+ 
+ def test_geojson_parsing():
+@@ -228,14 +215,14 @@ def test_geojson_parsing():
+     for j in geojson:
+         count += 1
+         geometries.append(mapnik.Geometry.from_geojson(j[1]))
+-    eq_(count, len(geometries))
++    assert count ==  len(geometries)
+ 
+ 
+ def test_geojson_parsing_reversed():
+     for idx, j in enumerate(geojson_reversed):
+         g1 = mapnik.Geometry.from_geojson(j)
+         g2 = mapnik.Geometry.from_geojson(geojson[idx][1])
+-        eq_(g1.to_geojson(), g2.to_geojson())
++        assert g1.to_geojson() ==  g2.to_geojson()
+ 
+ # http://geojson.org/geojson-spec.html#positions
+ 
+@@ -243,44 +230,44 @@ def test_geojson_parsing_reversed():
+ def test_geojson_point_positions():
+     input_json = '{"type":"Point","coordinates":[30,10]}'
+     geom = mapnik.Geometry.from_geojson(input_json)
+-    eq_(geom.to_geojson(), input_json)
++    assert geom.to_geojson() ==  input_json
+     # should ignore all but the first two
+     geom = mapnik.Geometry.from_geojson(
+         '{"type":"Point","coordinates":[30,10,50,50,50,50]}')
+-    eq_(geom.to_geojson(), input_json)
++    assert geom.to_geojson() ==  input_json
+ 
+ 
+ def test_geojson_point_positions2():
+     input_json = '{"type":"LineString","coordinates":[[30,10],[10,30],[40,40]]}'
+     geom = mapnik.Geometry.from_geojson(input_json)
+-    eq_(geom.to_geojson(), input_json)
++    assert geom.to_geojson() ==  input_json
+ 
+     # should ignore all but the first two
+     geom = mapnik.Geometry.from_geojson(
+         '{"type":"LineString","coordinates":[[30.0,10.0,0,0,0],[10.0,30.0,0,0,0],[40.0,40.0,0,0,0]]}')
+-    eq_(geom.to_geojson(), input_json)
++    assert geom.to_geojson() ==  input_json
+ 
+ 
+ def compare_wkb_from_wkt(wkt, type):
+     geom = mapnik.Geometry.from_wkt(wkt)
+-    eq_(geom.type(), type)
++    assert geom.type() ==  type
+ 
+ 
+ def compare_wkt_to_geojson(idx, wkt, num=None):
+     geom = mapnik.Geometry.from_wkt(wkt)
+     # ensure both have same result
+     gj = geom.to_geojson()
+-    eq_(len(gj) > 1, True)
++    assert len(gj) > 1 ==  True
+     a = json.loads(gj)
+     e = json.loads(geojson[idx][1])
+-    eq_(a, e)
++    assert a ==  e
+ 
+ 
+ def test_wkt_simple():
+     for wkt in wkts:
+         try:
+             geom = mapnik.Geometry.from_wkt(wkt[1])
+-            eq_(geom.type(), wkt[0])
++            assert geom.type() ==  wkt[0]
+         except RuntimeError as e:
+             raise RuntimeError('%s %s' % (e, wkt))
+ 
+@@ -308,7 +295,7 @@ def test_wkt_rounding():
+     # if precision is set to 15 still fails due to very subtle rounding issues
+     wkt = "POLYGON((7.904185 54.180426,7.89918 54.178168,7.897715 54.182318,7.893565 54.183111,7.890391 54.187567,7.885874 54.19068,7.879893 54.193915,7.894541 54.194647,7.900645 54.19068,7.904185 54.180426))"
+     geom = mapnik.Geometry.from_wkt(wkt)
+-    eq_(geom.type(), mapnik.GeometryType.Polygon)
++    assert geom.type() ==  mapnik.GeometryType.Polygon
+ 
+ 
+ def test_wkt_collection_flattening():
+@@ -316,7 +303,7 @@ def test_wkt_collection_flattening():
+     # currently fails as the MULTIPOLYGON inside will be returned as multiple polygons - not a huge deal - should we worry?
+     #wkt = "GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20))),LINESTRING(2 3,3 4))"
+     geom = mapnik.Geometry.from_wkt(wkt)
+-    eq_(geom.type(), mapnik.GeometryType.GeometryCollection)
++    assert geom.type() ==  mapnik.GeometryType.GeometryCollection
+ 
+ 
+ def test_creating_feature_from_geojson():
+@@ -327,8 +314,8 @@ def test_creating_feature_from_geojson()
+     }
+     ctx = mapnik.Context()
+     feat = mapnik.Feature.from_geojson(json.dumps(json_feat), ctx)
+-    eq_(feat.id(), 1)
+-    eq_(feat['name'], u'value')
++    assert feat.id() ==  1
++    assert feat['name'] ==  u'value'
+ 
+ 
+ def test_handling_valid_geojson_empty_geometries():
+@@ -336,13 +323,10 @@ def test_handling_valid_geojson_empty_ge
+         geom = mapnik.Geometry.from_geojson(json)
+         out_json = geom.to_geojson()
+         # check round trip
+-        eq_(json.replace(" ",""), out_json)
++        assert json.replace(" ","") ==  out_json
+ 
+ 
+ def test_handling_invalid_geojson_empty_geometries():
+-    for json in invalid_empty_geometries:
+-        assert_raises(RuntimeError, mapnik.Geometry.from_geojson, json)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    with pytest.raises(RuntimeError):
++        for json in invalid_empty_geometries:
++            mapnik.Geometry.from_geojson(json)
+--- a/test/python_tests/grayscale_test.py
++++ b/test/python_tests/grayscale_test.py
+@@ -1,16 +1,8 @@
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import run_all
+-
+-
+ def test_grayscale_conversion():
+     im = mapnik.Image(2, 2)
+     im.fill(mapnik.Color('white'))
+     im.set_grayscale_to_alpha()
+     pixel = im.get_pixel(0, 0)
+-    eq_((pixel >> 24) & 0xff, 255)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert (pixel >> 24) & 0xff == 255
+--- a/test/python_tests/image_encoding_speed_test.py
++++ b/test/python_tests/image_encoding_speed_test.py
+@@ -1,19 +1,6 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import os
+ from timeit import Timer, time
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+ combinations = ['png',
+                 'png8',
+                 'png8:m=o',
+@@ -121,9 +108,3 @@ def do_encoding():
+         print(
+             'min: %sms | avg: %sms | total: %sms | len: %s <-- %s' %
+             (min_, avg, elapsed, size, name))
+-
+-
+-if __name__ == "__main__":
+-    setup()
+-    do_encoding()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/image_filters_test.py
++++ b/test/python_tests/image_filters_test.py
+@@ -1,36 +1,29 @@
+-#!/usr/bin/env python
+-
+-import os
+-import re
+-
+-from nose.tools import eq_
+-
++import re, os
+ import mapnik
++import pytest
++from .utilities import side_by_side_image, execution_path
+ 
+-from .utilities import execution_path, run_all, side_by_side_image
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
++    yield
+ 
+ def replace_style(m, name, style):
+     m.remove_style(name)
+     m.append_style(name, style)
+ 
+-
+ def test_append():
+     s = mapnik.Style()
+-    eq_(s.image_filters, '')
++    assert s.image_filters ==  ''
+     s.image_filters = 'gray'
+-    eq_(s.image_filters, 'gray')
++    assert s.image_filters ==  'gray'
+     s.image_filters = 'sharpen'
+-    eq_(s.image_filters, 'sharpen')
++    assert s.image_filters ==  'sharpen'
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+-    def test_style_level_image_filter():
++    def test_style_level_image_filter(setup):
+         m = mapnik.Map(256, 256)
+         mapnik.load_map(m, '../data/good_maps/style_level_image_filter.xml')
+         m.zoom_all()
+@@ -66,15 +59,11 @@ if 'shape' in mapnik.DatasourceCache.plu
+             else:
+                 fails.append(
+                     'failed comparing actual (%s) and expected(%s)' %
+-                    (actual, 'tests/python_tests/' + expected))
++                    (actual, expected))
+                 fail_im = side_by_side_image(expected_im, im)
+                 fail_im.save(
+                     '/tmp/mapnik-style-image-filter-' +
+                     filename +
+                     '.fail.png',
+                     'png32')
+-        eq_(len(fails), 0, '\n' + '\n'.join(fails))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert len(fails) ==  0, '\n' + '\n'.join(fails)
+--- a/test/python_tests/image_test.py
++++ b/test/python_tests/image_test.py
+@@ -1,48 +1,38 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-import sys
+-
+-from nose.tools import assert_almost_equal, eq_, raises
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import READ_FLAGS, execution_path, get_unique_colors, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-if PYTHON3:
+-    buffer = memoryview
+-
++from .utilities import READ_FLAGS, get_unique_colors, execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_type():
++def test_type(setup):
+     im = mapnik.Image(256, 256)
+-    eq_(im.get_type(), mapnik.ImageType.rgba8)
++    assert im.get_type() ==  mapnik.ImageType.rgba8
+     im = mapnik.Image(256, 256, mapnik.ImageType.gray8)
+-    eq_(im.get_type(), mapnik.ImageType.gray8)
++    assert im.get_type() ==  mapnik.ImageType.gray8
+ 
+ 
+ def test_image_premultiply():
+     im = mapnik.Image(256, 256)
+-    eq_(im.premultiplied(), False)
++    assert im.premultiplied() ==  False
+     # Premultiply should return true that it worked
+-    eq_(im.premultiply(), True)
+-    eq_(im.premultiplied(), True)
++    assert im.premultiply() ==  True
++    assert im.premultiplied() ==  True
+     # Premultipling again should return false as nothing should happen
+-    eq_(im.premultiply(), False)
+-    eq_(im.premultiplied(), True)
++    assert im.premultiply() ==  False
++    assert im.premultiplied() ==  True
+     # Demultiply should return true that it worked
+-    eq_(im.demultiply(), True)
+-    eq_(im.premultiplied(), False)
++    assert im.demultiply() ==  True
++    assert im.premultiplied() ==  False
+     # Demultiply again should not work and return false as it did nothing
+-    eq_(im.demultiply(), False)
+-    eq_(im.premultiplied(), False)
++    assert im.demultiply() ==  False
++    assert im.premultiplied() ==  False
+ 
+ 
+ def test_image_premultiply_values():
+@@ -50,18 +40,18 @@ def test_image_premultiply_values():
+     im.fill(mapnik.Color(16, 33, 255, 128))
+     im.premultiply()
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.r, 8)
+-    eq_(c.g, 17)
+-    eq_(c.b, 128)
+-    eq_(c.a, 128)
++    assert c.r ==  8
++    assert c.g ==  17
++    assert c.b ==  128
++    assert c.a ==  128
+     im.demultiply()
+     # Do to the nature of this operation the result will not be exactly the
+     # same
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.r, 15)
+-    eq_(c.g, 33)
+-    eq_(c.b, 255)
+-    eq_(c.a, 128)
++    assert c.r ==  15
++    assert c.g ==  33
++    assert c.b ==  255
++    assert c.a ==  128
+ 
+ 
+ def test_apply_opacity():
+@@ -69,32 +59,32 @@ def test_apply_opacity():
+     im.fill(mapnik.Color(128, 128, 128, 128))
+     im.apply_opacity(0.75)
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.r, 128)
+-    eq_(c.g, 128)
+-    eq_(c.b, 128)
+-    eq_(c.a, 96)
++    assert c.r ==  128
++    assert c.g ==  128
++    assert c.b ==  128
++    assert c.a ==  96
+ 
+ 
+ def test_background():
+     im = mapnik.Image(256, 256)
+-    eq_(im.premultiplied(), False)
++    assert im.premultiplied() ==  False
+     im.fill(mapnik.Color(32, 64, 125, 128))
+-    eq_(im.premultiplied(), False)
++    assert im.premultiplied() ==  False
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.get_premultiplied(), False)
+-    eq_(c.r, 32)
+-    eq_(c.g, 64)
+-    eq_(c.b, 125)
+-    eq_(c.a, 128)
++    assert c.get_premultiplied() ==  False
++    assert c.r ==  32
++    assert c.g ==  64
++    assert c.b ==  125
++    assert c.a ==  128
+     # Now again with a premultiplied alpha
+     im.fill(mapnik.Color(32, 64, 125, 128, True))
+-    eq_(im.premultiplied(), True)
++    assert im.premultiplied() ==  True
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.get_premultiplied(), True)
+-    eq_(c.r, 32)
+-    eq_(c.g, 64)
+-    eq_(c.b, 125)
+-    eq_(c.a, 128)
++    assert c.get_premultiplied() ==  True
++    assert c.r ==  32
++    assert c.g ==  64
++    assert c.b ==  125
++    assert c.a ==  128
+ 
+ 
+ def test_set_and_get_pixel():
+@@ -106,27 +96,27 @@ def test_set_and_get_pixel():
+     im.set_pixel(1, 1, c0_pre)
+     # No differences for non premultiplied pixels
+     c1_int = mapnik.Color(im.get_pixel(0, 0))
+-    eq_(c0.r, c1_int.r)
+-    eq_(c0.g, c1_int.g)
+-    eq_(c0.b, c1_int.b)
+-    eq_(c0.a, c1_int.a)
++    assert c0.r ==  c1_int.r
++    assert c0.g ==  c1_int.g
++    assert c0.b ==  c1_int.b
++    assert c0.a ==  c1_int.a
+     c1 = im.get_pixel(0, 0, True)
+-    eq_(c0.r, c1.r)
+-    eq_(c0.g, c1.g)
+-    eq_(c0.b, c1.b)
+-    eq_(c0.a, c1.a)
++    assert c0.r ==  c1.r
++    assert c0.g ==  c1.g
++    assert c0.b ==  c1.b
++    assert c0.a ==  c1.a
+     # The premultiplied Color should be demultiplied before being applied.
+     c0_pre.demultiply()
+     c1_int = mapnik.Color(im.get_pixel(1, 1))
+-    eq_(c0_pre.r, c1_int.r)
+-    eq_(c0_pre.g, c1_int.g)
+-    eq_(c0_pre.b, c1_int.b)
+-    eq_(c0_pre.a, c1_int.a)
++    assert c0_pre.r ==  c1_int.r
++    assert c0_pre.g ==  c1_int.g
++    assert c0_pre.b ==  c1_int.b
++    assert c0_pre.a ==  c1_int.a
+     c1 = im.get_pixel(1, 1, True)
+-    eq_(c0_pre.r, c1.r)
+-    eq_(c0_pre.g, c1.g)
+-    eq_(c0_pre.b, c1.b)
+-    eq_(c0_pre.a, c1.a)
++    assert c0_pre.r ==  c1.r
++    assert c0_pre.g ==  c1.g
++    assert c0_pre.b ==  c1.b
++    assert c0_pre.a ==  c1.a
+ 
+     # Now create a new image that is premultiplied
+     im = mapnik.Image(256, 256, mapnik.ImageType.rgba8, True, True)
+@@ -138,26 +128,26 @@ def test_set_and_get_pixel():
+     # premultiply c0
+     c0.premultiply()
+     c1_int = mapnik.Color(im.get_pixel(0, 0))
+-    eq_(c0.r, c1_int.r)
+-    eq_(c0.g, c1_int.g)
+-    eq_(c0.b, c1_int.b)
+-    eq_(c0.a, c1_int.a)
++    assert c0.r ==  c1_int.r
++    assert c0.g ==  c1_int.g
++    assert c0.b ==  c1_int.b
++    assert c0.a ==  c1_int.a
+     c1 = im.get_pixel(0, 0, True)
+-    eq_(c0.r, c1.r)
+-    eq_(c0.g, c1.g)
+-    eq_(c0.b, c1.b)
+-    eq_(c0.a, c1.a)
++    assert c0.r ==  c1.r
++    assert c0.g ==  c1.g
++    assert c0.b ==  c1.b
++    assert c0.a ==  c1.a
+     # The premultiplied Color should be the same though
+     c1_int = mapnik.Color(im.get_pixel(1, 1))
+-    eq_(c0_pre.r, c1_int.r)
+-    eq_(c0_pre.g, c1_int.g)
+-    eq_(c0_pre.b, c1_int.b)
+-    eq_(c0_pre.a, c1_int.a)
++    assert c0_pre.r ==  c1_int.r
++    assert c0_pre.g ==  c1_int.g
++    assert c0_pre.b ==  c1_int.b
++    assert c0_pre.a ==  c1_int.a
+     c1 = im.get_pixel(1, 1, True)
+-    eq_(c0_pre.r, c1.r)
+-    eq_(c0_pre.g, c1.g)
+-    eq_(c0_pre.b, c1.b)
+-    eq_(c0_pre.a, c1.a)
++    assert c0_pre.r ==  c1.r
++    assert c0_pre.g ==  c1.g
++    assert c0_pre.b ==  c1.b
++    assert c0_pre.a ==  c1.a
+ 
+ 
+ def test_pixel_gray8():
+@@ -165,9 +155,9 @@ def test_pixel_gray8():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), 0)
++        assert im.get_pixel(0, 0) ==  0
+ 
+ 
+ def test_pixel_gray8s():
+@@ -175,9 +165,9 @@ def test_pixel_gray8s():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), -v)
++        assert im.get_pixel(0, 0) ==  -v
+ 
+ 
+ def test_pixel_gray16():
+@@ -185,9 +175,9 @@ def test_pixel_gray16():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), 0)
++        assert im.get_pixel(0, 0) ==  0
+ 
+ 
+ def test_pixel_gray16s():
+@@ -195,9 +185,9 @@ def test_pixel_gray16s():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), -v)
++        assert im.get_pixel(0, 0) ==  -v
+ 
+ 
+ def test_pixel_gray32():
+@@ -205,9 +195,9 @@ def test_pixel_gray32():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), 0)
++        assert im.get_pixel(0, 0) ==  0
+ 
+ 
+ def test_pixel_gray32s():
+@@ -215,9 +205,9 @@ def test_pixel_gray32s():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), -v)
++        assert im.get_pixel(0, 0) ==  -v
+ 
+ 
+ def test_pixel_gray64():
+@@ -225,9 +215,9 @@ def test_pixel_gray64():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), 0)
++        assert im.get_pixel(0, 0) ==  0
+ 
+ 
+ def test_pixel_gray64s():
+@@ -235,9 +225,9 @@ def test_pixel_gray64s():
+     val_list = range(20)
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        eq_(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) ==  v
+         im.set_pixel(0, 0, -v)
+-        eq_(im.get_pixel(0, 0), -v)
++        assert im.get_pixel(0, 0) ==  -v
+ 
+ 
+ def test_pixel_floats():
+@@ -245,9 +235,9 @@ def test_pixel_floats():
+     val_list = [0.9, 0.99, 0.999, 0.9999, 0.99999, 1, 1.0001, 1.001, 1.01, 1.1]
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        assert_almost_equal(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) == pytest.approx(v)
+         im.set_pixel(0, 0, -v)
+-        assert_almost_equal(im.get_pixel(0, 0), -v)
++        assert im.get_pixel(0, 0) == pytest.approx(-v)
+ 
+ 
+ def test_pixel_doubles():
+@@ -255,80 +245,80 @@ def test_pixel_doubles():
+     val_list = [0.9, 0.99, 0.999, 0.9999, 0.99999, 1, 1.0001, 1.001, 1.01, 1.1]
+     for v in val_list:
+         im.set_pixel(0, 0, v)
+-        assert_almost_equal(im.get_pixel(0, 0), v)
++        assert im.get_pixel(0, 0) == pytest.approx(v)
+         im.set_pixel(0, 0, -v)
+-        assert_almost_equal(im.get_pixel(0, 0), -v)
++        assert im.get_pixel(0, 0) == pytest.approx(-v)
+ 
+ 
+ def test_pixel_overflow():
+     im = mapnik.Image(4, 4, mapnik.ImageType.gray8)
+     im.set_pixel(0, 0, 256)
+-    eq_(im.get_pixel(0, 0), 255)
++    assert im.get_pixel(0, 0) ==  255
+ 
+ 
+ def test_pixel_underflow():
+     im = mapnik.Image(4, 4, mapnik.ImageType.gray8)
+     im.set_pixel(0, 0, -1)
+-    eq_(im.get_pixel(0, 0), 0)
++    assert im.get_pixel(0, 0) ==  0
+     im = mapnik.Image(4, 4, mapnik.ImageType.gray16)
+     im.set_pixel(0, 0, -1)
+-    eq_(im.get_pixel(0, 0), 0)
++    assert im.get_pixel(0, 0) ==  0
+ 
+ 
+- at raises(IndexError)
+ def test_set_pixel_out_of_range_1():
+-    im = mapnik.Image(4, 4)
+-    c = mapnik.Color('blue')
+-    im.set_pixel(5, 5, c)
++    with pytest.raises(IndexError):
++        im = mapnik.Image(4, 4)
++        c = mapnik.Color('blue')
++        im.set_pixel(5, 5, c)
+ 
+ 
+- at raises(OverflowError)
+ def test_set_pixel_out_of_range_2():
+-    im = mapnik.Image(4, 4)
+-    c = mapnik.Color('blue')
+-    im.set_pixel(-1, 1, c)
++    with pytest.raises(OverflowError):
++        im = mapnik.Image(4, 4)
++        c = mapnik.Color('blue')
++        im.set_pixel(-1, 1, c)
+ 
+ 
+- at raises(IndexError)
+ def test_get_pixel_out_of_range_1():
+-    im = mapnik.Image(4, 4)
+-    c = im.get_pixel(5, 5)
++    with pytest.raises(IndexError):
++        im = mapnik.Image(4, 4)
++        c = im.get_pixel(5, 5)
+ 
+ 
+- at raises(OverflowError)
+ def test_get_pixel_out_of_range_2():
+-    im = mapnik.Image(4, 4)
+-    c = im.get_pixel(-1, 1)
++    with pytest.raises(OverflowError):
++        im = mapnik.Image(4, 4)
++        c = im.get_pixel(-1, 1)
+ 
+ 
+- at raises(IndexError)
+ def test_get_pixel_color_out_of_range_1():
+-    im = mapnik.Image(4, 4)
+-    c = im.get_pixel(5, 5, True)
++    with pytest.raises(IndexError):
++        im = mapnik.Image(4, 4)
++        c = im.get_pixel(5, 5, True)
+ 
+ 
+- at raises(OverflowError)
+ def test_get_pixel_color_out_of_range_2():
+-    im = mapnik.Image(4, 4)
+-    c = im.get_pixel(-1, 1, True)
++    with pytest.raises(OverflowError):
++        im = mapnik.Image(4, 4)
++        c = im.get_pixel(-1, 1, True)
+ 
+ 
+ def test_set_color_to_alpha():
+     im = mapnik.Image(256, 256)
+     im.fill(mapnik.Color('rgba(12,12,12,255)'))
+-    eq_(get_unique_colors(im), ['rgba(12,12,12,255)'])
++    assert get_unique_colors(im), ['rgba(12,12,12 == 255)']
+     im.set_color_to_alpha(mapnik.Color('rgba(12,12,12,0)'))
+-    eq_(get_unique_colors(im), ['rgba(0,0,0,0)'])
++    assert get_unique_colors(im), ['rgba(0,0,0 == 0)']
+ 
+ 
+- at raises(RuntimeError)
+ def test_negative_image_dimensions():
+-    # TODO - this may have regressed in
+-    # https://github.com/mapnik/mapnik/commit/4f3521ac24b61fc8ae8fd344a16dc3a5fdf15af7
+-    im = mapnik.Image(-40, 40)
+-    # should not get here
+-    eq_(im.width(), 0)
+-    eq_(im.height(), 0)
++    with pytest.raises(RuntimeError):
++        # TODO - this may have regressed in
++        # https://github.com/mapnik/mapnik/commit/4f3521ac24b61fc8ae8fd344a16dc3a5fdf15af7
++        im = mapnik.Image(-40, 40)
++        # should not get here
++        assert im.width() ==  0
++        assert im.height() ==  0
+ 
+ 
+ def test_jpeg_round_trip():
+@@ -339,14 +329,14 @@ def test_jpeg_round_trip():
+     im2 = mapnik.Image.open(filepath)
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
+-    eq_(len(im.tostring()), len(im2.tostring()))
+-    eq_(len(im.tostring('jpeg')), len(im2.tostring('jpeg')))
+-    eq_(len(im.tostring()), len(im3.tostring()))
+-    eq_(len(im.tostring('jpeg')), len(im3.tostring('jpeg')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
++    assert len(im.tostring()) ==  len(im2.tostring())
++    assert len(im.tostring('jpeg')) ==  len(im2.tostring('jpeg'))
++    assert len(im.tostring()) ==  len(im3.tostring())
++    assert len(im.tostring('jpeg')) ==  len(im3.tostring('jpeg'))
+ 
+ 
+ def test_png_round_trip():
+@@ -357,16 +347,16 @@ def test_png_round_trip():
+     im2 = mapnik.Image.open(filepath)
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
+-    eq_(len(im.tostring()), len(im2.tostring()))
+-    eq_(len(im.tostring('png')), len(im2.tostring('png')))
+-    eq_(len(im.tostring('png8')), len(im2.tostring('png8')))
+-    eq_(len(im.tostring()), len(im3.tostring()))
+-    eq_(len(im.tostring('png')), len(im3.tostring('png')))
+-    eq_(len(im.tostring('png8')), len(im3.tostring('png8')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
++    assert len(im.tostring()) ==  len(im2.tostring())
++    assert len(im.tostring('png')) ==  len(im2.tostring('png'))
++    assert len(im.tostring('png8')) ==  len(im2.tostring('png8'))
++    assert len(im.tostring()) ==  len(im3.tostring())
++    assert len(im.tostring('png')) ==  len(im3.tostring('png'))
++    assert len(im.tostring('png8')) ==  len(im3.tostring('png8'))
+ 
+ 
+ def test_image_open_from_string():
+@@ -374,18 +364,14 @@ def test_image_open_from_string():
+     im1 = mapnik.Image.open(filepath)
+     with open(filepath, READ_FLAGS) as f:
+         im2 = mapnik.Image.fromstring(f.read())
+-    eq_(im1.width(), im2.width())
++    assert im1.width() ==  im2.width()
+     length = len(im1.tostring())
+-    eq_(length, len(im2.tostring()))
+-    eq_(len(mapnik.Image.fromstring(im1.tostring('png')).tostring()), length)
+-    eq_(len(mapnik.Image.fromstring(im1.tostring('jpeg')).tostring()), length)
+-    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('png'))).tostring()), length)
+-    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('jpeg'))).tostring()), length)
++    assert length ==  len(im2.tostring())
++    assert len(mapnik.Image.fromstring(im1.tostring('png')).tostring()) ==  length
++    assert len(mapnik.Image.fromstring(im1.tostring('jpeg')).tostring()) ==  length
++    assert len(mapnik.Image.frombuffer(memoryview(im1.tostring('png'))).tostring()) ==  length
++    assert len(mapnik.Image.frombuffer(memoryview(im1.tostring('jpeg'))).tostring()) ==  length
+ 
+     # TODO - https://github.com/mapnik/mapnik/issues/1831
+-    eq_(len(mapnik.Image.fromstring(im1.tostring('tiff')).tostring()), length)
+-    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('tiff'))).tostring()), length)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert len(mapnik.Image.fromstring(im1.tostring('tiff')).tostring()) ==  length
++    assert len(mapnik.Image.frombuffer(memoryview(im1.tostring('tiff'))).tostring()) ==  length
+--- a/test/python_tests/image_tiff_test.py
++++ b/test/python_tests/image_tiff_test.py
+@@ -1,27 +1,20 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import hashlib
+ import os
+-
+-from nose.tools import assert_not_equal, eq_
+-
++import hashlib
+ import mapnik
++import pytest
++from .utilities import READ_FLAGS, execution_path
+ 
+-from .utilities import READ_FLAGS, execution_path, run_all
+-
+-
+-def hashstr(var):
+-    return hashlib.md5(var).hexdigest()
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
++def hashstr(var):
++    return hashlib.md5(var).hexdigest()
+ 
+-def test_tiff_round_trip_scanline():
++def test_tiff_round_trip_scanline(setup):
+     filepath = '/tmp/mapnik-tiff-io-scanline.tiff'
+     im = mapnik.Image(255, 267)
+     im.fill(mapnik.Color('rgba(12,255,128,.5)'))
+@@ -30,26 +23,21 @@ def test_tiff_round_trip_scanline():
+     im2 = mapnik.Image.open(filepath)
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
+-    eq_(hashstr(im.tostring()), org_str)
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
++    assert hashstr(im.tostring()) ==  org_str
+     # This won't be the same the first time around because the im is not
+     # premultiplied and im2 is
+-    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    assert_not_equal(
+-        hashstr(
+-            im.tostring('tiff:method=scanline')), hashstr(
+-            im2.tostring('tiff:method=scanline')))
++    assert not hashstr(im.tostring()) == hashstr(im2.tostring())
++    assert not hashstr(im.tostring('tiff:method=scanline')) == hashstr(im2.tostring('tiff:method=scanline'))
+     # Now premultiply
+     im.premultiply()
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=scanline')),
+-        hashstr(im2.tostring('tiff:method=scanline')))
+-    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
+-    eq_(hashstr(im2.tostring('tiff:method=scanline')),
+-        hashstr(im3.tostring('tiff:method=scanline')))
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=scanline')) == hashstr(im2.tostring('tiff:method=scanline'))
++    assert hashstr(im2.tostring()) ==  hashstr(im3.tostring())
++    assert hashstr(im2.tostring('tiff:method=scanline')) == hashstr(im3.tostring('tiff:method=scanline'))
+ 
+ 
+ def test_tiff_round_trip_stripped():
+@@ -62,27 +50,22 @@ def test_tiff_round_trip_stripped():
+     im2.save('/tmp/mapnik-tiff-io-stripped2.tiff', 'tiff:method=stripped')
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
+     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
+     # difference in tags.
+-    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    assert_not_equal(
+-        hashstr(
+-            im.tostring('tiff:method=stripped')), hashstr(
+-            im2.tostring('tiff:method=stripped')))
++    assert not hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert not hashstr(im.tostring('tiff:method=stripped')) ==  hashstr(im2.tostring('tiff:method=stripped'))
+     # Now if we premultiply they will be exactly the same
+     im.premultiply()
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=stripped')),
+-        hashstr(im2.tostring('tiff:method=stripped')))
+-    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=stripped')) == hashstr(im2.tostring('tiff:method=stripped'))
++    assert hashstr(im2.tostring()) ==  hashstr(im3.tostring())
+     # Both of these started out premultiplied, so this round trip should be
+     # exactly the same!
+-    eq_(hashstr(im2.tostring('tiff:method=stripped')),
+-        hashstr(im3.tostring('tiff:method=stripped')))
++    assert hashstr(im2.tostring('tiff:method=stripped')) == hashstr(im3.tostring('tiff:method=stripped'))
+ 
+ 
+ def test_tiff_round_trip_rows_stripped():
+@@ -91,42 +74,38 @@ def test_tiff_round_trip_rows_stripped()
+     im = mapnik.Image(255, 267)
+     im.fill(mapnik.Color('rgba(12,255,128,.5)'))
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.r, 12)
+-    eq_(c.g, 255)
+-    eq_(c.b, 128)
+-    eq_(c.a, 128)
+-    eq_(c.get_premultiplied(), False)
++    assert c.r ==  12
++    assert c.g ==  255
++    assert c.b ==  128
++    assert c.a ==  128
++    assert c.get_premultiplied() ==  False
+     im.save(filepath, 'tiff:method=stripped:rows_per_strip=8')
+     im2 = mapnik.Image.open(filepath)
+     c2 = im2.get_pixel(0, 0, True)
+-    eq_(c2.r, 6)
+-    eq_(c2.g, 128)
+-    eq_(c2.b, 64)
+-    eq_(c2.a, 128)
+-    eq_(c2.get_premultiplied(), True)
++    assert c2.r ==  6
++    assert c2.g ==  128
++    assert c2.b ==  64
++    assert c2.a ==  128
++    assert c2.get_premultiplied() ==  True
+     im2.save(filepath2, 'tiff:method=stripped:rows_per_strip=8')
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
+     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
+     # difference in tags.
+-    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    assert_not_equal(
+-        hashstr(
+-            im.tostring('tiff:method=stripped:rows_per_strip=8')), hashstr(
+-            im2.tostring('tiff:method=stripped:rows_per_strip=8')))
++    assert not hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert not hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')) ==  hashstr(
++        im2.tostring('tiff:method=stripped:rows_per_strip=8'))
+     # Now premultiply the first image and they will be the same!
+     im.premultiply()
+-    eq_(hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')),
+-        hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')))
+-    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
++    assert hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')) == hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8'))
++    assert hashstr(im2.tostring()) ==  hashstr(im3.tostring())
+     # Both of these started out premultiplied, so this round trip should be
+     # exactly the same!
+-    eq_(hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')),
+-        hashstr(im3.tostring('tiff:method=stripped:rows_per_strip=8')))
++    assert hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')) == hashstr(im3.tostring('tiff:method=stripped:rows_per_strip=8'))
+ 
+ 
+ def test_tiff_round_trip_buffered_tiled():
+@@ -136,44 +115,40 @@ def test_tiff_round_trip_buffered_tiled(
+     im = mapnik.Image(255, 267)
+     im.fill(mapnik.Color('rgba(33,255,128,.5)'))
+     c = im.get_pixel(0, 0, True)
+-    eq_(c.r, 33)
+-    eq_(c.g, 255)
+-    eq_(c.b, 128)
+-    eq_(c.a, 128)
+-    eq_(c.get_premultiplied(), False)
++    assert c.r ==  33
++    assert c.g ==  255
++    assert c.b ==  128
++    assert c.a ==  128
++    assert not c.get_premultiplied()
+     im.save(filepath, 'tiff:method=tiled:tile_width=32:tile_height=32')
+     im2 = mapnik.Image.open(filepath)
+     c2 = im2.get_pixel(0, 0, True)
+-    eq_(c2.r, 17)
+-    eq_(c2.g, 128)
+-    eq_(c2.b, 64)
+-    eq_(c2.a, 128)
+-    eq_(c2.get_premultiplied(), True)
++    assert c2.r ==  17
++    assert c2.g ==  128
++    assert c2.b ==  64
++    assert c2.a ==  128
++    assert c2.get_premultiplied()
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+     im2.save(filepath2, 'tiff:method=tiled:tile_width=32:tile_height=32')
+     im3.save(filepath3, 'tiff:method=tiled:tile_width=32:tile_height=32')
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
+     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
+     # difference in tags.
+-    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    assert_not_equal(
+-        hashstr(
+-            im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')), hashstr(
+-            im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
++    assert not hashstr(im.tostring()) == hashstr(im2.tostring())
++    assert not hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')) ==  hashstr(
++        im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32'))
+     # Now premultiply the first image and they should be the same
+     im.premultiply()
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),
+-        hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
+-    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')) == hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32'))
++    assert hashstr(im2.tostring()) ==  hashstr(im3.tostring())
+     # Both of these started out premultiplied, so this round trip should be
+     # exactly the same!
+-    eq_(hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),
+-        hashstr(im3.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
++    assert hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')) == hashstr(im3.tostring('tiff:method=tiled:tile_width=32:tile_height=32'))
+ 
+ 
+ def test_tiff_round_trip_tiled():
+@@ -184,27 +159,22 @@ def test_tiff_round_trip_tiled():
+     im2 = mapnik.Image.open(filepath)
+     with open(filepath, READ_FLAGS) as f:
+         im3 = mapnik.Image.fromstring(f.read())
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(im.width(), im3.width())
+-    eq_(im.height(), im3.height())
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert im.width() ==  im3.width()
++    assert im.height() ==  im3.height()
+     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
+     # difference in tags.
+-    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    assert_not_equal(
+-        hashstr(
+-            im.tostring('tiff:method=tiled')), hashstr(
+-            im2.tostring('tiff:method=tiled')))
++    assert not hashstr(im.tostring()) == hashstr(im2.tostring())
++    assert not hashstr(im.tostring('tiff:method=tiled')) ==  hashstr(im2.tostring('tiff:method=tiled'))
+     # Now premultiply the first image and they will be exactly the same.
+     im.premultiply()
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=tiled')),
+-        hashstr(im2.tostring('tiff:method=tiled')))
+-    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=tiled')) == hashstr(im2.tostring('tiff:method=tiled'))
++    assert hashstr(im2.tostring()) ==  hashstr(im3.tostring())
+     # Both of these started out premultiplied, so this round trip should be
+     # exactly the same!
+-    eq_(hashstr(im2.tostring('tiff:method=tiled')),
+-        hashstr(im3.tostring('tiff:method=tiled')))
++    assert hashstr(im2.tostring('tiff:method=tiled')) == hashstr(im3.tostring('tiff:method=tiled'))
+ 
+ 
+ def test_tiff_rgb8_compare():
+@@ -213,13 +183,12 @@ def test_tiff_rgb8_compare():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff')), hashstr(im2.tostring('tiff')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff')) ==  hashstr(im2.tostring('tiff'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff"))
+ 
+ 
+ def test_tiff_rgba8_compare_scanline():
+@@ -228,14 +197,12 @@ def test_tiff_rgba8_compare_scanline():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=scanline')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=scanline')),
+-        hashstr(im2.tostring('tiff:method=scanline')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=scanline')) == hashstr(im2.tostring('tiff:method=scanline'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff"))
+ 
+ 
+ def test_tiff_rgba8_compare_stripped():
+@@ -244,14 +211,12 @@ def test_tiff_rgba8_compare_stripped():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=stripped')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=stripped')),
+-        hashstr(im2.tostring('tiff:method=stripped')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=stripped')) == hashstr(im2.tostring('tiff:method=stripped'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff"))
+ 
+ 
+ def test_tiff_rgba8_compare_tiled():
+@@ -260,14 +225,12 @@ def test_tiff_rgba8_compare_tiled():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=tiled')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=tiled')),
+-        hashstr(im2.tostring('tiff:method=tiled')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=tiled')) == hashstr(im2.tostring('tiff:method=tiled'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray8_compare_scanline():
+@@ -276,15 +239,12 @@ def test_tiff_gray8_compare_scanline():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=scanline')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=scanline')),
+-        hashstr(im2.tostring('tiff:method=scanline')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=scanline')) == hashstr(im2.tostring('tiff:method=scanline'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff")), True)
+-
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff"))
+ 
+ def test_tiff_gray8_compare_stripped():
+     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
+@@ -292,14 +252,12 @@ def test_tiff_gray8_compare_stripped():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=stripped')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=stripped')),
+-        hashstr(im2.tostring('tiff:method=stripped')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=stripped')) == hashstr(im2.tostring('tiff:method=stripped'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray8_compare_tiled():
+@@ -308,14 +266,12 @@ def test_tiff_gray8_compare_tiled():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=tiled')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=tiled')),
+-        hashstr(im2.tostring('tiff:method=tiled')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=tiled')) == hashstr(im2.tostring('tiff:method=tiled'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray16_compare_scanline():
+@@ -324,15 +280,12 @@ def test_tiff_gray16_compare_scanline():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=scanline')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=scanline')),
+-        hashstr(im2.tostring('tiff:method=scanline')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=scanline')) == hashstr(im2.tostring('tiff:method=scanline'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff")), True)
+-
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff"))
+ 
+ def test_tiff_gray16_compare_stripped():
+     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
+@@ -340,14 +293,12 @@ def test_tiff_gray16_compare_stripped():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=stripped')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=stripped')),
+-        hashstr(im2.tostring('tiff:method=stripped')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=stripped')) == hashstr(im2.tostring('tiff:method=stripped'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray16_compare_tiled():
+@@ -356,14 +307,12 @@ def test_tiff_gray16_compare_tiled():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=tiled')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=tiled')),
+-        hashstr(im2.tostring('tiff:method=tiled')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=tiled')) == hashstr(im2.tostring('tiff:method=tiled'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+-        im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray32f_compare_scanline():
+@@ -372,14 +321,12 @@ def test_tiff_gray32f_compare_scanline()
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=scanline')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=scanline')),
+-        hashstr(im2.tostring('tiff:method=scanline')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=scanline')) == hashstr(im2.tostring('tiff:method=scanline'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),
+-                                                             im.height(), mapnik.ImageType.gray32f).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray32f).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray32f_compare_stripped():
+@@ -388,14 +335,12 @@ def test_tiff_gray32f_compare_stripped()
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=stripped')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=stripped')),
+-        hashstr(im2.tostring('tiff:method=stripped')))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=stripped')) == hashstr(im2.tostring('tiff:method=stripped'))
+     # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),
+-                                                             im.height(), mapnik.ImageType.gray32f).tostring("tiff")), True)
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray32f).tostring("tiff"))
+ 
+ 
+ def test_tiff_gray32f_compare_tiled():
+@@ -404,15 +349,9 @@ def test_tiff_gray32f_compare_tiled():
+     im = mapnik.Image.open(filepath1)
+     im.save(filepath2, 'tiff:method=tiled')
+     im2 = mapnik.Image.open(filepath2)
+-    eq_(im.width(), im2.width())
+-    eq_(im.height(), im2.height())
+-    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+-    eq_(hashstr(im.tostring('tiff:method=tiled')),
+-        hashstr(im2.tostring('tiff:method=tiled')))
+-    # should not be a blank image
+-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),
+-                                                             im.height(), mapnik.ImageType.gray32f).tostring("tiff")), True)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert im.width() ==  im2.width()
++    assert im.height() ==  im2.height()
++    assert hashstr(im.tostring()) ==  hashstr(im2.tostring())
++    assert hashstr(im.tostring('tiff:method=tiled')) == hashstr(im2.tostring('tiff:method=tiled'))
++    # should not be a blank image
++    assert hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray32f).tostring("tiff"))
+--- a/test/python_tests/introspection_test.py
++++ b/test/python_tests/introspection_test.py
+@@ -1,36 +1,32 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_introspect_symbolizers():
++def test_introspect_symbolizers(setup):
+     # create a symbolizer
+     p = mapnik.PointSymbolizer()
+     p.file = "../data/images/dummy.png"
+     p.allow_overlap = True
+     p.opacity = 0.5
+ 
+-    eq_(p.allow_overlap, True)
+-    eq_(p.opacity, 0.5)
+-    eq_(p.filename, '../data/images/dummy.png')
++    assert p.allow_overlap ==  True
++    assert p.opacity ==  0.5
++    assert p.filename ==  '../data/images/dummy.png'
+ 
+     # make sure the defaults
+     # are what we think they are
+-    eq_(p.allow_overlap, True)
+-    eq_(p.opacity, 0.5)
+-    eq_(p.filename, '../data/images/dummy.png')
++    assert p.allow_overlap ==  True
++    assert p.opacity ==  0.5
++    assert p.filename ==  '../data/images/dummy.png'
+ 
+     # contruct objects to hold it
+     r = mapnik.Rule()
+@@ -46,20 +42,16 @@ def test_introspect_symbolizers():
+ 
+     s2 = m.find_style('s')
+     rules = s2.rules
+-    eq_(len(rules), 1)
++    assert len(rules) ==  1
+     r2 = rules[0]
+     syms = r2.symbols
+-    eq_(len(syms), 1)
++    assert len(syms) ==  1
+ 
+     # TODO here, we can do...
+     sym = syms[0]
+     p2 = sym.extract()
+     assert isinstance(p2, mapnik.PointSymbolizer)
+ 
+-    eq_(p2.allow_overlap, True)
+-    eq_(p2.opacity, 0.5)
+-    eq_(p2.filename, '../data/images/dummy.png')
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert p2.allow_overlap ==  True
++    assert p2.opacity ==  0.5
++    assert p2.filename ==  '../data/images/dummy.png'
+--- a/test/python_tests/json_feature_properties_test.py
++++ b/test/python_tests/json_feature_properties_test.py
+@@ -1,11 +1,4 @@
+-# encoding: utf8
+-
+-from nose.tools import eq_
+-
+ import mapnik
+-
+-from .utilities import run_all
+-
+ try:
+     import json
+ except ImportError:
+@@ -35,7 +28,7 @@ chars = [
+     {
+         "name": "reverse_solidus",  # backslash
+         "test": "string with \\ quote",
+-        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\\\ quote"}}'
++        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\\ quote"}}'
+     },
+     {
+         "name": "solidus",  # forward slash
+@@ -83,30 +76,20 @@ chars = [
+ ctx = mapnik.Context()
+ ctx.push('name')
+ 
+-
+ def test_char_escaping():
+     for char in chars:
+         feat = mapnik.Feature(ctx, 1)
+         expected = char['test']
+         feat["name"] = expected
+-        eq_(feat["name"], expected)
++        assert feat["name"] ==  expected
+         # confirm the python json module
+         # is working as we would expect
+         pyjson2 = json.loads(char['json'])
+-        eq_(pyjson2['properties']['name'], expected)
++        assert pyjson2['properties']['name'] ==  expected
+         # confirm our behavior is the same as python json module
+         # for the original string
+         geojson_feat_string = feat.to_geojson()
+-        eq_(
+-            geojson_feat_string,
+-            char['json'],
+-            "Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" %
+-            (geojson_feat_string,
+-             char['json'],
+-                char['name']))
++        assert  geojson_feat_string ==  char['json'], "Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" % (geojson_feat_string, char['json'], char['name'])
+         # and the round tripped string
+         pyjson = json.loads(geojson_feat_string)
+-        eq_(pyjson['properties']['name'], expected)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert pyjson['properties']['name'] ==  expected
+--- a/test/python_tests/layer_buffer_size_test.py
++++ b/test/python_tests/layer_buffer_size_test.py
+@@ -1,29 +1,27 @@
+-# coding=utf8
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
+ 
+     # the negative buffer on the layer should
+     # override the postive map buffer leading
+     # only one point to be rendered in the map
+-    def test_layer_buffer_size_1():
++    def test_layer_buffer_size_1(setup):
+         m = mapnik.Map(512, 512)
+-        eq_(m.buffer_size, 0)
++        assert m.buffer_size ==  0
+         mapnik.load_map(m, '../data/good_maps/layer_buffer_size_reduction.xml')
+-        eq_(m.buffer_size, 256)
+-        eq_(m.layers[0].buffer_size, -150)
++        assert m.buffer_size ==  256
++        assert m.layers[0].buffer_size ==  -150
+         m.zoom_all()
+         im = mapnik.Image(m.width, m.height)
+         mapnik.render(m, im)
+@@ -31,12 +29,4 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         expected = 'images/support/mapnik-layer-buffer-size.png'
+         im.save(actual, "png32")
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'tests/python_tests/' + expected))
+-
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert im.tostring('png32') == expected_im.tostring('png32'),'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/' + expected)
+--- a/test/python_tests/layer_modification_test.py
++++ b/test/python_tests/layer_modification_test.py
+@@ -1,21 +1,17 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_adding_datasource_to_layer():
++def test_adding_datasource_to_layer(setup):
+     map_string = '''<?xml version="1.0" encoding="utf-8"?>
+ <Map>
+ 
+@@ -39,9 +35,9 @@ def test_adding_datasource_to_layer():
+         mapnik.load_map_from_string(m, map_string)
+ 
+         # validate it loaded fine
+-        eq_(m.layers[0].styles[0], 'world_borders_style')
+-        eq_(m.layers[0].styles[1], 'point_style')
+-        eq_(len(m.layers), 1)
++        assert m.layers[0].styles[0] ==  'world_borders_style'
++        assert m.layers[0].styles[1] ==  'point_style'
++        assert len(m.layers) ==  1
+ 
+         # also assign a variable reference to that layer
+         # below we will test that this variable references
+@@ -49,35 +45,29 @@ def test_adding_datasource_to_layer():
+         lyr = m.layers[0]
+ 
+         # ensure that there was no datasource for the layer...
+-        eq_(m.layers[0].datasource, None)
+-        eq_(lyr.datasource, None)
++        assert m.layers[0].datasource ==  None
++        assert lyr.datasource ==  None
+ 
+         # also note that since the srs was black it defaulted to wgs84
+-        eq_(m.layers[0].srs,
+-            '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+-        eq_(lyr.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
++        assert m.layers[0].srs == 'epsg:4326'
++        assert lyr.srs ==  'epsg:4326'
+ 
+         # now add a datasource one...
+         ds = mapnik.Shapefile(file='../data/shp/world_merc.shp')
+         m.layers[0].datasource = ds
+ 
+         # now ensure it is attached
+-        eq_(m.layers[0].datasource.describe()['name'], "shape")
+-        eq_(lyr.datasource.describe()['name'], "shape")
++        assert m.layers[0].datasource.describe()['name'] ==  "shape"
++        assert lyr.datasource.describe()['name'] ==  "shape"
+ 
+         # and since we have now added a shapefile in spherical mercator, adjust
+         # the projection
+         lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
+ 
+         # test that assignment
+-        eq_(m.layers[
+-            0].srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
+-        eq_(lyr.srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
++        assert m.layers[0].srs == '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
++        assert lyr.srs ==  '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
+     except RuntimeError as e:
+         # only test datasources that we have installed
+         if not 'Could not create datasource' in str(e):
+             raise RuntimeError(e)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/layer_test.py
++++ b/test/python_tests/layer_test.py
+@@ -1,33 +1,21 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import run_all
+-
+-
+ # Map initialization
+ 
+-
+ def test_layer_init():
+     l = mapnik.Layer('test')
+-    eq_(l.name, 'test')
+-    eq_(l.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+-    eq_(l.envelope(), mapnik.Box2d())
+-    eq_(l.clear_label_cache, False)
+-    eq_(l.cache_features, False)
+-    eq_(l.visible(1), True)
+-    eq_(l.active, True)
+-    eq_(l.datasource, None)
+-    eq_(l.queryable, False)
+-    eq_(l.minimum_scale_denominator, 0.0)
+-    eq_(l.maximum_scale_denominator > 1e+6, True)
+-    eq_(l.group_by, "")
+-    eq_(l.maximum_extent, None)
+-    eq_(l.buffer_size, None)
+-    eq_(len(l.styles), 0)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert l.name ==  'test'
++    assert l.srs ==  'epsg:4326'
++    assert l.envelope() ==  mapnik.Box2d()
++    assert not l.clear_label_cache
++    assert not l.cache_features
++    assert l.visible(1)
++    assert l.active
++    assert l.datasource ==  None
++    assert not l.queryable
++    assert l.minimum_scale_denominator ==  0.0
++    assert l.maximum_scale_denominator > 1e+6
++    assert l.group_by ==  ""
++    assert l.maximum_extent ==  None
++    assert l.buffer_size ==  None
++    assert len(l.styles) ==  0
+--- a/test/python_tests/load_map_test.py
++++ b/test/python_tests/load_map_test.py
+@@ -1,32 +1,23 @@
+-#!/usr/bin/env python
+-
+-import glob
+-import os
+-
+-from nose.tools import eq_
+-
++import glob,os
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
+ default_logging_severity = mapnik.logger.get_severity()
+ 
+-
+-def setup():
+-    # make the tests silent to suppress unsupported params from harfbuzz tests
+-    # TODO: remove this after harfbuzz branch merges
+-    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
++ at pytest.fixture(scope="module")
++def setup_and_teardown():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
+-
+-def teardown():
++    # make the tests silent to suppress unsupported params from harfbuzz tests
++    # TODO: remove this after harfbuzz branch merges
++    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
++    yield
+     mapnik.logger.set_severity(default_logging_severity)
+ 
+-
+-def test_broken_files():
++def test_broken_files(setup_and_teardown):
+     default_logging_severity = mapnik.logger.get_severity()
+     mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
+     broken_files = glob.glob("../data/broken_maps/*.xml")
+@@ -44,7 +35,7 @@ def test_broken_files():
+                 filename)
+         except RuntimeError:
+             pass
+-    eq_(len(failures), 0, '\n' + '\n'.join(failures))
++    assert len(failures) ==  0, '\n' + '\n'.join(failures)
+     mapnik.logger.set_severity(default_logging_severity)
+ 
+ 
+@@ -59,6 +50,7 @@ def test_can_parse_xml_with_deprecated_p
+             m = mapnik.Map(512, 512)
+             strict = True
+             mapnik.load_map(m, filename, strict)
++            m = mapnik.Map(512, 512)
+             base_path = os.path.dirname(filename)
+             mapnik.load_map_from_string(
+                 m,
+@@ -74,7 +66,7 @@ def test_can_parse_xml_with_deprecated_p
+                 failures.append(
+                     'Failed to load valid map %s (%s)' %
+                     (filename, e))
+-    eq_(len(failures), 0, '\n' + '\n'.join(failures))
++    assert len(failures) == 0, '\n' + '\n'.join(failures)
+     mapnik.logger.set_severity(default_logging_severity)
+ 
+ 
+@@ -88,6 +80,7 @@ def test_good_files():
+             m = mapnik.Map(512, 512)
+             strict = True
+             mapnik.load_map(m, filename, strict)
++            m = mapnik.Map(512, 512)
+             base_path = os.path.dirname(filename)
+             with open(filename, 'rb') as f:
+                 mapnik.load_map_from_string(m, f.read(), strict, base_path)
+@@ -98,8 +91,4 @@ def test_good_files():
+                 failures.append(
+                     'Failed to load valid map %s (%s)' %
+                     (filename, e))
+-    eq_(len(failures), 0, '\n' + '\n'.join(failures))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert len(failures) == 0, '\n' + '\n'.join(failures)
+--- a/test/python_tests/map_query_test.py
++++ b/test/python_tests/map_query_test.py
+@@ -1,59 +1,49 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import assert_almost_equal, eq_, raises
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ # map has no layers
+-
+-
+- at raises(IndexError)
+-def test_map_query_throw1():
+-    m = mapnik.Map(256, 256)
+-    m.zoom_to_box(mapnik.Box2d(-1, -1, 0, 0))
+-    m.query_point(0, 0, 0)
++def test_map_query_throw1(setup):
++    with pytest.raises(IndexError):
++        m = mapnik.Map(256, 256)
++        m.zoom_to_box(mapnik.Box2d(-1, -1, 0, 0))
++        m.query_point(0, 0, 0)
+ 
+ # only positive indexes
+-
+-
+- at raises(IndexError)
+ def test_map_query_throw2():
+-    m = mapnik.Map(256, 256)
+-    m.query_point(-1, 0, 0)
++    with pytest.raises(IndexError):
++        m = mapnik.Map(256, 256)
++        m.query_point(-1, 0, 0)
+ 
+ # map has never been zoomed (nodata)
+-
+-
+- at raises(RuntimeError)
+ def test_map_query_throw3():
+-    m = mapnik.Map(256, 256)
+-    m.query_point(0, 0, 0)
++    with pytest.raises(RuntimeError):
++        m = mapnik.Map(256, 256)
++        m.query_point(0, 0, 0)
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+     # map has never been zoomed (even with data)
+-    @raises(RuntimeError)
+     def test_map_query_throw4():
+-        m = mapnik.Map(256, 256)
+-        mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
+-        m.query_point(0, 0, 0)
++        with pytest.raises(RuntimeError):
++            m = mapnik.Map(256, 256)
++            mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
++            m.query_point(0, 0, 0)
+ 
+     # invalid coords in general (do not intersect)
+-    @raises(RuntimeError)
+     def test_map_query_throw5():
+-        m = mapnik.Map(256, 256)
+-        mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
+-        m.zoom_all()
+-        m.query_point(0, 9999999999999999, 9999999999999999)
++        with pytest.raises(RuntimeError):
++            m = mapnik.Map(256, 256)
++            mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
++            m.zoom_all()
++            m.query_point(0, 9999999999999999, 9999999999999999)
+ 
+     def test_map_query_works1():
+         m = mapnik.Map(256, 256)
+@@ -65,7 +55,7 @@ if 'shape' in mapnik.DatasourceCache.plu
+         # somewhere in kansas
+         fs = m.query_point(0, -11012435.5376, 4599674.6134)
+         feat = fs.next()
+-        eq_(feat.attributes['NAME_FORMA'], u'United States of America')
++        assert feat.attributes['NAME_FORMA'] ==  u'United States of America'
+ 
+     def test_map_query_works2():
+         m = mapnik.Map(256, 256)
+@@ -78,13 +68,13 @@ if 'shape' in mapnik.DatasourceCache.plu
+         # mapnik.render_to_file(m,'works2.png')
+         # validate that aspect_fix_mode modified the bbox reasonably
+         e = m.envelope()
+-        assert_almost_equal(e.minx, -179.999999975, places=7)
+-        assert_almost_equal(e.miny, -167.951396161, places=7)
+-        assert_almost_equal(e.maxx, 179.999999975, places=7)
+-        assert_almost_equal(e.maxy, 192.048603789, places=7)
++        assert e.minx == pytest.approx(-179.999999975, abs=1e-7)
++        assert e.miny == pytest.approx(-167.951396161, abs=1e-7)
++        assert e.maxx == pytest.approx(179.999999975, abs=1e-7)
++        assert e.maxy == pytest.approx(192.048603789, abs=1e-7)
+         fs = m.query_point(0, -98.9264, 38.1432)  # somewhere in kansas
+         feat = fs.next()
+-        eq_(feat.attributes['NAME'], u'United States')
++        assert feat.attributes['NAME'] ==  u'United States'
+ 
+     def test_map_query_in_pixels_works1():
+         m = mapnik.Map(256, 256)
+@@ -95,7 +85,7 @@ if 'shape' in mapnik.DatasourceCache.plu
+         m.zoom_all()
+         fs = m.query_map_point(0, 55, 100)  # somewhere in middle of us
+         feat = fs.next()
+-        eq_(feat.attributes['NAME_FORMA'], u'United States of America')
++        assert feat.attributes['NAME_FORMA'] ==  u'United States of America'
+ 
+     def test_map_query_in_pixels_works2():
+         m = mapnik.Map(256, 256)
+@@ -107,14 +97,10 @@ if 'shape' in mapnik.DatasourceCache.plu
+         m.zoom_all()
+         # validate that aspect_fix_mode modified the bbox reasonably
+         e = m.envelope()
+-        assert_almost_equal(e.minx, -179.999999975, places=7)
+-        assert_almost_equal(e.miny, -167.951396161, places=7)
+-        assert_almost_equal(e.maxx, 179.999999975, places=7)
+-        assert_almost_equal(e.maxy, 192.048603789, places=7)
++        assert e.minx == pytest.approx(-179.999999975, abs=1e-7)
++        assert e.miny == pytest.approx(-167.951396161, abs=1e-7)
++        assert e.maxx == pytest.approx(179.999999975, abs=1e-7)
++        assert e.maxy == pytest.approx(192.048603789, abs=1e-7)
+         fs = m.query_map_point(0, 55, 100)  # somewhere in Canada
+         feat = fs.next()
+-        eq_(feat.attributes['NAME'], u'Canada')
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert feat.attributes['NAME'] ==  u'Canada'
+--- a/test/python_tests/mapnik_logger_test.py
++++ b/test/python_tests/mapnik_logger_test.py
+@@ -1,21 +1,12 @@
+-#!/usr/bin/env python
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import run_all
+-
+-
+ def test_logger_init():
+-    eq_(mapnik.severity_type.Debug, 0)
+-    eq_(mapnik.severity_type.Warn, 1)
+-    eq_(mapnik.severity_type.Error, 2)
+-    eq_(getattr(mapnik.severity_type, "None"), 3)
++    assert mapnik.severity_type.Debug ==  0
++    assert mapnik.severity_type.Warn ==  1
++    assert mapnik.severity_type.Error ==  2
++    assert getattr(mapnik.severity_type, "None") ==  3
+     default = mapnik.logger.get_severity()
+     mapnik.logger.set_severity(mapnik.severity_type.Debug)
+-    eq_(mapnik.logger.get_severity(), mapnik.severity_type.Debug)
++    assert mapnik.logger.get_severity() ==  mapnik.severity_type.Debug
+     mapnik.logger.set_severity(default)
+-    eq_(mapnik.logger.get_severity(), default)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert mapnik.logger.get_severity() ==  default
+--- a/test/python_tests/mapnik_test_data_test.py
++++ b/test/python_tests/mapnik_test_data_test.py
+@@ -1,29 +1,7 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-from __future__ import print_function
+-
+-import os
++?import os
+ from glob import glob
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-default_logging_severity = mapnik.logger.get_severity()
+-
+-
+-def setup():
+-    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-
+-def teardown():
+-    mapnik.logger.set_severity(default_logging_severity)
+-
+ plugin_mapping = {
+     '.csv': ['csv'],
+     '.json': ['geojson', 'ogr'],
+@@ -63,7 +41,3 @@ def test_opening_data():
+                             print('could not open, %s: %s' % (kwargs, e))
+             # else:
+             #    print 'skipping opening %s' % filepath
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/markers_complex_rendering_test.py
++++ b/test/python_tests/markers_complex_rendering_test.py
+@@ -1,35 +1,29 @@
+-# coding=utf8
+-import os
+-
+-from nose.tools import eq_
+-
++import pytest
+ import mapnik
++import os
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'csv' in mapnik.DatasourceCache.plugin_names():
+-    def test_marker_ellipse_render1():
++    def test_marker_ellipse_render1(setup):
+         m = mapnik.Map(256, 256)
+         mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform.xml')
+         m.zoom_all()
+         im = mapnik.Image(m.width, m.height)
+         mapnik.render(m, im)
+         actual = '/tmp/mapnik-marker-ellipse-render1.png'
+-        expected = 'images/support/mapnik-marker-ellipse-render1.png'
++        expected = './images/support/mapnik-marker-ellipse-render1.png'
+         im.save(actual, 'png32')
+         if os.environ.get('UPDATE'):
+             im.save(expected, 'png32')
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'test/python_tests/' + expected))
++        assert im.tostring('png32') == expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual, expected)
+ 
+     def test_marker_ellipse_render2():
+         m = mapnik.Map(256, 256)
+@@ -38,16 +32,9 @@ if 'csv' in mapnik.DatasourceCache.plugi
+         im = mapnik.Image(m.width, m.height)
+         mapnik.render(m, im)
+         actual = '/tmp/mapnik-marker-ellipse-render2.png'
+-        expected = 'images/support/mapnik-marker-ellipse-render2.png'
++        expected = './images/support/mapnik-marker-ellipse-render2.png'
+         im.save(actual, 'png32')
+         if os.environ.get('UPDATE'):
+             im.save(expected, 'png32')
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'test/python_tests/' + expected))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert im.tostring('png32') == expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual, expected)
+--- a/test/python_tests/memory_datasource_test.py
++++ b/test/python_tests/memory_datasource_test.py
+@@ -1,21 +1,15 @@
+-# encoding: utf8
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import run_all
+-
+-
+ def test_add_feature():
+     md = mapnik.MemoryDatasource()
+-    eq_(md.num_features(), 0)
++    assert md.num_features() ==  0
+     context = mapnik.Context()
+     context.push('foo')
+     feature = mapnik.Feature(context, 1)
+     feature['foo'] = 'bar'
+     feature.geometry = mapnik.Geometry.from_wkt('POINT(2 3)')
+     md.add_feature(feature)
+-    eq_(md.num_features(), 1)
++    assert md.num_features() ==  1
+ 
+     featureset = md.features_at_point(mapnik.Coord(2, 3))
+     retrieved = []
+@@ -23,15 +17,12 @@ def test_add_feature():
+     for feat in featureset:
+         retrieved.append(feat)
+ 
+-    eq_(len(retrieved), 1)
++    assert len(retrieved) ==  1
+     f = retrieved[0]
+-    eq_(f['foo'], 'bar')
++    assert f['foo'] ==  'bar'
+ 
+     featureset = md.features_at_point(mapnik.Coord(20, 30))
+     retrieved = []
+     for feat in featureset:
+         retrieved.append(feat)
+-    eq_(len(retrieved), 0)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert len(retrieved) ==  0
+--- a/test/python_tests/multi_tile_raster_test.py
++++ b/test/python_tests/multi_tile_raster_test.py
+@@ -1,22 +1,18 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ 
+-def test_multi_tile_policy():
+-    srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
++def test_multi_tile_policy(setup):
++    srs = 'epsg:4326'
+     lyr = mapnik.Layer('raster')
+     if 'raster' in mapnik.DatasourceCache.plugin_names():
+         lyr.datasource = mapnik.Raster(
+@@ -46,29 +42,25 @@ def test_multi_tile_policy():
+         mapnik.render(_map, im)
+ 
+         # test green chunk
+-        eq_(im.view(0, 64, 1, 1).tostring(), b'\x00\xff\x00\xff')
+-        eq_(im.view(127, 64, 1, 1).tostring(), b'\x00\xff\x00\xff')
+-        eq_(im.view(0, 127, 1, 1).tostring(), b'\x00\xff\x00\xff')
+-        eq_(im.view(127, 127, 1, 1).tostring(), b'\x00\xff\x00\xff')
++        assert im.view(0, 64, 1, 1).tostring() ==  b'\x00\xff\x00\xff'
++        assert im.view(127, 64, 1, 1).tostring() ==  b'\x00\xff\x00\xff'
++        assert im.view(0, 127, 1, 1).tostring() ==  b'\x00\xff\x00\xff'
++        assert im.view(127, 127, 1, 1).tostring() ==  b'\x00\xff\x00\xff'
+ 
+         # test blue chunk
+-        eq_(im.view(128, 64, 1, 1).tostring(), b'\x00\x00\xff\xff')
+-        eq_(im.view(255, 64, 1, 1).tostring(), b'\x00\x00\xff\xff')
+-        eq_(im.view(128, 127, 1, 1).tostring(), b'\x00\x00\xff\xff')
+-        eq_(im.view(255, 127, 1, 1).tostring(), b'\x00\x00\xff\xff')
++        assert im.view(128, 64, 1, 1).tostring() ==  b'\x00\x00\xff\xff'
++        assert im.view(255, 64, 1, 1).tostring() ==  b'\x00\x00\xff\xff'
++        assert im.view(128, 127, 1, 1).tostring() ==  b'\x00\x00\xff\xff'
++        assert im.view(255, 127, 1, 1).tostring() ==  b'\x00\x00\xff\xff'
+ 
+         # test red chunk
+-        eq_(im.view(0, 128, 1, 1).tostring(), b'\xff\x00\x00\xff')
+-        eq_(im.view(127, 128, 1, 1).tostring(), b'\xff\x00\x00\xff')
+-        eq_(im.view(0, 191, 1, 1).tostring(), b'\xff\x00\x00\xff')
+-        eq_(im.view(127, 191, 1, 1).tostring(), b'\xff\x00\x00\xff')
++        assert im.view(0, 128, 1, 1).tostring() ==  b'\xff\x00\x00\xff'
++        assert im.view(127, 128, 1, 1).tostring() ==  b'\xff\x00\x00\xff'
++        assert im.view(0, 191, 1, 1).tostring() ==  b'\xff\x00\x00\xff'
++        assert im.view(127, 191, 1, 1).tostring() ==  b'\xff\x00\x00\xff'
+ 
+         # test magenta chunk
+-        eq_(im.view(128, 128, 1, 1).tostring(), b'\xff\x00\xff\xff')
+-        eq_(im.view(255, 128, 1, 1).tostring(), b'\xff\x00\xff\xff')
+-        eq_(im.view(128, 191, 1, 1).tostring(), b'\xff\x00\xff\xff')
+-        eq_(im.view(255, 191, 1, 1).tostring(), b'\xff\x00\xff\xff')
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert im.view(128, 128, 1, 1).tostring() ==  b'\xff\x00\xff\xff'
++        assert im.view(255, 128, 1, 1).tostring() ==  b'\xff\x00\xff\xff'
++        assert im.view(128, 191, 1, 1).tostring() ==  b'\xff\x00\xff\xff'
++        assert im.view(255, 191, 1, 1).tostring() ==  b'\xff\x00\xff\xff'
+--- a/test/python_tests/object_test.py
++++ b/test/python_tests/object_test.py
+@@ -1,570 +1,275 @@
+-# #!/usr/bin/env python
+-# # -*- coding: utf-8 -*-
+-
+-# import os
+-# from nose.tools import *
+-# from utilities import execution_path, run_all
+-# import tempfile
+-
+-# import mapnik
+-
+-# def setup():
+-#     # All of the paths used are relative, if we run the tests
+-#     # from another directory we need to chdir()
+-#     os.chdir(execution_path('.'))
+-
+-# def test_debug_symbolizer():
+-#     s = mapnik.DebugSymbolizer()
+-#     eq_(s.mode,mapnik.debug_symbolizer_mode.collision)
+-
+-# def test_raster_symbolizer():
+-#     s = mapnik.RasterSymbolizer()
+-#     eq_(s.comp_op,mapnik.CompositeOp.src_over) # note: mode is deprecated
+-#     eq_(s.scaling,mapnik.scaling_method.NEAR)
+-#     eq_(s.opacity,1.0)
+-#     eq_(s.colorizer,None)
+-#     eq_(s.filter_factor,-1)
+-#     eq_(s.mesh_size,16)
+-#     eq_(s.premultiplied,None)
+-#     s.premultiplied = True
+-#     eq_(s.premultiplied,True)
+-
+-# def test_line_pattern():
+-#     s = mapnik.LinePatternSymbolizer(mapnik.PathExpression('../data/images/dummy.png'))
+-#     eq_(s.filename, '../data/images/dummy.png')
+-#     eq_(s.smooth,0.0)
+-#     eq_(s.transform,'')
+-#     eq_(s.offset,0.0)
+-#     eq_(s.comp_op,mapnik.CompositeOp.src_over)
+-#     eq_(s.clip,True)
+-
+-# def test_line_symbolizer():
+-#     s = mapnik.LineSymbolizer()
+-#     eq_(s.rasterizer, mapnik.line_rasterizer.FULL)
+-#     eq_(s.smooth,0.0)
+-#     eq_(s.comp_op,mapnik.CompositeOp.src_over)
+-#     eq_(s.clip,True)
+-#     eq_(s.stroke.width, 1)
+-#     eq_(s.stroke.opacity, 1)
+-#     eq_(s.stroke.color, mapnik.Color('black'))
+-#     eq_(s.stroke.line_cap, mapnik.line_cap.BUTT_CAP)
+-#     eq_(s.stroke.line_join, mapnik.line_join.MITER_JOIN)
+-
+-#     l = mapnik.LineSymbolizer(mapnik.Color('blue'), 5.0)
+-
+-#     eq_(l.stroke.width, 5)
+-#     eq_(l.stroke.opacity, 1)
+-#     eq_(l.stroke.color, mapnik.Color('blue'))
+-#     eq_(l.stroke.line_cap, mapnik.line_cap.BUTT_CAP)
+-#     eq_(l.stroke.line_join, mapnik.line_join.MITER_JOIN)
+-
+-#     s = mapnik.Stroke(mapnik.Color('blue'), 5.0)
+-#     l = mapnik.LineSymbolizer(s)
+-
+-#     eq_(l.stroke.width, 5)
+-#     eq_(l.stroke.opacity, 1)
+-#     eq_(l.stroke.color, mapnik.Color('blue'))
+-#     eq_(l.stroke.line_cap, mapnik.line_cap.BUTT_CAP)
+-#     eq_(l.stroke.line_join, mapnik.line_join.MITER_JOIN)
+-
+-# def test_line_symbolizer_stroke_reference():
+-#     l = mapnik.LineSymbolizer(mapnik.Color('green'),0.1)
+-#     l.stroke.add_dash(.1,.1)
+-#     l.stroke.add_dash(.1,.1)
+-#     eq_(l.stroke.get_dashes(), [(.1,.1),(.1,.1)])
+-#     eq_(l.stroke.color,mapnik.Color('green'))
+-#     eq_(l.stroke.opacity,1.0)
+-#     assert_almost_equal(l.stroke.width,0.1)
+-
+-# # https://github.com/mapnik/mapnik/issues/1427
+-# def test_stroke_dash_api():
+-#     stroke = mapnik.Stroke()
+-#     dashes = [(1.0,1.0)]
+-#     stroke.dasharray = dashes
+-#     eq_(stroke.dasharray, dashes)
+-#     stroke.add_dash(.1,.1)
+-#     dashes.append((.1,.1))
+-#     eq_(stroke.dasharray, dashes)
+-
+-
+-# def test_text_symbolizer():
+-#     s = mapnik.TextSymbolizer()
+-#     eq_(s.comp_op,mapnik.CompositeOp.src_over)
+-#     eq_(s.clip,True)
+-#     eq_(s.halo_rasterizer,mapnik.halo_rasterizer.FULL)
+-
+-#     # https://github.com/mapnik/mapnik/issues/1420
+-#     eq_(s.text_transform, mapnik.text_transform.NONE)
+-
+-#     # old args required method
+-#     ts = mapnik.TextSymbolizer(mapnik.Expression('[Field_Name]'), 'Font Name', 8, mapnik.Color('black'))
+-# #    eq_(str(ts.name), str(mapnik2.Expression('[Field_Name]'))) name field is no longer supported
+-#     eq_(ts.format.face_name, 'Font Name')
+-#     eq_(ts.format.text_size, 8)
+-#     eq_(ts.format.fill, mapnik.Color('black'))
+-#     eq_(ts.properties.label_placement, mapnik.label_placement.POINT_PLACEMENT)
+-#     eq_(ts.properties.horizontal_alignment, mapnik.horizontal_alignment.AUTO)
+-
+-# def test_shield_symbolizer_init():
+-#     s = mapnik.ShieldSymbolizer(mapnik.Expression('[Field Name]'), 'DejaVu Sans Bold', 6, mapnik.Color('#000000'), mapnik.PathExpression('../data/images/dummy.png'))
+-#     eq_(s.comp_op,mapnik.CompositeOp.src_over)
+-#     eq_(s.clip,True)
+-#     eq_(s.displacement, (0.0,0.0))
+-#     eq_(s.allow_overlap, False)
+-#     eq_(s.avoid_edges, False)
+-#     eq_(s.character_spacing,0)
+-#     #eq_(str(s.name), str(mapnik2.Expression('[Field Name]'))) name field is no longer supported
+-#     eq_(s.face_name, 'DejaVu Sans Bold')
+-#     eq_(s.allow_overlap, False)
+-#     eq_(s.fill, mapnik.Color('#000000'))
+-#     eq_(s.halo_fill, mapnik.Color('rgb(255,255,255)'))
+-#     eq_(s.halo_radius, 0)
+-#     eq_(s.label_placement, mapnik.label_placement.POINT_PLACEMENT)
+-#     eq_(s.minimum_distance, 0.0)
+-#     eq_(s.text_ratio, 0)
+-#     eq_(s.text_size, 6)
+-#     eq_(s.wrap_width, 0)
+-#     eq_(s.vertical_alignment, mapnik.vertical_alignment.AUTO)
+-#     eq_(s.label_spacing, 0)
+-#     eq_(s.label_position_tolerance, 0)
+-#     # 22.5 * M_PI/180.0 initialized by default
+-#     assert_almost_equal(s.max_char_angle_delta, 0.39269908169872414)
+-
+-#     eq_(s.text_transform, mapnik.text_transform.NONE)
+-#     eq_(s.line_spacing, 0)
+-#     eq_(s.character_spacing, 0)
+-
+-#     # r1341
+-#     eq_(s.wrap_before, False)
+-#     eq_(s.horizontal_alignment, mapnik.horizontal_alignment.AUTO)
+-#     eq_(s.justify_alignment, mapnik.justify_alignment.AUTO)
+-#     eq_(s.opacity, 1.0)
+-
+-#     # r2300
+-#     eq_(s.minimum_padding, 0.0)
+-
+-#     # was mixed with s.opacity
+-#     eq_(s.text_opacity, 1.0)
+-
+-#     eq_(s.shield_displacement, (0.0,0.0))
+-#     # TODO - the pattern in bindings seems to be to get/set
+-#     # strings for PathExpressions... should we pass objects?
+-#     eq_(s.filename, '../data/images/dummy.png')
+-
+-#     # 11c34b1: default transform list is empty, not identity matrix
+-#     eq_(s.transform, '')
+-
+-#     eq_(s.fontset, None)
+-
+-# # ShieldSymbolizer missing image file
+-# # images paths are now PathExpressions are evaluated at runtime
+-# # so it does not make sense to throw...
+-# #@raises(RuntimeError)
+-# #def test_shieldsymbolizer_missing_image():
+-# #    s = mapnik.ShieldSymbolizer(mapnik.Expression('[Field Name]'), 'DejaVu Sans Bold', 6, mapnik.Color('#000000'), mapnik.PathExpression('../#data/images/broken.png'))
+-
+-# def test_shield_symbolizer_modify():
+-#     s = mapnik.ShieldSymbolizer(mapnik.Expression('[Field Name]'), 'DejaVu Sans Bold', 6, mapnik.Color('#000000'), mapnik.PathExpression('../data/images/dummy.png'))
+-#     # transform expression
+-#     def check_transform(expr, expect_str=None):
+-#         s.transform = expr
+-#         eq_(s.transform, expr if expect_str is None else expect_str)
+-#     check_transform("matrix(1 2 3 4 5 6)", "matrix(1, 2, 3, 4, 5, 6)")
+-#     check_transform("matrix(1, 2, 3, 4, 5, 6 +7)", "matrix(1, 2, 3, 4, 5, (6+7))")
+-#     check_transform("rotate([a])")
+-#     check_transform("rotate([a] -2)", "rotate(([a]-2))")
+-#     check_transform("rotate([a] -2 -3)", "rotate([a], -2, -3)")
+-#     check_transform("rotate([a] -2 -3 -4)", "rotate(((([a]-2)-3)-4))")
+-#     check_transform("rotate([a] -2, 3, 4)", "rotate(([a]-2), 3, 4)")
+-#     check_transform("translate([tx]) rotate([a])")
+-#     check_transform("scale([sx], [sy]/2)")
+-#     # TODO check expected failures
+-
+-# def test_point_symbolizer():
+-#     p = mapnik.PointSymbolizer()
+-#     eq_(p.filename,'')
+-#     eq_(p.transform,'')
+-#     eq_(p.opacity,1.0)
+-#     eq_(p.allow_overlap,False)
+-#     eq_(p.ignore_placement,False)
+-#     eq_(p.comp_op,mapnik.CompositeOp.src_over)
+-#     eq_(p.placement, mapnik.point_placement.CENTROID)
+-
+-#     p = mapnik.PointSymbolizer(mapnik.PathExpression("../data/images/dummy.png"))
+-#     p.allow_overlap = True
+-#     p.opacity = 0.5
+-#     p.ignore_placement = True
+-#     p.placement = mapnik.point_placement.INTERIOR
+-#     eq_(p.allow_overlap, True)
+-#     eq_(p.opacity, 0.5)
+-#     eq_(p.filename,'../data/images/dummy.png')
+-#     eq_(p.ignore_placement,True)
+-#     eq_(p.placement, mapnik.point_placement.INTERIOR)
+-
+-# def test_markers_symbolizer():
+-#     p = mapnik.MarkersSymbolizer()
+-#     eq_(p.allow_overlap, False)
+-#     eq_(p.opacity,1.0)
+-#     eq_(p.fill_opacity,None)
+-#     eq_(p.filename,'shape://ellipse')
+-#     eq_(p.placement,mapnik.marker_placement.POINT_PLACEMENT)
+-#     eq_(p.multi_policy,mapnik.marker_multi_policy.EACH)
+-#     eq_(p.fill,None)
+-#     eq_(p.ignore_placement,False)
+-#     eq_(p.spacing,100)
+-#     eq_(p.max_error,0.2)
+-#     eq_(p.width,None)
+-#     eq_(p.height,None)
+-#     eq_(p.transform,'')
+-#     eq_(p.clip,True)
+-#     eq_(p.comp_op,mapnik.CompositeOp.src_over)
+-
+-
+-#     p.width = mapnik.Expression('12')
+-#     p.height = mapnik.Expression('12')
+-#     eq_(str(p.width),'12')
+-#     eq_(str(p.height),'12')
+-
+-#     p.width = mapnik.Expression('[field] + 2')
+-#     p.height = mapnik.Expression('[field] + 2')
+-#     eq_(str(p.width),'([field]+2)')
+-#     eq_(str(p.height),'([field]+2)')
+-
+-#     stroke = mapnik.Stroke()
+-#     stroke.color = mapnik.Color('black')
+-#     stroke.width = 1.0
+-
+-#     p.stroke = stroke
+-#     p.fill = mapnik.Color('white')
+-#     p.allow_overlap = True
+-#     p.opacity = 0.5
+-#     p.fill_opacity = 0.5
+-#     p.placement = mapnik.marker_placement.LINE_PLACEMENT
+-#     p.multi_policy = mapnik.marker_multi_policy.WHOLE
+-
+-#     eq_(p.allow_overlap, True)
+-#     eq_(p.opacity, 0.5)
+-#     eq_(p.fill_opacity, 0.5)
+-#     eq_(p.multi_policy,mapnik.marker_multi_policy.WHOLE)
+-#     eq_(p.placement,mapnik.marker_placement.LINE_PLACEMENT)
+-
+-#     #https://github.com/mapnik/mapnik/issues/1285
+-#     #https://github.com/mapnik/mapnik/issues/1427
+-#     p.marker_type = 'arrow'
+-#     eq_(p.marker_type,'shape://arrow')
+-#     eq_(p.filename,'shape://arrow')
+-
+-
+-# # PointSymbolizer missing image file
+-# # images paths are now PathExpressions are evaluated at runtime
+-# # so it does not make sense to throw...
+-# #@raises(RuntimeError)
+-# #def test_pointsymbolizer_missing_image():
+-#  #   p = mapnik.PointSymbolizer(mapnik.PathExpression("../data/images/broken.png"))
+-
+-# def test_polygon_symbolizer():
+-#     p = mapnik.PolygonSymbolizer()
+-#     eq_(p.smooth,0.0)
+-#     eq_(p.comp_op,mapnik.CompositeOp.src_over)
+-#     eq_(p.clip,True)
+-#     eq_(p.fill, mapnik.Color('gray'))
+-#     eq_(p.fill_opacity, 1)
+-
+-#     p = mapnik.PolygonSymbolizer(mapnik.Color('blue'))
+-
+-#     eq_(p.fill, mapnik.Color('blue'))
+-#     eq_(p.fill_opacity, 1)
+-
+-# def test_building_symbolizer_init():
+-#     p = mapnik.BuildingSymbolizer()
+-
+-#     eq_(p.fill, mapnik.Color('gray'))
+-#     eq_(p.fill_opacity, 1)
+-#     eq_(p.height,None)
+-
+-# def test_group_symbolizer_init():
+-#     s = mapnik.GroupSymbolizer()
+-
+-#     p = mapnik.GroupSymbolizerProperties()
+-
+-#     l = mapnik.PairLayout()
+-#     l.item_margin = 5.0
+-#     p.set_layout(l)
+-
+-#     r = mapnik.GroupRule(mapnik.Expression("[name%1]"))
+-#     r.append(mapnik.PointSymbolizer())
+-#     p.add_rule(r)
+-#     s.symbolizer_properties = p
+-
+-#     eq_(s.comp_op,mapnik.CompositeOp.src_over)
+-
+-# def test_stroke_init():
+-#     s = mapnik.Stroke()
+-
+-#     eq_(s.width, 1)
+-#     eq_(s.opacity, 1)
+-#     eq_(s.color, mapnik.Color('black'))
+-#     eq_(s.line_cap, mapnik.line_cap.BUTT_CAP)
+-#     eq_(s.line_join, mapnik.line_join.MITER_JOIN)
+-#     eq_(s.gamma,1.0)
+-
+-#     s = mapnik.Stroke(mapnik.Color('blue'), 5.0)
+-#     s.gamma = .5
+-
+-#     eq_(s.width, 5)
+-#     eq_(s.opacity, 1)
+-#     eq_(s.color, mapnik.Color('blue'))
+-#     eq_(s.gamma, .5)
+-#     eq_(s.line_cap, mapnik.line_cap.BUTT_CAP)
+-#     eq_(s.line_join, mapnik.line_join.MITER_JOIN)
+-
+-# def test_stroke_dash_arrays():
+-#     s = mapnik.Stroke()
+-#     s.add_dash(1,2)
+-#     s.add_dash(3,4)
+-#     s.add_dash(5,6)
+-
+-#     eq_(s.get_dashes(), [(1,2),(3,4),(5,6)])
+-
+-# def test_map_init():
+-#     m = mapnik.Map(256, 256)
+-
+-#     eq_(m.width, 256)
+-#     eq_(m.height, 256)
+-#     eq_(m.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+-#     eq_(m.base, '')
+-#     eq_(m.maximum_extent, None)
+-#     eq_(m.background_image, None)
+-#     eq_(m.background_image_comp_op, mapnik.CompositeOp.src_over)
+-#     eq_(m.background_image_opacity, 1.0)
+-
+-#     m = mapnik.Map(256, 256, '+proj=latlong')
+-#     eq_(m.srs, '+proj=latlong')
+-
+-# def test_map_style_access():
+-#     m = mapnik.Map(256, 256)
+-#     sty = mapnik.Style()
+-#     m.append_style("style",sty)
+-#     styles = list(m.styles)
+-#     eq_(len(styles),1)
+-#     eq_(styles[0][0],'style')
+-#     # returns a copy so let's just check it is the right instance
+-#     eq_(isinstance(styles[0][1],mapnik.Style),True)
+-
+-# def test_map_maximum_extent_modification():
+-#     m = mapnik.Map(256, 256)
+-#     eq_(m.maximum_extent, None)
+-#     m.maximum_extent = mapnik.Box2d()
+-#     eq_(m.maximum_extent, mapnik.Box2d())
+-#     m.maximum_extent = None
+-#     eq_(m.maximum_extent, None)
+-
+-# # Map initialization from string
+-# def test_map_init_from_string():
+-#     map_string = '''<Map background-color="steelblue" base="./" srs="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs">
+-#      <Style name="My Style">
+-#       <Rule>
+-#        <PolygonSymbolizer fill="#f2eff9"/>
+-#        <LineSymbolizer stroke="rgb(50%,50%,50%)" stroke-width="0.1"/>
+-#       </Rule>
+-#      </Style>
+-#      <Layer name="boundaries">
+-#       <StyleName>My Style</StyleName>
+-#        <Datasource>
+-#         <Parameter name="type">shape</Parameter>
+-#         <Parameter name="file">../../demo/data/boundaries</Parameter>
+-#        </Datasource>
+-#       </Layer>
+-#     </Map>'''
+-
+-#     m = mapnik.Map(600, 300)
+-#     eq_(m.base, '')
+-#     try:
+-#         mapnik.load_map_from_string(m, map_string)
+-#         eq_(m.base, './')
+-#         mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect
+-#         eq_(m.base, './')
+-
+-#         tmp_dir = tempfile.gettempdir()
+-#         try:
+-#             mapnik.load_map_from_string(m, map_string, False, tmp_dir)
+-#         except RuntimeError:
+-#             pass # runtime error expected because shapefile path should be wrong and datasource will throw
+-#         eq_(m.base, tmp_dir) # tmp_dir will be set despite the exception because load_map mostly worked
+-#         m.base = 'foo'
+-#         mapnik.load_map_from_string(m, map_string, True, ".")
+-#         eq_(m.base, '.')
+-#     except RuntimeError, e:
+-#         # only test datasources that we have installed
+-#         if not 'Could not create datasource' in str(e):
+-#             raise RuntimeError(e)
++import os
++import tempfile
++import mapnik
++import pytest
++
++from .utilities import execution_path
++
++ at pytest.fixture(scope="module")
++def setup():
++    # All of the paths used are relative, if we run the tests
++    # from another directory we need to chdir()
++    os.chdir(execution_path('.'))
++    yield
++
++def test_debug_symbolizer(setup):
++    s = mapnik.DebugSymbolizer()
++    s.mode = mapnik.debug_symbolizer_mode.collision
++    assert s.mode == mapnik.debug_symbolizer_mode.collision
++
++def test_raster_symbolizer():
++    s = mapnik.RasterSymbolizer()
++    s.comp_op = mapnik.CompositeOp.src_over
++    s.scaling = mapnik.scaling_method.NEAR
++    s.opacity = 1.0
++    s.mesh_size = 16
++
++    assert s.comp_op == mapnik.CompositeOp.src_over  # note: mode is deprecated
++    assert s.scaling == mapnik.scaling_method.NEAR
++    assert s.opacity == 1.0
++    assert s.colorizer == None
++    assert s.mesh_size == 16
++    assert s.premultiplied == None
++    s.premultiplied = True
++    assert s.premultiplied == True
++
++def test_line_pattern():
++    s = mapnik.LinePatternSymbolizer()
++    s.file = mapnik.PathExpression('../data/images/dummy.png')
++    assert str(s.file) ==  '../data/images/dummy.png'
++
++def test_map_init():
++    m = mapnik.Map(256, 256)
++    assert m.width ==  256
++    assert m.height ==  256
++    assert m.srs ==  'epsg:4326'
++    assert m.base ==  ''
++    assert m.maximum_extent ==  None
++    assert m.background_image ==  None
++    assert m.background_image_comp_op ==  mapnik.CompositeOp.src_over
++    assert m.background_image_opacity ==  1.0
++    m = mapnik.Map(256, 256, '+proj=latlong')
++    assert m.srs ==  '+proj=latlong'
++
++def test_map_style_access():
++    m = mapnik.Map(256, 256)
++    sty = mapnik.Style()
++    m.append_style("style",sty)
++    styles = list(m.styles)
++    assert len(styles) == 1
++    assert styles[0][0] == 'style'
++    # returns a copy so let's just check it is the right instance
++    assert isinstance(styles[0][1],mapnik.Style)
++
++def test_map_maximum_extent_modification():
++    m = mapnik.Map(256, 256)
++    assert m.maximum_extent ==  None
++    m.maximum_extent = mapnik.Box2d()
++    assert m.maximum_extent ==  mapnik.Box2d()
++    m.maximum_extent = None
++    assert m.maximum_extent ==  None
++
++# Map initialization from string
++def test_map_init_from_string():
++    map_string = '''<Map background-color="steelblue" base="./" srs="epsg:4326">
++     <Style name="My Style">
++      <Rule>
++       <PolygonSymbolizer fill="#f2eff9"/>
++       <LineSymbolizer stroke="rgb(50%,50%,50%)" stroke-width="0.1"/>
++      </Rule>
++     </Style>
++     <Layer name="boundaries">
++      <StyleName>My Style</StyleName>
++       <Datasource>
++        <Parameter name="type">shape</Parameter>
++        <Parameter name="file">../../demo/data/boundaries</Parameter>
++       </Datasource>
++      </Layer>
++    </Map>'''
++
++    m = mapnik.Map(600, 300)
++    assert m.base ==  ''
++    try:
++        mapnik.load_map_from_string(m, map_string)
++        assert m.base ==  './'
++        mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect
++        assert m.base ==  './'
++
++        tmp_dir = tempfile.gettempdir()
++        try:
++            mapnik.load_map_from_string(m, map_string, False, tmp_dir)
++        except RuntimeError:
++            pass # runtime error expected because shapefile path should be wrong and datasource will throw
++        assert m.base ==  tmp_dir  # tmp_dir will be set despite the exception because load_map mostly worked
++        m.remove_all()
++        m.base = 'foo'
++        mapnik.load_map_from_string(m, map_string, True, ".")
++        assert m.base ==  '.'
++    except RuntimeError as e:
++        # only test datasources that we have installed
++        if not 'Could not create datasource' in str(e):
++            raise RuntimeError(e)
+ 
+ # # Color initialization
+-# @raises(Exception) # Boost.Python.ArgumentError
+-# def test_color_init_errors():
+-#     c = mapnik.Color()
+-
+-# @raises(RuntimeError)
+-# def test_color_init_errors():
+-#     c = mapnik.Color('foo') # mapnik config
+-
+-# def test_color_init():
+-#     c = mapnik.Color('blue')
++def test_color_init_errors():
++    with pytest.raises(Exception): # Boost.Python.ArgumentError
++        c = mapnik.Color()
+ 
+-#     eq_(c.a, 255)
+-#     eq_(c.r, 0)
+-#     eq_(c.g, 0)
+-#     eq_(c.b, 255)
++def test_color_init_errors():
++    with pytest.raises(RuntimeError):
++        c = mapnik.Color('foo') # mapnik config
+ 
+-#     eq_(c.to_hex_string(), '#0000ff')
++def test_color_init():
++     c = mapnik.Color('blue')
++     assert c.a ==  255
++     assert c.r ==  0
++     assert c.g ==  0
++     assert c.b ==  255
+ 
+-#     c = mapnik.Color('#f2eff9')
++     assert c.to_hex_string() ==  '#0000ff'
+ 
+-#     eq_(c.a, 255)
+-#     eq_(c.r, 242)
+-#     eq_(c.g, 239)
+-#     eq_(c.b, 249)
++     c = mapnik.Color('#f2eff9')
+ 
+-#     eq_(c.to_hex_string(), '#f2eff9')
++     assert c.a ==  255
++     assert c.r ==  242
++     assert c.g ==  239
++     assert c.b ==  249
+ 
+-#     c = mapnik.Color('rgb(50%,50%,50%)')
++     assert c.to_hex_string() ==  '#f2eff9'
+ 
+-#     eq_(c.a, 255)
+-#     eq_(c.r, 128)
+-#     eq_(c.g, 128)
+-#     eq_(c.b, 128)
++     c = mapnik.Color('rgb(50%,50%,50%)')
+ 
+-#     eq_(c.to_hex_string(), '#808080')
++     assert c.a ==  255
++     assert c.r ==  128
++     assert c.g ==  128
++     assert c.b ==  128
+ 
+-#     c = mapnik.Color(0, 64, 128)
++     assert c.to_hex_string() ==  '#808080'
+ 
+-#     eq_(c.a, 255)
+-#     eq_(c.r, 0)
+-#     eq_(c.g, 64)
+-#     eq_(c.b, 128)
++     c = mapnik.Color(0, 64, 128)
+ 
+-#     eq_(c.to_hex_string(), '#004080')
++     assert c.a ==  255
++     assert c.r ==  0
++     assert c.g ==  64
++     assert c.b ==  128
+ 
+-#     c = mapnik.Color(0, 64, 128, 192)
++     assert c.to_hex_string() ==  '#004080'
+ 
+-#     eq_(c.a, 192)
+-#     eq_(c.r, 0)
+-#     eq_(c.g, 64)
+-#     eq_(c.b, 128)
++     c = mapnik.Color(0, 64, 128, 192)
+ 
+-#     eq_(c.to_hex_string(), '#004080c0')
++     assert c.a ==  192
++     assert c.r ==  0
++     assert c.g ==  64
++     assert c.b ==  128
+ 
+-# def test_color_equality():
++     assert c.to_hex_string() ==  '#004080c0'
+ 
+-#     c1 = mapnik.Color('blue')
+-#     c2 = mapnik.Color(0,0,255)
+-#     c3 = mapnik.Color('black')
++def test_color_equality():
+ 
+-#     c3.r = 0
+-#     c3.g = 0
+-#     c3.b = 255
+-#     c3.a = 255
++    c1 = mapnik.Color('blue')
++    c2 = mapnik.Color(0,0,255)
++    c3 = mapnik.Color('black')
+ 
+-#     eq_(c1, c2)
+-#     eq_(c1, c3)
++    c3.r = 0
++    c3.g = 0
++    c3.b = 255
++    c3.a = 255
+ 
+-#     c1 = mapnik.Color(0, 64, 128)
+-#     c2 = mapnik.Color(0, 64, 128)
+-#     c3 = mapnik.Color(0, 0, 0)
++    assert c1 ==  c2
++    assert c1 ==  c3
+ 
+-#     c3.r = 0
+-#     c3.g = 64
+-#     c3.b = 128
++    c1 = mapnik.Color(0, 64, 128)
++    c2 = mapnik.Color(0, 64, 128)
++    c3 = mapnik.Color(0, 0, 0)
+ 
+-#     eq_(c1, c2)
+-#     eq_(c1, c3)
++    c3.r = 0
++    c3.g = 64
++    c3.b = 128
+ 
+-#     c1 = mapnik.Color(0, 64, 128, 192)
+-#     c2 = mapnik.Color(0, 64, 128, 192)
+-#     c3 = mapnik.Color(0, 0, 0, 255)
++    assert c1 ==  c2
++    assert c1 ==  c3
+ 
+-#     c3.r = 0
+-#     c3.g = 64
+-#     c3.b = 128
+-#     c3.a = 192
++    c1 = mapnik.Color(0, 64, 128, 192)
++    c2 = mapnik.Color(0, 64, 128, 192)
++    c3 = mapnik.Color(0, 0, 0, 255)
+ 
+-#     eq_(c1, c2)
+-#     eq_(c1, c3)
++    c3.r = 0
++    c3.g = 64
++    c3.b = 128
++    c3.a = 192
+ 
+-#     c1 = mapnik.Color('rgb(50%,50%,50%)')
+-#     c2 = mapnik.Color(128, 128, 128, 255)
+-#     c3 = mapnik.Color('#808080')
+-#     c4 = mapnik.Color('gray')
++    assert c1 ==  c2
++    assert c1 ==  c3
+ 
+-#     eq_(c1, c2)
+-#     eq_(c1, c3)
+-#     eq_(c1, c4)
++    c1 = mapnik.Color('rgb(50%,50%,50%)')
++    c2 = mapnik.Color(128, 128, 128, 255)
++    c3 = mapnik.Color('#808080')
++    c4 = mapnik.Color('gray')
+ 
+-#     c1 = mapnik.Color('hsl(0, 100%, 50%)')   # red
+-#     c2 = mapnik.Color('hsl(120, 100%, 50%)') # lime
+-# c3 = mapnik.Color('hsla(240, 100%, 50%, 0.5)') # semi-transparent solid
+-# blue
++    assert c1 ==  c2
++    assert c1 ==  c3
++    assert c1 ==  c4
+ 
+-#     eq_(c1, mapnik.Color('red'))
+-#     eq_(c2, mapnik.Color('lime'))
+-#     eq_(c3, mapnik.Color(0,0,255,128))
++    c1 = mapnik.Color('hsl(0, 100%, 50%)')   # red
++    c2 = mapnik.Color('hsl(120, 100%, 50%)') # lime
++    c3 = mapnik.Color('hsla(240, 100%, 50%, 0.5)') # semi-transparent solid blue
+ 
+-# def test_rule_init():
+-#     min_scale = 5
+-#     max_scale = 10
++    assert c1 ==  mapnik.Color('red')
++    assert c2 ==  mapnik.Color('lime')
++    assert c3, mapnik.Color(0,0,255 == 128)
+ 
+-#     r = mapnik.Rule()
++def test_rule_init():
++    min_scale = 5
++    max_scale = 10
+ 
+-#     eq_(r.name, '')
+-#     eq_(r.min_scale, 0)
+-#     eq_(r.max_scale, float('inf'))
+-#     eq_(r.has_else(), False)
+-#     eq_(r.has_also(), False)
++    r = mapnik.Rule()
+ 
+-#     r = mapnik.Rule()
++    assert r.name ==  ''
++    assert r.min_scale ==  0
++    assert r.max_scale ==  float('inf')
++    assert r.has_else() ==  False
++    assert r.has_also() ==  False
+ 
+-#     r.set_else(True)
+-#     eq_(r.has_else(), True)
+-#     eq_(r.has_also(), False)
++    r = mapnik.Rule()
+ 
+-#     r = mapnik.Rule()
++    r.set_else(True)
++    assert r.has_else() ==  True
++    assert r.has_also() ==  False
+ 
+-#     r.set_also(True)
+-#     eq_(r.has_else(), False)
+-#     eq_(r.has_also(), True)
++    r = mapnik.Rule()
+ 
+-#     r = mapnik.Rule("Name")
++    r.set_also(True)
++    assert r.has_else() ==  False
++    assert r.has_also() ==  True
+ 
+-#     eq_(r.name, 'Name')
+-#     eq_(r.min_scale, 0)
+-#     eq_(r.max_scale, float('inf'))
+-#     eq_(r.has_else(), False)
+-#     eq_(r.has_also(), False)
++    r = mapnik.Rule("Name")
+ 
+-#     r = mapnik.Rule("Name")
++    assert r.name ==  'Name'
++    assert r.min_scale ==  0
++    assert r.max_scale ==  float('inf')
++    assert r.has_else() ==  False
++    assert r.has_also() ==  False
+ 
+-#     eq_(r.name, 'Name')
+-#     eq_(r.min_scale, 0)
+-#     eq_(r.max_scale, float('inf'))
+-#     eq_(r.has_else(), False)
+-#     eq_(r.has_also(), False)
++    r = mapnik.Rule("Name")
+ 
+-#     r = mapnik.Rule("Name", min_scale)
++    assert r.name ==  'Name'
++    assert r.min_scale ==  0
++    assert r.max_scale ==  float('inf')
++    assert r.has_else() ==  False
++    assert r.has_also() ==  False
+ 
+-#     eq_(r.name, 'Name')
+-#     eq_(r.min_scale, min_scale)
+-#     eq_(r.max_scale, float('inf'))
+-#     eq_(r.has_else(), False)
+-#     eq_(r.has_also(), False)
++    r = mapnik.Rule("Name", min_scale)
+ 
+-#     r = mapnik.Rule("Name", min_scale, max_scale)
++    assert r.name ==  'Name'
++    assert r.min_scale ==  min_scale
++    assert r.max_scale ==  float('inf')
++    assert r.has_else() ==  False
++    assert r.has_also() ==  False
+ 
+-#     eq_(r.name, 'Name')
+-#     eq_(r.min_scale, min_scale)
+-#     eq_(r.max_scale, max_scale)
+-#     eq_(r.has_else(), False)
+-#     eq_(r.has_also(), False)
++    r = mapnik.Rule("Name", min_scale, max_scale)
+ 
+-# if __name__ == "__main__":
+-#     setup()
+-#     run_all(eval(x) for x in dir() if x.startswith("test_"))
++    assert r.name ==  'Name'
++    assert r.min_scale ==  min_scale
++    assert r.max_scale ==  max_scale
++    assert r.has_else() ==  False
++    assert r.has_also() ==  False
+--- a/test/python_tests/ogr_and_shape_geometries_test.py
++++ b/test/python_tests/ogr_and_shape_geometries_test.py
+@@ -1,12 +1,14 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
++import pytest
+ import mapnik
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
++ at pytest.fixture(scope="module")
++def setup():
++    # All of the paths used are relative, if we run the tests
++    # from another directory we need to chdir()
++    os.chdir(execution_path('.'))
++    yield
+ 
+ try:
+     import itertools.izip as zip
+@@ -14,11 +16,6 @@ except ImportError:
+     pass
+ 
+ 
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+ # TODO - fix truncation in shapefile...
+ polys = ["POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))",
+          "POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))",
+@@ -37,17 +34,12 @@ if 'shape' in plugins and 'ogr' in plugi
+         count = 0
+         for feat1, feat2 in zip(fs1, fs2):
+             count += 1
+-            eq_(feat1.attributes, feat2.attributes)
+-            # TODO - revisit this: https://github.com/mapnik/mapnik/issues/1093
+-            # eq_(feat1.to_geojson(),feat2.to_geojson())
+-            # eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt())
+-            # eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.NDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.NDR))
+-            # eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.XDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.XDR))
++            assert feat1.attributes == feat2.attributes
++            assert feat1.to_geojson() == feat2.to_geojson()
++            assert feat1.geometry.to_wkt() == feat2.geometry.to_wkt()
++            assert feat1.geometry.to_wkb(mapnik.wkbByteOrder.NDR) == feat2.geometry.to_wkb(mapnik.wkbByteOrder.NDR)
++            assert feat1.geometry.to_wkb(mapnik.wkbByteOrder.XDR) == feat2.geometry.to_wkb(mapnik.wkbByteOrder.XDR)
+ 
+-    def test_simple_polys():
++    def test_simple_polys(setup):
+         ensure_geometries_are_interpreted_equivalently(
+             '../data/shp/wkt_poly.shp')
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/ogr_test.py
++++ b/test/python_tests/ogr_test.py
+@@ -1,89 +1,71 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import assert_almost_equal, eq_, raises
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all
++import pytest
+ 
+ try:
+     import json
+ except ImportError:
+     import simplejson as json
+ 
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'ogr' in mapnik.DatasourceCache.plugin_names():
+ 
+     # Shapefile initialization
+-    def test_shapefile_init():
++    def test_shapefile_init(setup):
+         ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0)
+         e = ds.envelope()
+-        assert_almost_equal(e.minx, -11121.6896651, places=7)
+-        assert_almost_equal(e.miny, -724724.216526, places=6)
+-        assert_almost_equal(e.maxx, 2463000.67866, places=5)
+-        assert_almost_equal(e.maxy, 1649661.267, places=3)
++        assert e.minx == pytest.approx(-11121.6896651, abs=1e-7)
++        assert e.miny == pytest.approx(-724724.216526, abs=1e-6)
++        assert e.maxx == pytest.approx(2463000.67866, abs=1e-5)
++        assert e.maxy == pytest.approx(1649661.267, abs=1e-3)
+         meta = ds.describe()
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
+-        eq_('+proj=lcc' in meta['proj4'], True)
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Polygon
++        assert '+proj=lcc' in meta['proj4']
+ 
+     # Shapefile properties
+     def test_shapefile_properties():
+         ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0)
+         f = list(ds.features_at_point(ds.envelope().center(), 0.001))[0]
+-        eq_(ds.geometry_type(), mapnik.DataGeometryType.Polygon)
++        assert ds.geometry_type() ==  mapnik.DataGeometryType.Polygon
+ 
+-        eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
+-        eq_(f['COUNTRY'], u'CAN')
+-        eq_(f['F_CODE'], u'FA001')
+-        eq_(f['NAME_EN'], u'Quebec')
+-        eq_(f['Shape_Area'], 1512185733150.0)
+-        eq_(f['Shape_Leng'], 19218883.724300001)
+-        meta = ds.describe()
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
+-        # NOTE: encoding is latin1 but gdal >= 1.9 should now expose utf8 encoded features
+-        # See SHAPE_ENCODING for overriding: http://gdal.org/ogr/drv_shapefile.html
+-        # Failure for the NOM_FR field is expected for older gdal
+-        #eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        #eq_(f['NOM_FR'], u'Qu?bec')
++        assert f['CGNS_FID'] ==  u'6f733341ba2011d892e2080020a0f4c9'
++        assert f['COUNTRY'] ==  u'CAN'
++        assert f['F_CODE'] ==  u'FA001'
++        assert f['NAME_EN'] ==  u'Quebec'
++        assert f['Shape_Area'] ==  1512185733150.0
++        assert f['Shape_Leng'] ==  19218883.724300001
++        meta = ds.describe()
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Polygon
++        assert f['NOM_FR'] ==  u'Qu\xe9bec'
++        assert f['NOM_FR'] ==  u'Qu?bec'
+ 
+-    @raises(RuntimeError)
+     def test_that_nonexistant_query_field_throws(**kwargs):
+-        ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0)
+-        eq_(len(ds.fields()), 11)
+-        eq_(ds.fields(), ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
+-                          'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
+-        eq_(ds.field_types(),
+-            ['str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float'])
+-        query = mapnik.Query(ds.envelope())
+-        for fld in ds.fields():
+-            query.add_property_name(fld)
+-        # also add an invalid one, triggering throw
+-        query.add_property_name('bogus')
+-        ds.features(query)
++        with pytest.raises(RuntimeError):
++            ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0)
++            assert len(ds.fields()) ==  11
++            assert ds.fields() == ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
++                              'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT']
++            assert ds.field_types() == ['str','str','str','int','str','int','int','int','int','float','float']
++            query = mapnik.Query(ds.envelope())
++            for fld in ds.fields():
++                query.add_property_name(fld)
++            # also add an invalid one, triggering throw
++            query.add_property_name('bogus')
++            ds.features(query)
+ 
+     # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member.
+     # def test_handling_of_null_features():
+-    #    ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0)
+-    #    fs = ds.all_features()
+-    #    eq_(len(fs),1)
++    #     ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0)
++    #     fs = ds.all_features()
++    #     assert len(list(fs)) == 1
+ 
+     # OGR plugin extent parameter
+     def test_ogr_extent_parameter():
+@@ -92,24 +74,24 @@ if 'ogr' in mapnik.DatasourceCache.plugi
+             layer_by_index=0,
+             extent='-1,-1,1,1')
+         e = ds.envelope()
+-        eq_(e.minx, -1)
+-        eq_(e.miny, -1)
+-        eq_(e.maxx, 1)
+-        eq_(e.maxy, 1)
++        assert e.minx ==  -1
++        assert e.miny ==  -1
++        assert e.maxx ==  1
++        assert e.maxy ==  1
+         meta = ds.describe()
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
+-        eq_('+proj=merc' in meta['proj4'], True)
++        assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon
++        assert '+proj=merc' in meta['proj4']
+ 
+     def test_ogr_reading_gpx_waypoint():
+         ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer='waypoints')
+         e = ds.envelope()
+-        eq_(e.minx, -122)
+-        eq_(e.miny, 48)
+-        eq_(e.maxx, -122)
+-        eq_(e.maxy, 48)
++        assert e.minx ==  -122
++        assert e.miny ==  48
++        assert e.maxx ==  -122
++        assert e.maxy ==  48
+         meta = ds.describe()
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_('+proj=longlat' in meta['proj4'], True)
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert '+proj=longlat' in meta['proj4']
+ 
+     def test_ogr_empty_data_should_not_throw():
+         default_logging_severity = mapnik.logger.get_severity()
+@@ -118,189 +100,102 @@ if 'ogr' in mapnik.DatasourceCache.plugi
+         for layer in ['routes', 'tracks', 'route_points', 'track_points']:
+             ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer=layer)
+             e = ds.envelope()
+-            eq_(e.minx, 0)
+-            eq_(e.miny, 0)
+-            eq_(e.maxx, 0)
+-            eq_(e.maxy, 0)
++            assert e.minx ==  0
++            assert e.miny ==  0
++            assert e.maxx ==  0
++            assert e.maxy ==  0
+         mapnik.logger.set_severity(default_logging_severity)
+         meta = ds.describe()
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+-        eq_('+proj=longlat' in meta['proj4'], True)
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++        assert '+proj=longlat' in meta['proj4']
+ 
+     # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member.
+-    # def test_handling_of_null_features():
+-    #    ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0)
+-    #    fs = ds.all_features()
+-    #    eq_(len(fs),1)
++    def test_handling_of_null_features():
++        assert True
++        ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0)
++        fs = ds.all_features()
++        assert len(list(fs)) == 1
+ 
+     def test_geometry_type():
+         ds = mapnik.Ogr(file='../data/csv/wkt.csv', layer_by_index=0)
+         e = ds.envelope()
+-        assert_almost_equal(e.minx, 1.0, places=1)
+-        assert_almost_equal(e.miny, 1.0, places=1)
+-        assert_almost_equal(e.maxx, 45.0, places=1)
+-        assert_almost_equal(e.maxy, 45.0, places=1)
++        assert e.minx == pytest.approx(1.0, abs=1e-1)
++        assert e.miny == pytest.approx(1.0, abs=1e-1)
++        assert e.maxx == pytest.approx(45.0, abs=1e-1)
++        assert e.maxy == pytest.approx(45.0, abs=1e-1)
+         meta = ds.describe()
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+-        #eq_('+proj=longlat' in meta['proj4'],True)
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+         fs = ds.featureset()
+         feat = fs.next()
+         actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'Point',
+-                           u'coordinates': [30,
+-                                            10]},
+-             u'type': u'Feature',
+-             u'id': 2,
+-             u'properties': {u'type': u'point',
+-                             u'WKT': u'           POINT (30 10)'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'LineString',
+-                           u'coordinates': [[30,
+-                                             10],
+-                                            [10,
+-                                             30],
+-                                            [40,
+-                                             40]]},
+-                u'type': u'Feature',
+-                u'id': 3,
+-                u'properties': {u'type': u'linestring',
+-                                u'WKT': u'      LINESTRING (30 10, 10 30, 40 40)'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'Polygon',
+-                           u'coordinates': [[[30,
+-                                              10],
+-                                             [40,
+-                                              40],
+-                                             [20,
+-                                              40],
+-                                             [10,
+-                                              20],
+-                                             [30,
+-                                              10]]]},
+-                u'type': u'Feature',
+-                u'id': 4,
+-                u'properties': {u'type': u'polygon',
+-                                u'WKT': u'         POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(
+-            actual, {
+-                u'geometry': {
+-                    u'type': u'Polygon', u'coordinates': [
+-                        [
+-                            [
+-                                35, 10], [
+-                                45, 45], [
+-                                15, 40], [
+-                                    10, 20], [
+-                                        35, 10]], [
+-                                            [
+-                                                20, 30], [
+-                                                    35, 35], [
+-                                                        30, 20], [
+-                                                            20, 30]]]}, u'type': u'Feature', u'id': 5, u'properties': {
+-                                                                u'type': u'polygon', u'WKT': u'         POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'MultiPoint',
+-                           u'coordinates': [[10,
+-                                             40],
+-                                            [40,
+-                                             30],
+-                                            [20,
+-                                             20],
+-                                            [30,
+-                                             10]]},
+-                u'type': u'Feature',
+-                u'id': 6,
+-                u'properties': {u'type': u'multipoint',
+-                                u'WKT': u'      MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'MultiLineString',
+-                           u'coordinates': [[[10,
+-                                              10],
+-                                             [20,
+-                                              20],
+-                                             [10,
+-                                              40]],
+-                                            [[40,
+-                                              40],
+-                                             [30,
+-                                                30],
+-                                             [40,
+-                                                20],
+-                                             [30,
+-                                                10]]]},
+-                u'type': u'Feature',
+-                u'id': 7,
+-                u'properties': {u'type': u'multilinestring',
+-                                u'WKT': u' MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'MultiPolygon',
+-                           u'coordinates': [[[[30,
+-                                               20],
+-                                              [45,
+-                                               40],
+-                                              [10,
+-                                               40],
+-                                              [30,
+-                                               20]]],
+-                                            [[[15,
+-                                               5],
+-                                              [40,
+-                                                10],
+-                                                [10,
+-                                                 20],
+-                                                [5,
+-                                                 10],
+-                                                [15,
+-                                                 5]]]]},
+-                u'type': u'Feature',
+-                u'id': 8,
+-                u'properties': {u'type': u'multipolygon',
+-                                u'WKT': u'    MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual, {u'geometry': {u'type': u'MultiPolygon', u'coordinates': [[[[40, 40], [20, 45], [45, 30], [40, 40]]], [[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], [[30, 20], [20, 15], [20, 25], [
+-            30, 20]]]]}, u'type': u'Feature', u'id': 9, u'properties': {u'type': u'multipolygon', u'WKT': u'    MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))'}})
+-        feat = fs.next()
+-        actual = json.loads(feat.to_geojson())
+-        eq_(actual,
+-            {u'geometry': {u'type': u'GeometryCollection',
+-                           u'geometries': [{u'type': u'Polygon',
+-                                            u'coordinates': [[[1,
+-                                                               1],
+-                                                              [2,
+-                                                               1],
+-                                                              [2,
+-                                                               2],
+-                                                              [1,
+-                                                               2],
+-                                                              [1,
+-                                                               1]]]},
+-                                           {u'type': u'Point',
+-                                            u'coordinates': [2,
+-                                                             3]},
+-                                           {u'type': u'LineString',
+-                                            u'coordinates': [[2,
+-                                                              3],
+-                                                             [3,
+-                                                              4]]}]},
+-                u'type': u'Feature',
+-                u'id': 10,
+-                u'properties': {u'type': u'collection',
+-                                u'WKT': u'      GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))'}})
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert actual == {u'geometry': {u'type': u'Point',
++                                        u'coordinates': [30,10]},
++                          u'type': u'Feature',
++                          u'id': 2,
++                          u'properties': {u'type': u'point',
++                                          u'WKT': u'           POINT (30 10)'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual ==  {u'geometry': {u'type': u'LineString',
++                           u'coordinates': [[30,10],[10,30],[40,40]]},
++                           u'type': u'Feature',
++                           u'id': 3,
++                           u'properties': {u'type': u'linestring',
++                                           u'WKT': u'      LINESTRING (30 10, 10 30, 40 40)'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual ==  {u'geometry': {u'type': u'Polygon', u'coordinates': [[[30,10],[40,40],[20,40],[10,20],[30,10]]]},
++                           u'type': u'Feature',
++                           u'id': 4,
++                           u'properties': {u'type': u'polygon',
++                                           u'WKT': u'         POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual == {u'geometry': {u'type': u'Polygon', u'coordinates': [[[35, 10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]},
++                                        u'type': u'Feature',
++                                        u'id': 5,
++                                        u'properties': { u'type': u'polygon', u'WKT': u'         POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual == {u'geometry': {u'type': u'MultiPoint',
++                                        u'coordinates': [[10,40],[40,30],[20,20],[30,10]]},
++                          u'type': u'Feature',
++                          u'id': 6,
++                          u'properties': {u'type': u'multipoint',
++                                          u'WKT': u'      MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual == {u'geometry': {u'type': u'MultiLineString',
++                                        u'coordinates': [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]},
++                          u'type': u'Feature',
++                          u'id': 7,
++                          u'properties': {u'type': u'multilinestring',
++                                          u'WKT': u' MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual == {u'geometry': {u'type': u'MultiPolygon',
++                                        u'coordinates': [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]},
++                          u'type': u'Feature',
++                          u'id': 8,
++                          u'properties': {u'type': u'multipolygon',
++                                          u'WKT': u'    MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual == {u'geometry': {u'type': u'MultiPolygon',
++                                        u'coordinates': [[[[40, 40], [20, 45], [45, 30], [40, 40]]], [[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], [[30, 20], [20, 15], [20, 25], [30, 20]]]]},
++                          u'type': u'Feature',
++                          u'id': 9,
++                          u'properties': {u'type': u'multipolygon', u'WKT': u'    MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))'}}
++        feat = fs.next()
++        actual = json.loads(feat.to_geojson())
++        assert actual == {u'geometry': {u'type': u'GeometryCollection',
++                                        u'geometries': [{u'type': u'Polygon',
++                                                         u'coordinates': [[[1, 1],[2,1],[2,2],[1,2],[1,1]]]},
++                                                        {u'type': u'Point',
++                                                         u'coordinates': [2,3]},
++                                                        {u'type': u'LineString',
++                                                         u'coordinates': [[2,3],[3,4]]}]},
++                          u'type': u'Feature',
++                          u'id': 10,
++                          u'properties': {u'type': u'collection',
++                                          u'WKT': u'      GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))'}}
+--- a/test/python_tests/osm_test.py
++++ /dev/null
+@@ -1,69 +0,0 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import os
+-
+-from nose.tools import eq_
+-
+-import mapnik
+-
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-if 'osm' in mapnik.DatasourceCache.plugin_names():
+-
+-    # osm initialization
+-    def test_osm_init():
+-        ds = mapnik.Osm(file='../data/osm/nodes.osm')
+-
+-        e = ds.envelope()
+-
+-        # these are hardcoded in the plugin? ugh
+-        eq_(e.minx >= -180.0, True)
+-        eq_(e.miny >= -90.0, True)
+-        eq_(e.maxx <= 180.0, True)
+-        eq_(e.maxy <= 90, True)
+-
+-    def test_that_nonexistant_query_field_throws(**kwargs):
+-        ds = mapnik.Osm(file='../data/osm/nodes.osm')
+-        eq_(len(ds.fields()), 0)
+-        query = mapnik.Query(ds.envelope())
+-        for fld in ds.fields():
+-            query.add_property_name(fld)
+-        # also add an invalid one, triggering throw
+-        query.add_property_name('bogus')
+-        ds.features(query)
+-
+-    def test_that_64bit_int_fields_work():
+-        ds = mapnik.Osm(file='../data/osm/64bit.osm')
+-        eq_(len(ds.fields()), 4)
+-        eq_(ds.fields(), ['bigint', 'highway', 'junction', 'note'])
+-        eq_(ds.field_types(), ['str', 'str', 'str', 'str'])
+-        fs = ds.featureset()
+-        feat = fs.next()
+-        eq_(feat.to_geojson(
+-        ), '{"type":"Feature","id":890,"geometry":{"type":"Point","coordinates":[-61.7960248,17.1415874]},"properties":{}}')
+-        eq_(feat.id(), 4294968186)
+-        eq_(feat['bigint'], None)
+-        feat = fs.next()
+-        eq_(feat['bigint'], '9223372036854775807')
+-
+-    def test_reading_ways():
+-        ds = mapnik.Osm(file='../data/osm/ways.osm')
+-        eq_(len(ds.fields()), 0)
+-        eq_(ds.fields(), [])
+-        eq_(ds.field_types(), [])
+-        feat = ds.all_features()[4]
+-        eq_(feat.to_geojson(
+-        ), '{"type":"Feature","id":1,"geometry":{"type":"LineString","coordinates":[[0,2],[0,-2]]},"properties":{}}')
+-        eq_(feat.id(), 1)
+-
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/palette_test.py
++++ b/test/python_tests/palette_test.py
+@@ -1,22 +1,14 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import os
+-import sys
+-
+-from nose.tools import eq_
+-
++import sys, os
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ expected_64 = '[Palette 64 colors #494746 #c37631 #89827c #d1955c #7397b9 #fc9237 #a09f9c #fbc147 #9bb3ce #b7c9a1 #b5d29c #c4b9aa #cdc4a5 #d5c8a3 #c1d7aa #ccc4b6 #dbd19c #b2c4d5 #eae487 #c9c8c6 #e4db99 #c9dcb5 #dfd3ac #cbd2c2 #d6cdbc #dbd2b6 #c0ceda #ece597 #f7ef86 #d7d3c3 #dfcbc3 #d1d0cd #d1e2bf #d3dec1 #dbd3c4 #e6d8b6 #f4ef91 #d3d3cf #cad5de #ded7c9 #dfdbce #fcf993 #ffff8a #dbd9d7 #dbe7cd #d4dce2 #e4ded3 #ebe3c9 #e0e2e2 #f4edc3 #fdfcae #e9e5dc #f4edda #eeebe4 #fefdc5 #e7edf2 #edf4e5 #f2efe9 #f6ede7 #fefedd #f6f4f0 #f1f5f8 #fbfaf8 #ffffff]'
+ 
+@@ -25,18 +17,15 @@ expected_256 = '[Palette 256 colors #272
+ expected_rgb = '[Palette 2 colors #ff00ff #ffffff]'
+ 
+ 
+-def test_reading_palettes():
++def test_reading_palettes(setup):
+     with open('../data/palettes/palette64.act', 'rb') as act:
+         palette = mapnik.Palette(act.read(), 'act')
+-    eq_(palette.to_string(), expected_64)
++    assert palette.to_string() ==  expected_64
+     with open('../data/palettes/palette256.act', 'rb') as act:
+         palette = mapnik.Palette(act.read(), 'act')
+-    eq_(palette.to_string(), expected_256)
+-    if PYTHON3:
+-        palette = mapnik.Palette(b'\xff\x00\xff\xff\xff\xff', 'rgb')
+-    else:
+-        palette = mapnik.Palette('\xff\x00\xff\xff\xff\xff', 'rgb')
+-    eq_(palette.to_string(), expected_rgb)
++    assert palette.to_string() ==  expected_256
++    palette = mapnik.Palette(b'\xff\x00\xff\xff\xff\xff', 'rgb')
++    assert palette.to_string() ==  expected_rgb
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+ 
+@@ -50,7 +39,7 @@ if 'shape' in mapnik.DatasourceCache.plu
+             palette = mapnik.Palette(act.read(), 'act')
+         # test saving directly to filesystem
+         im.save('/tmp/mapnik-palette-test.png', 'png', palette)
+-        expected = './images/support/mapnik-palette-test.png'
++        expected = 'images/support/mapnik-palette-test.png'
+         if os.environ.get('UPDATE'):
+             im.save(expected, "png", palette)
+ 
+@@ -58,17 +47,10 @@ if 'shape' in mapnik.DatasourceCache.plu
+         with open('/tmp/mapnik-palette-test2.png', 'wb') as f:
+             f.write(im.tostring('png', palette))
+         # compare the two methods
+-        eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),
+-            mapnik.Image.open(
+-                '/tmp/mapnik-palette-test2.png').tostring('png32'),
+-            '%s not eq to %s' % ('/tmp/mapnik-palette-test.png',
+-                                 '/tmp/mapnik-palette-test2.png'))
++        im1 = mapnik.Image.open('/tmp/mapnik-palette-test.png')
++        im2 = mapnik.Image.open('/tmp/mapnik-palette-test2.png')
++        assert im1.tostring('png32') == im1.tostring('png32'),'%s not eq to %s' % ('/tmp/mapnik-palette-test.png',
++                                                                                   '/tmp/mapnik-palette-test2.png')
+         # compare to expected
+-        eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),
+-            mapnik.Image.open(expected).tostring('png32'),
+-            '%s not eq to %s' % ('/tmp/mapnik-palette-test.png',
+-                                 expected))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert im1.tostring('png32') == mapnik.Image.open(expected).tostring('png32'), '%s not eq to %s' % ('/tmp/mapnik-palette-test.png',
++                                                                                                            expected)
+--- a/test/python_tests/parameters_test.py
++++ b/test/python_tests/parameters_test.py
+@@ -1,71 +1,52 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import os
+ import sys
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
+-    os.chdir(execution_path('.'))
+-
+-
+ def test_parameter_null():
+     p = mapnik.Parameter('key', None)
+-    eq_(p[0], 'key')
+-    eq_(p[1], None)
++    assert p[0] ==  'key'
++    assert p[1] ==  None
+ 
+ 
+ def test_parameter_string():
+     p = mapnik.Parameter('key', 'value')
+-    eq_(p[0], 'key')
+-    eq_(p[1], 'value')
++    assert p[0] ==  'key'
++    assert p[1] ==  'value'
+ 
+ 
+ def test_parameter_unicode():
+     p = mapnik.Parameter('key', u'value')
+-    eq_(p[0], 'key')
+-    eq_(p[1], u'value')
++    assert p[0] ==  'key'
++    assert p[1] ==  u'value'
+ 
+ 
+ def test_parameter_integer():
+     p = mapnik.Parameter('int', sys.maxsize)
+-    eq_(p[0], 'int')
+-    eq_(p[1], sys.maxsize)
++    assert p[0] ==  'int'
++    assert p[1] ==  sys.maxsize
+ 
+ 
+ def test_parameter_double():
+     p = mapnik.Parameter('double', float(sys.maxsize))
+-    eq_(p[0], 'double')
+-    eq_(p[1], float(sys.maxsize))
++    assert p[0] ==  'double'
++    assert p[1] ==  float(sys.maxsize)
+ 
+ 
+ def test_parameter_boolean():
+     p = mapnik.Parameter('boolean', True)
+-    eq_(p[0], 'boolean')
+-    eq_(p[1], True)
+-    eq_(bool(p[1]), True)
++    assert p[0] ==  'boolean'
++    assert p[1] ==  True
++    assert bool(p[1]) ==  True
+ 
+ 
+ def test_parameters():
+     params = mapnik.Parameters()
+     p = mapnik.Parameter('float', 1.0777)
+-    eq_(p[0], 'float')
+-    eq_(p[1], 1.0777)
++    assert p[0] ==  'float'
++    assert p[1] ==  1.0777
+ 
+     params.append(p)
+ 
+-    eq_(params[0][0], 'float')
+-    eq_(params[0][1], 1.0777)
+-
+-    eq_(params.get('float'), 1.0777)
+-
++    assert params[0][0] ==  'float'
++    assert params[0][1] ==  1.0777
+ 
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert params.get('float') ==  1.0777
+--- a/test/python_tests/pdf_printing_test.py
++++ b/test/python_tests/pdf_printing_test.py
+@@ -1,57 +1,50 @@
+-#!/usr/bin/env python
+-
+-import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
+-from .utilities import execution_path, run_all
++import os
++import pytest
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ def make_map_from_xml(source_xml):
+-	m = mapnik.Map(100, 100)
+-	mapnik.load_map(m, source_xml, True)
+-	m.zoom_all()
+-
+-	return m
++        m = mapnik.Map(100, 100)
++        mapnik.load_map(m, source_xml, True)
++        m.zoom_all()
++        return m
+ 
+ def make_pdf(m, output_pdf, esri_wkt):
+-	# renders a PDF with a grid and a legend
+-	page = mapnik.printing.PDFPrinter(use_ocg_layers=True)
++        # renders a PDF with a grid and a legend
++        page = mapnik.printing.PDFPrinter(use_ocg_layers=True)
+ 
+-	page.render_map(m, output_pdf)
+-	page.render_grid_on_map(m)
+-	page.render_legend(m)
++        page.render_map(m, output_pdf)
++        page.render_grid_on_map(m)
++        page.render_legend(m)
+ 
+-	page.finish()
+-	page.add_geospatial_pdf_header(m, output_pdf, wkt=esri_wkt)
++        page.finish()
++        page.add_geospatial_pdf_header(m, output_pdf, wkt=esri_wkt)
+ 
+ if mapnik.has_pycairo():
+-	import mapnik.printing
++        import mapnik.printing
+ 
+-	def test_pdf_printing():
+-		source_xml = '../data/good_maps/marker-text-line.xml'.encode('utf-8')
+-		m = make_map_from_xml(source_xml)
++        def test_pdf_printing(setup):
++                source_xml = '../data/good_maps/marker-text-line.xml'.encode('utf-8')
++                m = make_map_from_xml(source_xml)
+ 
+-		actual_pdf = "/tmp/pdf-printing-actual.pdf"
+-		esri_wkt = 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'
+-		make_pdf(m, actual_pdf, esri_wkt)
++                actual_pdf = "/tmp/pdf-printing-actual.pdf"
++                esri_wkt = 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'
++                make_pdf(m, actual_pdf, esri_wkt)
+ 
+-		expected_pdf = 'images/pycairo/pdf-printing-expected.pdf'
++                expected_pdf = 'images/pycairo/pdf-printing-expected.pdf'
+ 
+-		diff = abs(os.stat(expected_pdf).st_size - os.stat(actual_pdf).st_size)
+-		msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff, actual_pdf, 'tests/python_tests/' + expected_pdf)
+-		eq_(diff < 1500, True, msg)
++                diff = abs(os.stat(expected_pdf).st_size - os.stat(actual_pdf).st_size)
++                msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff, actual_pdf, 'tests/python_tests/' + expected_pdf)
++                assert diff < 1500, msg
+ 
+ # TODO: ideas for further testing on printing module
+ # - test with and without pangocairo
+ # - test legend with attribution
+ # - test graticule (bug at the moment)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/pgraster_test.py
++++ b/test/python_tests/pgraster_test.py
+@@ -1,5 +1,3 @@
+-#!/usr/bin/env python
+-
+ import atexit
+ import os
+ import re
+@@ -7,12 +5,9 @@ import sys
+ import time
+ from binascii import hexlify
+ from subprocess import PIPE, Popen
+-
+-from nose.tools import assert_almost_equal, eq_
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all, side_by_side_image
++import pytest
++from .utilities import execution_path, side_by_side_image
+ 
+ MAPNIK_TEST_DBNAME = 'mapnik-tmp-pgraster-test-db'
+ POSTGIS_TEMPLATE_DBNAME = 'template_postgis'
+@@ -23,7 +18,7 @@ def log(msg):
+     if DEBUG_OUTPUT:
+         print(msg)
+ 
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+@@ -433,16 +428,16 @@ if 'pgraster' in mapnik.DatasourceCache.
+             lyr.name, tnam, lbl, overview, clip)
+         compare_images(expected, im)
+         # no data
+-        eq_(hexlify(im.view(3, 16, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(128, 16, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(250, 16, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(3, 240, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(128, 240, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(250, 240, 1, 1).tostring()), '00000000')
++        eq_(hexlify(im.view(3, 16, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(128, 16, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(250, 16, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(3, 240, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(128, 240, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(250, 240, 1, 1).tostring()), b'00000000')
+         # dark brown
+-        eq_(hexlify(im.view(174, 39, 1, 1).tostring()), 'c3a698ff')
++        eq_(hexlify(im.view(174, 39, 1, 1).tostring()), b'c3a698ff')
+         # dark gray
+-        eq_(hexlify(im.view(195, 132, 1, 1).tostring()), '575f62ff')
++        eq_(hexlify(im.view(195, 132, 1, 1).tostring()), b'575f62ff')
+         # Now zoom over a portion of the env (1/10)
+         newenv = mapnik.Box2d(-12329035.7652168, 4508926.651484220,
+                               -12328997.49148983, 4508957.34625536)
+@@ -456,17 +451,17 @@ if 'pgraster' in mapnik.DatasourceCache.
+             lyr.name, tnam, lbl, overview, clip)
+         compare_images(expected, im)
+         # no data
+-        eq_(hexlify(im.view(3, 16, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(128, 16, 1, 1).tostring()), '00000000')
+-        eq_(hexlify(im.view(250, 16, 1, 1).tostring()), '00000000')
++        eq_(hexlify(im.view(3, 16, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(128, 16, 1, 1).tostring()), b'00000000')
++        eq_(hexlify(im.view(250, 16, 1, 1).tostring()), b'00000000')
+         # black
+-        eq_(hexlify(im.view(3, 42, 1, 1).tostring()), '000000ff')
+-        eq_(hexlify(im.view(3, 134, 1, 1).tostring()), '000000ff')
+-        eq_(hexlify(im.view(3, 244, 1, 1).tostring()), '000000ff')
++        eq_(hexlify(im.view(3, 42, 1, 1).tostring()), b'000000ff')
++        eq_(hexlify(im.view(3, 134, 1, 1).tostring()), b'000000ff')
++        eq_(hexlify(im.view(3, 244, 1, 1).tostring()), b'000000ff')
+         # gray
+-        eq_(hexlify(im.view(135, 157, 1, 1).tostring()), '4e555bff')
++        eq_(hexlify(im.view(135, 157, 1, 1).tostring()), b'4e555bff')
+         # brown
+-        eq_(hexlify(im.view(195, 223, 1, 1).tostring()), 'f2cdbaff')
++        eq_(hexlify(im.view(195, 223, 1, 1).tostring()), b'f2cdbaff')
+ 
+     def _test_rgb_8bui(lbl, tilesize, constraint, overview):
+         tnam = 'nodataedge'
+@@ -825,15 +820,8 @@ if 'pgraster' in mapnik.DatasourceCache.
+ 
+     atexit.register(postgis_takedown)
+ 
+-
+ def enabled(tname):
+     enabled = len(sys.argv) < 2 or tname in sys.argv
+     if not enabled:
+         print("Skipping " + tname + " as not explicitly enabled")
+     return enabled
+-
+-if __name__ == "__main__":
+-    setup()
+-    fail = run_all(eval(x)
+-                   for x in dir() if x.startswith("test_") and enabled(x))
+-    exit(fail)
+--- a/test/python_tests/pickling_test.py
++++ b/test/python_tests/pickling_test.py
+@@ -1,34 +1,29 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+ import pickle
+-
+-from nose.tools import eq_
+-
++import pytest
+ import mapnik
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ 
+-#def test_color_pickle():
+-#    c = mapnik.Color('blue')
+-#    eq_(pickle.loads(pickle.dumps(c)), c)
+-#    c = mapnik.Color(0, 64, 128)
+-#    eq_(pickle.loads(pickle.dumps(c)), c)
+-#    c = mapnik.Color(0, 64, 128, 192)
+-#    eq_(pickle.loads(pickle.dumps(c)), c)
++def test_color_pickle():
++    c = mapnik.Color('blue')
++    assert pickle.loads(pickle.dumps(c)) == c
++    c = mapnik.Color(0, 64, 128)
++    assert pickle.loads(pickle.dumps(c)) == c
++    c = mapnik.Color(0, 64, 128, 192)
++    assert pickle.loads(pickle.dumps(c)) == c
+ 
+ 
+-#def test_envelope_pickle():
+-#    e = mapnik.Box2d(100, 100, 200, 200)
+-#    eq_(pickle.loads(pickle.dumps(e)), e)
++def test_envelope_pickle():
++    e = mapnik.Box2d(100, 100, 200, 200)
++    assert pickle.loads(pickle.dumps(e)) == e
+ 
+ 
+ def test_parameters_pickle():
+@@ -37,9 +32,5 @@ def test_parameters_pickle():
+ 
+     params2 = pickle.loads(pickle.dumps(params, pickle.HIGHEST_PROTOCOL))
+ 
+-    eq_(params[0][0], params2[0][0])
+-    eq_(params[0][1], params2[0][1])
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert params[0][0] == params2[0][0]
++    assert params[0][1] == params2[0][1]
+--- a/test/python_tests/png_encoding_test.py
++++ b/test/python_tests/png_encoding_test.py
+@@ -1,19 +1,14 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if mapnik.has_png():
+     tmp_dir = '/tmp/mapnik-png/'
+@@ -47,7 +42,7 @@ if mapnik.has_png():
+ 
+     generate = os.environ.get('UPDATE')
+ 
+-    def test_expected_encodings():
++    def test_expected_encodings(setup):
+         # blank image
+         im = mapnik.Image(256, 256)
+         for opt in opts:
+@@ -58,9 +53,7 @@ if mapnik.has_png():
+                 im.save(expected, opt)
+             else:
+                 im.save(actual, opt)
+-                eq_(mapnik.Image.open(actual).tostring('png32'),
+-                    mapnik.Image.open(expected).tostring('png32'),
+-                    '%s (actual) not == to %s (expected)' % (actual, expected))
++                assert mapnik.Image.open(actual).tostring('png32') == mapnik.Image.open(expected).tostring('png32'), '%s (actual) not == to %s (expected)' % (actual, expected)
+ 
+         # solid image
+         im.fill(mapnik.Color('green'))
+@@ -72,9 +65,7 @@ if mapnik.has_png():
+                 im.save(expected, opt)
+             else:
+                 im.save(actual, opt)
+-                eq_(mapnik.Image.open(actual).tostring('png32'),
+-                    mapnik.Image.open(expected).tostring('png32'),
+-                    '%s (actual) not == to %s (expected)' % (actual, expected))
++                assert mapnik.Image.open(actual).tostring('png32') == mapnik.Image.open(expected).tostring('png32'), '%s (actual) not == to %s (expected)' % (actual, expected)
+ 
+         # aerial
+         im = mapnik.Image.open('./images/support/transparency/aerial_rgba.png')
+@@ -86,9 +77,7 @@ if mapnik.has_png():
+                 im.save(expected, opt)
+             else:
+                 im.save(actual, opt)
+-                eq_(mapnik.Image.open(actual).tostring('png32'),
+-                    mapnik.Image.open(expected).tostring('png32'),
+-                    '%s (actual) not == to %s (expected)' % (actual, expected))
++                assert mapnik.Image.open(actual).tostring('png32') == mapnik.Image.open(expected).tostring('png32'), '%s (actual) not == to %s (expected)' % (actual, expected)
+ 
+     def test_transparency_levels():
+         # create partial transparency image
+@@ -112,100 +101,83 @@ if mapnik.has_png():
+         im.save(t0, format)
+         im_in = mapnik.Image.open(t0)
+         t0_len = len(im_in.tostring(format))
+-        eq_(t0_len, len(mapnik.Image.open(
+-            'images/support/transparency/white0.png').tostring(format)))
++        assert t0_len == len(mapnik.Image.open('images/support/transparency/white0.png').tostring(format))
+         format = 'png8:m=o:t=1'
+         im.save(t1, format)
+         im_in = mapnik.Image.open(t1)
+         t1_len = len(im_in.tostring(format))
+-        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+-            'images/support/transparency/white1.png').tostring(format)))
++        assert len(im.tostring(format)) == len(mapnik.Image.open('images/support/transparency/white1.png').tostring(format))
+         format = 'png8:m=o:t=2'
+         im.save(t2, format)
+         im_in = mapnik.Image.open(t2)
+         t2_len = len(im_in.tostring(format))
+-        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+-            'images/support/transparency/white2.png').tostring(format)))
+-
+-        eq_(t0_len < t1_len < t2_len, True)
++        assert len(im.tostring(format)) == len(mapnik.Image.open('images/support/transparency/white2.png').tostring(format))
++        assert t0_len < t1_len < t2_len
+ 
+         # hextree
+         format = 'png8:m=h:t=0'
+         im.save(t0, format)
+         im_in = mapnik.Image.open(t0)
+         t0_len = len(im_in.tostring(format))
+-        eq_(t0_len, len(mapnik.Image.open(
+-            'images/support/transparency/white0.png').tostring(format)))
++        assert t0_len == len(mapnik.Image.open('images/support/transparency/white0.png').tostring(format))
+         format = 'png8:m=h:t=1'
+         im.save(t1, format)
+         im_in = mapnik.Image.open(t1)
+         t1_len = len(im_in.tostring(format))
+-        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+-            'images/support/transparency/white1.png').tostring(format)))
++        assert len(im.tostring(format)) == len(mapnik.Image.open('images/support/transparency/white1.png').tostring(format))
+         format = 'png8:m=h:t=2'
+         im.save(t2, format)
+         im_in = mapnik.Image.open(t2)
+         t2_len = len(im_in.tostring(format))
+-        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+-            'images/support/transparency/white2.png').tostring(format)))
+-
+-        eq_(t0_len < t1_len < t2_len, True)
++        assert len(im.tostring(format)) == len(mapnik.Image.open('images/support/transparency/white2.png').tostring(format))
++        assert t0_len < t1_len < t2_len
+ 
+     def test_transparency_levels_aerial():
+         im = mapnik.Image.open('../data/images/12_654_1580.png')
+         im_in = mapnik.Image.open(
+             './images/support/transparency/aerial_rgba.png')
+-        eq_(len(im.tostring('png8')), len(im_in.tostring('png8')))
+-        eq_(len(im.tostring('png32')), len(im_in.tostring('png32')))
++        assert len(im.tostring('png8')) == len(im_in.tostring('png8'))
++        assert len(im.tostring('png32')) == len(im_in.tostring('png32'))
+ 
+         im_in = mapnik.Image.open(
+             './images/support/transparency/aerial_rgb.png')
+-        eq_(len(im.tostring('png32')), len(im_in.tostring('png32')))
+-        eq_(len(im.tostring('png32:t=0')), len(im_in.tostring('png32:t=0')))
+-        eq_(len(im.tostring('png32:t=0')) == len(im_in.tostring('png32')), False)
+-        eq_(len(im.tostring('png8')), len(im_in.tostring('png8')))
+-        eq_(len(im.tostring('png8:t=0')), len(im_in.tostring('png8:t=0')))
++        assert len(im.tostring('png32')) == len(im_in.tostring('png32'))
++        assert len(im.tostring('png32:t=0')) == len(im_in.tostring('png32:t=0'))
++        assert not len(im.tostring('png32:t=0')) == len(im_in.tostring('png32'))
++        assert len(im.tostring('png8')) == len(im_in.tostring('png8'))
++        assert len(im.tostring('png8:t=0')) == len(im_in.tostring('png8:t=0'))
+         # unlike png32 paletted images without alpha will look the same even if
+         # no alpha is forced
+-        eq_(len(im.tostring('png8:t=0')) == len(im_in.tostring('png8')), True)
+-        eq_(len(im.tostring('png8:t=0:m=o')) ==
+-            len(im_in.tostring('png8:m=o')), True)
++        assert len(im.tostring('png8:t=0')) == len(im_in.tostring('png8'))
++        assert len(im.tostring('png8:t=0:m=o')) == len(im_in.tostring('png8:m=o'))
+ 
+     def test_9_colors_hextree():
+         expected = './images/support/encoding-opts/png8-9cols.png'
+         im = mapnik.Image.open(expected)
+         t0 = tmp_dir + 'png-encoding-9-colors.result-hextree.png'
+         im.save(t0, 'png8:m=h')
+-        eq_(mapnik.Image.open(t0).tostring(),
+-            mapnik.Image.open(expected).tostring(),
+-            '%s (actual) not == to %s (expected)' % (t0, expected))
++        assert mapnik.Image.open(t0).tostring() == mapnik.Image.open(expected).tostring(), '%s (actual) not == to %s (expected)' % (t0, expected)
+ 
+     def test_9_colors_octree():
+         expected = './images/support/encoding-opts/png8-9cols.png'
+         im = mapnik.Image.open(expected)
+         t0 = tmp_dir + 'png-encoding-9-colors.result-octree.png'
+         im.save(t0, 'png8:m=o')
+-        eq_(mapnik.Image.open(t0).tostring(),
+-            mapnik.Image.open(expected).tostring(),
+-            '%s (actual) not == to %s (expected)' % (t0, expected))
++        assert mapnik.Image.open(t0).tostring() == mapnik.Image.open(expected).tostring(), '%s (actual) not == to %s (expected)' % (t0, expected)
+ 
+     def test_17_colors_hextree():
+         expected = './images/support/encoding-opts/png8-17cols.png'
+         im = mapnik.Image.open(expected)
+         t0 = tmp_dir + 'png-encoding-17-colors.result-hextree.png'
+         im.save(t0, 'png8:m=h')
+-        eq_(mapnik.Image.open(t0).tostring(),
+-            mapnik.Image.open(expected).tostring(),
+-            '%s (actual) not == to %s (expected)' % (t0, expected))
++        assert mapnik.Image.open(t0).tostring() == mapnik.Image.open(expected).tostring(), '%s (actual) not == to %s (expected)' % (t0, expected)
+ 
+     def test_17_colors_octree():
+         expected = './images/support/encoding-opts/png8-17cols.png'
+         im = mapnik.Image.open(expected)
+         t0 = tmp_dir + 'png-encoding-17-colors.result-octree.png'
+         im.save(t0, 'png8:m=o')
+-        eq_(mapnik.Image.open(t0).tostring(),
+-            mapnik.Image.open(expected).tostring(),
+-            '%s (actual) not == to %s (expected)' % (t0, expected))
++        assert mapnik.Image.open(t0).tostring() == mapnik.Image.open(expected).tostring(), '%s (actual) not == to %s (expected)' % (t0, expected)
+ 
+     def test_2px_regression_hextree():
+         im = mapnik.Image.open('./images/support/encoding-opts/png8-2px.A.png')
+@@ -213,20 +185,11 @@ if mapnik.has_png():
+ 
+         t0 = tmp_dir + 'png-encoding-2px.result-hextree.png'
+         im.save(t0, 'png8:m=h')
+-        eq_(mapnik.Image.open(t0).tostring(),
+-            mapnik.Image.open(expected).tostring(),
+-            '%s (actual) not == to %s (expected)' % (t0, expected))
++        assert mapnik.Image.open(t0).tostring() == mapnik.Image.open(expected).tostring(), '%s (actual) not == to %s (expected)' % (t0, expected)
+ 
+     def test_2px_regression_octree():
+         im = mapnik.Image.open('./images/support/encoding-opts/png8-2px.A.png')
+         expected = './images/support/encoding-opts/png8-2px.png'
+         t0 = tmp_dir + 'png-encoding-2px.result-octree.png'
+         im.save(t0, 'png8:m=o')
+-        eq_(mapnik.Image.open(t0).tostring(),
+-            mapnik.Image.open(expected).tostring(),
+-            '%s (actual) not == to %s (expected)' % (t0, expected))
+-
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert mapnik.Image.open(t0).tostring() == mapnik.Image.open(expected).tostring(), '%s (actual) not == to %s (expected)' % (t0, expected)
+--- a/test/python_tests/pngsuite_test.py
++++ b/test/python_tests/pngsuite_test.py
+@@ -1,24 +1,20 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import assert_raises
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all
++import pytest
++from .utilities import execution_path
+ 
+ datadir = '../data/pngsuite'
+ 
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
++    yield
+ 
+ def assert_broken_file(fname):
+-    assert_raises(RuntimeError, lambda: mapnik.Image.open(fname))
++    with pytest.raises(RuntimeError):
++        mapnik.Image.open(fname)
+ 
+ 
+ def assert_good_file(fname):
+@@ -31,15 +27,11 @@ def get_pngs(good):
+             for x in files if good != x.startswith('x')]
+ 
+ 
+-def test_good_pngs():
++def test_good_pngs(setup):
+     for x in get_pngs(True):
+-        yield assert_good_file, x
++        assert_good_file, x
+ 
+ 
+ def test_broken_pngs():
+     for x in get_pngs(False):
+-        yield assert_broken_file, x
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert_broken_file, x
+--- a/test/python_tests/postgis_test.py
++++ b/test/python_tests/postgis_test.py
+@@ -1,32 +1,16 @@
+-#!/usr/bin/env python
+ import atexit
+ import os
+ import sys
+ import threading
+ from subprocess import PIPE, Popen
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-if PYTHON3:
+-    long = int
+-
++import pytest
++from .utilities import execution_path
+ 
+ MAPNIK_TEST_DBNAME = 'mapnik-tmp-postgis-test-db'
+ POSTGIS_TEMPLATE_DBNAME = 'template_postgis'
+ SHAPEFILE = os.path.join(execution_path('.'), '../data/shp/world_merc.shp')
+ 
+-
+-def setup():
+-    # All of the paths used are relative, if we run the tests
+-    # from another directory we need to chdir()
+-    os.chdir(execution_path('.'))
+-
+-
+ def call(cmd, silent=False):
+     stdin, stderr = Popen(cmd, shell=True, stdout=PIPE,
+                           stderr=PIPE).communicate()
+@@ -305,23 +289,23 @@ if 'postgis' in mapnik.DatasourceCache.p
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc')
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['gid'], 1)
+-        eq_(feature['fips'], u'AC')
+-        eq_(feature['iso2'], u'AG')
+-        eq_(feature['iso3'], u'ATG')
+-        eq_(feature['un'], 28)
+-        eq_(feature['name'], u'Antigua and Barbuda')
+-        eq_(feature['area'], 44)
+-        eq_(feature['pop2005'], 83039)
+-        eq_(feature['region'], 19)
+-        eq_(feature['subregion'], 29)
+-        eq_(feature['lon'], -61.783)
+-        eq_(feature['lat'], 17.078)
+-        meta = ds.describe()
+-        eq_(meta['srid'], 3857)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['encoding'], u'UTF8')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
++        assert feature['gid'] ==  1
++        assert feature['fips'] ==  u'AC'
++        assert feature['iso2'] ==  u'AG'
++        assert feature['iso3'] ==  u'ATG'
++        assert feature['un'] ==  28
++        assert feature['name'] ==  u'Antigua and Barbuda'
++        assert feature['area'] ==  44
++        assert feature['pop2005'] ==  83039
++        assert feature['region'] ==  19
++        assert feature['subregion'] ==  29
++        assert feature['lon'] ==  -61.783
++        assert feature['lat'] ==  17.078
++        meta = ds.describe()
++        assert meta['srid'] ==  3857
++        assert meta.get('key_field') ==  None
++        assert meta['encoding'] ==  u'UTF8'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Polygon
+ 
+     def test_subquery():
+         ds = mapnik.PostGIS(
+@@ -329,37 +313,37 @@ if 'postgis' in mapnik.DatasourceCache.p
+             table='(select * from world_merc) as w')
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['gid'], 1)
+-        eq_(feature['fips'], u'AC')
+-        eq_(feature['iso2'], u'AG')
+-        eq_(feature['iso3'], u'ATG')
+-        eq_(feature['un'], 28)
+-        eq_(feature['name'], u'Antigua and Barbuda')
+-        eq_(feature['area'], 44)
+-        eq_(feature['pop2005'], 83039)
+-        eq_(feature['region'], 19)
+-        eq_(feature['subregion'], 29)
+-        eq_(feature['lon'], -61.783)
+-        eq_(feature['lat'], 17.078)
+-        meta = ds.describe()
+-        eq_(meta['srid'], 3857)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['encoding'], u'UTF8')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
++        assert feature['gid'] ==  1
++        assert feature['fips'] ==  u'AC'
++        assert feature['iso2'] ==  u'AG'
++        assert feature['iso3'] ==  u'ATG'
++        assert feature['un'] ==  28
++        assert feature['name'] ==  u'Antigua and Barbuda'
++        assert feature['area'] ==  44
++        assert feature['pop2005'] ==  83039
++        assert feature['region'] ==  19
++        assert feature['subregion'] ==  29
++        assert feature['lon'] ==  -61.783
++        assert feature['lat'] ==  17.078
++        meta = ds.describe()
++        assert meta['srid'] ==  3857
++        assert meta.get('key_field') ==  None
++        assert meta['encoding'] ==  u'UTF8'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Polygon
+ 
+         ds = mapnik.PostGIS(
+             dbname=MAPNIK_TEST_DBNAME,
+             table='(select gid,geom,fips as _fips from world_merc) as w')
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['gid'], 1)
+-        eq_(feature['_fips'], u'AC')
+-        eq_(len(feature), 2)
+-        meta = ds.describe()
+-        eq_(meta['srid'], 3857)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['encoding'], u'UTF8')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
++        assert feature['gid'] ==  1
++        assert feature['_fips'] ==  u'AC'
++        assert len(feature) ==  2
++        meta = ds.describe()
++        assert meta['srid'] ==  3857
++        assert meta.get('key_field') ==  None
++        assert meta['encoding'] ==  u'UTF8'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Polygon
+ 
+     def test_bad_connection():
+         try:
+@@ -374,60 +358,62 @@ if 'postgis' in mapnik.DatasourceCache.p
+ 
+     def test_empty_db():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='empty')
+-        fs = ds.featureset()
++        fs = ds.features(mapnik.Query(mapnik.Box2d(-180,-90,180,90)))
+         feature = None
+         try:
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['encoding'], u'UTF8')
+-        eq_(meta['geometry_type'], None)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  None
++        assert meta['encoding'] ==  u'UTF8'
++        assert meta['geometry_type'] ==  None
+ 
+     def test_manual_srid():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, srid=99, table='empty')
+-        fs = ds.featureset()
++        fs = ds.features(mapnik.Query(mapnik.Box2d(-180,-90,180,90)))
+         feature = None
+         try:
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+         meta = ds.describe()
+-        eq_(meta['srid'], 99)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['encoding'], u'UTF8')
+-        eq_(meta['geometry_type'], None)
++        assert meta['srid'] ==  99
++        assert meta.get('key_field') ==  None
++        assert meta['encoding'] ==  u'UTF8'
++        assert meta['geometry_type'] ==  None
+ 
+     def test_geometry_detection():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test',
+                             geometry_field='geom')
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Collection
+ 
+         # will fail with postgis 2.0 because it automatically adds a geometry_columns entry
+         # ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test',
+         #                   geometry_field='geom',
+         #                    row_limit=1)
+-        # eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point)
++        # assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Point
++
+ 
+-    @raises(RuntimeError)
+     def test_that_nonexistant_query_field_throws(**kwargs):
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='empty')
+-        eq_(len(ds.fields()), 1)
+-        eq_(ds.fields(), ['key'])
+-        eq_(ds.field_types(), ['int'])
++        assert len(ds.fields()) ==  1
++        assert ds.fields() ==  ['key']
++        assert ds.field_types() ==  ['int']
+         query = mapnik.Query(ds.envelope())
++
+         for fld in ds.fields():
+             query.add_property_name(fld)
+-        # also add an invalid one, triggering throw
+-        query.add_property_name('bogus')
+-        ds.features(query)
++            # also add an invalid one, triggering throw
++            query.add_property_name('bogus')
++        with pytest.raises(RuntimeError):
++            ds.features(query)
+ 
+     def test_auto_detection_of_unique_feature_id_32_bit():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test2',
+@@ -435,25 +421,25 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         f = fs.next()
+-        eq_(len(ds.fields()),len(f.attributes))
+-        eq_(f['manual_id'], 0)
+-        eq_(fs.next()['manual_id'], 1)
+-        eq_(fs.next()['manual_id'], 1000)
+-        eq_(fs.next()['manual_id'], -1000)
+-        eq_(fs.next()['manual_id'], 2147483647)
+-        eq_(fs.next()['manual_id'], -2147483648)
+-
+-        fs = ds.featureset()
+-        eq_(fs.next().id(), 0)
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 1000)
+-        eq_(fs.next().id(), -1000)
+-        eq_(fs.next().id(), 2147483647)
+-        eq_(fs.next().id(), -2147483648)
+-        meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), u'manual_id')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert len(ds.fields()) == len(f.attributes)
++        assert f['manual_id'] ==  0
++        assert fs.next()['manual_id'] ==  1
++        assert fs.next()['manual_id'] ==  1000
++        assert fs.next()['manual_id'] ==  -1000
++        assert fs.next()['manual_id'] ==  2147483647
++        assert fs.next()['manual_id'] ==  -2147483648
++
++        fs = ds.featureset()
++        assert fs.next().id() ==  0
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  1000
++        assert fs.next().id() ==  -1000
++        assert fs.next().id() ==  2147483647
++        assert fs.next().id() ==  -2147483648
++        meta = ds.describe()
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  u'manual_id'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_auto_detection_of_unique_feature_id_32_bit_no_attribute():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test2',
+@@ -462,19 +448,19 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             key_field_as_attribute=False)
+         fs = ds.featureset()
+         f = fs.next()
+-        eq_(len(ds.fields()),len(f.attributes))
+-        eq_(len(ds.fields()),0)
+-        eq_(len(f.attributes),0)
+-        eq_(f.id(), 0)
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 1000)
+-        eq_(fs.next().id(), -1000)
+-        eq_(fs.next().id(), 2147483647)
+-        eq_(fs.next().id(), -2147483648)
+-        meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), u'manual_id')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert len(ds.fields()) == len(f.attributes)
++        assert len(ds.fields()) == 0
++        assert len(f.attributes) == 0
++        assert f.id() ==  0
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  1000
++        assert fs.next().id() ==  -1000
++        assert fs.next().id() ==  2147483647
++        assert fs.next().id() ==  -2147483648
++        meta = ds.describe()
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  u'manual_id'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_auto_detection_will_fail_since_no_primary_key():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3',
+@@ -482,35 +468,36 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=False)
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['manual_id'], 0)
+-        eq_(feat['non_id'],9223372036854775807)
+-        eq_(fs.next()['manual_id'], 1)
+-        eq_(fs.next()['manual_id'], 1000)
+-        eq_(fs.next()['manual_id'], -1000)
+-        eq_(fs.next()['manual_id'], 2147483647)
+-        eq_(fs.next()['manual_id'], -2147483648)
++        assert feat['manual_id'] ==  0
++        assert feat['non_id'] == 9223372036854775807
++        assert fs.next()['manual_id'] ==  1
++        assert fs.next()['manual_id'] ==  1000
++        assert fs.next()['manual_id'] ==  -1000
++        assert fs.next()['manual_id'] ==  2147483647
++        assert fs.next()['manual_id'] ==  -2147483648
+ 
+         # since no valid primary key will be detected the fallback
+         # is auto-incrementing counter
+         fs = ds.featureset()
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 2)
+-        eq_(fs.next().id(), 3)
+-        eq_(fs.next().id(), 4)
+-        eq_(fs.next().id(), 5)
+-        eq_(fs.next().id(), 6)
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  2
++        assert fs.next().id() ==  3
++        assert fs.next().id() ==  4
++        assert fs.next().id() ==  5
++        assert fs.next().id() ==  6
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++
+ 
+-    @raises(RuntimeError)
+     def test_auto_detection_will_fail_and_should_throw():
+-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3',
+-                            geometry_field='geom',
+-                            autodetect_key_field=True)
+-        ds.featureset()
++        with pytest.raises(RuntimeError):
++            ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3',
++                                geometry_field='geom',
++                                autodetect_key_field=True)
++            ds.featureset()
+ 
+     def test_auto_detection_of_unique_feature_id_64_bit():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test4',
+@@ -518,26 +505,26 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         f = fs.next()
+-        eq_(len(ds.fields()),len(f.attributes))
+-        eq_(f['manual_id'], 0)
+-        eq_(fs.next()['manual_id'], 1)
+-        eq_(fs.next()['manual_id'], 1000)
+-        eq_(fs.next()['manual_id'], -1000)
+-        eq_(fs.next()['manual_id'], 2147483647)
+-        eq_(fs.next()['manual_id'], -2147483648)
+-
+-        fs = ds.featureset()
+-        eq_(fs.next().id(), 0)
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 1000)
+-        eq_(fs.next().id(), -1000)
+-        eq_(fs.next().id(), 2147483647)
+-        eq_(fs.next().id(), -2147483648)
+-
+-        meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), u'manual_id')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert len(ds.fields()) == len(f.attributes)
++        assert f['manual_id'] ==  0
++        assert fs.next()['manual_id'] ==  1
++        assert fs.next()['manual_id'] ==  1000
++        assert fs.next()['manual_id'] ==  -1000
++        assert fs.next()['manual_id'] ==  2147483647
++        assert fs.next()['manual_id'] ==  -2147483648
++
++        fs = ds.featureset()
++        assert fs.next().id() ==  0
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  1000
++        assert fs.next().id() ==  -1000
++        assert fs.next().id() ==  2147483647
++        assert fs.next().id() ==  -2147483648
++
++        meta = ds.describe()
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  u'manual_id'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_disabled_auto_detection_and_subquery():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, 'a'::varchar as name from test2) as t''',
+@@ -545,28 +532,28 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=False)
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat.id(), 1)
+-        eq_(feat['name'], 'a')
++        assert feat.id() ==  1
++        assert feat['name'] ==  'a'
+         feat = fs.next()
+-        eq_(feat.id(), 2)
+-        eq_(feat['name'], 'a')
++        assert feat.id() ==  2
++        assert feat['name'] ==  'a'
+         feat = fs.next()
+-        eq_(feat.id(), 3)
+-        eq_(feat['name'], 'a')
++        assert feat.id() ==  3
++        assert feat['name'] ==  'a'
+         feat = fs.next()
+-        eq_(feat.id(), 4)
+-        eq_(feat['name'], 'a')
++        assert feat.id() ==  4
++        assert feat['name'] ==  'a'
+         feat = fs.next()
+-        eq_(feat.id(), 5)
+-        eq_(feat['name'], 'a')
++        assert feat.id() ==  5
++        assert feat['name'] ==  'a'
+         feat = fs.next()
+-        eq_(feat.id(), 6)
+-        eq_(feat['name'], 'a')
++        assert feat.id() ==  6
++        assert feat['name'] ==  'a'
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_auto_detection_and_subquery_including_key():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id from test2) as t''',
+@@ -574,44 +561,47 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         f = fs.next()
+-        eq_(len(ds.fields()),len(f.attributes))
+-        eq_(f['manual_id'], 0)
+-        eq_(fs.next()['manual_id'], 1)
+-        eq_(fs.next()['manual_id'], 1000)
+-        eq_(fs.next()['manual_id'], -1000)
+-        eq_(fs.next()['manual_id'], 2147483647)
+-        eq_(fs.next()['manual_id'], -2147483648)
+-
+-        fs = ds.featureset()
+-        eq_(fs.next().id(), 0)
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 1000)
+-        eq_(fs.next().id(), -1000)
+-        eq_(fs.next().id(), 2147483647)
+-        eq_(fs.next().id(), -2147483648)
+-
+-        meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), u'manual_id')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert len(ds.fields()) == len(f.attributes)
++        assert f['manual_id'] ==  0
++        assert fs.next()['manual_id'] ==  1
++        assert fs.next()['manual_id'] ==  1000
++        assert fs.next()['manual_id'] ==  -1000
++        assert fs.next()['manual_id'] ==  2147483647
++        assert fs.next()['manual_id'] ==  -2147483648
++
++        fs = ds.featureset()
++        assert fs.next().id() ==  0
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  1000
++        assert fs.next().id() ==  -1000
++        assert fs.next().id() ==  2147483647
++        assert fs.next().id() ==  -2147483648
++
++        meta = ds.describe()
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  u'manual_id'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++
+ 
+-    @raises(RuntimeError)
+     def test_auto_detection_of_invalid_numeric_primary_key():
+-        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id::numeric from test2) as t''',
+-                       geometry_field='geom',
+-                       autodetect_key_field=True)
++        with pytest.raises(RuntimeError):
++            mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id::numeric from test2) as t''',
++                           geometry_field='geom',
++                           autodetect_key_field=True)
++
+ 
+-    @raises(RuntimeError)
+     def test_auto_detection_of_invalid_multiple_keys():
+-        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''test6''',
+-                       geometry_field='geom',
+-                       autodetect_key_field=True)
++        with pytest.raises(RuntimeError):
++            mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''test6''',
++                           geometry_field='geom',
++                           autodetect_key_field=True)
++
+ 
+-    @raises(RuntimeError)
+     def test_auto_detection_of_invalid_multiple_keys_subquery():
+-        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select first_id,second_id,geom from test6) as t''',
+-                       geometry_field='geom',
+-                       autodetect_key_field=True)
++        with pytest.raises(RuntimeError):
++            mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select first_id,second_id,geom from test6) as t''',
++                           geometry_field='geom',
++                           autodetect_key_field=True)
+ 
+     def test_manually_specified_feature_id_field():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test4',
+@@ -620,43 +610,43 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         f = fs.next()
+-        eq_(len(ds.fields()),len(f.attributes))
+-        eq_(f['manual_id'], 0)
+-        eq_(fs.next()['manual_id'], 1)
+-        eq_(fs.next()['manual_id'], 1000)
+-        eq_(fs.next()['manual_id'], -1000)
+-        eq_(fs.next()['manual_id'], 2147483647)
+-        eq_(fs.next()['manual_id'], -2147483648)
+-
+-        fs = ds.featureset()
+-        eq_(fs.next().id(), 0)
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 1000)
+-        eq_(fs.next().id(), -1000)
+-        eq_(fs.next().id(), 2147483647)
+-        eq_(fs.next().id(), -2147483648)
+-
+-        meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), u'manual_id')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert len(ds.fields()) == len(f.attributes)
++        assert f['manual_id'] ==  0
++        assert fs.next()['manual_id'] ==  1
++        assert fs.next()['manual_id'] ==  1000
++        assert fs.next()['manual_id'] ==  -1000
++        assert fs.next()['manual_id'] ==  2147483647
++        assert fs.next()['manual_id'] ==  -2147483648
++
++        fs = ds.featureset()
++        assert fs.next().id() ==  0
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  1000
++        assert fs.next().id() ==  -1000
++        assert fs.next().id() ==  2147483647
++        assert fs.next().id() ==  -2147483648
++
++        meta = ds.describe()
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  u'manual_id'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_numeric_type_feature_id_field():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test5',
+                             geometry_field='geom',
+                             autodetect_key_field=False)
+         fs = ds.featureset()
+-        eq_(fs.next()['manual_id'], -1)
+-        eq_(fs.next()['manual_id'], 1)
++        assert fs.next()['manual_id'] ==  -1
++        assert fs.next()['manual_id'] ==  1
+ 
+         fs = ds.featureset()
+-        eq_(fs.next().id(), 1)
+-        eq_(fs.next().id(), 2)
++        assert fs.next().id() ==  1
++        assert fs.next().id() ==  2
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_querying_table_with_mixed_case():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='"tableWithMixedCase"',
+@@ -664,12 +654,12 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), u'gid')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  u'gid'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_querying_subquery_with_mixed_case():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='(SeLeCt * FrOm "tableWithMixedCase") as MixedCaseQuery',
+@@ -677,12 +667,12 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), u'gid')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  u'gid'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_bbox_token_in_subquery1():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
+@@ -691,12 +681,12 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), u'gid')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  u'gid'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_bbox_token_in_subquery2():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
+@@ -705,23 +695,23 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             autodetect_key_field=True)
+         fs = ds.featureset()
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), u'gid')
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  u'gid'
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_empty_geom():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test7',
+                             geometry_field='geom')
+         fs = ds.featureset()
+-        eq_(fs.next()['gid'], 1)
++        assert fs.next()['gid'] ==  1
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Collection
+ 
+     def create_ds():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
+@@ -729,12 +719,12 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             max_size=20,
+                             geometry_field='geom')
+         fs = list(ds.all_features())
+-        eq_(len(fs), 8)
++        assert len(fs) ==  8
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Collection
+ 
+     def test_threaded_create(NUM_THREADS=100):
+         # run one to start before thread loop
+@@ -747,7 +737,7 @@ if 'postgis' in mapnik.DatasourceCache.p
+             t.start()
+             t.join()
+             runs += 1
+-        eq_(runs, NUM_THREADS)
++        assert runs ==  NUM_THREADS
+ 
+     def create_ds_and_error():
+         try:
+@@ -756,7 +746,7 @@ if 'postgis' in mapnik.DatasourceCache.p
+                                 max_size=20)
+             ds.all_features()
+         except Exception as e:
+-            eq_('in executeQuery' in str(e), True)
++            assert 'in executeQuery' in str(e)
+ 
+     def test_threaded_create2(NUM_THREADS=10):
+         for i in range(NUM_THREADS):
+@@ -768,23 +758,23 @@ if 'postgis' in mapnik.DatasourceCache.p
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
+                             table='test8',
+                             geometry_field='geom')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['gid', 'int_field'])
+-        eq_(ds.field_types(), ['int', 'int'])
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['gid' ==  'int_field']
++        assert ds.field_types(), ['int' ==  'int']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat.id(), 1)
+-        eq_(feat['gid'], 1)
+-        eq_(feat['int_field'], 2147483648)
++        assert feat.id() ==  1
++        assert feat['gid'] ==  1
++        assert feat['int_field'] ==  2147483648
+         feat = fs.next()
+-        eq_(feat.id(), 2)
+-        eq_(feat['gid'], 2)
+-        eq_(feat['int_field'], 922337203685477580)
++        assert feat.id() ==  2
++        assert feat['gid'] ==  2
++        assert feat['int_field'] ==  922337203685477580
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_persist_connection_off():
+         # NOTE: max_size should be equal or greater than
+@@ -800,11 +790,11 @@ if 'postgis' in mapnik.DatasourceCache.p
+                                 table='(select ST_MakePoint(0,0) as g, pg_backend_pid() as p, 1 as v) as w',
+                                 geometry_field='g')
+             fs = ds.featureset()
+-            eq_(fs.next()['v'], 1)
++            assert fs.next()['v'] ==  1
+ 
+             meta = ds.describe()
+-            eq_(meta['srid'], -1)
+-            eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++            assert meta['srid'] ==  -1
++            assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+     def test_null_comparision():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test9',
+@@ -813,51 +803,51 @@ if 'postgis' in mapnik.DatasourceCache.p
+         feat = fs.next()
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+-
+-        eq_(feat['gid'], 1)
+-        eq_(feat['name'], 'name')
+-        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] = ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = null").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = true").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = false").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != ''").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != null").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != true").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != false").evaluate(feat), True)
+-
+-        feat = fs.next()
+-        eq_(feat['gid'], 2)
+-        eq_(feat['name'], '')
+-        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = ''").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] = null").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = true").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = false").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != null").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != true").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != false").evaluate(feat), True)
+-
+-        feat = fs.next()
+-        eq_(feat['gid'], 3)
+-        eq_(feat['name'], None)  # null
+-        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = null").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] = true").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] = false").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat), True)
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++
++        assert feat['gid'] ==  1
++        assert feat['name'] ==  'name'
++        assert mapnik.Expression("[name] = 'name'").evaluate(feat)
++        assert not mapnik.Expression("[name] = ''").evaluate(feat)
++        assert not mapnik.Expression("[name] = null").evaluate(feat)
++        assert not mapnik.Expression("[name] = true").evaluate(feat)
++        assert not mapnik.Expression("[name] = false").evaluate(feat)
++        assert not mapnik.Expression("[name] != 'name'").evaluate(feat)
++        assert mapnik.Expression("[name] != ''").evaluate(feat)
++        assert mapnik.Expression("[name] != null").evaluate(feat)
++        assert mapnik.Expression("[name] != true").evaluate(feat)
++        assert mapnik.Expression("[name] != false").evaluate(feat)
++
++        feat = fs.next()
++        assert feat['gid'] ==  2
++        assert feat['name'] ==  ''
++        assert mapnik.Expression("[name] = 'name'").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] = ''").evaluate(feat) ==  True
++        assert mapnik.Expression("[name] = null").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] = true").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] = false").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] != 'name'").evaluate(feat) ==  True
++        assert mapnik.Expression("[name] != ''").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] != null").evaluate(feat) ==  True
++        assert mapnik.Expression("[name] != true").evaluate(feat) ==  True
++        assert mapnik.Expression("[name] != false").evaluate(feat) ==  True
++
++        feat = fs.next()
++        assert feat['gid'] ==  3
++        assert feat['name'] ==  None  # null
++        assert mapnik.Expression("[name] = 'name'").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] = ''").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] = null").evaluate(feat) ==  True
++        assert mapnik.Expression("[name] = true").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] = false").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] != 'name'").evaluate(feat) ==  True
+         # https://github.com/mapnik/mapnik/issues/1859
+-        eq_(mapnik.Expression("[name] != ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != null").evaluate(feat), False)
+-        eq_(mapnik.Expression("[name] != true").evaluate(feat), True)
+-        eq_(mapnik.Expression("[name] != false").evaluate(feat), True)
++        assert mapnik.Expression("[name] != ''").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] != null").evaluate(feat) ==  False
++        assert mapnik.Expression("[name] != true").evaluate(feat) ==  True
++        assert mapnik.Expression("[name] != false").evaluate(feat) ==  True
+ 
+     def test_null_comparision2():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test10',
+@@ -866,64 +856,58 @@ if 'postgis' in mapnik.DatasourceCache.p
+         feat = fs.next()
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], -1)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+-
+-        eq_(feat['gid'], 1)
+-        eq_(feat['bool_field'], True)
+-        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] != ''").evaluate(
+-            feat), True)  # in 2.1.x used to be False
+-        eq_(mapnik.Expression("[bool_field] != null").evaluate(
+-            feat), True)  # in 2.1.x used to be False
+-        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat), True)
+-
+-        feat = fs.next()
+-        eq_(feat['gid'], 2)
+-        eq_(feat['bool_field'], False)
+-        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] != null").evaluate(
+-            feat), True)  # in 2.1.x used to be False
+-        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat), False)
+-
+-        feat = fs.next()
+-        eq_(feat['gid'], 3)
+-        eq_(feat['bool_field'], None)  # null
+-        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat), True)
+-        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(
+-            feat), True)  # in 2.1.x used to be False
++        assert meta['srid'] ==  -1
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++
++        assert feat['gid'] ==  1
++        assert feat['bool_field']
++        assert not mapnik.Expression("[bool_field] = 'name'").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = ''").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = null").evaluate(feat)
++        assert mapnik.Expression("[bool_field] = true").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = false").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != 'name'").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != ''").evaluate(feat)  # in 2.1.x used to be False
++        assert mapnik.Expression("[bool_field] != null").evaluate(feat)  # in 2.1.x used to be False
++        assert not mapnik.Expression("[bool_field] != true").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != false").evaluate(feat)
++
++        feat = fs.next()
++        assert feat['gid'] ==  2
++        assert not feat['bool_field']
++        assert not mapnik.Expression("[bool_field] = 'name'").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = ''").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = null").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = true").evaluate(feat)
++        assert mapnik.Expression("[bool_field] = false").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != 'name'").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != ''").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != null").evaluate(feat) # in 2.1.x used to be False
++        assert mapnik.Expression("[bool_field] != true").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] != false").evaluate(feat)
++
++        feat = fs.next()
++        assert feat['gid'] ==  3
++        assert feat['bool_field'] ==  None  # null
++        assert not mapnik.Expression("[bool_field] = 'name'").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = ''").evaluate(feat)
++        assert mapnik.Expression("[bool_field] = null").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = true").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] = false").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != 'name'").evaluate(feat) # in 2.1.x used to be False
+         # https://github.com/mapnik/mapnik/issues/1859
+-        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] != null").evaluate(feat), False)
+-        eq_(mapnik.Expression("[bool_field] != true").evaluate(
+-            feat), True)  # in 2.1.x used to be False
+-        eq_(mapnik.Expression("[bool_field] != false").evaluate(
+-            feat), True)  # in 2.1.x used to be False
++        assert not mapnik.Expression("[bool_field] != ''").evaluate(feat)
++        assert not mapnik.Expression("[bool_field] != null").evaluate(feat)
++        assert mapnik.Expression("[bool_field] != true").evaluate(feat)  # in 2.1.x used to be False
++        assert mapnik.Expression("[bool_field] != false").evaluate(feat) # in 2.1.x used to be False
+ 
+     # https://github.com/mapnik/mapnik/issues/1816
+     def test_exception_message_reporting():
+         try:
+             mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='doesnotexist')
+         except Exception as e:
+-            eq_(str(e) != 'unidentifiable C++ exception', True)
++            assert str(e) != 'unidentifiable C++ exception'
+ 
+     def test_null_id_field():
+         opts = {'type': 'postgis',
+@@ -933,15 +917,15 @@ if 'postgis' in mapnik.DatasourceCache.p
+         ds = mapnik.Datasource(**opts)
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat.id(), long(1))
+-        eq_(feat['osm_id'], None)
++        assert feat.id() ==  int(1)
++        assert feat['osm_id'] ==  None
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
++
+ 
+-    @raises(StopIteration)
+     def test_null_key_field():
+         opts = {'type': 'postgis',
+                 "key_field": 'osm_id',
+@@ -950,9 +934,10 @@ if 'postgis' in mapnik.DatasourceCache.p
+                 'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"}
+         ds = mapnik.Datasource(**opts)
+         fs = ds.featureset()
+-        # should throw since key_field is null: StopIteration: No more
+-        # features.
+-        fs.next()
++        with pytest.raises(StopIteration):
++            # should throw since key_field is null: StopIteration: No more
++            # features.
++            fs.next()
+ 
+     def test_psql_error_should_not_break_connection_pool():
+         # Bad request, will trigger an error when returning result
+@@ -969,15 +954,15 @@ if 'postgis' in mapnik.DatasourceCache.p
+             fs = ds_bad.featureset()
+             count = sum(1 for f in fs)
+         except RuntimeError as e:
+-            assert 'invalid input syntax for integer' in str(e)
++            assert 'invalid input syntax for type integer' in str(e)
+             failed = True
+ 
+-        eq_(failed, True)
++        assert failed ==  True
+ 
+         # Should be ok
+         fs = ds_good.featureset()
+         count = sum(1 for f in fs)
+-        eq_(count, 8)
++        assert count ==  8
+ 
+     def test_psql_error_should_give_back_connections_opened_for_lower_layers_to_the_pool():
+         map1 = mapnik.Map(600, 300)
+@@ -991,7 +976,7 @@ if 'postgis' in mapnik.DatasourceCache.p
+         buggy_s = mapnik.Style()
+         buggy_r = mapnik.Rule()
+         buggy_r.symbols.append(mapnik.PolygonSymbolizer())
+-        buggy_r.filter = mapnik.Filter("[fips] = 'FR'")
++        buggy_r.filter = mapnik.Expression("[fips] = 'FR'")
+         buggy_s.rules.append(buggy_r)
+         map1.append_style('style for buggy layer', buggy_s)
+         buggy_layer = mapnik.Layer('this layer is buggy at runtime')
+@@ -1031,9 +1016,9 @@ if 'postgis' in mapnik.DatasourceCache.p
+             mapnik.render_to_file(
+                 map1, '/tmp/mapnik-postgis-test-map1.png', 'png')
+             # Test must fail if error was not raised just above
+-            eq_(False, True)
++            assert False ==  True
+         except RuntimeError as e:
+-            assert 'invalid input syntax for integer' in str(e)
++            assert 'invalid input syntax for type integer' in str(e)
+             pass
+         # This used to raise an exception before correction of issue 2042
+         mapnik.render_to_file(map2, '/tmp/mapnik-postgis-test-map2.png', 'png')
+@@ -1042,200 +1027,196 @@ if 'postgis' in mapnik.DatasourceCache.p
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
+                             table='(select gid,ST_CoordDim(geom) as dim,name,geom from test12) as tmp',
+                             geometry_field='geom')
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['gid', 'dim', 'name'])
+-        eq_(ds.field_types(), ['int', 'int', 'str'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['gid', 'dim' ==  'name']
++        assert ds.field_types(), ['int', 'int' ==  'str']
+         fs = ds.featureset()
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), None)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  None
+         # Note: this is incorrect because we only check first couple geoms
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+         # Point (2d)
+         feat = fs.next()
+-        eq_(feat.id(), 1)
+-        eq_(feat['gid'], 1)
+-        eq_(feat['dim'], 2)
+-        eq_(feat['name'], 'Point')
+-        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
++        assert feat.id() ==  1
++        assert feat['gid'] ==  1
++        assert feat['dim'] ==  2
++        assert feat['name'] ==  'Point'
++        assert feat.geometry.to_wkt() ==  'POINT(0 0)'
+ 
+         # PointZ
+         feat = fs.next()
+-        eq_(feat.id(), 2)
+-        eq_(feat['gid'], 2)
+-        eq_(feat['dim'], 3)
+-        eq_(feat['name'], 'PointZ')
+-        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
++        assert feat.id() ==  2
++        assert feat['gid'] ==  2
++        assert feat['dim'] ==  3
++        assert feat['name'] ==  'PointZ'
++        assert feat.geometry.to_wkt() ==  'POINT(0 0)'
+ 
+         # PointM
+         feat = fs.next()
+-        eq_(feat.id(), 3)
+-        eq_(feat['gid'], 3)
+-        eq_(feat['dim'], 3)
+-        eq_(feat['name'], 'PointM')
+-        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
++        assert feat.id() ==  3
++        assert feat['gid'] ==  3
++        assert feat['dim'] ==  3
++        assert feat['name'] ==  'PointM'
++        assert feat.geometry.to_wkt() ==  'POINT(0 0)'
+ 
+         # PointZM
+         feat = fs.next()
+-        eq_(feat.id(), 4)
+-        eq_(feat['gid'], 4)
+-        eq_(feat['dim'], 4)
+-        eq_(feat['name'], 'PointZM')
++        assert feat.id() ==  4
++        assert feat['gid'] ==  4
++        assert feat['dim'] ==  4
++        assert feat['name'] ==  'PointZM'
+ 
+-        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
++        assert feat.geometry.to_wkt() ==  'POINT(0 0)'
+         # MultiPoint
+         feat = fs.next()
+-        eq_(feat.id(), 5)
+-        eq_(feat['gid'], 5)
+-        eq_(feat['dim'], 2)
+-        eq_(feat['name'], 'MultiPoint')
+-        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
++        assert feat.id() ==  5
++        assert feat['gid'] ==  5
++        assert feat['dim'] ==  2
++        assert feat['name'] ==  'MultiPoint'
++        assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)'
+ 
+         # MultiPointZ
+         feat = fs.next()
+-        eq_(feat.id(), 6)
+-        eq_(feat['gid'], 6)
+-        eq_(feat['dim'], 3)
+-        eq_(feat['name'], 'MultiPointZ')
+-        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
++        assert feat.id() ==  6
++        assert feat['gid'] ==  6
++        assert feat['dim'] ==  3
++        assert feat['name'] ==  'MultiPointZ'
++        assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)'
+ 
+         # MultiPointM
+         feat = fs.next()
+-        eq_(feat.id(), 7)
+-        eq_(feat['gid'], 7)
+-        eq_(feat['dim'], 3)
+-        eq_(feat['name'], 'MultiPointM')
+-        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
++        assert feat.id() ==  7
++        assert feat['gid'] ==  7
++        assert feat['dim'] ==  3
++        assert feat['name'] ==  'MultiPointM'
++        assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)'
+ 
+         # MultiPointZM
+         feat = fs.next()
+-        eq_(feat.id(), 8)
+-        eq_(feat['gid'], 8)
+-        eq_(feat['dim'], 4)
+-        eq_(feat['name'], 'MultiPointZM')
+-        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
++        assert feat.id() ==  8
++        assert feat['gid'] ==  8
++        assert feat['dim'] ==  4
++        assert feat['name'] ==  'MultiPointZM'
++        assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)'
+ 
+         # LineString
+         feat = fs.next()
+-        eq_(feat.id(), 9)
+-        eq_(feat['gid'], 9)
+-        eq_(feat['dim'], 2)
+-        eq_(feat['name'], 'LineString')
+-        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
++        assert feat.id() ==  9
++        assert feat['gid'] ==  9
++        assert feat['dim'] ==  2
++        assert feat['name'] ==  'LineString'
++        assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)'
+ 
+         # LineStringZ
+         feat = fs.next()
+-        eq_(feat.id(), 10)
+-        eq_(feat['gid'], 10)
+-        eq_(feat['dim'], 3)
+-        eq_(feat['name'], 'LineStringZ')
+-        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
++        assert feat.id() ==  10
++        assert feat['gid'] ==  10
++        assert feat['dim'] ==  3
++        assert feat['name'] ==  'LineStringZ'
++        assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)'
+ 
+         # LineStringM
+         feat = fs.next()
+-        eq_(feat.id(), 11)
+-        eq_(feat['gid'], 11)
+-        eq_(feat['dim'], 3)
+-        eq_(feat['name'], 'LineStringM')
+-        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
++        assert feat.id() ==  11
++        assert feat['gid'] ==  11
++        assert feat['dim'] ==  3
++        assert feat['name'] ==  'LineStringM'
++        assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)'
+ 
+         # LineStringZM
+         feat = fs.next()
+-        eq_(feat.id(), 12)
+-        eq_(feat['gid'], 12)
+-        eq_(feat['dim'], 4)
+-        eq_(feat['name'], 'LineStringZM')
+-        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
++        assert feat.id() ==  12
++        assert feat['gid'] ==  12
++        assert feat['dim'] ==  4
++        assert feat['name'] ==  'LineStringZM'
++        assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)'
+ 
+         # Polygon
+         feat = fs.next()
+-        eq_(feat.id(), 13)
+-        eq_(feat['gid'], 13)
+-        eq_(feat['name'], 'Polygon')
+-        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
++        assert feat.id() ==  13
++        assert feat['gid'] ==  13
++        assert feat['name'] ==  'Polygon'
++        assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))'
+ 
+         # PolygonZ
+         feat = fs.next()
+-        eq_(feat.id(), 14)
+-        eq_(feat['gid'], 14)
+-        eq_(feat['name'], 'PolygonZ')
+-        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
++        assert feat.id() ==  14
++        assert feat['gid'] ==  14
++        assert feat['name'] ==  'PolygonZ'
++        assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))'
+ 
+         # PolygonM
+         feat = fs.next()
+-        eq_(feat.id(), 15)
+-        eq_(feat['gid'], 15)
+-        eq_(feat['name'], 'PolygonM')
+-        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
++        assert feat.id() ==  15
++        assert feat['gid'] ==  15
++        assert feat['name'] ==  'PolygonM'
++        assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))'
+ 
+         # PolygonZM
+         feat = fs.next()
+-        eq_(feat.id(), 16)
+-        eq_(feat['gid'], 16)
+-        eq_(feat['name'], 'PolygonZM')
+-        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
++        assert feat.id() ==  16
++        assert feat['gid'] ==  16
++        assert feat['name'] ==  'PolygonZM'
++        assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))'
+ 
+         # MultiLineString
+         feat = fs.next()
+-        eq_(feat.id(), 17)
+-        eq_(feat['gid'], 17)
+-        eq_(feat['name'], 'MultiLineString')
+-        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
++        assert feat.id() ==  17
++        assert feat['gid'] ==  17
++        assert feat['name'] ==  'MultiLineString'
++        assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))'
+ 
+         # MultiLineStringZ
+         feat = fs.next()
+-        eq_(feat.id(), 18)
+-        eq_(feat['gid'], 18)
+-        eq_(feat['name'], 'MultiLineStringZ')
+-        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
++        assert feat.id() ==  18
++        assert feat['gid'] ==  18
++        assert feat['name'] ==  'MultiLineStringZ'
++        assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))'
+ 
+         # MultiLineStringM
+         feat = fs.next()
+-        eq_(feat.id(), 19)
+-        eq_(feat['gid'], 19)
+-        eq_(feat['name'], 'MultiLineStringM')
+-        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
++        assert feat.id() ==  19
++        assert feat['gid'] ==  19
++        assert feat['name'] ==  'MultiLineStringM'
++        assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))'
+ 
+         # MultiLineStringZM
+         feat = fs.next()
+-        eq_(feat.id(), 20)
+-        eq_(feat['gid'], 20)
+-        eq_(feat['name'], 'MultiLineStringZM')
+-        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
++        assert feat.id() ==  20
++        assert feat['gid'] ==  20
++        assert feat['name'] ==  'MultiLineStringZM'
++        assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))'
+ 
+         # MultiPolygon
+         feat = fs.next()
+-        eq_(feat.id(), 21)
+-        eq_(feat['gid'], 21)
+-        eq_(feat['name'], 'MultiPolygon')
+-        eq_(feat.geometry.to_wkt(),
+-            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
++        assert feat.id() ==  21
++        assert feat['gid'] ==  21
++        assert feat['name'] ==  'MultiPolygon'
++        assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))'
+ 
+         # MultiPolygonZ
+         feat = fs.next()
+-        eq_(feat.id(), 22)
+-        eq_(feat['gid'], 22)
+-        eq_(feat['name'], 'MultiPolygonZ')
+-        eq_(feat.geometry.to_wkt(),
+-            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
++        assert feat.id() ==  22
++        assert feat['gid'] ==  22
++        assert feat['name'] ==  'MultiPolygonZ'
++        assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))'
+ 
+         # MultiPolygonM
+         feat = fs.next()
+-        eq_(feat.id(), 23)
+-        eq_(feat['gid'], 23)
+-        eq_(feat['name'], 'MultiPolygonM')
+-        eq_(feat.geometry.to_wkt(),
+-            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
++        assert feat.id() ==  23
++        assert feat['gid'] ==  23
++        assert feat['name'] ==  'MultiPolygonM'
++        assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))'
+ 
+         # MultiPolygonZM
+         feat = fs.next()
+-        eq_(feat.id(), 24)
+-        eq_(feat['gid'], 24)
+-        eq_(feat['name'], 'MultiPolygonZM')
+-        eq_(feat.geometry.to_wkt(),
+-            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
++        assert feat.id() ==  24
++        assert feat['gid'] ==  24
++        assert feat['name'] ==  'MultiPolygonZM'
++        assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))'
+ 
+     def test_handling_of_discarded_key_field():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
+@@ -1244,21 +1225,21 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             key_field_as_attribute=False)
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat['name'],'Point')
++        assert feat['name'] == 'Point'
+ 
+     def test_variable_in_subquery1():
+         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
+-           (select * from test where @zoom = 30 ) as tmp''',
++           (select * from test where !@zoom! = 30 ) as tmp''',
+                             geometry_field='geom', srid=4326,
+                             autodetect_key_field=True)
+         fs = ds.featureset(variables={'zoom': 30})
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta.get('key_field'), "gid")
+-        eq_(meta['geometry_type'], None)
++        assert meta['srid'] ==  4326
++        assert meta.get('key_field') ==  "gid"
++        assert meta['geometry_type'] ==  None
+ 
+     # currently needs manual `geometry_table` passed
+     # to avoid misparse of `geometry_table`
+@@ -1272,11 +1253,11 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             geometry_table='test')
+         fs = ds.featureset()
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
++        assert meta['srid'] ==  4326
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Collection
+ 
+     # same
+     # to avoid misparse of `geometry_table`
+@@ -1290,14 +1271,10 @@ if 'postgis' in mapnik.DatasourceCache.p
+                             geometry_table='test')
+         fs = ds.featureset()
+         for id in range(1, 5):
+-            eq_(fs.next().id(), id)
++            assert fs.next().id() ==  id
+ 
+         meta = ds.describe()
+-        eq_(meta['srid'], 4326)
+-        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
++        assert meta['srid'] ==  4326
++        assert meta['geometry_type'] ==  mapnik.DataGeometryType.Collection
+ 
+     atexit.register(postgis_takedown)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/projection_test.py
++++ b/test/python_tests/projection_test.py
+@@ -1,60 +1,38 @@
+-#!/usr/bin/env python
+ import math
+ import sys
+-
+-from nose.tools import assert_almost_equal, eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import assert_box2d_almost_equal, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-if PYTHON3:
+-    xrange = range
++from .utilities import assert_box2d_almost_equal
+ 
+ # Tests that exercise map projections.
+ 
+-
+-def test_normalizing_definition():
+-    p = mapnik.Projection('+init=epsg:4326')
+-    expanded = p.expanded()
+-    eq_('+proj=longlat' in expanded, True)
+-
++def test_projection_description():
++    p = mapnik.Projection('epsg:4326')
++    assert 'WGS 84' == p.description()
+ 
+ # Trac Ticket #128
+ def test_wgs84_inverse_forward():
+-    p = mapnik.Projection('+init=epsg:4326')
+-
++    p1 = mapnik.Projection('epsg:4326')
++    p2 = mapnik.Projection('epsg:4326')
++    tr = mapnik.ProjTransform(p1, p2)
+     c = mapnik.Coord(3.01331418311, 43.3333092669)
+     e = mapnik.Box2d(-122.54345245, 45.12312553, 68.2335581353, 48.231231233)
+ 
+     # It appears that the y component changes very slightly, is this OK?
+     # so we test for 'almost equal float values'
+ 
+-    assert_almost_equal(p.inverse(c).y, c.y)
+-    assert_almost_equal(p.inverse(c).x, c.x)
+-
+-    assert_almost_equal(p.forward(c).y, c.y)
+-    assert_almost_equal(p.forward(c).x, c.x)
++    assert tr.backward(c).y == pytest.approx(c.y)
++    assert tr.backward(c).x == pytest.approx(c.x)
+ 
+-    assert_almost_equal(p.inverse(e).center().y, e.center().y)
+-    assert_almost_equal(p.inverse(e).center().x, e.center().x)
++    assert tr.forward(c).y == pytest.approx(c.y)
++    assert tr.forward(c).x == pytest.approx(c.x)
+ 
+-    assert_almost_equal(p.forward(e).center().y, e.center().y)
+-    assert_almost_equal(p.forward(e).center().x, e.center().x)
+-
+-    assert_almost_equal(c.inverse(p).y, c.y)
+-    assert_almost_equal(c.inverse(p).x, c.x)
+-
+-    assert_almost_equal(c.forward(p).y, c.y)
+-    assert_almost_equal(c.forward(p).x, c.x)
+-
+-    assert_almost_equal(e.inverse(p).center().y, e.center().y)
+-    assert_almost_equal(e.inverse(p).center().x, e.center().x)
+-
+-    assert_almost_equal(e.forward(p).center().y, e.center().y)
+-    assert_almost_equal(e.forward(p).center().x, e.center().x)
++    assert tr.backward(e).center().y == pytest.approx(e.center().y)
++    assert tr.backward(e).center().x == pytest.approx(e.center().x)
+ 
++    assert tr.forward(e).center().y == pytest.approx(e.center().y)
++    assert tr.forward(e).center().x == pytest.approx(e.center().x)
+ 
+ def wgs2merc(lon, lat):
+     x = lon * 20037508.34 / 180
+@@ -78,7 +56,7 @@ def merc2wgs(x, y):
+         y = -85.0511
+     return [x, y]
+ 
+-# echo -109 37 | cs2cs -f "%.10f" +init=epsg:4326 +to +init=epsg:3857
++# echo -109 37 | cs2cs -f "%.10f" epsg:4326 +to epsg:3857
+ #-12133824.4964668211    4439106.7872505859 0.0000000000
+ 
+ # todo
+@@ -89,43 +67,43 @@ def merc2wgs(x, y):
+ 
+ 
+ def test_proj_transform_between_init_and_literal():
+-    one = mapnik.Projection('+init=epsg:4326')
+-    two = mapnik.Projection('+init=epsg:3857')
++    one = mapnik.Projection('epsg:4326')
++    two = mapnik.Projection('epsg:3857')
+     tr1 = mapnik.ProjTransform(one, two)
+     tr1b = mapnik.ProjTransform(two, one)
+-    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
+-    merc = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
++    wgs84 = 'epsg:4326'
++    merc = 'epsg:3857'
+     src = mapnik.Projection(wgs84)
+     dest = mapnik.Projection(merc)
+     tr2 = mapnik.ProjTransform(src, dest)
+     tr2b = mapnik.ProjTransform(dest, src)
+-    for x in xrange(-180, 180, 10):
+-        for y in xrange(-60, 60, 10):
++    for x in range(-180, 180, 10):
++        for y in range(-60, 60, 10):
+             coord = mapnik.Coord(x, y)
+             merc_coord1 = tr1.forward(coord)
+             merc_coord2 = tr1b.backward(coord)
+             merc_coord3 = tr2.forward(coord)
+             merc_coord4 = tr2b.backward(coord)
+-            eq_(math.fabs(merc_coord1.x - merc_coord1.x) < 1, True)
+-            eq_(math.fabs(merc_coord1.x - merc_coord2.x) < 1, True)
+-            eq_(math.fabs(merc_coord1.x - merc_coord3.x) < 1, True)
+-            eq_(math.fabs(merc_coord1.x - merc_coord4.x) < 1, True)
+-            eq_(math.fabs(merc_coord1.y - merc_coord1.y) < 1, True)
+-            eq_(math.fabs(merc_coord1.y - merc_coord2.y) < 1, True)
+-            eq_(math.fabs(merc_coord1.y - merc_coord3.y) < 1, True)
+-            eq_(math.fabs(merc_coord1.y - merc_coord4.y) < 1, True)
++            assert math.fabs(merc_coord1.x - merc_coord1.x) < 1
++            assert math.fabs(merc_coord1.x - merc_coord2.x) < 1
++            assert math.fabs(merc_coord1.x - merc_coord3.x) < 1
++            assert math.fabs(merc_coord1.x - merc_coord4.x) < 1
++            assert math.fabs(merc_coord1.y - merc_coord1.y) < 1
++            assert math.fabs(merc_coord1.y - merc_coord2.y) < 1
++            assert math.fabs(merc_coord1.y - merc_coord3.y) < 1
++            assert math.fabs(merc_coord1.y - merc_coord4.y) < 1
+             lon_lat_coord1 = tr1.backward(merc_coord1)
+             lon_lat_coord2 = tr1b.forward(merc_coord2)
+             lon_lat_coord3 = tr2.backward(merc_coord3)
+             lon_lat_coord4 = tr2b.forward(merc_coord4)
+-            eq_(math.fabs(coord.x - lon_lat_coord1.x) < 1, True)
+-            eq_(math.fabs(coord.x - lon_lat_coord2.x) < 1, True)
+-            eq_(math.fabs(coord.x - lon_lat_coord3.x) < 1, True)
+-            eq_(math.fabs(coord.x - lon_lat_coord4.x) < 1, True)
+-            eq_(math.fabs(coord.y - lon_lat_coord1.y) < 1, True)
+-            eq_(math.fabs(coord.y - lon_lat_coord2.y) < 1, True)
+-            eq_(math.fabs(coord.y - lon_lat_coord3.y) < 1, True)
+-            eq_(math.fabs(coord.y - lon_lat_coord4.y) < 1, True)
++            assert math.fabs(coord.x - lon_lat_coord1.x) < 1
++            assert math.fabs(coord.x - lon_lat_coord2.x) < 1
++            assert math.fabs(coord.x - lon_lat_coord3.x) < 1
++            assert math.fabs(coord.x - lon_lat_coord4.x) < 1
++            assert math.fabs(coord.y - lon_lat_coord1.y) < 1
++            assert math.fabs(coord.y - lon_lat_coord2.y) < 1
++            assert math.fabs(coord.y - lon_lat_coord3.y) < 1
++            assert math.fabs(coord.y - lon_lat_coord4.y) < 1
+ 
+ 
+ # Github Issue #2648
+@@ -133,8 +111,8 @@ def test_proj_antimeridian_bbox():
+     # this is logic from feature_style_processor::prepare_layer()
+     PROJ_ENVELOPE_POINTS = 20  # include/mapnik/config.hpp
+ 
+-    prjGeog = mapnik.Projection('+init=epsg:4326')
+-    prjProj = mapnik.Projection('+init=epsg:2193')
++    prjGeog = mapnik.Projection('epsg:4326')
++    prjProj = mapnik.Projection('epsg:2193')
+     prj_trans_fwd = mapnik.ProjTransform(prjProj, prjGeog)
+     prj_trans_rev = mapnik.ProjTransform(prjGeog, prjProj)
+ 
+@@ -162,7 +140,3 @@ def test_proj_antimeridian_bbox():
+     ext = mapnik.Box2d(274000, 3087000, 276000, 7173000)
+     rev_ext = prj_trans_rev.backward(ext, PROJ_ENVELOPE_POINTS)
+     assert_box2d_almost_equal(rev_ext, normal)
+-
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/query_test.py
++++ b/test/python_tests/query_test.py
+@@ -1,44 +1,33 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import assert_almost_equal, eq_, raises
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_query_init():
++def test_query_init(setup):
+     bbox = (-180, -90, 180, 90)
+     query = mapnik.Query(mapnik.Box2d(*bbox))
+     r = query.resolution
+-    assert_almost_equal(r[0], 1.0, places=7)
+-    assert_almost_equal(r[1], 1.0, places=7)
++    assert r[0] == pytest.approx(1.0, abs=1e-7)
++    assert r[1] == pytest.approx(1.0, abs=1e-7)
+     # https://github.com/mapnik/mapnik/issues/1762
+-    eq_(query.property_names, [])
++    assert query.property_names == []
+     query.add_property_name('migurski')
+-    eq_(query.property_names, ['migurski'])
++    assert query.property_names == ['migurski']
+ 
+ # Converting *from* tuples *to* resolutions is not yet supported
+ 
+-
+- at raises(TypeError)
+ def test_query_resolution():
+-    bbox = (-180, -90, 180, 90)
+-    init_res = (4.5, 6.7)
+-    query = mapnik.Query(mapnik.Box2d(*bbox), init_res)
+-    r = query.resolution
+-    assert_almost_equal(r[0], init_res[0], places=7)
+-    assert_almost_equal(r[1], init_res[1], places=7)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    with pytest.raises(TypeError):
++        bbox = (-180, -90, 180, 90)
++        init_res = (4.5, 6.7)
++        query = mapnik.Query(mapnik.Box2d(*bbox), init_res)
++        r = query.resolution
++        assert r[0] == pytest.approx(init_res[0], abs=1e-7)
++        assert r[1] == pytest.approx(init_res[1], abs=1e-7)
+--- a/test/python_tests/query_tolerance_test.py
++++ b/test/python_tests/query_tolerance_test.py
+@@ -1,22 +1,18 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+-    def test_query_tolerance():
+-        srs = '+init=epsg:4326'
++    def test_query_tolerance(setup):
++        srs = 'epsg:4326'
+         lyr = mapnik.Layer('test')
+         ds = mapnik.Shapefile(file='../data/shp/arrows.shp')
+         lyr.datasource = ds
+@@ -29,20 +25,16 @@ if 'shape' in mapnik.DatasourceCache.plu
+         _map_env = _map.envelope()
+         tol = (_map_env.maxx - _map_env.minx) / _width * 3
+         # 0.046875 for arrows.shp and zoom_all
+-        eq_(tol, 0.046875)
++        assert tol == 0.046875
+         # check point really exists
+         x, y = 2.0, 4.0
+         features = _map.query_point(0, x, y)
+-        eq_(len(list(features)), 1)
++        assert len(list(features)) == 1
+         # check inside tolerance limit
+         x = 2.0 + tol * 0.9
+         features = _map.query_point(0, x, y)
+-        eq_(len(list(features)), 1)
++        assert len(list(features)) == 1
+         # check outside tolerance limit
+         x = 2.0 + tol * 1.1
+         features = _map.query_point(0, x, y)
+-        eq_(len(list(features)), 0)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert len(list(features)) == 0
+--- a/test/python_tests/raster_colorizer_test.py
++++ b/test/python_tests/raster_colorizer_test.py
+@@ -1,25 +1,19 @@
+-# coding=utf8
+ import os
+ import sys
+-
+-from nose.tools import eq_
+-
++import pytest
+ import mapnik
+ 
+-from .utilities import execution_path, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ # test discrete colorizer mode
+-
+-
+-def test_get_color_discrete():
++def test_get_color_discrete(setup):
+     # setup
+     colorizer = mapnik.RasterColorizer()
+     colorizer.default_color = mapnik.Color(0, 0, 0, 0)
+@@ -29,16 +23,16 @@ def test_get_color_discrete():
+     colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200))
+ 
+     # should be default colour
+-    eq_(colorizer.get_color(-50), mapnik.Color(0, 0, 0, 0))
+-    eq_(colorizer.get_color(0), mapnik.Color(0, 0, 0, 0))
++    assert colorizer.get_color(-50) == mapnik.Color(0, 0, 0, 0)
++    assert colorizer.get_color(0) ==  mapnik.Color(0, 0, 0, 0)
+ 
+     # now in stop 1
+-    eq_(colorizer.get_color(10), mapnik.Color(100, 100, 100, 100))
+-    eq_(colorizer.get_color(19), mapnik.Color(100, 100, 100, 100))
++    assert colorizer.get_color(10) == mapnik.Color(100, 100, 100, 100)
++    assert colorizer.get_color(19) == mapnik.Color(100, 100, 100, 100)
+ 
+     # now in stop 2
+-    eq_(colorizer.get_color(20), mapnik.Color(200, 200, 200, 200))
+-    eq_(colorizer.get_color(1000), mapnik.Color(200, 200, 200, 200))
++    assert colorizer.get_color(20) == mapnik.Color(200, 200, 200, 200)
++    assert colorizer.get_color(1000) ==  mapnik.Color(200, 200, 200, 200)
+ 
+ # test exact colorizer mode
+ 
+@@ -53,15 +47,15 @@ def test_get_color_exact():
+     colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200))
+ 
+     # should be default colour
+-    eq_(colorizer.get_color(-50), mapnik.Color(0, 0, 0, 0))
+-    eq_(colorizer.get_color(11), mapnik.Color(0, 0, 0, 0))
+-    eq_(colorizer.get_color(20.001), mapnik.Color(0, 0, 0, 0))
++    assert colorizer.get_color(-50) == mapnik.Color(0, 0, 0, 0)
++    assert colorizer.get_color(11) == mapnik.Color(0, 0, 0, 0)
++    assert colorizer.get_color(20.001) == mapnik.Color(0, 0, 0, 0)
+ 
+     # should be stop 1
+-    eq_(colorizer.get_color(10), mapnik.Color(100, 100, 100, 100))
++    assert colorizer.get_color(10) == mapnik.Color(100, 100, 100, 100)
+ 
+     # should be stop 2
+-    eq_(colorizer.get_color(20), mapnik.Color(200, 200, 200, 200))
++    assert colorizer.get_color(20) == mapnik.Color(200, 200, 200, 200)
+ 
+ # test linear colorizer mode
+ 
+@@ -76,20 +70,20 @@ def test_get_color_linear():
+     colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200))
+ 
+     # should be default colour
+-    eq_(colorizer.get_color(-50), mapnik.Color(0, 0, 0, 0))
+-    eq_(colorizer.get_color(9.9), mapnik.Color(0, 0, 0, 0))
++    assert colorizer.get_color(-50) == mapnik.Color(0, 0, 0, 0)
++    assert colorizer.get_color(9.9) == mapnik.Color(0, 0, 0, 0)
+ 
+     # should be stop 1
+-    eq_(colorizer.get_color(10), mapnik.Color(100, 100, 100, 100))
++    assert colorizer.get_color(10) == mapnik.Color(100, 100, 100, 100)
+ 
+     # should be stop 2
+-    eq_(colorizer.get_color(20), mapnik.Color(200, 200, 200, 200))
++    assert colorizer.get_color(20) == mapnik.Color(200, 200, 200, 200)
+ 
+     # half way between stops 1 and 2
+-    eq_(colorizer.get_color(15), mapnik.Color(150, 150, 150, 150))
++    assert colorizer.get_color(15) ==  mapnik.Color(150, 150, 150, 150)
+ 
+     # after stop 2
+-    eq_(colorizer.get_color(100), mapnik.Color(200, 200, 200, 200))
++    assert colorizer.get_color(100) ==  mapnik.Color(200, 200, 200, 200)
+ 
+ 
+ def test_stop_label():
+@@ -97,11 +91,5 @@ def test_stop_label():
+         1, mapnik.COLORIZER_LINEAR, mapnik.Color('red'))
+     assert not stop.label
+     label = u"32? C"
+-    if not PYTHON3:
+-        label = label.encode('utf8')
+     stop.label = label
+     assert stop.label == label, stop.label
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/raster_symbolizer_test.py
++++ b/test/python_tests/raster_symbolizer_test.py
+@@ -1,22 +1,17 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path, get_unique_colors
+ 
+-from .utilities import execution_path, get_unique_colors, run_all
+-
+-
++ at pytest.fixture
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_dataraster_coloring():
+-    srs = '+init=epsg:32630'
++def test_dataraster_coloring(setup):
++    srs = 'epsg:32630'
+     lyr = mapnik.Layer('dataraster')
+     if 'gdal' in mapnik.DatasourceCache.plugin_names():
+         lyr.datasource = mapnik.Gdal(
+@@ -65,14 +60,12 @@ def test_dataraster_coloring():
+             im.save(expected_file, 'png32')
+         actual = mapnik.Image.open(actual_file)
+         expected = mapnik.Image.open(expected_file)
+-        eq_(actual.tostring('png32'),
+-            expected.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual_file,
+-                                                                expected_file))
++        assert actual.tostring('png32') == expected.tostring('png32'),'failed comparing actual (%s) and expected (%s)' % (actual_file,
++                                                                                                                          expected_file)
+ 
+ 
+ def test_dataraster_query_point():
+-    srs = '+init=epsg:32630'
++    srs = 'epsg:32630'
+     lyr = mapnik.Layer('dataraster')
+     if 'gdal' in mapnik.DatasourceCache.plugin_names():
+         lyr.datasource = mapnik.Gdal(
+@@ -153,12 +146,12 @@ def test_raster_with_alpha_blends_correc
+         mapnik.render(map, mim)
+         mim.tostring()
+         # All white is expected
+-        eq_(get_unique_colors(mim), ['rgba(254,254,254,255)'])
++        assert get_unique_colors(mim) == ['rgba(254,254,254,255)']
+ 
+ 
+ def test_raster_warping():
+-    lyrSrs = "+init=epsg:32630"
+-    mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
++    lyrSrs = "epsg:32630"
++    mapSrs = 'epsg:4326'
+     lyr = mapnik.Layer('dataraster', lyrSrs)
+     if 'gdal' in mapnik.DatasourceCache.plugin_names():
+         lyr.datasource = mapnik.Gdal(
+@@ -191,15 +184,13 @@ def test_raster_warping():
+             im.save(expected_file, 'png32')
+         actual = mapnik.Image.open(actual_file)
+         expected = mapnik.Image.open(expected_file)
+-        eq_(actual.tostring('png32'),
+-            expected.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual_file,
+-                                                                expected_file))
++        assert actual.tostring('png32') == expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,
++                                                                                                                           expected_file)
+ 
+ 
+ def test_raster_warping_does_not_overclip_source():
+-    lyrSrs = "+init=epsg:32630"
+-    mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
++    lyrSrs = "epsg:32630"
++    mapSrs = 'epsg:4326'
+     lyr = mapnik.Layer('dataraster', lyrSrs)
+     if 'gdal' in mapnik.DatasourceCache.plugin_names():
+         lyr.datasource = mapnik.Gdal(
+@@ -229,11 +220,5 @@ def test_raster_warping_does_not_overcli
+             im.save(expected_file, 'png32')
+         actual = mapnik.Image.open(actual_file)
+         expected = mapnik.Image.open(expected_file)
+-        eq_(actual.tostring('png32'),
+-            expected.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual_file,
+-                                                                expected_file))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert actual.tostring('png32') == expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,
++                                                                                                                           expected_file)
+--- a/test/python_tests/rasterlite_test.py
++++ b/test/python_tests/rasterlite_test.py
+@@ -1,19 +1,15 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import assert_almost_equal, eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
++    yield
+ 
+ if 'rasterlite' in mapnik.DatasourceCache.plugin_names():
+ 
+@@ -24,19 +20,15 @@ if 'rasterlite' in mapnik.DatasourceCach
+         )
+         e = ds.envelope()
+ 
+-        assert_almost_equal(e.minx, -180, places=5)
+-        assert_almost_equal(e.miny, -90, places=5)
+-        assert_almost_equal(e.maxx, 180, places=5)
+-        assert_almost_equal(e.maxy, 90, places=5)
+-        eq_(len(ds.fields()), 0)
++        assert e.minx == pytest.approx(-180,abs=1e-5)
++        assert e.miny == pytest.approx(-90, abs=1e-5)
++        assert e.maxx == pytest.approx(180, abs=1e-5)
++        assert e.maxy == pytest.approx( 90, abs=1e-5)
++        assert len(ds.fields()) == 0
+         query = mapnik.Query(ds.envelope())
+         for fld in ds.fields():
+             query.add_property_name(fld)
+         fs = ds.features(query)
+         feat = fs.next()
+-        eq_(feat.id(), 1)
+-        eq_(feat.attributes, {})
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert feat.id() == 1
++        assert feat.attributes == {}
+--- a/test/python_tests/render_grid_test.py
++++ b/test/python_tests/render_grid_test.py
+@@ -1,24 +1,16 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
++import json
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
+-try:
+-    import json
+-except ImportError:
+-    import simplejson as json
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if mapnik.has_grid_renderer():
+     def show_grids(name, g1, g2):
+@@ -369,7 +361,7 @@ if mapnik.has_grid_renderer():
+         m.layers.append(lyr)
+         return m
+ 
+-    def test_render_grid():
++    def test_render_grid(setup):
+         """ test render_grid method"""
+         width, height = 256, 256
+         sym = mapnik.MarkersSymbolizer()
+@@ -384,27 +376,26 @@ if mapnik.has_grid_renderer():
+         grid = mapnik.Grid(m.width, m.height, key='Name')
+         mapnik.render_layer(m, grid, layer=0, fields=['Name'])
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1, grid_correct_new3, show_grids(
+-            'new-markers', utf1, grid_correct_new3))
++        assert utf1 == grid_correct_new3, show_grids('new-markers', utf1, grid_correct_new3)
+ 
+         # check a full view is the same as a full image
+         grid_view = grid.view(0, 0, width, height)
+         # for kicks check at full res too
+         utf3 = grid.encode('utf', resolution=1)
+         utf4 = grid_view.encode('utf', resolution=1)
+-        eq_(utf3['grid'], utf4['grid'])
+-        eq_(utf3['keys'], utf4['keys'])
+-        eq_(utf3['data'], utf4['data'])
++        assert utf3['grid'] ==  utf4['grid']
++        assert utf3['keys'] ==  utf4['keys']
++        assert utf3['data'] ==  utf4['data']
+ 
+-        eq_(resolve(utf4, 0, 0), None)
++        assert resolve(utf4, 0, 0) ==  None
+ 
+         # resolve some center points in the
+         # resampled view
+         utf5 = grid_view.encode('utf', resolution=4)
+-        eq_(resolve(utf5, 25, 10), {"Name": "North West"})
+-        eq_(resolve(utf5, 25, 46), {"Name": "North East"})
+-        eq_(resolve(utf5, 38, 10), {"Name": "South West"})
+-        eq_(resolve(utf5, 38, 46), {"Name": "South East"})
++        assert resolve(utf5, 25, 10) ==  {"Name": "North West"}
++        assert resolve(utf5, 25, 46) ==  {"Name": "North East"}
++        assert resolve(utf5, 38, 10) ==  {"Name": "South West"}
++        assert resolve(utf5, 38, 46) ==  {"Name": "South East"}
+ 
+     grid_feat_id = {
+         'keys': [
+@@ -670,25 +661,25 @@ if mapnik.has_grid_renderer():
+         grid = mapnik.Grid(m.width, m.height, key='__id__')
+         mapnik.render_layer(m, grid, layer=0, fields=['__id__', 'Name'])
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1, grid_feat_id3, show_grids('id-markers', utf1, grid_feat_id3))
++        assert utf1 == grid_feat_id3, show_grids('id-markers', utf1 ==  grid_feat_id3)
+         # check a full view is the same as a full image
+         grid_view = grid.view(0, 0, width, height)
+         # for kicks check at full res too
+         utf3 = grid.encode('utf', resolution=1)
+         utf4 = grid_view.encode('utf', resolution=1)
+-        eq_(utf3['grid'], utf4['grid'])
+-        eq_(utf3['keys'], utf4['keys'])
+-        eq_(utf3['data'], utf4['data'])
++        assert utf3['grid'] ==  utf4['grid']
++        assert utf3['keys'] ==  utf4['keys']
++        assert utf3['data'] ==  utf4['data']
+ 
+-        eq_(resolve(utf4, 0, 0), None)
++        assert resolve(utf4, 0, 0) ==  None
+ 
+         # resolve some center points in the
+         # resampled view
+         utf5 = grid_view.encode('utf', resolution=4)
+-        eq_(resolve(utf5, 25, 10), {"Name": "North West", "__id__": 3})
+-        eq_(resolve(utf5, 25, 46), {"Name": "North East", "__id__": 4})
+-        eq_(resolve(utf5, 38, 10), {"Name": "South West", "__id__": 2})
+-        eq_(resolve(utf5, 38, 46), {"Name": "South East", "__id__": 1})
++        assert resolve(utf5, 25, 10) == {"Name": "North West", "__id__": 3}
++        assert resolve(utf5, 25, 46) == {"Name": "North East", "__id__": 4}
++        assert resolve(utf5, 38, 10) == {"Name": "South West", "__id__": 2}
++        assert resolve(utf5, 38, 46) == {"Name": "South East", "__id__": 1}
+ 
+     def gen_grid_for_id(pixel_key):
+         ds = mapnik.MemoryDatasource()
+@@ -718,39 +709,39 @@ if mapnik.has_grid_renderer():
+ 
+     def test_negative_id():
+         grid = gen_grid_for_id(-1)
+-        eq_(grid.get_pixel(128, 128), -1)
++        assert grid.get_pixel(128, 128) ==  -1
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1['keys'], ['-1'])
++        assert utf1['keys'] ==  ['-1']
+ 
+     def test_32bit_int_id():
+         int32 = 2147483647
+         grid = gen_grid_for_id(int32)
+-        eq_(grid.get_pixel(128, 128), int32)
++        assert grid.get_pixel(128, 128) ==  int32
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1['keys'], [str(int32)])
++        assert utf1['keys'] ==  [str(int32)]
+         max_neg = -(int32)
+         grid = gen_grid_for_id(max_neg)
+-        eq_(grid.get_pixel(128, 128), max_neg)
++        assert grid.get_pixel(128, 128) ==  max_neg
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1['keys'], [str(max_neg)])
++        assert utf1['keys'] ==  [str(max_neg)]
+ 
+     def test_64bit_int_id():
+         int64 = 0x7FFFFFFFFFFFFFFF
+         grid = gen_grid_for_id(int64)
+-        eq_(grid.get_pixel(128, 128), int64)
++        assert grid.get_pixel(128, 128) ==  int64
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1['keys'], [str(int64)])
++        assert utf1['keys'] ==  [str(int64)]
+         max_neg = -(int64)
+         grid = gen_grid_for_id(max_neg)
+-        eq_(grid.get_pixel(128, 128), max_neg)
++        assert grid.get_pixel(128, 128) ==  max_neg
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1['keys'], [str(max_neg)])
++        assert utf1['keys'] ==  [str(max_neg)]
+ 
+     def test_id_zero():
+         grid = gen_grid_for_id(0)
+-        eq_(grid.get_pixel(128, 128), 0)
++        assert grid.get_pixel(128, 128) ==  0
+         utf1 = grid.encode('utf', resolution=4)
+-        eq_(utf1['keys'], ['0'])
++        assert utf1['keys'] ==  ['0']
+ 
+     line_expected = {
+         "keys": [
+@@ -852,7 +843,7 @@ if mapnik.has_grid_renderer():
+         grid = mapnik.Grid(m.width, m.height, key='__id__')
+         mapnik.render_layer(m, grid, layer=0, fields=['Name'])
+         utf1 = grid.encode()
+-        eq_(utf1, line_expected, show_grids('line', utf1, line_expected))
++        assert utf1 == line_expected, show_grids('line', utf1, line_expected)
+ 
+     point_expected = {
+         "data": {
+@@ -947,53 +938,49 @@ if mapnik.has_grid_renderer():
+         grid = mapnik.Grid(m.width, m.height)
+         mapnik.render_layer(m, grid, layer=0, fields=['Name'])
+         utf1 = grid.encode()
+-        eq_(utf1, point_expected, show_grids('point-sym', utf1, point_expected))
++        assert utf1 == point_expected, show_grids('point-sym', utf1, point_expected)
+ 
+     test_point_symbolizer_grid.requires_data = True
+ 
+     # should throw because this is a mis-usage
+     # https://github.com/mapnik/mapnik/issues/1325
+-    @raises(RuntimeError)
+     def test_render_to_grid_multiple_times():
+-        # create map with two layers
+-        m = mapnik.Map(256, 256)
+-        s = mapnik.Style()
+-        r = mapnik.Rule()
+-        sym = mapnik.MarkersSymbolizer()
+-        sym.allow_overlap = True
+-        r.symbols.append(sym)
+-        s.rules.append(r)
+-        m.append_style('points', s)
+-
+-        # NOTE: we use a csv datasource here
+-        # because the memorydatasource fails silently for
+-        # queries requesting fields that do not exist in the datasource
+-        ds1 = mapnik.Datasource(**{"type": "csv", "inline": '''
+-          wkt,Name
+-          "POINT (143.10 -38.60)",South East'''})
+-        lyr1 = mapnik.Layer('One')
+-        lyr1.datasource = ds1
+-        lyr1.styles.append('points')
+-        m.layers.append(lyr1)
+-
+-        ds2 = mapnik.Datasource(**{"type": "csv", "inline": '''
+-          wkt,Value
+-          "POINT (142.48 -38.60)",South West'''})
+-        lyr2 = mapnik.Layer('Two')
+-        lyr2.datasource = ds2
+-        lyr2.styles.append('points')
+-        m.layers.append(lyr2)
+-
+-        ul_lonlat = mapnik.Coord(142.30, -38.20)
+-        lr_lonlat = mapnik.Coord(143.40, -38.80)
+-        m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat))
+-        grid = mapnik.Grid(m.width, m.height)
+-        mapnik.render_layer(m, grid, layer=0, fields=['Name'])
+-        # should throw right here since Name will be a property now on the `grid` object
+-        # and it is not found on the second layer
+-        mapnik.render_layer(m, grid, layer=1, fields=['Value'])
+-        grid.encode()
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        with pytest.raises(RuntimeError):
++            # create map with two layers
++            m = mapnik.Map(256, 256)
++            s = mapnik.Style()
++            r = mapnik.Rule()
++            sym = mapnik.MarkersSymbolizer()
++            sym.allow_overlap = True
++            r.symbols.append(sym)
++            s.rules.append(r)
++            m.append_style('points', s)
++
++            # NOTE: we use a csv datasource here
++            # because the memorydatasource fails silently for
++            # queries requesting fields that do not exist in the datasource
++            ds1 = mapnik.Datasource(**{"type": "csv", "inline": '''
++            wkt,Name
++            "POINT (143.10 -38.60)",South East'''})
++            lyr1 = mapnik.Layer('One')
++            lyr1.datasource = ds1
++            lyr1.styles.append('points')
++            m.layers.append(lyr1)
++
++            ds2 = mapnik.Datasource(**{"type": "csv", "inline": '''
++            wkt,Value
++            "POINT (142.48 -38.60)",South West'''})
++            lyr2 = mapnik.Layer('Two')
++            lyr2.datasource = ds2
++            lyr2.styles.append('points')
++            m.layers.append(lyr2)
++
++            ul_lonlat = mapnik.Coord(142.30, -38.20)
++            lr_lonlat = mapnik.Coord(143.40, -38.80)
++            m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat))
++            grid = mapnik.Grid(m.width, m.height)
++            mapnik.render_layer(m, grid, layer=0, fields=['Name'])
++            # should throw right here since Name will be a property now on the `grid` object
++            # and it is not found on the second layer
++            mapnik.render_layer(m, grid, layer=1, fields=['Value'])
++            grid.encode()
+--- a/test/python_tests/render_test.py
++++ b/test/python_tests/render_test.py
+@@ -1,77 +1,62 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-import os
+-import sys
++import sys, os
+ import tempfile
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+-
+-def test_simplest_render():
++def test_simplest_render(setup):
+     m = mapnik.Map(256, 256)
+     im = mapnik.Image(m.width, m.height)
+-    eq_(im.painted(), False)
+-    eq_(im.is_solid(), True)
++    assert not im.painted()
++    assert im.is_solid()
+     mapnik.render(m, im)
+-    eq_(im.painted(), False)
+-    eq_(im.is_solid(), True)
++    assert not im.painted()
++    assert im.is_solid()
+     s = im.tostring()
+-    if PYTHON3:
+-        eq_(s, 256 * 256 * b'\x00\x00\x00\x00')
+-    else:
+-        eq_(s, 256 * 256 * '\x00\x00\x00\x00')
++    assert s ==  256 * 256 * b'\x00\x00\x00\x00'
+ 
+ 
+ def test_render_image_to_string():
+     im = mapnik.Image(256, 256)
+     im.fill(mapnik.Color('black'))
+-    eq_(im.painted(), False)
+-    eq_(im.is_solid(), True)
++    assert not im.painted()
++    assert im.is_solid()
+     s = im.tostring()
+-    if PYTHON3:
+-        eq_(s, 256 * 256 * b'\x00\x00\x00\xff')
+-    else:
+-        eq_(s, 256 * 256 * '\x00\x00\x00\xff')
++    assert s ==  256 * 256 * b'\x00\x00\x00\xff'
+ 
+ 
+ def test_non_solid_image():
+     im = mapnik.Image(256, 256)
+     im.fill(mapnik.Color('black'))
+-    eq_(im.painted(), False)
+-    eq_(im.is_solid(), True)
++    assert not im.painted()
++    assert im.is_solid()
+     # set one pixel to a different color
+     im.set_pixel(0, 0, mapnik.Color('white'))
+-    eq_(im.painted(), False)
+-    eq_(im.is_solid(), False)
++    assert not im.painted()
++    assert not im.is_solid()
+ 
+ 
+ def test_non_solid_image_view():
+     im = mapnik.Image(256, 256)
+     im.fill(mapnik.Color('black'))
+     view = im.view(0, 0, 256, 256)
+-    eq_(view.is_solid(), True)
++    assert view.is_solid()
+     # set one pixel to a different color
+     im.set_pixel(0, 0, mapnik.Color('white'))
+-    eq_(im.is_solid(), False)
++    assert not im.is_solid()
+     # view, since it is the exact dimensions of the image
+     # should also be non-solid
+-    eq_(view.is_solid(), False)
++    assert not view.is_solid()
+     # but not a view that excludes the single diff pixel
+     view2 = im.view(1, 1, 256, 256)
+-    eq_(view2.is_solid(), True)
++    assert view2.is_solid()
+ 
+ 
+ def test_setting_alpha():
+@@ -80,16 +65,16 @@ def test_setting_alpha():
+     # white, half transparent
+     c1 = mapnik.Color('rgba(255,255,255,.5)')
+     im1.fill(c1)
+-    eq_(im1.painted(), False)
+-    eq_(im1.is_solid(), True)
++    assert not im1.painted()
++    assert im1.is_solid()
+     # pure white
+     im2 = mapnik.Image(w, h)
+     c2 = mapnik.Color('rgba(255,255,255,1)')
+     im2.fill(c2)
+     im2.apply_opacity(c1.a / 255.0)
+-    eq_(im2.painted(), False)
+-    eq_(im2.is_solid(), True)
+-    eq_(len(im1.tostring('png32')), len(im2.tostring('png32')))
++    assert not im2.painted()
++    assert im2.is_solid()
++    assert len(im1.tostring('png32')) ==  len(im2.tostring('png32'))
+ 
+ 
+ def test_render_image_to_file():
+@@ -129,11 +114,11 @@ def test_render_from_serialization():
+     try:
+         im, im2 = get_paired_images(
+             100, 100, '../data/good_maps/building_symbolizer.xml')
+-        eq_(im.tostring('png32'), im2.tostring('png32'))
++        assert im.tostring('png32') ==  im2.tostring('png32')
+ 
+         im, im2 = get_paired_images(
+             100, 100, '../data/good_maps/polygon_symbolizer.xml')
+-        eq_(im.tostring('png32'), im2.tostring('png32'))
++        assert im.tostring('png32') ==  im2.tostring('png32')
+     except RuntimeError as e:
+         # only test datasources that we have installed
+         if not 'Could not create datasource' in str(e):
+@@ -166,7 +151,7 @@ def test_render_points():
+     s.rules.append(r)
+     lyr = mapnik.Layer(
+         'Places',
+-        '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
++        'epsg:4326')
+     lyr.datasource = ds
+     lyr.styles.append('places_labels')
+     # latlon bounding box corners
+@@ -174,8 +159,8 @@ def test_render_points():
+     lr_lonlat = mapnik.Coord(143.40, -38.80)
+     # render for different projections
+     projs = {
+-        'google': '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over',
+-        'latlon': '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs',
++        'google': 'epsg:3857',
++        'latlon': 'epsg:4326',
+         'merc': '+proj=merc +datum=WGS84 +k=1.0 +units=m +over +no_defs',
+         'utm': '+proj=utm +zone=54 +datum=WGS84'
+     }
+@@ -184,7 +169,7 @@ def test_render_points():
+         m.append_style('places_labels', s)
+         m.layers.append(lyr)
+         dest_proj = mapnik.Projection(projs[projdescr])
+-        src_proj = mapnik.Projection('+init=epsg:4326')
++        src_proj = mapnik.Projection('epsg:4326')
+         tr = mapnik.ProjTransform(src_proj, dest_proj)
+         m.zoom_to_box(tr.forward(mapnik.Box2d(ul_lonlat, lr_lonlat)))
+         # Render to SVG so that it can be checked how many points are there
+@@ -198,21 +183,14 @@ def test_render_points():
+         with open(svg_file, 'r') as f:
+             svg = f.read()
+         num_points_rendered = svg.count('<image ')
+-        eq_(
+-            num_points_present,
+-            num_points_rendered,
+-            "Not all points were rendered (%d instead of %d) at projection %s" %
+-            (num_points_rendered,
+-             num_points_present,
+-             projdescr))
++        assert  num_points_present == num_points_rendered, "Not all points were rendered (%d instead of %d) at projection %s" % (num_points_rendered, num_points_present, projdescr)
+ 
+ 
+- at raises(RuntimeError)
+ def test_render_with_scale_factor_zero_throws():
+-    m = mapnik.Map(256, 256)
+-    im = mapnik.Image(256, 256)
+-    mapnik.render(m, im, 0.0)
+-
++    with pytest.raises(RuntimeError):
++        m = mapnik.Map(256, 256)
++        im = mapnik.Image(256, 256)
++        mapnik.render(m, im, 0.0) #should throw
+ 
+ def test_render_with_detector():
+     ds = mapnik.MemoryDatasource()
+@@ -234,27 +212,24 @@ def test_render_with_detector():
+     m.zoom_to_box(mapnik.Box2d(-180, -85, 180, 85))
+     im = mapnik.Image(256, 256)
+     mapnik.render(m, im)
+-    expected_file = './images/support/marker-in-center.png'
++    expected_file = 'images/support/marker-in-center.png'
+     actual_file = '/tmp/' + os.path.basename(expected_file)
+     # im.save(expected_file,'png8')
+     im.save(actual_file, 'png8')
+     actual = mapnik.Image.open(expected_file)
+     expected = mapnik.Image.open(expected_file)
+-    eq_(actual.tostring('png32'),
+-        expected.tostring('png32'),
+-        'failed comparing actual (%s) and expected (%s)' % (actual_file,
+-                                                            expected_file))
++    assert actual.tostring('png32') == expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file, expected_file)
+     # now render will a collision detector that should
+     # block out the placement of this point
+     detector = mapnik.LabelCollisionDetector(m)
+-    eq_(detector.extent(), mapnik.Box2d(-0.0, -0.0, m.width, m.height))
+-    eq_(detector.extent(), mapnik.Box2d(-0.0, -0.0, 256.0, 256.0))
+-    eq_(detector.boxes(), [])
++    assert detector.extent(), mapnik.Box2d(-0.0, -0.0, m.width ==  m.height)
++    assert detector.extent(), mapnik.Box2d(-0.0, -0.0, 256.0 ==  256.0)
++    assert detector.boxes() ==  []
+     detector.insert(detector.extent())
+-    eq_(detector.boxes(), [detector.extent()])
++    assert detector.boxes() ==  [detector.extent()]
+     im2 = mapnik.Image(256, 256)
+     mapnik.render_with_detector(m, im2, detector)
+-    expected_file_collision = './images/support/marker-in-center-not-placed.png'
++    expected_file_collision = 'images/support/marker-in-center-not-placed.png'
+     # im2.save(expected_file_collision,'png8')
+     actual_file = '/tmp/' + os.path.basename(expected_file_collision)
+     im2.save(actual_file, 'png8')
+@@ -270,7 +245,7 @@ if 'shape' in mapnik.DatasourceCache.plu
+         for size in sizes:
+             im = mapnik.Image(256, 256)
+             mapnik.render(m, im, size)
+-            expected_file = './images/support/marker-text-line-scale-factor-%s.png' % size
++            expected_file = 'images/support/marker-text-line-scale-factor-%s.png' % size
+             actual_file = '/tmp/' + os.path.basename(expected_file)
+             im.save(actual_file, 'png32')
+             if os.environ.get('UPDATE'):
+@@ -279,11 +254,4 @@ if 'shape' in mapnik.DatasourceCache.plu
+             # color png
+             actual = mapnik.Image.open(actual_file)
+             expected = mapnik.Image.open(expected_file)
+-            eq_(actual.tostring('png32'),
+-                expected.tostring('png32'),
+-                'failed comparing actual (%s) and expected (%s)' % (actual_file,
+-                                                                    expected_file))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++            assert actual.tostring('png32') == expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file, expected_file)
+--- a/test/python_tests/reprojection_test.py
++++ b/test/python_tests/reprojection_test.py
+@@ -1,22 +1,18 @@
+-# coding=utf8
+ import os
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
++from .utilities import execution_path, images_almost_equal
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+ 
+-    #@raises(RuntimeError)
+-    def test_zoom_all_will_fail():
++    def test_zoom_all_will_fail(setup):
+         m = mapnik.Map(512, 512)
+         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+         m.zoom_all()
+@@ -24,25 +20,23 @@ if 'shape' in mapnik.DatasourceCache.plu
+     def test_zoom_all_will_work_with_max_extent():
+         m = mapnik.Map(512, 512)
+         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+-        merc_bounds = mapnik.Box2d(-20037508.34, -
+-                                   20037508.34, 20037508.34, 20037508.34)
++        merc_bounds = mapnik.Box2d(-20037508.34, -20037508.34, 20037508.34, 20037508.34)
+         m.maximum_extent = merc_bounds
+         m.zoom_all()
+         # note - fixAspectRatio is being called, then re-clipping to maxextent
+         # which makes this hard to predict
+-        # eq_(m.envelope(),merc_bounds)
++        #assert m.envelope() == merc_bounds
+ 
+-        #m = mapnik.Map(512,512)
+-        # mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
+-        #merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
+-        # m.zoom_to_box(merc_bounds)
+-        # eq_(m.envelope(),merc_bounds)
++        m = mapnik.Map(512,512)
++        mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
++        merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
++        m.zoom_to_box(merc_bounds)
++        assert m.envelope() == merc_bounds
+ 
+     def test_visual_zoom_all_rendering1():
+         m = mapnik.Map(512, 512)
+         mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+-        merc_bounds = mapnik.Box2d(-20037508.34, -
+-                                   20037508.34, 20037508.34, 20037508.34)
++        merc_bounds = mapnik.Box2d(-20037508.34, -20037508.34, 20037508.34, 20037508.34)
+         m.maximum_extent = merc_bounds
+         m.zoom_all()
+         im = mapnik.Image(512, 512)
+@@ -51,10 +45,7 @@ if 'shape' in mapnik.DatasourceCache.plu
+         expected = 'images/support/mapnik-wgs842merc-reprojection-render.png'
+         im.save(actual, 'png32')
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'test/python_tests/' + expected))
++        images_almost_equal(im, expected_im)
+ 
+     def test_visual_zoom_all_rendering2():
+         m = mapnik.Map(512, 512)
+@@ -62,14 +53,8 @@ if 'shape' in mapnik.DatasourceCache.plu
+         m.zoom_all()
+         im = mapnik.Image(512, 512)
+         mapnik.render(m, im)
+-        actual = '/tmp/mapnik-merc2wgs84-reprojection-render.png'
+-        expected = 'images/support/mapnik-merc2wgs84-reprojection-render.png'
+-        im.save(actual, 'png32')
+-        expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'test/python_tests/' + expected))
++        expected_im = mapnik.Image.open('images/support/mapnik-merc2wgs84-reprojection-render.png')
++        images_almost_equal(im, expected_im)
+ 
+     # maximum-extent read from map.xml
+     def test_visual_zoom_all_rendering3():
+@@ -78,14 +63,9 @@ if 'shape' in mapnik.DatasourceCache.plu
+         m.zoom_all()
+         im = mapnik.Image(512, 512)
+         mapnik.render(m, im)
+-        actual = '/tmp/mapnik-merc2merc-reprojection-render1.png'
+         expected = 'images/support/mapnik-merc2merc-reprojection-render1.png'
+-        im.save(actual, 'png32')
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'test/python_tests/' + expected))
++        images_almost_equal(im, expected_im)
+ 
+     # no maximum-extent
+     def test_visual_zoom_all_rendering4():
+@@ -95,15 +75,6 @@ if 'shape' in mapnik.DatasourceCache.plu
+         m.zoom_all()
+         im = mapnik.Image(512, 512)
+         mapnik.render(m, im)
+-        actual = '/tmp/mapnik-merc2merc-reprojection-render2.png'
+         expected = 'images/support/mapnik-merc2merc-reprojection-render2.png'
+-        im.save(actual, 'png32')
+         expected_im = mapnik.Image.open(expected)
+-        eq_(im.tostring('png32'),
+-            expected_im.tostring('png32'),
+-            'failed comparing actual (%s) and expected (%s)' % (actual,
+-                                                                'test/python_tests/' + expected))
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        images_almost_equal(im, expected_im)
+--- a/test/python_tests/save_map_test.py
++++ b/test/python_tests/save_map_test.py
+@@ -1,31 +1,19 @@
+-#!/usr/bin/env python
+-
+ import glob
+ import os
+ import tempfile
+-
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
+ default_logging_severity = mapnik.logger.get_severity()
+ 
+-
++ at pytest.fixture(scope="module")
+ def setup():
+-    # make the tests silent to suppress unsupported params from harfbuzz tests
+-    # TODO: remove this after harfbuzz branch merges
+-    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
+-
+-def teardown():
+-    mapnik.logger.set_severity(default_logging_severity)
+-
++    yield
+ 
+ def compare_map(xml):
+     m = mapnik.Map(256, 256)
+@@ -56,7 +44,7 @@ def compare_map(xml):
+     try:
+         with open(test_map) as f1:
+             with open(test_map2) as f2:
+-                eq_(f1.read(), f2.read())
++                assert f1.read() == f2.read()
+     except AssertionError as e:
+         raise AssertionError(
+             'serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' %
+@@ -69,7 +57,7 @@ def compare_map(xml):
+         return False
+ 
+ 
+-def test_compare_map():
++def test_compare_map(setup):
+     good_maps = glob.glob("../data/good_maps/*.xml")
+     good_maps = [os.path.normpath(p) for p in good_maps]
+     # remove one map that round trips CDATA differently, but this is okay
+@@ -89,7 +77,3 @@ def test_compare_map_deprecations():
+     dep = [os.path.normpath(p) for p in dep]
+     for m in dep:
+         compare_map(m)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/shapefile_test.py
++++ b/test/python_tests/shapefile_test.py
+@@ -1,149 +1,122 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+ import os
+-
+-from nose.tools import assert_almost_equal, eq_, raises
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
+ 
+     # Shapefile initialization
+-    def test_shapefile_init():
++    def test_shapefile_init(setup):
+         s = mapnik.Shapefile(file='../data/shp/boundaries')
+-
+         e = s.envelope()
+-
+-        assert_almost_equal(e.minx, -11121.6896651, places=7)
+-        assert_almost_equal(e.miny, -724724.216526, places=6)
+-        assert_almost_equal(e.maxx, 2463000.67866, places=5)
+-        assert_almost_equal(e.maxy, 1649661.267, places=3)
++        assert e.minx == pytest.approx(-11121.6896651, abs=1e-07)
++        assert e.miny == pytest.approx( -724724.216526, abs=1e-6)
++        assert e.maxx == pytest.approx( 2463000.67866, abs=1e-5)
++        assert e.maxy == pytest.approx( 1649661.267, abs=1e-3)
+ 
+     # Shapefile properties
+     def test_shapefile_properties():
+         s = mapnik.Shapefile(file='../data/shp/boundaries', encoding='latin1')
+         f = list(s.features_at_point(s.envelope().center()))[0]
+ 
+-        eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
+-        eq_(f['COUNTRY'], u'CAN')
+-        eq_(f['F_CODE'], u'FA001')
+-        eq_(f['NAME_EN'], u'Quebec')
++        assert f['CGNS_FID'] ==  u'6f733341ba2011d892e2080020a0f4c9'
++        assert f['COUNTRY'] ==  u'CAN'
++        assert f['F_CODE'] ==  u'FA001'
++        assert f['NAME_EN'] ==  u'Quebec'
+         # this seems to break if icu data linking is not working
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
+-        eq_(f['Shape_Area'], 1512185733150.0)
+-        eq_(f['Shape_Leng'], 19218883.724300001)
++        assert f['NOM_FR'] ==  u'Qu\xe9bec'
++        assert f['NOM_FR'] ==  u'Qu?bec'
++        assert f['Shape_Area'] ==  1512185733150.0
++        assert f['Shape_Leng'] ==  19218883.724300001
++
+ 
+-    @raises(RuntimeError)
+     def test_that_nonexistant_query_field_throws(**kwargs):
+         ds = mapnik.Shapefile(file='../data/shp/world_merc')
+-        eq_(len(ds.fields()), 11)
+-        eq_(ds.fields(), ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
+-                          'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
+-        eq_(ds.field_types(),
+-            ['str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float'])
++        assert len(ds.fields()) ==  11
++        assert ds.fields() == ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
++                               'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT']
++        assert ds.field_types() == ['str','str','str','int','str','int','int','int','int','float','float']
+         query = mapnik.Query(ds.envelope())
+-        for fld in ds.fields():
+-            query.add_property_name(fld)
+-        # also add an invalid one, triggering throw
+-        query.add_property_name('bogus')
+-        ds.features(query)
++        with pytest.raises(RuntimeError):
++            for fld in ds.fields():
++                query.add_property_name(fld)
++                # also add an invalid one, triggering throw
++                query.add_property_name('bogus')
++                ds.features(query)
+ 
+     def test_dbf_logical_field_is_boolean():
+         ds = mapnik.Shapefile(file='../data/shp/long_lat')
+-        eq_(len(ds.fields()), 7)
+-        eq_(ds.fields(), ['LONG', 'LAT', 'LOGICAL_TR',
+-                          'LOGICAL_FA', 'CHARACTER', 'NUMERIC', 'DATE'])
+-        eq_(ds.field_types(), ['str', 'str',
+-                               'bool', 'bool', 'str', 'float', 'str'])
++        assert len(ds.fields()) ==  7
++        assert ds.fields() == ['LONG', 'LAT', 'LOGICAL_TR', 'LOGICAL_FA', 'CHARACTER', 'NUMERIC', 'DATE']
++        assert ds.field_types() == ['str', 'str', 'bool', 'bool', 'str', 'float', 'str']
+         query = mapnik.Query(ds.envelope())
+         for fld in ds.fields():
+             query.add_property_name(fld)
+         feat = list(ds.all_features())[0]
+-        eq_(feat.id(), 1)
+-        eq_(feat['LONG'], '0')
+-        eq_(feat['LAT'], '0')
+-        eq_(feat['LOGICAL_TR'], True)
+-        eq_(feat['LOGICAL_FA'], False)
+-        eq_(feat['CHARACTER'], '254')
+-        eq_(feat['NUMERIC'], 32)
+-        eq_(feat['DATE'], '20121202')
++        assert feat.id() ==  1
++        assert feat['LONG'] ==  '0'
++        assert feat['LAT'] ==  '0'
++        assert feat['LOGICAL_TR'] ==  True
++        assert feat['LOGICAL_FA'] ==  False
++        assert feat['CHARACTER'] ==  '254'
++        assert feat['NUMERIC'] ==  32
++        assert feat['DATE'] ==  '20121202'
+ 
+     # created by hand in qgis 1.8.0
+     def test_shapefile_point2d_from_qgis():
+         ds = mapnik.Shapefile(file='../data/shp/points/qgis.shp')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['id', 'name'])
+-        eq_(ds.field_types(), ['int', 'str'])
+-        eq_(len(list(ds.all_features())), 3)
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['id' ==  'name']
++        assert ds.field_types(), ['int' ==  'str']
++        assert len(list(ds.all_features())) ==  3
+ 
+     # ogr2ogr tests/data/shp/3dpoint/ogr_zfield.shp
+     # tests/data/shp/3dpoint/qgis.shp -zfield id
+     def test_shapefile_point_z_from_qgis():
+         ds = mapnik.Shapefile(file='../data/shp/points/ogr_zfield.shp')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['id', 'name'])
+-        eq_(ds.field_types(), ['int', 'str'])
+-        eq_(len(list(ds.all_features())), 3)
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['id' ==  'name']
++        assert ds.field_types(), ['int' ==  'str']
++        assert len(list(ds.all_features())) ==  3
+ 
+     def test_shapefile_multipoint_from_qgis():
+         ds = mapnik.Shapefile(file='../data/shp/points/qgis_multi.shp')
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['id', 'name'])
+-        eq_(ds.field_types(), ['int', 'str'])
+-        eq_(len(list(ds.all_features())), 1)
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['id' ==  'name']
++        assert ds.field_types(), ['int' ==  'str']
++        assert len(list(ds.all_features())) ==  1
+ 
+     # pointzm from arcinfo
+     def test_shapefile_point_zm_from_arcgis():
+         ds = mapnik.Shapefile(file='../data/shp/points/poi.shp')
+-        eq_(len(ds.fields()), 7)
+-        eq_(ds.fields(),
+-            ['interst_id',
+-             'state_d',
+-             'cnty_name',
+-             'latitude',
+-             'longitude',
+-             'Name',
+-             'Website'])
+-        eq_(ds.field_types(), ['str', 'str',
+-                               'str', 'float', 'float', 'str', 'str'])
+-        eq_(len(list(ds.all_features())), 17)
++        assert len(ds.fields()) ==  7
++        assert ds.fields() == ['interst_id',
++                               'state_d',
++                               'cnty_name',
++                               'latitude',
++                               'longitude',
++                               'Name',
++                               'Website']
++        assert ds.field_types() == ['str', 'str', 'str', 'float', 'float', 'str', 'str']
++        assert len(list(ds.all_features())) ==  17
+ 
+     # copy of the above with ogr2ogr that makes m record 14 instead of 18
+     def test_shapefile_point_zm_from_ogr():
+         ds = mapnik.Shapefile(file='../data/shp/points/poi_ogr.shp')
+-        eq_(len(ds.fields()), 7)
+-        eq_(ds.fields(),
+-            ['interst_id',
+-             'state_d',
+-             'cnty_name',
+-             'latitude',
+-             'longitude',
+-             'Name',
+-             'Website'])
+-        eq_(ds.field_types(), ['str', 'str',
+-                               'str', 'float', 'float', 'str', 'str'])
+-        eq_(len(list(ds.all_features())), 17)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert len(ds.fields()) ==  7
++        assert ds.fields(),['interst_id',
++                            'state_d',
++                            'cnty_name',
++                            'latitude',
++                            'longitude',
++                            'Name',
++                            'Website']
++        assert ds.field_types() == ['str', 'str', 'str', 'float', 'float', 'str', 'str']
++        assert len(list(ds.all_features())) ==  17
+--- a/test/python_tests/shapeindex_test.py
++++ b/test/python_tests/shapeindex_test.py
+@@ -1,24 +1,24 @@
+-#!/usr/bin/env python
+-
+ import fnmatch
+ import os
+ import shutil
+ from subprocess import PIPE, Popen
+ 
+-from nose.tools import eq_
+-
+ import mapnik
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
++    index = '../data/sqlite/world.sqlite.index'
++    if os.path.exists(index):
++        os.unlink(index)
+ 
+-
+-def test_shapeindex():
++def test_shapeindex(setup):
+     # first copy shapefiles to tmp directory
+     source_dir = '../data/shp/'
+     working_dir = '/tmp/mapnik-shp-tmp/'
+@@ -53,8 +53,4 @@ def test_shapeindex():
+                 count2 = count2 + 1
+         except StopIteration:
+             pass
+-        eq_(count, count2)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert count == count2
+--- a/test/python_tests/sqlite_rtree_test.py
++++ b/test/python_tests/sqlite_rtree_test.py
+@@ -1,27 +1,23 @@
+-#!/usr/bin/env python
+ import os
+ import sqlite3
+ import sys
+ import threading
+-
+-from nose.tools import eq_
+-
+ import mapnik
+-
+-from .utilities import execution_path, run_all
+-
+-PYTHON3 = sys.version_info[0] == 3
++import pytest
++from .utilities import execution_path
+ 
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
++
+ 
+ NUM_THREADS = 10
+ TOTAL = 245
+ 
+-
+ def create_ds(test_db, table):
+     ds = mapnik.SQLite(file=test_db, table=table)
+     ds.all_features()
+@@ -29,7 +25,7 @@ def create_ds(test_db, table):
+ 
+ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
+ 
+-    def test_rtree_creation():
++    def test_rtree_creation(setup):
+         test_db = '../data/sqlite/world.sqlite'
+         index = test_db + '.index'
+         table = 'world_merc'
+@@ -46,7 +42,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         for i in threads:
+             i.join()
+ 
+-        eq_(os.path.exists(index), True)
++        assert os.path.exists(index)
+         conn = sqlite3.connect(index)
+         cur = conn.cursor()
+         try:
+@@ -55,7 +51,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                 table.replace(
+                     "'", ""))
+             conn.commit()
+-            eq_(cur.fetchone()[0], TOTAL)
++            assert cur.fetchone()[0] == TOTAL
+         except sqlite3.OperationalError:
+             # don't worry about testing # of index records if
+             # python's sqlite module does not support rtree
+@@ -66,13 +62,13 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         ds = mapnik.SQLite(file=test_db, table=table)
+         fs = list(ds.all_features())
+         del ds
+-        eq_(len(fs), TOTAL)
++        assert len(fs) == TOTAL
+         os.unlink(index)
+         ds = mapnik.SQLite(file=test_db, table=table, use_spatial_index=False)
+         fs = list(ds.all_features())
+         del ds
+-        eq_(len(fs), TOTAL)
+-        eq_(os.path.exists(index), False)
++        assert len(fs) == TOTAL
++        assert os.path.exists(index) ==  False
+ 
+         ds = mapnik.SQLite(file=test_db, table=table, use_spatial_index=True)
+         fs = list(ds.all_features())
+@@ -82,10 +78,10 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         # for feat in fs:
+         #    query = mapnik.Query(feat.envelope())
+         #    selected = ds.features(query)
+-        #    eq_(len(selected.features)>=1,True)
++        #    assert len(selected.features)>=1 == True
+         del ds
+ 
+-        eq_(os.path.exists(index), True)
++        assert os.path.exists(index) ==  True
+         os.unlink(index)
+ 
+     test_rtree_creation.requires_data = True
+@@ -148,17 +144,17 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+ 
+         # confirm the wkb matches a manually formed wkb
+         wkb2 = make_wkb_point(x, y)
+-        eq_(wkb, wkb2)
++        assert wkb ==  wkb2
+ 
+         # ensure we can read this data back out properly with mapnik
+         ds = mapnik.Datasource(
+             **{'type': 'sqlite', 'file': test_db, 'table': 'point_table'})
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat.id(), 1)
+-        eq_(feat['name'], 'test point')
++        assert feat.id() ==  1
++        assert feat['name'] == 'test point'
+         geom = feat.geometry
+-        eq_(geom.to_wkt(), 'POINT(-122 48)')
++        assert geom.to_wkt() == 'POINT(-122 48)'
+         del ds
+ 
+         # ensure it matches data read with just sqlite
+@@ -169,19 +165,12 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         result = cur.fetchone()
+         cur.close()
+         feat_id = result[0]
+-        eq_(feat_id, 1)
++        assert feat_id ==  1
+         name = result[2]
+-        eq_(name, 'test point')
++        assert name ==  'test point'
+         geom_wkb_blob = result[1]
+-        if not PYTHON3:
+-            geom_wkb_blob = str(geom_wkb_blob)
+-        eq_(geom_wkb_blob, geom.to_wkb(mapnik.wkbByteOrder.NDR))
++        assert geom_wkb_blob == geom.to_wkb(mapnik.wkbByteOrder.NDR)
+         new_geom = mapnik.Geometry.from_wkb(geom_wkb_blob)
+-        eq_(new_geom.to_wkt(), geom.to_wkt())
++        assert new_geom.to_wkt() == geom.to_wkt()
+         conn.close()
+         os.unlink(test_db)
+-
+-if __name__ == "__main__":
+-    setup()
+-    returncode = run_all(eval(x) for x in dir() if x.startswith("test_"))
+-    exit(returncode)
+--- a/test/python_tests/sqlite_test.py
++++ b/test/python_tests/sqlite_test.py
+@@ -1,28 +1,21 @@
+-#!/usr/bin/env python
+-
+ import os
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
++import pytest
++from .utilities import execution_path
+ 
+-from .utilities import execution_path, run_all
+-
+-
+-def setup():
++ at pytest.fixture(scope="module")
++def setup_and_teardown():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
+-
+-
+-def teardown():
++    yield
+     index = '../data/sqlite/world.sqlite.index'
+     if os.path.exists(index):
+         os.unlink(index)
+ 
+ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
+ 
+-    def test_attachdb_with_relative_file():
++    def test_attachdb_with_relative_file(setup_and_teardown):
+         # The point table and index is in the qgis_spatiallite.sqlite
+         # database.  If either is not found, then this fails
+         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
+@@ -31,7 +24,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['pkuid'], 1)
++        assert feature['pkuid'] == 1
+ 
+     test_attachdb_with_relative_file.requires_data = True
+ 
+@@ -52,7 +45,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         except StopIteration:
+             pass
+         # the above should not throw but will result in no features
+-        eq_(feature, None)
++        assert feature == None
+ 
+     test_attachdb_with_multiple_files.requires_data = True
+ 
+@@ -65,7 +58,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['pkuid'], 1)
++        assert feature['pkuid'] ==  1
+ 
+     test_attachdb_with_absolute_file.requires_data = True
+ 
+@@ -86,7 +79,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+ 
+     test_attachdb_with_index.requires_data = True
+ 
+@@ -107,7 +100,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+ 
+     test_attachdb_with_explicit_index.requires_data = True
+ 
+@@ -116,70 +109,68 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
+                            attachdb='busines at business.sqlite'
+                            )
+-        eq_(len(ds.fields()), 29)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'fips',
+-             'iso2',
+-             'iso3',
+-             'un',
+-             'name',
+-             'area',
+-             'pop2005',
+-             'region',
+-             'subregion',
+-             'lon',
+-             'lat',
+-             'ISO3:1',
+-             '1995',
+-             '1996',
+-             '1997',
+-             '1998',
+-             '1999',
+-             '2000',
+-             '2001',
+-             '2002',
+-             '2003',
+-             '2004',
+-             '2005',
+-             '2006',
+-             '2007',
+-             '2008',
+-             '2009',
+-             '2010'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int'])
++        assert len(ds.fields()) ==  29
++        assert ds.fields() == ['OGC_FID',
++                               'fips',
++                               'iso2',
++                               'iso3',
++                               'un',
++                               'name',
++                               'area',
++                               'pop2005',
++                               'region',
++                               'subregion',
++                               'lon',
++                               'lat',
++                               'ISO3:1',
++                               '1995',
++                               '1996',
++                               '1997',
++                               '1998',
++                               '1999',
++                               '2000',
++                               '2001',
++                               '2002',
++                               '2003',
++                               '2004',
++                               '2005',
++                               '2006',
++                               '2007',
++                               '2008',
++                               '2009',
++                               '2010']
++        assert ds.field_types() == ['int',
++                                    'str',
++                                    'str',
++                                    'str',
++                                    'int',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'float',
++                                    'float',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int']
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature.id(), 1)
++        assert feature.id() ==  1
+         expected = {
+             1995: 0,
+             1996: 0,
+@@ -215,7 +206,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         }
+         for k, v in expected.items():
+             try:
+-                eq_(feature[str(k)], v)
++                assert feature[str(k)] ==  v
+             except:
+                 #import pdb;pdb.set_trace()
+                 print('invalid key/v %s/%s for: %s' % (k, v, feature))
+@@ -227,68 +218,66 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
+                            attachdb='busines at business.sqlite'
+                            )
+-        eq_(len(ds.fields()), 29)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'fips',
+-             'iso2',
+-             'iso3',
+-             'un',
+-             'name',
+-             'area',
+-             'pop2005',
+-             'region',
+-             'subregion',
+-             'lon',
+-             'lat',
+-             'ISO3:1',
+-             '1995',
+-             '1996',
+-             '1997',
+-             '1998',
+-             '1999',
+-             '2000',
+-             '2001',
+-             '2002',
+-             '2003',
+-             '2004',
+-             '2005',
+-             '2006',
+-             '2007',
+-             '2008',
+-             '2009',
+-             '2010'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int'])
+-        eq_(len(list(ds.all_features())), 100)
++        assert len(ds.fields()) ==  29
++        assert ds.fields() == ['OGC_FID',
++                               'fips',
++                               'iso2',
++                               'iso3',
++                               'un',
++                               'name',
++                               'area',
++                               'pop2005',
++                               'region',
++                               'subregion',
++                               'lon',
++                               'lat',
++                               'ISO3:1',
++                               '1995',
++                               '1996',
++                               '1997',
++                               '1998',
++                               '1999',
++                               '2000',
++                               '2001',
++                               '2002',
++                               '2003',
++                               '2004',
++                               '2005',
++                               '2006',
++                               '2007',
++                               '2008',
++                               '2009',
++                               '2010']
++        assert ds.field_types() == ['int',
++                                    'str',
++                                    'str',
++                                    'str',
++                                    'int',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'float',
++                                    'float',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int']
++        assert len(list(ds.all_features())) ==  100
+ 
+     test_attachdb_with_sql_join_count.requires_data = True
+ 
+@@ -302,68 +291,66 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                            attachdb='busines at business.sqlite'
+                            )
+-        eq_(len(ds.fields()), 29)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'fips',
+-             'iso2',
+-             'iso3',
+-             'un',
+-             'name',
+-             'area',
+-             'pop2005',
+-             'region',
+-             'subregion',
+-             'lon',
+-             'lat',
+-             'ISO3:1',
+-             '1995',
+-             '1996',
+-             '1997',
+-             '1998',
+-             '1999',
+-             '2000',
+-             '2001',
+-             '2002',
+-             '2003',
+-             '2004',
+-             '2005',
+-             '2006',
+-             '2007',
+-             '2008',
+-             '2009',
+-             '2010'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int'])
+-        eq_(len(list(ds.all_features())), 192)
++        assert len(ds.fields()) ==  29
++        assert ds.fields() == ['OGC_FID',
++                               'fips',
++                               'iso2',
++                               'iso3',
++                               'un',
++                               'name',
++                               'area',
++                               'pop2005',
++                               'region',
++                               'subregion',
++                               'lon',
++                               'lat',
++                               'ISO3:1',
++                               '1995',
++                               '1996',
++                               '1997',
++                               '1998',
++                               '1999',
++                               '2000',
++                               '2001',
++                               '2002',
++                               '2003',
++                               '2004',
++                               '2005',
++                               '2006',
++                               '2007',
++                               '2008',
++                               '2009',
++                               '2010']
++        assert ds.field_types() == ['int',
++                                    'str',
++                                    'str',
++                                    'str',
++                                    'int',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'float',
++                                    'float',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int']
++        assert len(list(ds.all_features())) ==  192
+ 
+     test_attachdb_with_sql_join_count2.requires_data = True
+ 
+@@ -375,68 +362,66 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            table='(select * from (select * from world_merc where !intersects!) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                            attachdb='busines at business.sqlite'
+                            )
+-        eq_(len(ds.fields()), 29)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'fips',
+-             'iso2',
+-             'iso3',
+-             'un',
+-             'name',
+-             'area',
+-             'pop2005',
+-             'region',
+-             'subregion',
+-             'lon',
+-             'lat',
+-             'ISO3:1',
+-             '1995',
+-             '1996',
+-             '1997',
+-             '1998',
+-             '1999',
+-             '2000',
+-             '2001',
+-             '2002',
+-             '2003',
+-             '2004',
+-             '2005',
+-             '2006',
+-             '2007',
+-             '2008',
+-             '2009',
+-             '2010'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int'])
+-        eq_(len(list(ds.all_features())), 192)
++        assert len(ds.fields()) ==  29
++        assert ds.fields() == ['OGC_FID',
++                               'fips',
++                               'iso2',
++                               'iso3',
++                               'un',
++                               'name',
++                               'area',
++                               'pop2005',
++                               'region',
++                               'subregion',
++                               'lon',
++                               'lat',
++                               'ISO3:1',
++                               '1995',
++                               '1996',
++                               '1997',
++                               '1998',
++                               '1999',
++                               '2000',
++                               '2001',
++                               '2002',
++                               '2003',
++                               '2004',
++                               '2005',
++                               '2006',
++                               '2007',
++                               '2008',
++                               '2009',
++                               '2010']
++        assert ds.field_types() == ['int',
++                                    'str',
++                                    'str',
++                                    'str',
++                                    'int',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'float',
++                                    'float',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int']
++        assert len(list(ds.all_features())) ==  192
+ 
+     test_attachdb_with_sql_join_count3.requires_data = True
+ 
+@@ -448,68 +433,66 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            table='(select * from (select * from world_merc where !intersects! limit 1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                            attachdb='busines at business.sqlite'
+                            )
+-        eq_(len(ds.fields()), 29)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'fips',
+-             'iso2',
+-             'iso3',
+-             'un',
+-             'name',
+-             'area',
+-             'pop2005',
+-             'region',
+-             'subregion',
+-             'lon',
+-             'lat',
+-             'ISO3:1',
+-             '1995',
+-             '1996',
+-             '1997',
+-             '1998',
+-             '1999',
+-             '2000',
+-             '2001',
+-             '2002',
+-             '2003',
+-             '2004',
+-             '2005',
+-             '2006',
+-             '2007',
+-             '2008',
+-             '2009',
+-             '2010'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'int'])
+-        eq_(len(list(ds.all_features())), 1)
++        assert len(ds.fields()) ==  29
++        assert ds.fields() == ['OGC_FID',
++                               'fips',
++                               'iso2',
++                               'iso3',
++                               'un',
++                               'name',
++                               'area',
++                               'pop2005',
++                               'region',
++                               'subregion',
++                               'lon',
++                               'lat',
++                               'ISO3:1',
++                               '1995',
++                               '1996',
++                               '1997',
++                               '1998',
++                               '1999',
++                               '2000',
++                               '2001',
++                               '2002',
++                               '2003',
++                               '2004',
++                               '2005',
++                               '2006',
++                               '2007',
++                               '2008',
++                               '2009',
++                               '2010']
++        assert ds.field_types() == ['int',
++                                    'str',
++                                    'str',
++                                    'str',
++                                    'int',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'float',
++                                    'float',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int']
++        assert len(list(ds.all_features())) ==  1
+ 
+     test_attachdb_with_sql_join_count4.requires_data = True
+ 
+@@ -523,34 +506,32 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            )
+         # nothing is able to join to business so we don't pick up business
+         # schema
+-        eq_(len(ds.fields()), 12)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'fips',
+-             'iso2',
+-             'iso3',
+-             'un',
+-             'name',
+-             'area',
+-             'pop2005',
+-             'region',
+-             'subregion',
+-             'lon',
+-             'lat'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'str',
+-             'str',
+-             'str',
+-             'int',
+-             'str',
+-             'int',
+-             'int',
+-             'int',
+-             'int',
+-             'float',
+-             'float'])
+-        eq_(len(list(ds.all_features())), 0)
++        assert len(ds.fields()) ==  12
++        assert ds.fields() == ['OGC_FID',
++                               'fips',
++                               'iso2',
++                               'iso3',
++                               'un',
++                               'name',
++                               'area',
++                               'pop2005',
++                               'region',
++                               'subregion',
++                               'lon',
++                               'lat']
++        assert ds.field_types() == ['int',
++                                    'str',
++                                    'str',
++                                    'str',
++                                    'int',
++                                    'str',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'int',
++                                    'float',
++                                    'float']
++        assert len(list(ds.all_features())) ==  0
+ 
+     test_attachdb_with_sql_join_count5.requires_data = True
+ 
+@@ -560,52 +541,52 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['OGC_FID'], 1)
+-        eq_(feature['fips'], u'AC')
+-        eq_(feature['iso2'], u'AG')
+-        eq_(feature['iso3'], u'ATG')
+-        eq_(feature['un'], 28)
+-        eq_(feature['name'], u'Antigua and Barbuda')
+-        eq_(feature['area'], 44)
+-        eq_(feature['pop2005'], 83039)
+-        eq_(feature['region'], 19)
+-        eq_(feature['subregion'], 29)
+-        eq_(feature['lon'], -61.783)
+-        eq_(feature['lat'], 17.078)
++        assert feature['OGC_FID'] ==  1
++        assert feature['fips'] ==  u'AC'
++        assert feature['iso2'] ==  u'AG'
++        assert feature['iso3'] ==  u'ATG'
++        assert feature['un'] ==  28
++        assert feature['name'] ==  u'Antigua and Barbuda'
++        assert feature['area'] ==  44
++        assert feature['pop2005'] ==  83039
++        assert feature['region'] ==  19
++        assert feature['subregion'] ==  29
++        assert feature['lon'] ==  -61.783
++        assert feature['lat'] ==  17.078
+ 
+         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
+                            table='(select * from world_merc)',
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['OGC_FID'], 1)
+-        eq_(feature['fips'], u'AC')
+-        eq_(feature['iso2'], u'AG')
+-        eq_(feature['iso3'], u'ATG')
+-        eq_(feature['un'], 28)
+-        eq_(feature['name'], u'Antigua and Barbuda')
+-        eq_(feature['area'], 44)
+-        eq_(feature['pop2005'], 83039)
+-        eq_(feature['region'], 19)
+-        eq_(feature['subregion'], 29)
+-        eq_(feature['lon'], -61.783)
+-        eq_(feature['lat'], 17.078)
++        assert feature['OGC_FID'] ==  1
++        assert feature['fips'] ==  u'AC'
++        assert feature['iso2'] ==  u'AG'
++        assert feature['iso3'] ==  u'ATG'
++        assert feature['un'] ==  28
++        assert feature['name'] ==  u'Antigua and Barbuda'
++        assert feature['area'] ==  44
++        assert feature['pop2005'] ==  83039
++        assert feature['region'] ==  19
++        assert feature['subregion'] ==  29
++        assert feature['lon'] ==  -61.783
++        assert feature['lat'] ==  17.078
+ 
+         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
+                            table='(select OGC_FID,GEOMETRY from world_merc)',
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['OGC_FID'], 1)
+-        eq_(len(feature), 1)
++        assert feature['OGC_FID'] ==  1
++        assert len(feature) ==  1
+ 
+         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
+                            table='(select GEOMETRY,OGC_FID,fips from world_merc)',
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['OGC_FID'], 1)
+-        eq_(feature['fips'], u'AC')
++        assert feature['OGC_FID'] ==  1
++        assert feature['fips'] ==  u'AC'
+ 
+         # same as above, except with alias like postgres requires
+         # TODO - should we try to make this work?
+@@ -615,16 +596,16 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+         #    )
+         #fs = ds.featureset()
+         #feature = fs.next()
+-        # eq_(feature['aliased_id'],1)
+-        # eq_(feature['fips'],u'AC')
++        # assert feature['aliased_id'] == 1
++        # assert feature['fips'] == u'AC'
+ 
+         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
+                            table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)',
+                            )
+         fs = ds.featureset()
+         feature = fs.next()
+-        eq_(feature['rowid'], 1)
+-        eq_(feature['fips'], u'AC')
++        assert feature['rowid'] ==  1
++        assert feature['fips'] ==  u'AC'
+ 
+     test_subqueries.requires_data = True
+ 
+@@ -638,74 +619,73 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+ 
+     test_empty_db.requires_data = True
+ 
+-    @raises(RuntimeError)
++
+     def test_that_nonexistant_query_field_throws(**kwargs):
+         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
+                            table='empty',
+                            )
+-        eq_(len(ds.fields()), 25)
+-        eq_(ds.fields(),
+-            ['OGC_FID',
+-             'scalerank',
+-             'labelrank',
+-             'featurecla',
+-             'sovereignt',
+-             'sov_a3',
+-             'adm0_dif',
+-             'level',
+-             'type',
+-             'admin',
+-             'adm0_a3',
+-             'geou_dif',
+-             'name',
+-             'abbrev',
+-             'postal',
+-             'name_forma',
+-             'terr_',
+-             'name_sort',
+-             'map_color',
+-             'pop_est',
+-             'gdp_md_est',
+-             'fips_10_',
+-             'iso_a2',
+-             'iso_a3',
+-             'iso_n3'])
+-        eq_(ds.field_types(),
+-            ['int',
+-             'int',
+-             'int',
+-             'str',
+-             'str',
+-             'str',
+-             'float',
+-             'float',
+-             'str',
+-             'str',
+-             'str',
+-             'float',
+-             'str',
+-             'str',
+-             'str',
+-             'str',
+-             'str',
+-             'str',
+-             'float',
+-             'float',
+-             'float',
+-             'float',
+-             'str',
+-             'str',
+-             'float'])
++        assert len(ds.fields()) ==  25
++        assert ds.fields() == ['OGC_FID',
++                               'scalerank',
++                               'labelrank',
++                               'featurecla',
++                               'sovereignt',
++                               'sov_a3',
++                               'adm0_dif',
++                               'level',
++                               'type',
++                               'admin',
++                               'adm0_a3',
++                               'geou_dif',
++                               'name',
++                               'abbrev',
++                               'postal',
++                               'name_forma',
++                               'terr_',
++                               'name_sort',
++                               'map_color',
++                               'pop_est',
++                               'gdp_md_est',
++                               'fips_10_',
++                               'iso_a2',
++                               'iso_a3',
++                               'iso_n3']
++        assert ds.field_types() ==  ['int',
++                                     'int',
++                                     'int',
++                                     'str',
++                                     'str',
++                                     'str',
++                                     'float',
++                                     'float',
++                                     'str',
++                                     'str',
++                                     'str',
++                                     'float',
++                                     'str',
++                                     'str',
++                                     'str',
++                                     'str',
++                                     'str',
++                                     'str',
++                                     'float',
++                                     'float',
++                                     'float',
++                                     'float',
++                                     'str',
++                                     'str',
++                                     'float']
+         query = mapnik.Query(ds.envelope())
+         for fld in ds.fields():
+             query.add_property_name(fld)
+         # also add an invalid one, triggering throw
+         query.add_property_name('bogus')
+-        ds.features(query)
++        with pytest.raises(RuntimeError):
++            ds.features(query)
+ 
+     test_that_nonexistant_query_field_throws.requires_data = True
+ 
+@@ -719,7 +699,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+ 
+     test_intersects_token1.requires_data = True
+ 
+@@ -733,7 +713,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+ 
+     test_intersects_token2.requires_data = True
+ 
+@@ -747,7 +727,7 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+ 
+     test_intersects_token3.requires_data = True
+ 
+@@ -766,15 +746,15 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            use_spatial_index=False,
+                            key_field='alias'
+                            )
+-        eq_(len(ds.fields()), 1)
+-        eq_(ds.fields(), ['alias'])
+-        eq_(ds.field_types(), ['str'])
++        assert len(ds.fields()) ==  1
++        assert ds.fields() ==  ['alias']
++        assert ds.field_types() ==  ['str']
+         fs = list(ds.all_features())
+-        eq_(len(fs), 1)
++        assert len(fs) ==  1
+         feat = fs[0]
+-        eq_(feat.id(), 0)  # should be 1?
+-        eq_(feat['alias'], 'test')
+-        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
++        assert feat.id() ==  0  # should be 1?
++        assert feat['alias'] ==  'test'
++        assert feat.geometry.to_wkt() ==  'POINT(0 0)'
+ 
+     def test_db_with_one_untyped_column():
+         # form up an in-memory test db
+@@ -791,9 +771,9 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            )
+ 
+         # ensure the untyped column is found
+-        eq_(len(ds.fields()), 2)
+-        eq_(ds.fields(), ['rowid', 'untyped'])
+-        eq_(ds.field_types(), ['int', 'str'])
++        assert len(ds.fields()) ==  2
++        assert ds.fields(), ['rowid' ==  'untyped']
++        assert ds.field_types(), ['int' ==  'str']
+ 
+     def test_db_with_one_untyped_column_using_subquery():
+         # form up an in-memory test db
+@@ -810,27 +790,27 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+                            )
+ 
+         # ensure the untyped column is found
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['rowid', 'untyped', 'rowid'])
+-        eq_(ds.field_types(), ['int', 'str', 'int'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['rowid', 'untyped' ==  'rowid']
++        assert ds.field_types(), ['int', 'str' ==  'int']
+ 
+     def test_that_64bit_int_fields_work():
+         ds = mapnik.SQLite(file='../data/sqlite/64bit_int.sqlite',
+                            table='int_table',
+                            use_spatial_index=False
+                            )
+-        eq_(len(ds.fields()), 3)
+-        eq_(ds.fields(), ['OGC_FID', 'id', 'bigint'])
+-        eq_(ds.field_types(), ['int', 'int', 'int'])
++        assert len(ds.fields()) ==  3
++        assert ds.fields(), ['OGC_FID', 'id' ==  'bigint']
++        assert ds.field_types(), ['int', 'int' ==  'int']
+         fs = ds.featureset()
+         feat = fs.next()
+-        eq_(feat.id(), 1)
+-        eq_(feat['OGC_FID'], 1)
+-        eq_(feat['bigint'], 2147483648)
++        assert feat.id() ==  1
++        assert feat['OGC_FID'] ==  1
++        assert feat['bigint'] ==  2147483648
+         feat = fs.next()
+-        eq_(feat.id(), 2)
+-        eq_(feat['OGC_FID'], 2)
+-        eq_(feat['bigint'], 922337203685477580)
++        assert feat.id() ==  2
++        assert feat['OGC_FID'] ==  2
++        assert feat['bigint'] ==  922337203685477580
+ 
+     test_that_64bit_int_fields_work.requires_data = True
+ 
+@@ -860,11 +840,5 @@ if 'sqlite' in mapnik.DatasourceCache.pl
+             feature = fs.next()
+         except StopIteration:
+             pass
+-        eq_(feature, None)
++        assert feature ==  None
+         mapnik.logger.set_severity(default_logging_severity)
+-
+-if __name__ == "__main__":
+-    setup()
+-    result = run_all(eval(x) for x in dir() if x.startswith("test_"))
+-    teardown()
+-    exit(result)
+--- a/test/python_tests/style_test.py
++++ b/test/python_tests/style_test.py
+@@ -1,21 +1,10 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-
+-from nose.tools import eq_
+-
+ import mapnik
+ 
+-from .utilities import run_all
+-
+-
+ def test_style_init():
+     s = mapnik.Style()
+-    eq_(s.filter_mode, mapnik.filter_mode.ALL)
+-    eq_(len(s.rules), 0)
+-    eq_(s.opacity, 1)
+-    eq_(s.comp_op, None)
+-    eq_(s.image_filters, "")
+-    eq_(s.image_filters_inflate, False)
+-
+-if __name__ == "__main__":
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++    assert s.filter_mode ==  mapnik.filter_mode.ALL
++    assert len(s.rules) ==  0
++    assert s.opacity ==  1
++    assert s.comp_op ==  None
++    assert s.image_filters ==  ""
++    assert not s.image_filters_inflate
+--- a/test/python_tests/topojson_plugin_test.py
++++ b/test/python_tests/topojson_plugin_test.py
+@@ -1,70 +1,48 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-from __future__ import absolute_import, print_function
+-
+-import os
+-
+-from nose.tools import assert_almost_equal, eq_
+-
+ import mapnik
++import pytest
++import os
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if 'topojson' in mapnik.DatasourceCache.plugin_names():
+ 
+-    def test_topojson_init():
++    def test_topojson_init(setup):
+         # topojson tests/data/json/escaped.geojson -o tests/data/topojson/escaped.topojson --properties
+         # topojson version 1.4.2
+         ds = mapnik.Datasource(
+             type='topojson',
+             file='../data/topojson/escaped.topojson')
+         e = ds.envelope()
+-        assert_almost_equal(e.minx, -81.705583, places=7)
+-        assert_almost_equal(e.miny, 41.480573, places=6)
+-        assert_almost_equal(e.maxx, -81.705583, places=5)
+-        assert_almost_equal(e.maxy, 41.480573, places=3)
++        assert e.minx == pytest.approx(-81.705583, 1e-7)
++        assert e.miny == pytest.approx( 41.480573, 1e-6)
++        assert e.maxx == pytest.approx(-81.705583, 1e-5)
++        assert e.maxy == pytest.approx(41.480573,  1e-3)
+ 
+     def test_topojson_properties():
+-        ds = mapnik.Datasource(
+-            type='topojson',
+-            file='../data/topojson/escaped.topojson')
+-        f = list(ds.features_at_point(ds.envelope().center()))[0]
+-        eq_(len(ds.fields()), 11)
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
+-
+-        ds = mapnik.Datasource(
+-            type='topojson',
+-            file='../data/topojson/escaped.topojson')
+-        f = list(ds.all_features())[0]
+-        eq_(len(ds.fields()), 11)
+-
+-        desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+-
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
++         ds = mapnik.Datasource(
++             type='topojson',
++             file='../data/topojson/escaped.topojson')
++
++         f = list(ds.features_at_point(ds.envelope().center()))[0]
++         assert len(ds.fields()) ==  11
++         desc = ds.describe()
++         assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
++
++         assert  f['name'] == u'Test'
++         assert  f['int'] ==  1
++         assert  f['description'] == u'Test: \u005C'
++         assert  f['spaces'] ==  u'this has spaces'
++         assert  f['double'] == 1.1
++         assert  f['boolean'] == True
++         assert  f['NOM_FR'] == u'Qu\xe9bec'
++         assert  f['NOM_FR'] == u'Qu?bec'
+ 
+     def test_geojson_from_in_memory_string():
+         ds = mapnik.Datasource(
+@@ -72,42 +50,43 @@ if 'topojson' in mapnik.DatasourceCache.
+             inline=open(
+                 '../data/topojson/escaped.topojson',
+                 'r').read())
+-        f = list(ds.all_features())[0]
+-        eq_(len(ds.fields()), 11)
+-
++        f = list(ds.features_at_point(ds.envelope().center()))[0]
++        assert len(ds.fields()) ==  11
+         desc = ds.describe()
+-        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
++        assert desc['geometry_type'] ==  mapnik.DataGeometryType.Point
+ 
+-        eq_(f['name'], u'Test')
+-        eq_(f['int'], 1)
+-        eq_(f['description'], u'Test: \u005C')
+-        eq_(f['spaces'], u'this has spaces')
+-        eq_(f['double'], 1.1)
+-        eq_(f['boolean'], True)
+-        eq_(f['NOM_FR'], u'Qu\xe9bec')
+-        eq_(f['NOM_FR'], u'Qu?bec')
++        assert  f['name'] == u'Test'
++        assert  f['int'] ==  1
++        assert  f['description'] == u'Test: \u005C'
++        assert  f['spaces'] ==  u'this has spaces'
++        assert  f['double'] == 1.1
++        assert  f['boolean'] == True
++        assert  f['NOM_FR'] == u'Qu\xe9bec'
++        assert  f['NOM_FR'] == u'Qu?bec'
+ 
+-#    @raises(RuntimeError)
++    #@raises(RuntimeError)
+     def test_that_nonexistant_query_field_throws(**kwargs):
++        #with pytest.raises(RuntimeError):
+         ds = mapnik.Datasource(
+             type='topojson',
+             file='../data/topojson/escaped.topojson')
+-        eq_(len(ds.fields()), 11)
++        assert len(ds.fields()) ==  11
+         # TODO - this sorting is messed up
+-        eq_(ds.fields(), ['name', 'int', 'description',
+-                          'spaces', 'double', 'boolean', 'NOM_FR',
+-                          'object', 'array', 'empty_array', 'empty_object'])
+-        eq_(ds.field_types(), ['str', 'int',
+-                               'str', 'str', 'float', 'bool', 'str',
+-                               'str', 'str', 'str', 'str'])
+-# TODO - should topojson plugin throw like others?
+-#        query = mapnik.Query(ds.envelope())
+-#        for fld in ds.fields():
+-#            query.add_property_name(fld)
+-#        # also add an invalid one, triggering throw
+-#        query.add_property_name('bogus')
+-#        fs = ds.features(query)
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
++        assert ds.fields() == ['name', 'int', 'description',
++                               'spaces', 'double', 'boolean', 'NOM_FR',
++                               'object', 'array', 'empty_array', 'empty_object']
++        assert ds.field_types() == ['str', 'int',
++                                    'str', 'str', 'float', 'bool', 'str',
++                                    'str', 'str', 'str', 'str']
++        # TODO - should topojson plugin throw like others?
++        query = mapnik.Query(ds.envelope())
++        for fld in ds.fields():
++            query.add_property_name(fld)
++            # also add an invalid one, triggering throw
++            query.add_property_name('bogus')
++            fs = ds.features(query)
++
++
++#if __name__ == "__main__":
++    #setup()
++#    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
+--- a/test/python_tests/utilities.py
++++ b/test/python_tests/utilities.py
+@@ -4,35 +4,16 @@
+ import os
+ import sys
+ import traceback
+-
+-from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
+-from nose.tools import assert_almost_equal
+-
+ import mapnik
++import pytest
+ 
+-PYTHON3 = sys.version_info[0] == 3
+-READ_FLAGS = 'rb' if PYTHON3 else 'r'
+-if PYTHON3:
+-    xrange = range
+-
++READ_FLAGS = 'rb'
+ HERE = os.path.dirname(__file__)
+ 
+-
+ def execution_path(filename):
+     return os.path.join(os.path.dirname(
+         sys._getframe(1).f_code.co_filename), filename)
+ 
+-
+-class Todo(Exception):
+-    pass
+-
+-
+-class TodoPlugin(ErrorClassPlugin):
+-    name = "todo"
+-
+-    todo = ErrorClass(Todo, label='TODO', isfailure=False)
+-
+-
+ def contains_word(word, bytestring_):
+     """
+     Checks that a bytestring contains a given word. len(bytestring) should be
+@@ -51,7 +32,7 @@ def contains_word(word, bytestring_):
+     """
+     n = len(word)
+     assert len(bytestring_) % n == 0, "len(bytestring_) not multiple of len(word)"
+-    chunks = [bytestring_[i:i + n] for i in xrange(0, len(bytestring_), n)]
++    chunks = [bytestring_[i:i + n] for i in range(0, len(bytestring_), n)]
+     return word in chunks
+ 
+ 
+@@ -77,29 +58,6 @@ def get_unique_colors(im):
+     pixels = sorted(pixels)
+     return list(map(pixel2rgba, pixels))
+ 
+-
+-def run_all(iterable):
+-    failed = 0
+-    for test in iterable:
+-        try:
+-            test()
+-            sys.stderr.write("\x1b[32m? \x1b[m" + test.__name__ + "\x1b[m\n")
+-        except:
+-            exc_type, exc_value, exc_tb = sys.exc_info()
+-            failed += 1
+-            sys.stderr.write("\x1b[31m? \x1b[m" + test.__name__ + "\x1b[m\n")
+-            for mline in traceback.format_exception_only(exc_type, exc_value):
+-                for line in mline.rstrip().split("\n"):
+-                    sys.stderr.write("  \x1b[31m" + line + "\x1b[m\n")
+-            sys.stderr.write("  Traceback:\n")
+-            for mline in traceback.format_tb(exc_tb):
+-                for line in mline.rstrip().split("\n"):
+-                    if not 'utilities.py' in line and not 'trivial.py' in line and not line.strip() == 'test()':
+-                        sys.stderr.write("  " + line + "\n")
+-        sys.stderr.flush()
+-    return failed
+-
+-
+ def side_by_side_image(left_im, right_im):
+     width = left_im.width() + 1 + right_im.width()
+     height = max(left_im.height(), right_im.height())
+@@ -135,7 +93,19 @@ def side_by_side_image(left_im, right_im
+ 
+ def assert_box2d_almost_equal(a, b, msg=None):
+     msg = msg or ("%r != %r" % (a, b))
+-    assert_almost_equal(a.minx, b.minx, msg=msg)
+-    assert_almost_equal(a.maxx, b.maxx, msg=msg)
+-    assert_almost_equal(a.miny, b.miny, msg=msg)
+-    assert_almost_equal(a.maxy, b.maxy, msg=msg)
++    assert a.minx == pytest.approx(b.minx, abs=1e-2), msg
++    assert a.maxx == pytest.approx(b.maxx, abs=1e-2), msg
++    assert a.miny == pytest.approx(b.miny, abs=1e-2), msg
++    assert a.maxy == pytest.approx(b.maxy, abs=1e-2), msg
++
++
++def images_almost_equal(image1, image2, tolerance = 1):
++    def rgba(p):
++        return p & 0xff,(p >> 8) & 0xff,(p >> 16) & 0xff, p >> 24
++    assert image1.width()  == image2.width()
++    assert image1.height() == image2.height()
++    for x in range(image1.width()):
++        for y in range(image1.height()):
++            p1 = image1.get_pixel(x, y)
++            p2 = image2.get_pixel(x, y)
++            assert rgba(p1) == pytest.approx(rgba(p2), abs = tolerance)
+--- a/test/python_tests/webp_encoding_test.py
++++ b/test/python_tests/webp_encoding_test.py
+@@ -1,20 +1,15 @@
+-#!/usr/bin/env python
+-# -*- coding: utf-8 -*-
+-from __future__ import absolute_import, print_function
+-
+-import os
+-
+-from nose.tools import eq_, raises
+-
+ import mapnik
++import os
++import pytest
+ 
+-from .utilities import execution_path, run_all
+-
++from .utilities import execution_path
+ 
++ at pytest.fixture(scope="module")
+ def setup():
+     # All of the paths used are relative, if we run the tests
+     # from another directory we need to chdir()
+     os.chdir(execution_path('.'))
++    yield
+ 
+ if mapnik.has_webp():
+     tmp_dir = '/tmp/mapnik-webp/'
+@@ -48,26 +43,28 @@ if mapnik.has_webp():
+         return os.path.join('images/support/encoding-opts',
+                             name + '-' + format.replace(":", "+") + '.webp')
+ 
+-    def test_quality_threshold():
++    def test_quality_threshold(setup):
+         im = mapnik.Image(256, 256)
+         im.tostring('webp:quality=99.99000')
+         im.tostring('webp:quality=0')
+         im.tostring('webp:quality=0.001')
+ 
+-    @raises(RuntimeError)
++
+     def test_quality_threshold_invalid():
+         im = mapnik.Image(256, 256)
+-        im.tostring('webp:quality=101')
++        with pytest.raises(RuntimeError):
++            im.tostring('webp:quality=101')
++
+ 
+-    @raises(RuntimeError)
+     def test_quality_threshold_invalid2():
+         im = mapnik.Image(256, 256)
+-        im.tostring('webp:quality=-1')
++        with pytest.raises(RuntimeError):
++            im.tostring('webp:quality=-1')
+ 
+-    @raises(RuntimeError)
+     def test_quality_threshold_invalid3():
+         im = mapnik.Image(256, 256)
+-        im.tostring('webp:quality=101.1')
++        with pytest.raises(RuntimeError):
++            im.tostring('webp:quality=101.1')
+ 
+     generate = os.environ.get('UPDATE')
+ 
+@@ -139,7 +136,7 @@ if mapnik.has_webp():
+                         '%s (actual) not == to %s (expected)' %
+                         (actual, expected))
+             # disabled to avoid failures on ubuntu when using old webp packages
+-            # eq_(fails,[],'\n'+'\n'.join(fails))
++            # assert fails,[] == '\n'+'\n'.join(fails)
+         except RuntimeError as e:
+             print(e)
+ 
+@@ -175,11 +172,6 @@ if mapnik.has_webp():
+                 print(
+                     'warning, cannot open webp expected image (your libwebp is likely too old)')
+                 return
+-            eq_(t0_len, len(expected_bytes))
++            assert t0_len ==  len(expected_bytes)
+         except RuntimeError as e:
+             print(e)
+-
+-
+-if __name__ == "__main__":
+-    setup()
+-    exit(run_all(eval(x) for x in dir() if x.startswith("test_")))


More information about the Pkg-grass-devel mailing list