[mapproxy] 01/09: Imported Upstream version 1.10.0

Bas Couwenberg sebastic at debian.org
Thu May 18 18:12:01 UTC 2017


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository mapproxy.

commit 3dd167968267daeac041875365a54468838f8e86
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Thu May 18 18:36:14 2017 +0200

    Imported Upstream version 1.10.0
---
 .github/ISSUE_TEMPLATE.md                          |  37 ++
 .gitignore                                         |   2 +
 .travis.yml                                        |  13 +-
 AUTHORS.txt                                        |   3 +-
 CHANGES.txt                                        |  48 ++
 README.rst                                         |   8 +-
 doc/auth.rst                                       |  14 +-
 doc/caches.rst                                     | 244 ++++++++-
 doc/conf.py                                        |   4 +-
 doc/configuration.rst                              |  32 +-
 doc/coverages.rst                                  |  93 +++-
 doc/deployment.rst                                 |   6 +-
 doc/install.rst                                    |   2 +-
 doc/install_windows.rst                            |  36 +-
 doc/mapproxy_util.rst                              |  12 +-
 doc/seed.rst                                       |  47 +-
 doc/services.rst                                   |   4 +-
 doc/sources.rst                                    |  44 +-
 doc/yaml/cache_conf.yaml                           |  37 +-
 doc/yaml/grid_conf.yaml                            |   7 +-
 doc/yaml/merged_conf.yaml                          |   7 +-
 doc/yaml/meta_conf.yaml                            |   7 +-
 doc/yaml/simple_conf.yaml                          |   7 +-
 mapproxy/cache/__init__.py                         |  14 +-
 mapproxy/cache/compact.py                          | 302 +++++++++++
 mapproxy/cache/file.py                             | 177 +-----
 mapproxy/cache/geopackage.py                       | 604 +++++++++++++++++++++
 mapproxy/cache/mbtiles.py                          |  83 ++-
 mapproxy/cache/path.py                             | 226 ++++++++
 mapproxy/cache/redis.py                            |  88 +++
 mapproxy/cache/renderd.py                          |   4 +
 mapproxy/cache/s3.py                               | 170 ++++++
 mapproxy/cache/tile.py                             |  97 +++-
 mapproxy/client/arcgis.py                          |  44 ++
 mapproxy/client/wms.py                             |  11 +-
 mapproxy/compat/image.py                           |  46 +-
 mapproxy/config/coverage.py                        |  38 +-
 mapproxy/config/defaults.py                        |   1 +
 mapproxy/config/loader.py                          | 179 +++++-
 mapproxy/config/spec.py                            |  54 +-
 .../config_template/base_config/full_example.yaml  |   8 +-
 mapproxy/config_template/paster/etc/mapproxy.yaml  |  12 +-
 mapproxy/featureinfo.py                            |  41 +-
 mapproxy/grid.py                                   |  15 +-
 mapproxy/image/mask.py                             |  27 +-
 mapproxy/image/merge.py                            |  61 ++-
 mapproxy/image/tile.py                             |  30 +-
 mapproxy/image/transform.py                        |  12 +-
 mapproxy/layer.py                                  |   2 +-
 mapproxy/request/arcgis.py                         | 144 ++++-
 mapproxy/request/wms/__init__.py                   |   2 +
 mapproxy/response.py                               |   2 +-
 mapproxy/script/export.py                          |  22 +-
 mapproxy/script/scales.py                          |   4 +-
 mapproxy/seed/cleanup.py                           | 104 +++-
 mapproxy/seed/script.py                            | 135 ++++-
 mapproxy/seed/seeder.py                            | 151 +++---
 mapproxy/seed/util.py                              |  91 +---
 mapproxy/service/templates/wmts100capabilities.xml |   2 +-
 mapproxy/service/wms.py                            |   4 +-
 mapproxy/source/arcgis.py                          |  19 +-
 mapproxy/test/http.py                              |  92 +++-
 mapproxy/test/system/fixture/arcgis.yaml           |  20 +-
 mapproxy/test/system/fixture/cache.gpkg            | Bin 0 -> 45056 bytes
 mapproxy/test/system/fixture/cache_geopackage.yaml |  56 ++
 mapproxy/test/system/fixture/cache_s3.yaml         |  58 ++
 mapproxy/test/system/fixture/coverage.yaml         |   4 +-
 mapproxy/test/system/fixture/formats.yaml          |   4 +-
 mapproxy/test/system/fixture/inspire.yaml          |   4 +-
 mapproxy/test/system/fixture/inspire_full.yaml     |   4 +-
 mapproxy/test/system/fixture/layer.yaml            |   4 +-
 mapproxy/test/system/fixture/legendgraphic.yaml    |   4 +-
 mapproxy/test/system/fixture/mixed_mode.yaml       |   6 +-
 mapproxy/test/system/fixture/scalehints.yaml       |   4 +-
 mapproxy/test/system/fixture/seedonly.yaml         |   4 +-
 .../test/system/fixture/util-conf-wms-111-cap.xml  |   2 +-
 .../system/fixture/util_wms_capabilities111.xml    |   2 +-
 .../system/fixture/util_wms_capabilities130.xml    |   2 +-
 mapproxy/test/system/fixture/wms_versions.yaml     |   4 +-
 mapproxy/test/system/fixture/wmts.yaml             |   4 +-
 mapproxy/test/system/test_arcgis.py                |  48 +-
 mapproxy/test/system/test_auth.py                  |   2 +-
 mapproxy/test/system/test_cache_geopackage.py      | 128 +++++
 mapproxy/test/system/test_cache_s3.py              | 115 ++++
 mapproxy/test/system/test_kml.py                   |   2 +-
 mapproxy/test/system/test_multi_cache_layers.py    |  10 +-
 mapproxy/test/system/test_tms.py                   |   2 +-
 mapproxy/test/system/test_wms.py                   |  23 +-
 mapproxy/test/test_http_helper.py                  |  24 +-
 mapproxy/test/unit/fixture/cache.gpkg              | Bin 0 -> 45056 bytes
 mapproxy/test/unit/test_async.py                   |   4 +-
 mapproxy/test/unit/test_cache.py                   |  64 +++
 mapproxy/test/unit/test_cache_compact.py           | 127 +++++
 mapproxy/test/unit/test_cache_geopackage.py        | 221 ++++++++
 mapproxy/test/unit/test_cache_redis.py             |  71 +++
 mapproxy/test/unit/test_cache_s3.py                |  85 +++
 mapproxy/test/unit/test_cache_tile.py              | 103 +++-
 mapproxy/test/unit/test_client.py                  |  16 +-
 mapproxy/test/unit/test_client_arcgis.py           |  73 +++
 mapproxy/test/unit/test_conf_loader.py             |  19 +
 mapproxy/test/unit/test_featureinfo.py             |  22 +-
 mapproxy/test/unit/test_geom.py                    | 148 ++++-
 mapproxy/test/unit/test_grid.py                    |  16 +
 mapproxy/test/unit/test_image.py                   |  50 +-
 mapproxy/test/unit/test_image_mask.py              |  45 +-
 mapproxy/test/unit/test_request.py                 |  49 +-
 mapproxy/test/unit/test_seed.py                    |  52 +-
 mapproxy/test/unit/test_wms_layer.py               |   8 +-
 mapproxy/util/async.py                             |   2 +-
 mapproxy/util/coverage.py                          |  89 ++-
 mapproxy/util/ext/serving.py                       |  17 +-
 mapproxy/util/ext/wmsparse/test/test_parse.py      |   2 +-
 .../util/ext/wmsparse/test/wms-omniscale-111.xml   |   2 +-
 mapproxy/util/geom.py                              |  87 ++-
 mapproxy/util/py.py                                |   9 +-
 release.py                                         |   8 +-
 requirements-tests.txt                             |  44 +-
 setup.py                                           |   2 +-
 tox.ini                                            |   2 +-
 119 files changed, 5053 insertions(+), 756 deletions(-)

diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..6a38d38
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,37 @@
+<!--- Is this a bug? -->
+<!--- This issue tracker is only used for tracking bugs. Please use the mailing
+      list, if you have any question or need help: https://mapproxy.org/support -->
+
+<!--- It is a bug! -->
+<!--- Please provide a general summary of the issue in the Title above -->
+
+## Context
+<!--- Provide a more detailed introduction to the issue itself, and why you consider it to be a bug -->
+
+## Expected Behavior
+<!--- Tell us what should happen -->
+
+## Actual Behavior
+<!--- Tell us what happens instead -->
+
+## Possible Fix
+<!--- Not obligatory, but suggest a fix or reason for the bug -->
+
+## Steps to Reproduce
+<!--- Provide a an unambiguous set of steps to reproduce this bug -->
+<!--- Include _minimal_ but _complete_ configurations and test requests. -->
+<!--- Use https://gist.github.com to link to larger configurations. -->
+1.
+2.
+3.
+4.
+
+## Context
+<!--- How has this bug affected you? What were you trying to accomplish? -->
+
+## Your Environment
+<!--- Include as many relevant details about the environment you experienced the bug in -->
+* Version used:
+* Environment name and version (e.g. Python 2.7.5 with mod_wsgi 4.5.9):
+* Server type and version:
+* Operating System and version:
diff --git a/.gitignore b/.gitignore
index 6d74fb2..bf9d20b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,5 @@ nosetests*.xml
 .settings
 .pydevproject
 .tox/
+
+.idea/
diff --git a/.travis.yml b/.travis.yml
index f062e66..9d991c7 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,7 +1,6 @@
 language: python
 
 python:
-  - "2.6"
   - "2.7"
   - "3.3"
   - "3.4"
@@ -10,6 +9,7 @@ python:
 services:
   - couchdb
   - riak
+  - redis-server
 
 addons:
   apt:
@@ -29,7 +29,13 @@ addons:
       - libprotoc-dev
 
 env:
-  - MAPPROXY_TEST_COUCHDB=http://127.0.0.1:5984
+  global:
+    - MAPPROXY_TEST_COUCHDB=http://127.0.0.1:5984
+    - MAPPROXY_TEST_REDIS=127.0.0.1:6379
+
+    # do not load /etc/boto.cfg with Python 3 incompatible plugin
+    # https://github.com/travis-ci/travis-ci/issues/5246#issuecomment-166460882
+    - BOTO_CONFIG=/doesnotexist
 
 cache:
   directories:
@@ -37,7 +43,8 @@ cache:
 
 install:
     # riak packages are not compatible with Python 3
-    - "if [[ $TRAVIS_PYTHON_VERSION = '2.7' ]]; then pip install --use-mirrors protobuf>=2.4.1 riak==2.2 riak_pb>=2.0; export MAPPROXY_TEST_COUCHDB=http://127.0.0.1:5984; export MAPPROXY_TEST_RIAK_PBC=pbc://localhost:8087; fi"
+    - "if [[ $TRAVIS_PYTHON_VERSION = '2.7' ]]; then pip install protobuf>=2.4.1 riak==2.2 riak_pb>=2.0; export MAPPROXY_TEST_RIAK_PBC=pbc://localhost:8087; fi"
     - "pip install -r requirements-tests.txt"
+    - "pip freeze"
 
 script: nosetests mapproxy
diff --git a/AUTHORS.txt b/AUTHORS.txt
index 0ba4e31..bef029e 100644
--- a/AUTHORS.txt
+++ b/AUTHORS.txt
@@ -29,4 +29,5 @@ Patches and Suggestions
 - Richard Duivenvoorde
 - Stephan Holl
 - Steven D. Lander
-- Tom Payne
\ No newline at end of file
+- Tom Payne
+- Joseph Svrcek
diff --git a/CHANGES.txt b/CHANGES.txt
index f25e254..88b1fa3 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,3 +1,51 @@
+1.10.0 2017-05-18
+~~~~~~~~~~~~~~~~~
+
+Improvements:
+
+- Support for S3 cache.
+- Support for the ArcGIS Compact Cache format version 1.
+- Support for GeoPackage files.
+- Support for Redis cache.
+- Support meta_tiles for tiles sources with bulk_meta_tiles option.
+- mbtiles/sqlite cache: Store multiple tiles in one transaction.
+- mbtiles/sqlite cache: Make timeout and WAL configurable.
+- ArcGIS REST source: Improve handling for ImageServer endpoints.
+- ArcGIS REST source: Support FeatureInfo requests.
+- ArcGIS REST source: Support min_res and max_res.
+- Support merging of RGB images with fixed transparency.
+- Coverages: Clip source requests at coverage boundaries.
+- Coverages: Build the difference, union or intersection of multiple coverages.
+- Coverages: Create coverages from webmercator tile coordinates like 05/182/123
+  with expire tiles files.
+- Coverages: Add native support for GeoJSON (no OGR/GDAL required).
+- mapproxy-seed: Add --duration, -reseed-file and -reseed-interval options.
+
+Fixes:
+
+- Fix level selection for grids with small res_factor.
+- mapproxy-util scales: Fix for Python 3.
+- WMS: Fix FeatureInfo precision for transformed requests.
+- Auth-API: Fix FeatureInfo for layers with limitto.
+- Fixes subpixel transformation deviations with Pillow 3.4 or higher.
+- mapproxy-seed: Reduce log output, especially in --quiet mode.
+- mapproxy-seed: Improve tile counter for tile grids with custom resolutions.
+- mapproxy-seed: Improve saving of the seed progress for --continue.
+- Fix band-merging when not all sources return an image.
+
+Other:
+
+- Python 2.6 is no longer supported.
+
+
+1.9.1 2017-01-18
+~~~~~~~~~~~~~~~~
+
+Fixes:
+
+- serve-develop: fixed reloader for Windows installations made
+  with recent pip version (#279)
+
 1.9.0 2016-07-22
 ~~~~~~~~~~~~~~~~
 
diff --git a/README.rst b/README.rst
index 08f6f0f..56a7bd5 100644
--- a/README.rst
+++ b/README.rst
@@ -1,12 +1,12 @@
 MapProxy is an open source proxy for geospatial data. It caches, accelerates and transforms data from existing map services and serves any desktop or web GIS client.
 
-.. image:: http://mapproxy.org/mapproxy.png
+.. image:: https://mapproxy.org/mapproxy.png
 
 MapProxy is a tile cache, but also offers many new and innovative features like full support for WMS clients.
 
-MapProxy is actively developed and supported by `Omniscale <http://omniscale.com>`_, it is released under the Apache Software License 2.0, runs on Unix/Linux and Windows and is easy to install and to configure.
+MapProxy is actively developed and supported by `Omniscale <https://omniscale.com>`_, it is released under the Apache Software License 2.0, runs on Unix/Linux and Windows and is easy to install and to configure.
 
-Go to http://mapproxy.org/ for more information.
+Go to https://mapproxy.org/ for more information.
 
-The documentation is available at: http://mapproxy.org/docs/latest/
+The documentation is available at: https://mapproxy.org/docs/latest/
 
diff --git a/doc/auth.rst b/doc/auth.rst
index 50acc4e..847aa32 100644
--- a/doc/auth.rst
+++ b/doc/auth.rst
@@ -33,11 +33,11 @@ A simple middleware that authorizes random requests might look like::
       def __init__(self, app, global_conf):
           self.app = app
 
-      def __call__(self, environ, start_reponse):
+      def __call__(self, environ, start_response):
           if random.randint(0, 1) == 1:
-            return self.app(environ, start_reponse)
+            return self.app(environ, start_response)
           else:
-            start_reponse('403 Forbidden',
+            start_response('403 Forbidden',
               [('content-type', 'text/plain')])
             return ['no luck today']
 
@@ -86,10 +86,10 @@ Here is a more elaborate example that denies requests to all layers that start w
           self.app = app
           self.prefix = prefix
 
-      def __call__(self, environ, start_reponse):
+      def __call__(self, environ, start_response):
           # put authorize callback function into environment
           environ['mapproxy.authorize'] = self.authorize
-          return self.app(environ, start_reponse)
+          return self.app(environ, start_response)
 
       def authorize(self, service, layers=[], environ=None, **kw):
           allowed = denied = False
@@ -517,9 +517,9 @@ Example that rejects MapProxy instances where the name starts with ``secure``.
       def __init__(self, app, global_conf):
           self.app = app
 
-      def __call__(self, environ, start_reponse):
+      def __call__(self, environ, start_response):
           environ['mapproxy.authorize'] = self.authorize
-          return self.app(environ, start_reponse)
+          return self.app(environ, start_response)
 
       def authorize(self, service, layers=[]):
           instance_name = environ.get('mapproxy.instance_name', '')
diff --git a/doc/caches.rst b/doc/caches.rst
index 45fd07d..01ff791 100644
--- a/doc/caches.rst
+++ b/doc/caches.rst
@@ -5,6 +5,7 @@ Caches
 
 MapProxy supports multiple backends to store the internal tiles. The default backend is file based and does not require any further configuration.
 
+
 Configuration
 =============
 
@@ -25,6 +26,19 @@ Each backend has a ``type`` and one or more options.
 
 The following backend types are available.
 
+
+- :ref:`cache_file`
+- :ref:`cache_mbtiles`
+- :ref:`cache_sqlite`
+- :ref:`cache_geopackage`
+- :ref:`cache_couchdb`
+- :ref:`cache_riak`
+- :ref:`cache_redis`
+- :ref:`cache_s3`
+- :ref:`cache_compact`
+
+.. _cache_file:
+
 ``file``
 ========
 
@@ -53,6 +67,7 @@ This is the default cache type and it uses a single file for each tile. Availabl
 
   .. versionadded:: 1.6.0
 
+.. _cache_mbtiles:
 
 ``mbtiles``
 ===========
@@ -87,17 +102,21 @@ You can set the ``sources`` to an empty list, if you use an existing MBTiles fil
   The MBTiles format specification does not include any timestamps for each tile and the seeding function is limited therefore. If you include any ``refresh_before`` time in a seed task, all tiles will be recreated regardless of the value. The cleanup process does not support any ``remove_before`` times for MBTiles and it always removes all tiles.
   Use the ``--summary`` option of the ``mapproxy-seed`` tool.
 
+The note about ``bulk_meta_tiles`` for SQLite below applies to MBtiles as well.
+
+.. _cache_sqlite:
+
 ``sqlite``
 ===========
 
 .. versionadded:: 1.6.0
 
-Use SQLite databases to store the tiles, similar to ``mbtiles`` cache. The difference to ``mbtiles`` cache is that the ``sqlite`` cache stores each level into a separate databse. This makes it easy to remove complete levels during mapproxy-seed cleanup processes. The ``sqlite`` cache also stores the timestamp of each tile.
+Use SQLite databases to store the tiles, similar to ``mbtiles`` cache. The difference to ``mbtiles`` cache is that the ``sqlite`` cache stores each level into a separate database. This makes it easy to remove complete levels during mapproxy-seed cleanup processes. The ``sqlite`` cache also stores the timestamp of each tile.
 
 Available options:
 
 ``dirname``:
-  The direcotry where the level databases will be stored.
+  The directory where the level databases will be stored.
 
 ``tile_lock_dir``:
   Directory where MapProxy should write lock files when it creates new tiles for this cache. Defaults to ``cache_data/tile_locks``.
@@ -115,6 +134,25 @@ Available options:
         directory: /path/to/cache
 
 
+.. note::
+
+  .. versionadded:: 1.10.0
+
+  All tiles from a meta tile request are stored in one transaction into the SQLite file to increase performance. You need to activate the :ref:`bulk_meta_tiles <bulk_meta_tiles>` option to get the same benefit when you are using tiled sources.
+
+  ::
+
+    caches:
+      sqlite_cache:
+        sources: [mytilesource]
+        bulk_meta_tiles: true
+        grids: [GLOBAL_MERCATOR]
+        cache:
+          type: sqlite
+          directory: /path/to/cache
+
+.. _cache_couchdb:
+
 ``couchdb``
 ===========
 
@@ -235,6 +273,7 @@ MapProxy will place the JSON document for tile z=3, x=1, y=2 at ``http://localho
 
 The ``_attachments``-part is the internal structure of CouchDB where the tile itself is stored. You can access the tile directly at: ``http://localhost:9999/mywms_tiles/mygrid-3-1-2/tile``.
 
+.. _cache_riak:
 
 ``riak``
 ========
@@ -288,3 +327,204 @@ Example
         default_ports:
             pb: 8087
             http: 8098
+
+.. _cache_redis:
+
+``redis``
+=========
+
+.. versionadded:: 1.10.0
+
+Store tiles in a `Redis <https://redis.io/>`_ in-memory database. This backend is useful for short-term caching. Typical use-case is a small Redis cache that allows you to benefit from meta-tiling.
+
+Your Redis database should be configured with ``maxmemory`` and ``maxmemory-policy`` options to limit the memory usage. For example::
+
+  maxmemory 256mb
+  maxmemory-policy volatile-ttl
+
+
+Requirements
+------------
+
+You will need the `Python Redis client <https://pypi.python.org/pypi/redis>`_. You can install it in the usual way, for example with ``pip install redis``.
+
+Configuration
+-------------
+
+Available options:
+
+``host``:
+    Host name of the Redis server. Defaults to ``127.0.0.1``.
+
+``port``:
+    Port of the Redis server. Defaults to ``6379``.
+
+``db``:
+    Number of the Redis database. Please refer to the Redis documentation. Defaults to `0`.
+
+``prefix``:
+    The prefix added to each tile-key in the Redis cache. Used to distinguish tiles from different caches and grids.  Defaults to ``cache-name_grid-name``.
+
+``default_ttl``:
+    The default Time-To-Live of each tile in the Redis cache in seconds. Defaults to 3600 seconds (1 hour).
+
+
+
+Example
+-------
+
+::
+
+    redis_cache:
+        sources: [mywms]
+        grids: [mygrid]
+        cache:
+          type: redis
+          default_ttl: 600
+
+
+.. _cache_geopackage:
+
+``geopackage``
+==============
+
+.. versionadded:: 1.10.0
+
+Store tiles in a `geopackage <http://www.geopackage.org/>`_ database. MapProxy creates a tile table if one isn't defined and populates the required meta data fields.
+This backend is good for datasets that require portability.
+Available options:
+
+``filename``:
+  The path to the geopackage file. Defaults to ``cachename.gpkg``.
+
+``table_name``:
+  The name of the table where the tiles should be stored (or retrieved if using an existing cache). Defaults to the ``cachename_gridname``.
+
+``levels``:
+  Set this to true to cache to a directory where each level is stored in a separate geopackage. Defaults to ``false``.
+  If set to true, ``filename`` is ignored.
+
+``directory``:
+  If levels is true use this to specify the directory to store geopackage files.
+
+You can set the ``sources`` to an empty list, if you use an existing geopackage file and do not have a source.
+
+::
+
+  caches:
+    geopackage_cache:
+      sources: []
+      grids: [GLOBAL_MERCATOR]
+      cache:
+        type: geopackage
+        filename: /path/to/bluemarble.gpkg
+        table_name: bluemarble_tiles
+
+.. note::
+
+  The geopackage format specification does not include any timestamps for each tile and the seeding function is limited therefore. If you include any ``refresh_before`` time in a seed task, all tiles will be recreated regardless of the value. The cleanup process does not support any ``remove_before`` times for geopackage and it always removes all tiles.
+  Use the ``--summary`` option of the ``mapproxy-seed`` tool.
+
+
+.. _cache_s3:
+
+``s3``
+======
+
+.. versionadded:: 1.10.0
+
+Store tiles in a `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_.
+
+
+Requirements
+------------
+
+You will need the Python `boto3 <https://github.com/boto/boto3>`_ package. You can install it in the usual way, for example with ``pip install boto3``.
+
+Configuration
+-------------
+
+Available options:
+
+``bucket_name``:
+  The bucket used for this cache. You can set the default bucket with ``globals.cache.s3.bucket_name``.
+
+``profile_name``:
+  Optional profile name for `shared credentials <http://boto3.readthedocs.io/en/latest/guide/configuration.html>`_ for this cache. Alternative methods of authentification are using the  ``AWS_ACCESS_KEY_ID`` and ``AWS_SECRET_ACCESS_KEY`` environmental variables, or by using an `IAM role <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_ when using an Amazon EC2 instance.
+  You can set the default profile with ``globals.cache.s3.profile_name``.
+
+``directory``:
+  Base directory (path) where all tiles are stored.
+
+``directory_layout``:
+  Defines the directory layout for the tiles (``12/12345/67890.png``, ``L12/R00010932/C00003039.png``, etc.).  See :ref:`cache_file` for available options. Defaults to ``tms`` (e.g. ``12/12345/67890.png``). This cache cache also supports ``reverse_tms`` where tiles are stored as ``y/x/z.format``. See *note* below.
+
+.. note::
+  The hierarchical ``directory_layouts`` can hit limitations of S3 *"if you are routinely processing 100 or more requests per second"*. ``directory_layout: reverse_tms`` can work around this limitation. Please read `S3 Request Rate and Performance Considerations <http://docs.aws.amazon.com/AmazonS3/latest/dev/request-rate-perf-considerations.html>`_ for more information on this issue.
+
+Example
+-------
+
+::
+
+  cache:
+    my_layer_20110501_epsg_4326_cache_out:
+      sources: [my_layer_20110501_cache]
+      cache:
+        type: s3
+        directory: /1.0.0/my_layer/default/20110501/4326/
+        bucket_name: my-s3-tiles-cache
+
+  globals:
+    cache:
+      s3:
+        profile_name: default
+
+
+.. _cache_compact:
+
+
+``compact``
+===========
+
+.. versionadded:: 1.10.0
+
+Store tiles in ArcGIS compatible compact cache files. A single compact cache ``.bundle`` file stores up to about 16,000 tiles. There is one additional ``.bundlx`` index file for each ``.bundle`` data file.
+
+Only version 1 of the compact cache format (ArcGIS 10.0-10.2) is supported. Version 2 (ArcGIS 10.3 or higher) is not supported at the moment.
+
+Available options:
+
+``directory``:
+  Directory where MapProxy should store the level directories. This will not add the cache name or grid name to the path. You can use this option to point MapProxy to an existing compact cache.
+
+``version``:
+  The version of the ArcGIS compact cache format. This option is required.
+
+
+You can set the ``sources`` to an empty list, if you use an existing compact cache files and do not have a source.
+
+
+The following configuration will load tiles from ``/path/to/cache/L00/R0000C0000.bundle``, etc.
+
+::
+
+  caches:
+    compact_cache:
+      sources: []
+      grids: [webmercator]
+      cache:
+        type: compact
+        version: 1
+        directory: /path/to/cache
+
+.. note::
+
+  The compact cache format does not include any timestamps for each tile and the seeding function is limited therefore. If you include any ``refresh_before`` time in a seed task, all tiles will be recreated regardless of the value. The cleanup process does not support any ``remove_before`` times for compact caches and it always removes all tiles.
+  Use the ``--summary`` option of the ``mapproxy-seed`` tool.
+
+
+.. note::
+
+  The compact cache format is append-only to allow parallel read and write operations. Removing or refreshing tiles with ``mapproxy-seed`` does not reduce the size of the cache files. Therefore, this format is not suitable for caches that require frequent updates.
+
diff --git a/doc/conf.py b/doc/conf.py
index 89628f0..e09cca4 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -49,9 +49,9 @@ copyright = u'Oliver Tonnhofer, Omniscale'
 # built documents.
 #
 # The short X.Y version.
-version = '1.8'
+version = '1.10'
 # The full version, including alpha/beta/rc tags.
-release = '1.8.2a0'
+release = '1.10.0a0'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff --git a/doc/configuration.rst b/doc/configuration.rst
index 1e73ebd..5bf27e4 100644
--- a/doc/configuration.rst
+++ b/doc/configuration.rst
@@ -419,7 +419,7 @@ with no data (e.g. water areas, areas with no roads, etc.).
 """"""""""""""""""""""""""
 If set to ``true``, MapProxy will only issue a single request to the source. This option can reduce the request latency for uncached areas (on demand caching).
 
-By default MapProxy requests all uncached meta tiles that intersect the requested bbox. With a typical configuration it is not uncommon that a requests will trigger four requests each larger than 2000x2000 pixel. With the ``minimize_meta_requests`` option enabled, each request will trigger only one request to the source. That request will be aligned to the next tile boundaries and the tiles will be cached.
+By default MapProxy requests all uncached meta-tiles that intersect the requested bbox. With a typical configuration it is not uncommon that a requests will trigger four requests each larger than 2000x2000 pixel. With the ``minimize_meta_requests`` option enabled, each request will trigger only one request to the source. That request will be aligned to the next tile boundaries and the tiles will be cached.
 
 .. index:: watermark
 
@@ -470,6 +470,11 @@ requests for geographical SRS will use ``EPSG:4326``.
 
 Change the ``meta_size`` and ``meta_buffer`` of this cache. See :ref:`global cache options <meta_size>` for more details.
 
+``bulk_meta_tiles``
+"""""""""""""""""""
+
+Enables meta-tile handling for tiled sources. See :ref:`global cache options <meta_size>` for more details.
+
 ``image``
 """""""""
 
@@ -608,6 +613,9 @@ Requests with 1500, 1000 or 701m/px resolution will use the first level, request
 """"""""
 
 The extent of your grid. You can use either a list or a string with the lower left and upper right coordinates. You can set the SRS of the coordinates with the ``bbox_srs`` option. If that option is not set the ``srs`` of the grid will be used.
+
+MapProxy always expects your BBOX coordinates order to be east, south, west, north, regardless of your SRS :ref:`axis order <axis_order>`.
+
 ::
 
   bbox: [0, 40, 15, 55]
@@ -633,11 +641,9 @@ The default origin (x=0, y=0) of the tile grid is the lower left corner, similar
 The following values are supported:
 
 ``ll`` or ``sw``:
-
   If the x=0, y=0 tile is in the lower-left/south-west corner of the tile grid. This is the default.
 
 ``ul`` or ``nw``:
-
   If the x=0, y=0 tile is in the upper-left/north-west corner of the tile grid.
 
 
@@ -788,13 +794,24 @@ Here you can define some options that affect the way MapProxy generates image re
 ``cache``
 """""""""
 
+The following options define how tiles are created and stored. Most options can be set individually for each cache as well.
+
 .. versionadded:: 1.6.0 ``tile_lock_dir``
+.. versionadded:: 1.10.0 ``bulk_meta_tiles``
 
 
 .. _meta_size:
 
 ``meta_size``
-  MapProxy does not make a single request for every tile but will request a large meta-tile that consist of multiple tiles. ``meta_size`` defines how large a meta-tile is. A ``meta_size`` of ``[4, 4]`` will request 16 tiles in one pass. With a tile size of 256x256 this will result in 1024x1024 requests to the source WMS.
+  MapProxy does not make a single request for every tile it needs, but it will request a large meta-tile that consist of multiple tiles. ``meta_size`` defines how large a meta-tile is. A ``meta_size`` of ``[4, 4]`` will request 16 tiles in one pass. With a tile size of 256x256 this will result in 1024x1024 requests to the source. Tiled sources are still requested tile by tile, but you can configure MapProxy to load multiple tiles in bulk with `bulk_meta_tiles`.
+
+
+.. _bulk_meta_tiles:
+
+``bulk_meta_tiles``
+  Enables meta-tile handling for caches with tile sources.
+  If set to `true`, MapProxy will request neighboring tiles from the source even if only one tile is requested from the cache. ``meta_size`` defines how many tiles should be requested in one step and ``concurrent_tile_creators`` defines how many requests are made in parallel. This option improves the performance for caches that allow to store multiple tiles with one request, like SQLite/MBTiles but not the ``file`` cache.
+
 
 ``meta_buffer``
   MapProxy will increase the size of each meta-tile request by this number of
@@ -822,10 +839,11 @@ Here you can define some options that affect the way MapProxy generates image re
   can either be absolute (e.g. ``/tmp/lock/mapproxy``) or relative to the
   mapproxy.yaml file. Defaults to ``./cache_data/dir_of_the_cache/tile_locks``.
 
+
 ``concurrent_tile_creators``
-  This limits the number of parallel requests MapProxy will make to a source WMS. This limit is per request and not for all MapProxy requests. To limit the requests MapProxy makes to a single server use the ``concurrent_requests`` option.
+  This limits the number of parallel requests MapProxy will make to a source. This limit is per request for this cache and not for all MapProxy requests. To limit the requests MapProxy makes to a single server use the ``concurrent_requests`` option.
 
-  Example: A request in an uncached region requires MapProxy to fetch four meta-tiles. A ``concurrent_tile_creators`` value of two allows MapProxy to make two requests to the source WMS request in parallel. The splitting of the meta tile and the encoding of the new tiles will happen in parallel to.
+  Example: A request in an uncached region requires MapProxy to fetch four meta-tiles. A ``concurrent_tile_creators`` value of two allows MapProxy to make two requests to the source WMS request in parallel. The splitting of the meta-tile and the encoding of the new tiles will happen in parallel to.
 
 
 ``link_single_color_images``
@@ -897,7 +915,7 @@ See the `Python SSL documentation <http://docs.python.org/dev/library/ssl.html#s
   http:
     ssl_ca_certs: /etc/ssl/certs/ca-certificates.crt
 
-If you want to use SSL but do not need certificate verification, then you can disable it with the ``ssl_no_cert_checks`` option. You can also disable this check on a source level, see :ref:`WMS source options <wms_source-ssl_no_cert_checks>`.
+If you want to use SSL but do not need certificate verification, then you can disable it with the ``ssl_no_cert_checks`` option. You can also disable this check on a source level, see :ref:`WMS source options <wms_source_ssl_no_cert_checks>`.
 ::
 
   http:
diff --git a/doc/coverages.rst b/doc/coverages.rst
index 3cf124b..72401c4 100644
--- a/doc/coverages.rst
+++ b/doc/coverages.rst
@@ -7,12 +7,17 @@ With coverages you can define areas where data is available or where data you ar
 MapProxy supports coverages for :doc:`sources <sources>` and in the :doc:`mapproxy-seed tool <seed>`. Refer to the corresponding section in the documentation.
 
 
-There are three different ways to describe a coverage.
+There are five different ways to describe a coverage:
 
 - a simple rectangular bounding box,
 - a text file with one or more (multi)polygons in WKT format,
-- (multi)polygons from any data source readable with OGR (e.g. Shapefile, GeoJSON, PostGIS)
+- a GeoJSON file with (multi)polygons features,
+- (multi)polygons from any data source readable with OGR (e.g. Shapefile, GeoJSON, PostGIS),
+- a file with webmercator tile coordinates.
 
+.. versionadded:: 1.10
+
+You can also build intersections, unions and differences between multiple coverages.
 
 Requirements
 ------------
@@ -46,7 +51,7 @@ Bounding box
 For simple box coverages.
 
 ``bbox`` or ``datasource``:
-    A simple BBOX as a list, e.g: `[4, -30, 10, -28]` or as a string `4,-30,10,-28`.
+    A simple BBOX as a list of minx, miny, maxx, maxy, e.g: `[4, -30, 10, -28]` or as a string `4,-30,10,-28`.
 
 Polygon file
 """"""""""""
@@ -57,6 +62,17 @@ You can create your own files or use `one of the files we provide for every coun
 ``datasource``:
  The path to the polygon file. Should be relative to the proxy configuration or absolute.
 
+GeoJSON
+"""""""
+
+.. versionadded:: 1.10
+  Previous versions required OGR/GDAL for reading GeoJSON.
+
+You can use GeoJSON files with Polygon and MultiPolygons geometries. FeatureCollections and Features of these geometries are suppored as well. MapProxy uses OGR to read GeoJSON files if you define a ``where`` filter.
+
+``datasource``:
+ The path to the GeoJSON file. Should be relative to the proxy configuration or absolute.
+
 OGR datasource
 """"""""""""""
 
@@ -75,6 +91,60 @@ Any polygon datasource that is supported by OGR (e.g. Shapefile, GeoJSON, PostGI
   option is unset, the first layer from the datasource will be used.
 
 
+Expire tiles
+""""""""""""
+
+.. versionadded:: 1.10
+
+Text file with webmercator tile coordinates. The tiles should be in ``z/x/y`` format (e.g. ``14/1283/6201``),
+with one tile coordinate per line. Only tiles in the webmercator grid are supported (origin is always `nw`).
+
+``expire_tiles``:
+  File or directory with expire tile files. Directories are loaded recursive.
+
+
+Union
+"""""
+
+.. versionadded:: 1.10
+
+A union coverage contains the combined coverage of one or more sub-coverages. This can be used to combine multiple coverages a single source. Each sub-coverage can be of any supported type and SRS.
+
+``union``:
+  A list of multiple coverages.
+
+Difference
+""""""""""
+
+.. versionadded:: 1.10
+
+A difference coverage subtracts the coverage of other sub-coverages from the first coverage. This can be used to exclude parts from a coverage. Each sub-coverage can be of any supported type and SRS.
+
+``difference``:
+  A list of multiple coverages.
+
+
+Intersection
+""""""""""""
+
+.. versionadded:: 1.10
+
+An intersection coverage contains only areas that are covered by all sub-coverages. This can be used to limit a larger coverage to a smaller area. Each sub-coverage can be of any supported type and SRS.
+
+``difference``:
+  A list of multiple coverages.
+
+
+Clipping
+--------
+.. versionadded:: 1.10.0
+
+By default MapProxy tries to get and serve full source image even if a coverage only touches it.
+Clipping by coverage can be enabled by setting ``clip: true``. If enabled, all areas outside the coverage will be converted to transparent pixels.
+
+The ``clip`` option is only active for source coverages and not for seeding coverages.
+
+
 Examples
 --------
 
@@ -96,6 +166,23 @@ Use the ``coverage`` option to define a coverage for a WMS or tile source.
         srs: 'EPSG:4326'
 
 
+Example of an intersection coverage with clipping::
+
+  sources:
+    mywms:
+      type: wms
+      req:
+        url: http://example.com/service?
+        layers: base
+      coverage:
+        clip: true
+        intersection:
+          - bbox: [5, 50, 10, 55]
+            srs: 'EPSG:4326'
+          - datasource: coverage.geojson
+            srs: 'EPSG:4326'
+
+
 mapproxy-seed
 """""""""""""
 
diff --git a/doc/deployment.rst b/doc/deployment.rst
index 84070ca..0063af8 100644
--- a/doc/deployment.rst
+++ b/doc/deployment.rst
@@ -115,8 +115,10 @@ A more complete configuration might look like::
 
   <Directory /path/to/mapproxy/>
     Order deny,allow
-    Require all granted  # for Apache 2.4
-    # Allow from all     # for Apache 2.2
+    # For Apache 2.4:
+    Require all granted
+    # For Apache 2.2:
+    # Allow from all
   </Directory>
 
 
diff --git a/doc/install.rst b/doc/install.rst
index 48d26ef..c27a701 100644
--- a/doc/install.rst
+++ b/doc/install.rst
@@ -94,7 +94,7 @@ You will need Shapely to use the :doc:`coverage feature <coverages>` of MapProxy
 
 GDAL *(optional)*
 ~~~~~~~~~~~~~~~~~
-The :doc:`coverage feature <coverages>` allows you to read geometries from OGR datasources (Shapefiles, PostGIS, etc.). This package is optional and only required for OGR datasource support. OGR is part of GDAL (``libgdal-dev``).
+The :doc:`coverage feature <coverages>` allows you to read geometries from OGR datasources (Shapefiles, PostGIS, etc.). This package is optional and only required for OGR datasource support (BBOX, WKT and GeoJSON coverages are supported natively). OGR is part of GDAL (``libgdal-dev``).
 
 .. _lxml_install:
 
diff --git a/doc/install_windows.rst b/doc/install_windows.rst
index 00acbaa..ad48715 100644
--- a/doc/install_windows.rst
+++ b/doc/install_windows.rst
@@ -1,10 +1,7 @@
 Installation on Windows
 =======================
 
-.. note:: You can also :doc:`install MapProxy inside an existing OSGeo4W installation<install_osgeo4w>`.
-
-At frist you need a working Python installation. You can download Python from: http://www.python.org/download/. MapProxy requires Python 2.7, 3.3 or 3.4. Python 2.6 should still work, but it is no longer officially supported.
-
+At frist you need a working Python installation. You can download Python from: https://www.python.org/download/. MapProxy requires Python 2.7, 3.3, 3.4, 3.5 or 3.6. Python 2.6 should still work, but it is no longer officially supported. We would recommend the latest 2.7 version available.
 
 Virtualenv
 ----------
@@ -24,23 +21,22 @@ To create a new virtual environment for your MapProxy installation and to activa
 
 .. note:: Apache mod_wsgi does not work well with virtualenv on Windows. If you want to use mod_wsgi for deployment, then you should skip the creation the virtualenv.
 
-After you activated the new environment, you have access to ``python`` and ``easy_install``.
+After you activated the new environment, you have access to ``python`` and ``pip``.
 To install MapProxy with most dependencies call::
 
-  easy_install MapProxy
+  pip install MapProxy
 
 This might take a minute. You can skip the next step.
 
 
-Setuptools
-----------
+PIP
+---
 
-MapProxy and most dependencies can be installed with the ``easy_install`` command.
-You need to `install the setuptool package <http://pypi.python.org/pypi/setuptools>`_ to get the ``easy_install`` command.
+MapProxy and most dependencies can be installed with the ``pip`` command. ``pip`` is already installed if you are using Python >=2.7.9, or Python >=3.4. `Read the pip documentation for more information <https://pip.pypa.io/en/stable/installing/>`_.
 
 After that you can install MapProxy with::
 
-    c:\Python27\Scripts\easy_install MapProxy
+    c:\Python27\Scripts\pip install MapProxy
 
 This might take a minute.
 
@@ -53,21 +49,22 @@ Read :ref:`dependency_details` for more information about all dependencies.
 Pillow and YAML
 ~~~~~~~~~~~~~~~
 
-Pillow and PyYAML are installed automatically by ``easy_install``.
+Pillow and PyYAML are installed automatically by ``pip``.
 
 PyProj
 ~~~~~~
 
 Since libproj4 is generally not available on a Windows system, you will also need to install the Python package ``pyproj``.
+You need to manually download the ``pyproj`` package for your system. See below for *Platform dependent packages*.
 
 ::
 
-  easy_install pyproj
+  pip install path\to\pyproj-xxx.whl
 
 
 Shapely and GEOS *(optional)*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Shapely can be installed with ``easy_install Shapely``. This will already include the required ``geos.dll``.
+Shapely can be installed with ``pip install Shapely``. This will already include the required ``geos.dll``.
 
 
 GDAL *(optional)*
@@ -87,16 +84,18 @@ You also need to set ``GDAL_DRIVER_PATH`` or ``OGR_DRIVER_PATH`` to the ``gdalpl
     set GDAL_DRIVER_PATH=C:\Program Files (x86)\GDAL\gdalplugins
 
 
+.. _win_platform_packages:
+
 Platform dependent packages
 ---------------------------
 
-All Python packages are downloaded from http://pypi.python.org/, but not all platform combinations might be available as a binary package, especially if you run a 64bit version of Windows.
+``pip`` downloads all packages from https://pypi.python.org/, but not all platform combinations might be available as a binary package, especially if you run a 64bit version of Python.
 
-If you run into troubles during installation, because it is trying to compile something (e.g. complaining about ``vcvarsall.bat``), you should look at Christoph Gohlke's `Unofficial Windows Binaries for Python Extension Packages <http://www.lfd.uci.edu/~gohlke/pythonlibs/>`_.
+If you run into trouble during installation, because it is trying to compile something (e.g. complaining about ``vcvarsall.bat``), you should look at Christoph Gohlke's `Unofficial Windows Binaries for Python Extension Packages <http://www.lfd.uci.edu/~gohlke/pythonlibs/>`_. This is a reliable site for binary packages for Python. You need to download the right package: The ``cpxx`` code refers to the Python version (e.g. ``cp27`` for Python 2.7); ``win32`` for 32bit Python installations  [...]
 
-You can install the ``.exe`` packages with ``easy_install``::
+You can install the ``.whl``, ``.zip`` or ``.exe`` packages with ``pip``::
 
-  easy_install path\to\package-xxx.exe
+  pip install path\to\package-xxx.whl
 
 
 Check installation
@@ -110,4 +109,3 @@ To check if the MapProxy was successfully installed you can call ``mapproxy-util
 
 Now continue with :ref:`Create a configuration <create_configuration>` from the installation documentation.
 
-
diff --git a/doc/mapproxy_util.rst b/doc/mapproxy_util.rst
index 881f38a..2c5a0df 100644
--- a/doc/mapproxy_util.rst
+++ b/doc/mapproxy_util.rst
@@ -504,11 +504,19 @@ Export types
     Export tiles like the internal cache directory structure. This is compatible with TileCache.
 
 ``mbtile``:
-    Exports tiles into a MBTile file.
+    Export tiles into a MBTile file.
+
+``sqlite``:
+    Export tiles into SQLite level files.
+
+``geopackage``:
+    Export tiles into a GeoPackage file.
 
 ``arcgis``:
-    Exports tiles in a ArcGIS exploded cache directory structure.
+    Export tiles in a ArcGIS exploded cache directory structure.
 
+``compact-v1``:
+    Export tiles as ArcGIS compact cache bundle files (version 1).
 
 
 Examples
diff --git a/doc/seed.rst b/doc/seed.rst
index f6b0717..75452b0 100644
--- a/doc/seed.rst
+++ b/doc/seed.rst
@@ -21,7 +21,7 @@ Options
 
 .. option:: -s <seed.yaml>, --seed-conf==<seed.yaml>
 
-  The seed configuration. You can also pass the configration as the last argument to ``mapproxy-seed``
+  The seed configuration. You can also pass the configuration as the last argument to ``mapproxy-seed``
 
 .. option:: -f <mapproxy.yaml>, --proxy-conf=<mapproxy.yaml>
 
@@ -67,6 +67,22 @@ Options
 
   Filename where MapProxy stores the seeding progress for the ``--continue`` option. Defaults to ``.mapproxy_seed_progress`` in the current working directory. MapProxy will remove that file after a successful seed.
 
+.. option:: --duration
+
+  Stop seeding process after this duration. This option accepts duration in the following format: 120s, 15m, 4h, 0.5d
+  Use this option in combination with ``--continue`` to be able to resume the seeding. By default, 
+
+.. option:: --reseed-file
+
+  File created by ``mapproxy-seed`` at the start of a new seeding.
+
+.. option:: --reseed-interval
+
+  Only start seeding if ``--reseed-file`` is older then this duration.
+  This option accepts duration in the following format: 120s, 15m, 4h, 0.5d
+  Use this option in combination with ``--continue`` to be able to resume the seeding. By default, 
+  
+
 .. option:: --use-cache-lock
 
   Lock each cache to prevent multiple parallel `mapproxy-seed` calls to work on the same cache.
@@ -82,6 +98,11 @@ Options
 .. versionadded:: 1.7.0
   ``--log-config`` option
 
+.. versionadded:: 1.10.0
+  ``--duration``, ``--reseed-file`` and ``--reseed-interval`` option
+
+
+
 
 Examples
 ~~~~~~~~
@@ -380,6 +401,30 @@ Example
       srs: 'EPSG:4326'
 
 
+.. _background_seeding:
+
+Example: Background seeding
+---------------------------
+
+.. versionadded:: 1.10.0
+
+The ``--duration`` option allows you run MapProxy seeding for a limited time. In combination with the ``--continue`` option, you can resume the seeding process at a later time.
+You can use this to call ``mapproxy-seed`` with ``cron`` to seed in the off-hours.
+
+However, this will restart the seeding process from the begining everytime the is seeding completed.
+You can prevent this with the ``--reeseed-interval`` and ``--reseed-file`` option. 
+The follwing example starts seeding for six hours. It will seed for another six hours, everytime you call this command again. Once all seed and cleanup tasks were proccessed the command will exit immediately everytime you call it within 14 days after the first call. After 14 days, the modification time of the ``reseed.time`` file will be updated and the re-seeding process starts again.
+
+::
+
+  mapproxy-seed -f mapproxy.yaml -s seed.yaml  \
+    --reseed-interval 14d --duration 6h --reseed-file reseed.time \ 
+    --continue --progress-file .mapproxy_seed_progress
+  
+You can use the ``--reseed-file`` as a ``refresh_before`` and ``remove_before`` ``mtime``-file.
+
+
+
 .. _seed_old_configuration:
 
 Old Configuration
diff --git a/doc/services.rst b/doc/services.rst
index 6d4c8b5..17ab6ae 100644
--- a/doc/services.rst
+++ b/doc/services.rst
@@ -108,10 +108,12 @@ You can use this to offer global datasets with SRS that are only valid in a loca
 
 A list of image mime types the server should offer.
 
+.. _wms_featureinfo_types:
+
 ``featureinfo_types``
 """""""""""""""""""""
 
-A list of feature info types the server should offer. Available types are ``text``, ``html`` and ``xml``. The types then are advertised in the capabilities with the correct mime type.
+A list of feature info types the server should offer. Available types are ``text``, ``html``, ``xml`` and ``json``. The types are advertised in the capabilities with the correct mime type. Defaults to ``[text, html, xml]``.
 
 ``featureinfo_xslt``
 """"""""""""""""""""
diff --git a/doc/sources.rst b/doc/sources.rst
index 77aaea5..7c825c2 100644
--- a/doc/sources.rst
+++ b/doc/sources.rst
@@ -257,7 +257,9 @@ Full example::
 .. _arcgis_label:
 
 ArcGIS REST API
-"""
+"""""""""""""""
+
+.. versionadded: 1.9.0
 
 Use the type ``arcgis`` for ArcGIS MapServer and ImageServer REST server endpoints. This
 source is based on :ref:`the WMS source <wms_label>` and most WMS options apply to the
@@ -266,37 +268,49 @@ ArcGIS source too.
 ``req``
 ^^^^^^^
 
-This describes the ArcGIS source. The only required option is ``url``. You need to set ``transparent`` to ``true`` if you want to use this source as an overlay.
-::
+This describes the ArcGIS source. The only required option is ``url``. You need to set ``transparent`` to ``true`` if you want to use this source as an overlay. You can also add ArcGIS specific parameters to ``req``, for example to set the `interpolation method for ImageServers <http://resources.arcgis.com/en/help/rest/apiref/exportimage.html>`_.
 
-  req:
-    url: http://example.org/ArcGIS/rest/services/Imagery/MapService
-    layers: show: 0,1
-    transparent: true
 
-.. _example_configuration:
+``opts``
+^^^^^^^^
+
+.. versionadded: 1.10.0
+
+This option affects what request MapProxy sends to the source ArcGIS server.
+
+``featureinfo``
+  If this is set to ``true``, MapProxy will mark the layer as queryable and incoming `GetFeatureInfo` requests will be forwarded as ``identify`` requests to the source server. ArcGIS REST server support only HTML and JSON format. You need to enable support for JSON :ref:`wms_featureinfo_types`.
+
+``featureinfo_return_geometries``
+  Whether the source should include the feature geometries.
+
+``featureinfo_tolerance``
+  Tolerance in pixel within the ArcGIS server should identify features.
 
 Example configuration
 ^^^^^^^^^^^^^^^^^^^^^
 
-Minimal example::
+MapServer example::
 
   my_minimal_arcgissource:
     type: arcgis
     req:
+      layers: show: 0,1
       url: http://example.org/ArcGIS/rest/services/Imagery/MapService
+      transparent: true
 
-Full example::
+ImageServer example::
 
   my_arcgissource:
     type: arcgis
     coverage:
        polygons: GM.txt
-       polygons_srs: EPSG:900913
+       srs: EPSG:3857
     req:
-      url: http://example.org/ArcGIS/rest/services/Imagery/MapService
-      layers: show:0,1
-      transparent: true
+      url: http://example.org/ArcGIS/rest/services/World/MODIS/ImageServer
+      interpolation: RSP_CubicConvolution
+      bandIds: 2,0,1
+
 
 .. _tiles_label:
 
@@ -361,7 +375,7 @@ You can configure the following HTTP related options for this source:
 - ``headers``
 - ``client_timeout``
 - ``ssl_ca_certs``
-- ``ssl_no_cert_checks`` (:ref:`see above <wms_source-ssl_no_cert_checks>`)
+- ``ssl_no_cert_checks`` (:ref:`see above <wms_source_ssl_no_cert_checks>`)
 
 See :ref:`HTTP Options <http_ssl>` for detailed documentation.
 
diff --git a/doc/yaml/cache_conf.yaml b/doc/yaml/cache_conf.yaml
index 436a334..2143d69 100644
--- a/doc/yaml/cache_conf.yaml
+++ b/doc/yaml/cache_conf.yaml
@@ -2,26 +2,23 @@ services:
   demo:
   wms:
     md:
-        title: MapProxy WMS Proxy
-        abstract: This is the fantastic MapProxy.
-        online_resource: http://mapproxy.org/
-        contact:
-            person: Your Name Here
-            position: Technical Director
-            organization: 
-            address: Fakestreet 123
-            city: Somewhere
-            postcode: 12345
-            country: Germany
-            phone: +49(0)000-000000-0
-            fax: +49(0)000-000000-0
-            email: info at omniscale.de
-        access_constraints:
-            This service is intended for private and
-            evaluation use only. The data is licensed
-            as Creative Commons Attribution-Share Alike 2.0
-            (http://creativecommons.org/licenses/by-sa/2.0/)
-        fees: 'None'
+      title: MapProxy WMS Proxy
+      abstract: This is the fantastic MapProxy.
+      online_resource: http://mapproxy.org/
+      contact:
+        person: Your Name Here
+        position: Technical Director
+        organization:
+        address: Fakestreet 123
+        city: Somewhere
+        postcode: 12345
+        country: Germany
+        phone: +49(0)000-000000-0
+        fax: +49(0)000-000000-0
+        email: info at omniscale.de
+      access_constraints:
+        Insert license and copyright information for this service.
+      fees: 'None'
 
 sources:
   test_wms:
diff --git a/doc/yaml/grid_conf.yaml b/doc/yaml/grid_conf.yaml
index 24e70b2..2dbb081 100644
--- a/doc/yaml/grid_conf.yaml
+++ b/doc/yaml/grid_conf.yaml
@@ -8,7 +8,7 @@ services:
       contact:
         person: Your Name Here
         position: Technical Director
-        organization: 
+        organization:
         address: Fakestreet 123
         city: Somewhere
         postcode: 12345
@@ -17,10 +17,7 @@ services:
         fax: +49(0)000-000000-0
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and
-        evaluation use only. The data is licensed
-        as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
 sources:
diff --git a/doc/yaml/merged_conf.yaml b/doc/yaml/merged_conf.yaml
index 571f2ad..546a2b3 100644
--- a/doc/yaml/merged_conf.yaml
+++ b/doc/yaml/merged_conf.yaml
@@ -8,7 +8,7 @@ services:
       contact:
         person: Your Name Here
         position: Technical Director
-        organization: 
+        organization:
         address: Fakestreet 123
         city: Somewhere
         postcode: 12345
@@ -17,10 +17,7 @@ services:
         fax: +49(0)000-000000-0
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and
-        evaluation use only. The data is licensed
-        as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
 sources:
diff --git a/doc/yaml/meta_conf.yaml b/doc/yaml/meta_conf.yaml
index c5605c0..e9e415a 100644
--- a/doc/yaml/meta_conf.yaml
+++ b/doc/yaml/meta_conf.yaml
@@ -8,7 +8,7 @@ services:
       contact:
         person: Your Name Here
         position: Technical Director
-        organization: 
+        organization:
         address: Fakestreet 123
         city: Somewhere
         postcode: 12345
@@ -17,10 +17,7 @@ services:
         fax: +49(0)000-000000-0
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and
-        evaluation use only. The data is licensed
-        as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
 sources:
diff --git a/doc/yaml/simple_conf.yaml b/doc/yaml/simple_conf.yaml
index 286cd36..bb6737a 100644
--- a/doc/yaml/simple_conf.yaml
+++ b/doc/yaml/simple_conf.yaml
@@ -8,7 +8,7 @@ services:
       contact:
         person: Your Name Here
         position: Technical Director
-        organization: 
+        organization:
         address: Fakestreet 123
         city: Somewhere
         postcode: 12345
@@ -17,10 +17,7 @@ services:
         fax: +49(0)000-000000-0
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and
-        evaluation use only. The data is licensed
-        as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
 sources:
diff --git a/mapproxy/cache/__init__.py b/mapproxy/cache/__init__.py
index 70487bb..8f228c4 100644
--- a/mapproxy/cache/__init__.py
+++ b/mapproxy/cache/__init__.py
@@ -1,12 +1,12 @@
 # This file is part of the MapProxy project.
 # Copyright (C) 2010 Omniscale <http://omniscale.de>
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #    http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,10 +17,10 @@
 Tile caching (creation, caching and retrieval of tiles).
 
 .. digraph:: Schematic Call Graph
-    
+
     ranksep = 0.1;
-    node [shape="box", height="0", width="0"] 
-    
+    node [shape="box", height="0", width="0"]
+
     cl  [label="CacheMapLayer" href="<mapproxy.layer.CacheMapLayer>"]
     tm  [label="TileManager",  href="<mapproxy.cache.tile.TileManager>"];
     fc      [label="FileCache", href="<mapproxy.cache.file.FileCache>"];
@@ -31,6 +31,6 @@ Tile caching (creation, caching and retrieval of tiles).
         tm -> fc [label="load\\nstore\\nis_cached"];
         tm -> s  [label="get_map"]
     }
-    
+
 
 """
diff --git a/mapproxy/cache/compact.py b/mapproxy/cache/compact.py
new file mode 100644
index 0000000..3a82877
--- /dev/null
+++ b/mapproxy/cache/compact.py
@@ -0,0 +1,302 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2016 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement
+import errno
+import hashlib
+import os
+import shutil
+import struct
+
+from mapproxy.image import ImageSource
+from mapproxy.cache.base import TileCacheBase, tile_buffer
+from mapproxy.util.fs import ensure_directory, write_atomic
+from mapproxy.util.lock import FileLock
+from mapproxy.compat import BytesIO
+
+import logging
+log = logging.getLogger(__name__)
+
+
+class CompactCacheV1(TileCacheBase):
+    supports_timestamp = False
+
+    def __init__(self, cache_dir):
+        self.lock_cache_id = 'compactcache-' + hashlib.md5(cache_dir.encode('utf-8')).hexdigest()
+        self.cache_dir = cache_dir
+
+    def _get_bundle(self, tile_coord):
+        x, y, z = tile_coord
+
+        level_dir = os.path.join(self.cache_dir, 'L%02d' % z)
+
+        c = x // BUNDLEX_GRID_WIDTH * BUNDLEX_GRID_WIDTH
+        r = y // BUNDLEX_GRID_HEIGHT * BUNDLEX_GRID_HEIGHT
+
+        basename = 'R%04xC%04x' % (r, c)
+        return Bundle(os.path.join(level_dir, basename), offset=(c, r))
+
+    def is_cached(self, tile):
+        if tile.coord is None:
+            return True
+        if tile.source:
+            return True
+
+        return self._get_bundle(tile.coord).is_cached(tile)
+
+    def store_tile(self, tile):
+        if tile.stored:
+            return True
+
+        return self._get_bundle(tile.coord).store_tile(tile)
+
+    def load_tile(self, tile, with_metadata=False):
+        if tile.source or tile.coord is None:
+            return True
+
+        return self._get_bundle(tile.coord).load_tile(tile)
+
+    def remove_tile(self, tile):
+        if tile.coord is None:
+            return True
+
+        return self._get_bundle(tile.coord).remove_tile(tile)
+
+    def load_tile_metadata(self, tile):
+        if self.load_tile(tile):
+            tile.timestamp = -1
+
+    def remove_level_tiles_before(self, level, timestamp):
+        if timestamp == 0:
+            level_dir = os.path.join(self.cache_dir, 'L%02d' % level)
+            shutil.rmtree(level_dir, ignore_errors=True)
+            return True
+        return False
+
+BUNDLE_EXT = '.bundle'
+BUNDLEX_EXT = '.bundlx'
+
+class Bundle(object):
+    def __init__(self, base_filename, offset):
+        self.base_filename = base_filename
+        self.lock_filename = base_filename + '.lck'
+        self.offset = offset
+
+    def _rel_tile_coord(self, tile_coord):
+        return (
+            tile_coord[0] % BUNDLEX_GRID_WIDTH,
+            tile_coord[1] % BUNDLEX_GRID_HEIGHT,
+        )
+
+    def is_cached(self, tile):
+        if tile.source or tile.coord is None:
+            return True
+
+        idx = BundleIndex(self.base_filename + BUNDLEX_EXT)
+        x, y = self._rel_tile_coord(tile.coord)
+        offset = idx.tile_offset(x, y)
+        if offset == 0:
+            return False
+
+        bundle = BundleData(self.base_filename + BUNDLE_EXT, self.offset)
+        size = bundle.read_size(offset)
+        return size != 0
+
+    def store_tile(self, tile):
+        if tile.stored:
+            return True
+
+        with tile_buffer(tile) as buf:
+            data = buf.read()
+
+        with FileLock(self.lock_filename):
+            bundle = BundleData(self.base_filename + BUNDLE_EXT, self.offset)
+            idx = BundleIndex(self.base_filename + BUNDLEX_EXT)
+            x, y = self._rel_tile_coord(tile.coord)
+            offset = idx.tile_offset(x, y)
+            offset, size = bundle.append_tile(data, prev_offset=offset)
+            idx.update_tile_offset(x, y, offset=offset, size=size)
+
+        return True
+
+    def load_tile(self, tile, with_metadata=False):
+        if tile.source or tile.coord is None:
+            return True
+
+        idx = BundleIndex(self.base_filename + BUNDLEX_EXT)
+        x, y = self._rel_tile_coord(tile.coord)
+        offset = idx.tile_offset(x, y)
+        if offset == 0:
+            return False
+
+        bundle = BundleData(self.base_filename + BUNDLE_EXT, self.offset)
+        data = bundle.read_tile(offset)
+        if not data:
+            return False
+        tile.source = ImageSource(BytesIO(data))
+
+        return True
+
+    def remove_tile(self, tile):
+        if tile.coord is None:
+            return True
+
+        with FileLock(self.lock_filename):
+            idx = BundleIndex(self.base_filename + BUNDLEX_EXT)
+            x, y = self._rel_tile_coord(tile.coord)
+            idx.remove_tile_offset(x, y)
+
+        return True
+
+
+BUNDLEX_GRID_WIDTH = 128
+BUNDLEX_GRID_HEIGHT = 128
+BUNDLEX_HEADER_SIZE = 16
+BUNDLEX_HEADER = b'\x03\x00\x00\x00\x10\x00\x00\x00\x00\x40\x00\x00\x05\x00\x00\x00'
+BUNDLEX_FOOTER_SIZE = 16
+BUNDLEX_FOOTER = b'\x00\x00\x00\x00\x10\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00'
+
+class BundleIndex(object):
+    def __init__(self, filename):
+        self.filename = filename
+        # defer initialization to update/remove calls to avoid
+        # index creation on is_cached (prevents new files in read-only caches)
+        self._initialized = False
+
+    def _init_index(self):
+        self._initialized = True
+        if os.path.exists(self.filename):
+            return
+        ensure_directory(self.filename)
+        buf = BytesIO()
+        buf.write(BUNDLEX_HEADER)
+        for i in range(BUNDLEX_GRID_WIDTH * BUNDLEX_GRID_HEIGHT):
+            buf.write(struct.pack('<Q', (i*4)+BUNDLE_HEADER_SIZE)[:5])
+        buf.write(BUNDLEX_FOOTER)
+        write_atomic(self.filename, buf.getvalue())
+
+    def _tile_offset(self, x, y):
+        return BUNDLEX_HEADER_SIZE + (x * BUNDLEX_GRID_HEIGHT + y) * 5
+
+    def tile_offset(self, x, y):
+        idx_offset = self._tile_offset(x, y)
+        try:
+            with open(self.filename, 'rb') as f:
+                f.seek(idx_offset)
+                offset = struct.unpack('<Q', f.read(5) + b'\x00\x00\x00')[0]
+            return offset
+        except IOError as ex:
+            if ex.errno == errno.ENOENT:
+                # mising bundle file -> missing tile
+                return 0
+            raise
+
+    def update_tile_offset(self, x, y, offset, size):
+        self._init_index()
+        idx_offset = self._tile_offset(x, y)
+        offset = struct.pack('<Q', offset)[:5]
+        with open(self.filename, 'r+b') as f:
+            f.seek(idx_offset, os.SEEK_SET)
+            f.write(offset)
+
+    def remove_tile_offset(self, x, y):
+        self._init_index()
+        idx_offset = self._tile_offset(x, y)
+        with open(self.filename, 'r+b') as f:
+            f.seek(idx_offset)
+            f.write(b'\x00' * 5)
+
+# The bundle file has a header with 15 little-endian long values (60 bytes).
+# NOTE: the fixed values might be some flags for image options (format, aliasing)
+# all files available for testing had the same values however.
+BUNDLE_HEADER_SIZE = 60
+BUNDLE_HEADER = [
+    3        , # 0,  fixed
+    16384    , # 1,  max. num of tiles 128*128 = 16384
+    16       , # 2,  size of largest tile
+    5        , # 3,  fixed
+    0        , # 4,  num of tiles in bundle (*4)
+    0        , # 5,  fixed
+    60+65536 , # 6,  bundle size
+    0        , # 7,  fixed
+    40       , # 8   fixed
+    0        , # 9,  fixed
+    16       , # 10, fixed
+    0        , # 11, y0
+    127      , # 12, y1
+    0        , # 13, x0
+    127      , # 14, x1
+]
+BUNDLE_HEADER_STRUCT_FORMAT = '<lllllllllllllll'
+
+class BundleData(object):
+    def __init__(self, filename, tile_offsets):
+        self.filename = filename
+        self.tile_offsets = tile_offsets
+        if not os.path.exists(self.filename):
+            self._init_bundle()
+
+    def _init_bundle(self):
+        ensure_directory(self.filename)
+        header = list(BUNDLE_HEADER)
+        header[13], header[11] = self.tile_offsets
+        header[14], header[12] = header[13]+127, header[11]+127
+        write_atomic(self.filename,
+            struct.pack(BUNDLE_HEADER_STRUCT_FORMAT, *header) +
+            # zero-size entry for each tile
+            (b'\x00' * (BUNDLEX_GRID_HEIGHT * BUNDLEX_GRID_WIDTH * 4)))
+
+    def read_size(self, offset):
+        with open(self.filename, 'rb') as f:
+            f.seek(offset)
+            return struct.unpack('<L', f.read(4))[0]
+
+    def read_tile(self, offset):
+        with open(self.filename, 'rb') as f:
+            f.seek(offset)
+            size = struct.unpack('<L', f.read(4))[0]
+            if size <= 0:
+                return False
+            return f.read(size)
+
+    def append_tile(self, data, prev_offset):
+        size = len(data)
+        is_new_tile = True
+        with open(self.filename, 'r+b') as f:
+            if prev_offset:
+                f.seek(prev_offset, os.SEEK_SET)
+                if f.tell() == prev_offset:
+                    if struct.unpack('<L', f.read(4))[0] > 0:
+                        is_new_tile = False
+
+            f.seek(0, os.SEEK_END)
+            offset = f.tell()
+            if offset == 0:
+                f.write(b'\x00' * 16) # header
+                offset = 16
+            f.write(struct.pack('<L', size))
+            f.write(data)
+
+            # update header
+            f.seek(0, os.SEEK_SET)
+            header = list(struct.unpack(BUNDLE_HEADER_STRUCT_FORMAT, f.read(60)))
+            header[2] = max(header[2], size)
+            header[6] += size + 4
+            if is_new_tile:
+                header[4] += 4
+            f.seek(0, os.SEEK_SET)
+            f.write(struct.pack(BUNDLE_HEADER_STRUCT_FORMAT, *header))
+
+        return offset, size
diff --git a/mapproxy/cache/file.py b/mapproxy/cache/file.py
index 42aa4f5..51fe297 100644
--- a/mapproxy/cache/file.py
+++ b/mapproxy/cache/file.py
@@ -20,8 +20,8 @@ import hashlib
 
 from mapproxy.util.fs import ensure_directory, write_atomic
 from mapproxy.image import ImageSource, is_single_color_image
+from mapproxy.cache import path
 from mapproxy.cache.base import TileCacheBase, tile_buffer
-from mapproxy.compat import string_type
 
 import logging
 log = logging.getLogger('mapproxy.cache.file')
@@ -31,7 +31,7 @@ class FileCache(TileCacheBase):
     This class is responsible to store and load the actual tile data.
     """
     def __init__(self, cache_dir, file_ext, directory_layout='tc',
-                 link_single_color_images=False, lock_timeout=60.0):
+                 link_single_color_images=False):
         """
         :param cache_dir: the path where the tile will be stored
         :param file_ext: the file extension that will be appended to
@@ -42,179 +42,22 @@ class FileCache(TileCacheBase):
         self.cache_dir = cache_dir
         self.file_ext = file_ext
         self.link_single_color_images = link_single_color_images
+        self._tile_location, self._level_location = path.location_funcs(layout=directory_layout)
+        if self._level_location is None:
+            self.level_location = None # disable level based clean-ups
 
-        if directory_layout == 'tc':
-            self.tile_location = self._tile_location_tc
-            self.level_location = self._level_location
-        elif directory_layout == 'mp':
-            self.tile_location = self._tile_location_mp
-            self.level_location = self._level_location
-        elif directory_layout == 'tms':
-            self.tile_location = self._tile_location_tms
-            self.level_location = self._level_location_tms
-        elif directory_layout == 'quadkey':
-            self.tile_location = self._tile_location_quadkey
-            self.level_location = self._level_location
-        elif directory_layout == 'arcgis':
-            self.tile_location = self._tile_location_arcgiscache
-            self.level_location = self._level_location_arcgiscache
-        else:
-            raise ValueError('unknown directory_layout "%s"' % directory_layout)
+    def tile_location(self, tile, create_dir=False):
+        return self._tile_location(tile, self.cache_dir, self.file_ext, create_dir=create_dir)
 
-    def _level_location(self, level):
+    def level_location(self, level):
         """
         Return the path where all tiles for `level` will be stored.
 
         >>> c = FileCache(cache_dir='/tmp/cache/', file_ext='png')
-        >>> c._level_location(2)
+        >>> c.level_location(2)
         '/tmp/cache/02'
         """
-        if isinstance(level, string_type):
-            return os.path.join(self.cache_dir, level)
-        else:
-            return os.path.join(self.cache_dir, "%02d" % level)
-
-    def _tile_location_tc(self, tile, create_dir=False):
-        """
-        Return the location of the `tile`. Caches the result as ``location``
-        property of the `tile`.
-
-        :param tile: the tile object
-        :param create_dir: if True, create all necessary directories
-        :return: the full filename of the tile
-
-        >>> from mapproxy.cache.tile import Tile
-        >>> c = FileCache(cache_dir='/tmp/cache/', file_ext='png')
-        >>> c.tile_location(Tile((3, 4, 2))).replace('\\\\', '/')
-        '/tmp/cache/02/000/000/003/000/000/004.png'
-        """
-        if tile.location is None:
-            x, y, z = tile.coord
-            parts = (self._level_location(z),
-                     "%03d" % int(x / 1000000),
-                     "%03d" % (int(x / 1000) % 1000),
-                     "%03d" % (int(x) % 1000),
-                     "%03d" % int(y / 1000000),
-                     "%03d" % (int(y / 1000) % 1000),
-                     "%03d.%s" % (int(y) % 1000, self.file_ext))
-            tile.location = os.path.join(*parts)
-        if create_dir:
-            ensure_directory(tile.location)
-        return tile.location
-
-    def _tile_location_mp(self, tile, create_dir=False):
-        """
-        Return the location of the `tile`. Caches the result as ``location``
-        property of the `tile`.
-
-        :param tile: the tile object
-        :param create_dir: if True, create all necessary directories
-        :return: the full filename of the tile
-
-        >>> from mapproxy.cache.tile import Tile
-        >>> c = FileCache(cache_dir='/tmp/cache/', file_ext='png', directory_layout='mp')
-        >>> c.tile_location(Tile((3, 4, 2))).replace('\\\\', '/')
-        '/tmp/cache/02/0000/0003/0000/0004.png'
-        >>> c.tile_location(Tile((12345678, 98765432, 22))).replace('\\\\', '/')
-        '/tmp/cache/22/1234/5678/9876/5432.png'
-        """
-        if tile.location is None:
-            x, y, z = tile.coord
-            parts = (self._level_location(z),
-                     "%04d" % int(x / 10000),
-                     "%04d" % (int(x) % 10000),
-                     "%04d" % int(y / 10000),
-                     "%04d.%s" % (int(y) % 10000, self.file_ext))
-            tile.location = os.path.join(*parts)
-        if create_dir:
-            ensure_directory(tile.location)
-        return tile.location
-
-    def _tile_location_tms(self, tile, create_dir=False):
-        """
-        Return the location of the `tile`. Caches the result as ``location``
-        property of the `tile`.
-
-        :param tile: the tile object
-        :param create_dir: if True, create all necessary directories
-        :return: the full filename of the tile
-
-        >>> from mapproxy.cache.tile import Tile
-        >>> c = FileCache(cache_dir='/tmp/cache/', file_ext='png', directory_layout='tms')
-        >>> c.tile_location(Tile((3, 4, 2))).replace('\\\\', '/')
-        '/tmp/cache/2/3/4.png'
-        """
-        if tile.location is None:
-            x, y, z = tile.coord
-            tile.location = os.path.join(
-                self.level_location(str(z)),
-                str(x), str(y) + '.' + self.file_ext
-            )
-        if create_dir:
-            ensure_directory(tile.location)
-        return tile.location
-
-    def _level_location_tms(self, z):
-        return self._level_location(str(z))
-
-    def _tile_location_quadkey(self, tile, create_dir=False):
-        """
-        Return the location of the `tile`. Caches the result as ``location``
-        property of the `tile`.
-
-        :param tile: the tile object
-        :param create_dir: if True, create all necessary directories
-        :return: the full filename of the tile
-
-        >>> from mapproxy.cache.tile import Tile
-        >>> from mapproxy.cache.file import FileCache
-        >>> c = FileCache(cache_dir='/tmp/cache/', file_ext='png', directory_layout='quadkey')
-        >>> c.tile_location(Tile((3, 4, 2))).replace('\\\\', '/')
-        '/tmp/cache/11.png'
-        """
-        if tile.location is None:
-            x, y, z = tile.coord
-            quadKey = ""
-            for i in range(z,0,-1):
-                digit = 0
-                mask = 1 << (i-1)
-                if (x & mask) != 0:
-                    digit += 1
-                if (y & mask) != 0:
-                    digit += 2
-                quadKey += str(digit)
-            tile.location = os.path.join(
-                self.cache_dir, quadKey + '.' + self.file_ext
-            )
-        if create_dir:
-            ensure_directory(tile.location)
-        return tile.location
-
-    def _tile_location_arcgiscache(self, tile, create_dir=False):
-        """
-        Return the location of the `tile`. Caches the result as ``location``
-        property of the `tile`.
-
-        :param tile: the tile object
-        :param create_dir: if True, create all necessary directories
-        :return: the full filename of the tile
-
-        >>> from mapproxy.cache.tile import Tile
-        >>> from mapproxy.cache.file import FileCache
-        >>> c = FileCache(cache_dir='/tmp/cache/', file_ext='png', directory_layout='arcgis')
-        >>> c.tile_location(Tile((1234567, 87654321, 9))).replace('\\\\', '/')
-        '/tmp/cache/L09/R05397fb1/C0012d687.png'
-        """
-        if tile.location is None:
-            x, y, z = tile.coord
-            parts = (self._level_location_arcgiscache(z), 'R%08x' % y, 'C%08x.%s' % (x, self.file_ext))
-            tile.location = os.path.join(*parts)
-        if create_dir:
-            ensure_directory(tile.location)
-        return tile.location
-
-    def _level_location_arcgiscache(self, z):
-        return self._level_location('L%02d' % z)
+        return self._level_location(level, self.cache_dir)
 
     def _single_color_tile_location(self, color, create_dir=False):
         """
diff --git a/mapproxy/cache/geopackage.py b/mapproxy/cache/geopackage.py
new file mode 100644
index 0000000..4e9fc69
--- /dev/null
+++ b/mapproxy/cache/geopackage.py
@@ -0,0 +1,604 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2011-2013 Omniscale <http://omniscale.de>
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement
+
+import hashlib
+import logging
+import os
+import re
+import sqlite3
+import threading
+
+from mapproxy.cache.base import TileCacheBase, tile_buffer, REMOVE_ON_UNLOCK
+from mapproxy.compat import BytesIO, PY2, itertools
+from mapproxy.image import ImageSource
+from mapproxy.srs import get_epsg_num
+from mapproxy.util.fs import ensure_directory
+from mapproxy.util.lock import FileLock
+
+
+log = logging.getLogger(__name__)
+
+class GeopackageCache(TileCacheBase):
+    supports_timestamp = False
+
+    def __init__(self, geopackage_file, tile_grid, table_name, with_timestamps=False, timeout=30, wal=False):
+        self.tile_grid = tile_grid
+        self.table_name = self._check_table_name(table_name)
+        self.lock_cache_id = 'gpkg' + hashlib.md5(geopackage_file.encode('utf-8')).hexdigest()
+        self.geopackage_file = geopackage_file
+        # XXX timestamps not implemented
+        self.supports_timestamp = with_timestamps
+        self.timeout = timeout
+        self.wal = wal
+        self.ensure_gpkg()
+        self._db_conn_cache = threading.local()
+
+    @property
+    def db(self):
+        if not getattr(self._db_conn_cache, 'db', None):
+            self.ensure_gpkg()
+            self._db_conn_cache.db = sqlite3.connect(self.geopackage_file, timeout=self.timeout)
+        return self._db_conn_cache.db
+
+    def cleanup(self):
+        """
+        Close all open connection and remove them from cache.
+        """
+        if getattr(self._db_conn_cache, 'db', None):
+            self._db_conn_cache.db.close()
+        self._db_conn_cache.db = None
+
+    @staticmethod
+    def _check_table_name(table_name):
+        """
+        >>> GeopackageCache._check_table_name("test")
+        'test'
+        >>> GeopackageCache._check_table_name("test_2")
+        'test_2'
+        >>> GeopackageCache._check_table_name("test-2")
+        'test-2'
+        >>> GeopackageCache._check_table_name("test3;")
+        Traceback (most recent call last):
+        ...
+        ValueError: The table_name test3; contains unsupported characters.
+        >>> GeopackageCache._check_table_name("table name")
+        Traceback (most recent call last):
+        ...
+        ValueError: The table_name table name contains unsupported characters.
+
+        @param table_name: A desired name for an geopackage table.
+        @return: The name of the table if it is good, otherwise an exception.
+        """
+        # Regex string indicating table names which will be accepted.
+        regex_str = '^[a-zA-Z0-9_-]+$'
+        if re.match(regex_str, table_name):
+            return table_name
+        else:
+            msg = ("The table name may only contain alphanumeric characters, an underscore, "
+                   "or a dash: {}".format(regex_str))
+            log.info(msg)
+            raise ValueError("The table_name {0} contains unsupported characters.".format(table_name))
+
+    def ensure_gpkg(self):
+        if not os.path.isfile(self.geopackage_file):
+            with FileLock(self.geopackage_file + '.init.lck',
+                          remove_on_unlock=REMOVE_ON_UNLOCK):
+                ensure_directory(self.geopackage_file)
+                self._initialize_gpkg()
+        else:
+            if not self.check_gpkg():
+                ensure_directory(self.geopackage_file)
+                self._initialize_gpkg()
+
+    def check_gpkg(self):
+        if not self._verify_table():
+            return False
+        if not self._verify_gpkg_contents():
+            return False
+        if not self._verify_tile_size():
+            return False
+        return True
+
+    def _verify_table(self):
+        with sqlite3.connect(self.geopackage_file) as db:
+            cur = db.execute("""SELECT name FROM sqlite_master WHERE type='table' AND name=?""",
+                             (self.table_name,))
+            content = cur.fetchone()
+            if not content:
+                # Table doesn't exist _initialize_gpkg will create a new one.
+                return False
+            return True
+
+    def _verify_gpkg_contents(self):
+        with sqlite3.connect(self.geopackage_file) as db:
+            cur = db.execute("""SELECT * FROM gpkg_contents WHERE table_name = ?"""
+                             , (self.table_name,))
+
+        results = cur.fetchone()
+        if not results:
+            # Table doesn't exist in gpkg_contents _initialize_gpkg will add it.
+            return False
+        gpkg_data_type = results[1]
+        gpkg_srs_id = results[9]
+        cur = db.execute("""SELECT * FROM gpkg_spatial_ref_sys WHERE srs_id = ?"""
+                         , (gpkg_srs_id,))
+
+        gpkg_coordsys_id = cur.fetchone()[3]
+        if gpkg_data_type.lower() != "tiles":
+            log.info("The geopackage table name already exists for a data type other than tiles.")
+            raise ValueError("table_name is improperly configured.")
+        if gpkg_coordsys_id != get_epsg_num(self.tile_grid.srs.srs_code):
+            log.info(
+                "The geopackage {0} table name {1} already exists and has an SRS of {2}, which does not match the configured" \
+                " Mapproxy SRS of {3}.".format(self.geopackage_file, self.table_name, gpkg_coordsys_id,
+                                              get_epsg_num(self.tile_grid.srs.srs_code)))
+            raise ValueError("srs is improperly configured.")
+        return True
+
+    def _verify_tile_size(self):
+        with sqlite3.connect(self.geopackage_file) as db:
+            cur = db.execute(
+                """SELECT * FROM gpkg_tile_matrix WHERE table_name = ?""",
+                (self.table_name,))
+
+        results = cur.fetchall()
+        results = results[0]
+        tile_size = self.tile_grid.tile_size
+
+        if not results:
+            # There is no tile conflict. Return to allow the creation of new tiles.
+            return True
+
+        gpkg_table_name, gpkg_zoom_level, gpkg_matrix_width, gpkg_matrix_height, gpkg_tile_width, gpkg_tile_height, \
+            gpkg_pixel_x_size, gpkg_pixel_y_size = results
+        resolution = self.tile_grid.resolution(gpkg_zoom_level)
+        if gpkg_tile_width != tile_size[0] or gpkg_tile_height != tile_size[1]:
+            log.info(
+                "The geopackage {0} table name {1} already exists and has tile sizes of ({2},{3})"
+                " which is different than the configure tile sizes of ({4},{5}).".format(self.geopackage_file,
+                                                                                       self.table_name,
+                                                                                       gpkg_tile_width,
+                                                                                       gpkg_tile_height,
+                                                                                       tile_size[0],
+                                                                                       tile_size[1]))
+            log.info("The current mapproxy configuration is invalid for this geopackage.")
+            raise ValueError("tile_size is improperly configured.")
+        if not is_close(gpkg_pixel_x_size, resolution) or not is_close(gpkg_pixel_y_size, resolution):
+            log.info(
+                "The geopackage {0} table name {1} already exists and level {2} a resolution of ({3:.13f},{4:.13f})"
+                " which is different than the configured resolution of ({5:.13f},{6:.13f}).".format(self.geopackage_file,
+                                                                                                  self.table_name,
+                                                                                                  gpkg_zoom_level,
+                                                                                                  gpkg_pixel_x_size,
+                                                                                                  gpkg_pixel_y_size,
+                                                                                                  resolution,
+                                                                                                  resolution))
+            log.info("The current mapproxy configuration is invalid for this geopackage.")
+            raise ValueError("res is improperly configured.")
+        return True
+
+    def _initialize_gpkg(self):
+        log.info('initializing Geopackage file %s', self.geopackage_file)
+        db = sqlite3.connect(self.geopackage_file)
+
+        if self.wal:
+            db.execute('PRAGMA journal_mode=wal')
+
+        proj = get_epsg_num(self.tile_grid.srs.srs_code)
+        stmts = ["""
+                CREATE TABLE IF NOT EXISTS gpkg_contents
+                    (table_name  TEXT     NOT NULL PRIMARY KEY,                                    -- The name of the tiles, or feature table
+                     data_type   TEXT     NOT NULL,                                                -- Type of data stored in the table: "features" per clause Features (http://www.geopackage.org/spec/#features), "tiles" per clause Tiles (http://www.geopackage.org/spec/#tiles), or an implementer-defined value for other data tables per clause in an Extended GeoPackage
+                     identifier  TEXT     UNIQUE,                                                  -- A human-readable identifier (e.g. short name) for the table_name content
+                     description TEXT     DEFAULT '',                                              -- A human-readable description for the table_name content
+                     last_change DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')), -- Timestamp value in ISO 8601 format as defined by the strftime function %Y-%m-%dT%H:%M:%fZ format string applied to the current time
+                     min_x       DOUBLE,                                                           -- Bounding box minimum easting or longitude for all content in table_name
+                     min_y       DOUBLE,                                                           -- Bounding box minimum northing or latitude for all content in table_name
+                     max_x       DOUBLE,                                                           -- Bounding box maximum easting or longitude for all content in table_name
+                     max_y       DOUBLE,                                                           -- Bounding box maximum northing or latitude for all content in table_name
+                     srs_id      INTEGER,                                                          -- Spatial Reference System ID: gpkg_spatial_ref_sys.srs_id; when data_type is features, SHALL also match gpkg_geometry_columns.srs_id; When data_type is tiles, SHALL also match gpkg_tile_matrix_set.srs.id
+                     CONSTRAINT fk_gc_r_srs_id FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys(srs_id))
+                """,
+                 """
+                 CREATE TABLE IF NOT EXISTS gpkg_spatial_ref_sys
+                     (srs_name                 TEXT    NOT NULL,             -- Human readable name of this SRS (Spatial Reference System)
+                      srs_id                   INTEGER NOT NULL PRIMARY KEY, -- Unique identifier for each Spatial Reference System within a GeoPackage
+                      organization             TEXT    NOT NULL,             -- Case-insensitive name of the defining organization e.g. EPSG or epsg
+                      organization_coordsys_id INTEGER NOT NULL,             -- Numeric ID of the Spatial Reference System assigned by the organization
+                      definition               TEXT    NOT NULL,             -- Well-known Text representation of the Spatial Reference System
+                      description              TEXT)
+                  """,
+                 """
+                 CREATE TABLE IF NOT EXISTS gpkg_tile_matrix
+                     (table_name    TEXT    NOT NULL, -- Tile Pyramid User Data Table Name
+                      zoom_level    INTEGER NOT NULL, -- 0 <= zoom_level <= max_level for table_name
+                      matrix_width  INTEGER NOT NULL, -- Number of columns (>= 1) in tile matrix at this zoom level
+                      matrix_height INTEGER NOT NULL, -- Number of rows (>= 1) in tile matrix at this zoom level
+                      tile_width    INTEGER NOT NULL, -- Tile width in pixels (>= 1) for this zoom level
+                      tile_height   INTEGER NOT NULL, -- Tile height in pixels (>= 1) for this zoom level
+                      pixel_x_size  DOUBLE  NOT NULL, -- In t_table_name srid units or default meters for srid 0 (>0)
+                      pixel_y_size  DOUBLE  NOT NULL, -- In t_table_name srid units or default meters for srid 0 (>0)
+                      CONSTRAINT pk_ttm PRIMARY KEY (table_name, zoom_level), CONSTRAINT fk_tmm_table_name FOREIGN KEY (table_name) REFERENCES gpkg_contents(table_name))
+                  """,
+                 """
+                         CREATE TABLE IF NOT EXISTS gpkg_tile_matrix_set
+                 (table_name TEXT    NOT NULL PRIMARY KEY, -- Tile Pyramid User Data Table Name
+                  srs_id     INTEGER NOT NULL,             -- Spatial Reference System ID: gpkg_spatial_ref_sys.srs_id
+                  min_x      DOUBLE  NOT NULL,             -- Bounding box minimum easting or longitude for all content in table_name
+                  min_y      DOUBLE  NOT NULL,             -- Bounding box minimum northing or latitude for all content in table_name
+                  max_x      DOUBLE  NOT NULL,             -- Bounding box maximum easting or longitude for all content in table_name
+                  max_y      DOUBLE  NOT NULL,             -- Bounding box maximum northing or latitude for all content in table_name
+                  CONSTRAINT fk_gtms_table_name FOREIGN KEY (table_name) REFERENCES gpkg_contents(table_name), CONSTRAINT fk_gtms_srs FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys (srs_id))
+                  """,
+                 """
+                 CREATE TABLE IF NOT EXISTS [{0}]
+                    (id          INTEGER PRIMARY KEY AUTOINCREMENT, -- Autoincrement primary key
+                     zoom_level  INTEGER NOT NULL,                  -- min(zoom_level) <= zoom_level <= max(zoom_level) for t_table_name
+                     tile_column INTEGER NOT NULL,                  -- 0 to tile_matrix matrix_width - 1
+                     tile_row    INTEGER NOT NULL,                  -- 0 to tile_matrix matrix_height - 1
+                     tile_data   BLOB    NOT NULL,                  -- Of an image MIME type specified in clauses Tile Encoding PNG, Tile Encoding JPEG, Tile Encoding WEBP
+                     UNIQUE (zoom_level, tile_column, tile_row))
+                  """.format(self.table_name)
+                 ]
+
+        for stmt in stmts:
+            db.execute(stmt)
+
+        db.execute("PRAGMA foreign_keys = 1;")
+
+        # List of WKT execute statements and data.("""
+        wkt_statement = """
+                            INSERT OR REPLACE INTO gpkg_spatial_ref_sys (
+                                srs_id,
+                                organization,
+                                organization_coordsys_id,
+                                srs_name,
+                                definition)
+                            VALUES (?, ?, ?, ?, ?)
+                        """
+        wkt_entries = [(3857, 'epsg', 3857, 'WGS 84 / Pseudo-Mercator',
+                        """
+PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,\
+AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],\
+UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","9122"]]AUTHORITY["EPSG","4326"]],\
+PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],\
+PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH]\
+                        """
+                        ),
+                       (4326, 'epsg', 4326, 'WGS 84',
+                        """
+GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],\
+AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,\
+AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]\
+                        """
+                        ),
+                       (-1, 'NONE', -1, ' ', 'undefined'),
+                       (0, 'NONE', 0, ' ', 'undefined')
+                       ]
+
+        if get_epsg_num(self.tile_grid.srs.srs_code) not in [4326, 3857]:
+            wkt_entries.append((proj, 'epsg', proj, 'Not provided', "Added via Mapproxy."))
+        db.commit()
+
+        # Add geopackage version to the header (1.0)
+        db.execute("PRAGMA application_id = 1196437808;")
+        db.commit()
+
+        for wkt_entry in wkt_entries:
+            try:
+                db.execute(wkt_statement, (wkt_entry[0], wkt_entry[1], wkt_entry[2], wkt_entry[3], wkt_entry[4]))
+            except sqlite3.IntegrityError:
+                log.info("srs_id already exists.".format(wkt_entry[0]))
+        db.commit()
+
+        # Ensure that tile table exists here, don't overwrite a valid entry.
+        try:
+            db.execute("""
+                        INSERT INTO gpkg_contents (
+                            table_name,
+                            data_type,
+                            identifier,
+                            description,
+                            min_x,
+                            max_x,
+                            min_y,
+                            max_y,
+                            srs_id)
+                        VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);
+                        """, (self.table_name,
+                              "tiles",
+                              self.table_name,
+                              "Created with Mapproxy.",
+                              self.tile_grid.bbox[0],
+                              self.tile_grid.bbox[2],
+                              self.tile_grid.bbox[1],
+                              self.tile_grid.bbox[3],
+                              proj))
+        except sqlite3.IntegrityError:
+            pass
+        db.commit()
+
+        # Ensure that tile set exists here, don't overwrite a valid entry.
+        try:
+            db.execute("""
+                INSERT INTO gpkg_tile_matrix_set (table_name, srs_id, min_x, max_x, min_y, max_y)
+                VALUES (?, ?, ?, ?, ?, ?);
+            """, (
+                self.table_name, proj, self.tile_grid.bbox[0], self.tile_grid.bbox[2], self.tile_grid.bbox[1],
+                self.tile_grid.bbox[3]))
+        except sqlite3.IntegrityError:
+            pass
+        db.commit()
+
+        tile_size = self.tile_grid.tile_size
+        for grid, resolution, level in zip(self.tile_grid.grid_sizes,
+                                           self.tile_grid.resolutions, range(20)):
+            db.execute("""INSERT OR REPLACE INTO gpkg_tile_matrix
+                              (table_name, zoom_level, matrix_width, matrix_height, tile_width, tile_height, pixel_x_size, pixel_y_size)
+                              VALUES(?, ?, ?, ?, ?, ?, ?, ?)
+                              """,
+                       (self.table_name, level, grid[0], grid[1], tile_size[0], tile_size[1], resolution, resolution))
+        db.commit()
+        db.close()
+
+    def is_cached(self, tile):
+        if tile.coord is None:
+            return True
+        if tile.source:
+            return True
+
+        return self.load_tile(tile)
+
+
+    def store_tile(self, tile):
+        if tile.stored:
+            return True
+        return self._store_bulk([tile])
+
+    def store_tiles(self, tiles):
+        tiles = [t for t in tiles if not t.stored]
+        return self._store_bulk(tiles)
+
+
+    def _store_bulk(self, tiles):
+        records = []
+        # tile_buffer (as_buffer) will encode the tile to the target format
+        # we collect all tiles before, to avoid having the db transaction
+        # open during this slow encoding
+        for tile in tiles:
+            with tile_buffer(tile) as buf:
+                if PY2:
+                    content = buffer(buf.read())
+                else:
+                    content = buf.read()
+                x, y, level = tile.coord
+                records.append((level, x, y, content))
+
+        cursor = self.db.cursor()
+        try:
+            stmt = "INSERT OR REPLACE INTO [{0}] (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)".format(
+                    self.table_name)
+            cursor.executemany(stmt, records)
+            self.db.commit()
+        except sqlite3.OperationalError as ex:
+            log.warn('unable to store tile: %s', ex)
+            return False
+        return True
+
+    def load_tile(self, tile, with_metadata=False):
+        if tile.source or tile.coord is None:
+            return True
+
+        cur = self.db.cursor()
+        cur.execute("""SELECT tile_data FROM [{0}]
+                WHERE tile_column = ? AND
+                      tile_row = ? AND
+                      zoom_level = ?""".format(self.table_name), tile.coord)
+
+        content = cur.fetchone()
+        if content:
+            tile.source = ImageSource(BytesIO(content[0]))
+            return True
+        else:
+            return False
+
+    def load_tiles(self, tiles, with_metadata=False):
+        # associate the right tiles with the cursor
+        tile_dict = {}
+        coords = []
+        for tile in tiles:
+            if tile.source or tile.coord is None:
+                continue
+            x, y, level = tile.coord
+            coords.append(x)
+            coords.append(y)
+            coords.append(level)
+            tile_dict[(x, y)] = tile
+
+        if not tile_dict:
+            # all tiles loaded or coords are None
+            return True
+
+        stmt_base = "SELECT tile_column, tile_row, tile_data FROM [{0}] WHERE ".format(self.table_name)
+
+        loaded_tiles = 0
+
+        # SQLite is limited to 1000 args -> split into multiple requests if more arguments are needed
+        while coords:
+            cur_coords = coords[:999]
+
+            stmt = stmt_base + ' OR '.join(
+                ['(tile_column = ? AND tile_row = ? AND zoom_level = ?)'] * (len(cur_coords) // 3))
+
+            cursor = self.db.cursor()
+            cursor.execute(stmt, cur_coords)
+
+            for row in cursor:
+                loaded_tiles += 1
+                tile = tile_dict[(row[0], row[1])]
+                data = row[2]
+                tile.size = len(data)
+                tile.source = ImageSource(BytesIO(data))
+            cursor.close()
+
+            coords = coords[999:]
+
+        return loaded_tiles == len(tile_dict)
+
+    def remove_tile(self, tile):
+        cursor = self.db.cursor()
+        cursor.execute(
+            "DELETE FROM [{0}] WHERE (tile_column = ? AND tile_row = ? AND zoom_level = ?)".format(self.table_name),
+            tile.coord)
+        self.db.commit()
+        if cursor.rowcount:
+            return True
+        return False
+
+    def remove_level_tiles_before(self, level, timestamp):
+        if timestamp == 0:
+            cursor = self.db.cursor()
+            cursor.execute(
+                "DELETE FROM [{0}] WHERE (zoom_level = ?)".format(self.table_name), (level,))
+            self.db.commit()
+            log.info("Cursor rowcount = {0}".format(cursor.rowcount))
+            if cursor.rowcount:
+                return True
+            return False
+
+    def load_tile_metadata(self, tile):
+        self.load_tile(tile)
+
+
+class GeopackageLevelCache(TileCacheBase):
+
+    def __init__(self, geopackage_dir, tile_grid, table_name, timeout=30, wal=False):
+        self.lock_cache_id = 'gpkg-' + hashlib.md5(geopackage_dir.encode('utf-8')).hexdigest()
+        self.cache_dir = geopackage_dir
+        self.tile_grid = tile_grid
+        self.table_name = table_name
+        self.timeout = timeout
+        self.wal = wal
+        self._geopackage = {}
+        self._geopackage_lock = threading.Lock()
+
+    def _get_level(self, level):
+        if level in self._geopackage:
+            return self._geopackage[level]
+
+        with self._geopackage_lock:
+            if level not in self._geopackage:
+                geopackage_filename = os.path.join(self.cache_dir, '%s.gpkg' % level)
+                self._geopackage[level] = GeopackageCache(
+                    geopackage_filename,
+                    self.tile_grid,
+                    self.table_name,
+                    with_timestamps=True,
+                    timeout=self.timeout,
+                    wal=self.wal,
+                )
+
+        return self._geopackage[level]
+
+    def cleanup(self):
+        """
+        Close all open connection and remove them from cache.
+        """
+        with self._geopackage_lock:
+            for gp in self._geopackage.values():
+                gp.cleanup()
+
+    def is_cached(self, tile):
+        if tile.coord is None:
+            return True
+        if tile.source:
+            return True
+
+        return self._get_level(tile.coord[2]).is_cached(tile)
+
+    def store_tile(self, tile):
+        if tile.stored:
+            return True
+
+        return self._get_level(tile.coord[2]).store_tile(tile)
+
+    def store_tiles(self, tiles):
+        failed = False
+        for level, tiles in itertools.groupby(tiles, key=lambda t: t.coord[2]):
+            tiles = [t for t in tiles if not t.stored]
+            res = self._get_level(level).store_tiles(tiles)
+            if not res: failed = True
+        return failed
+
+    def load_tile(self, tile, with_metadata=False):
+        if tile.source or tile.coord is None:
+            return True
+
+        return self._get_level(tile.coord[2]).load_tile(tile, with_metadata=with_metadata)
+
+    def load_tiles(self, tiles, with_metadata=False):
+        level = None
+        for tile in tiles:
+            if tile.source or tile.coord is None:
+                continue
+            level = tile.coord[2]
+            break
+
+        if not level:
+            return True
+
+        return self._get_level(level).load_tiles(tiles, with_metadata=with_metadata)
+
+    def remove_tile(self, tile):
+        if tile.coord is None:
+            return True
+
+        return self._get_level(tile.coord[2]).remove_tile(tile)
+
+    def remove_level_tiles_before(self, level, timestamp):
+        level_cache = self._get_level(level)
+        if timestamp == 0:
+            level_cache.cleanup()
+            os.unlink(level_cache.geopackage_file)
+            return True
+        else:
+            return level_cache.remove_level_tiles_before(level, timestamp)
+
+
+def is_close(a, b, rel_tol=1e-09, abs_tol=0.0):
+    """
+    See PEP 485, added here for legacy versions.
+
+    >>> is_close(0.0, 0.0)
+    True
+    >>> is_close(1, 1.0)
+    True
+    >>> is_close(0.01, 0.001)
+    False
+    >>> is_close(0.0001001, 0.0001, rel_tol=1e-02)
+    True
+    >>> is_close(0.0001001, 0.0001)
+    False
+
+    @param a: An int or float.
+    @param b: An int or float.
+    @param rel_tol: Relative tolerance - maximumed allow difference between two numbers.
+    @param abs_tol: Absolute tolerance - minimum absolute tolerance.
+    @return: True if the values a and b are close.
+
+    """
+    return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
diff --git a/mapproxy/cache/mbtiles.py b/mapproxy/cache/mbtiles.py
index ebc1687..5af49f6 100644
--- a/mapproxy/cache/mbtiles.py
+++ b/mapproxy/cache/mbtiles.py
@@ -21,10 +21,10 @@ import threading
 import time
 
 from mapproxy.image import ImageSource
-from mapproxy.cache.base import TileCacheBase, tile_buffer, CacheBackendError
+from mapproxy.cache.base import TileCacheBase, tile_buffer, REMOVE_ON_UNLOCK
 from mapproxy.util.fs import ensure_directory
 from mapproxy.util.lock import FileLock
-from mapproxy.compat import BytesIO, PY2
+from mapproxy.compat import BytesIO, PY2, itertools
 
 import logging
 log = logging.getLogger(__name__)
@@ -38,10 +38,12 @@ def sqlite_datetime_to_timestamp(datetime):
 class MBTilesCache(TileCacheBase):
     supports_timestamp = False
 
-    def __init__(self, mbtile_file, with_timestamps=False):
+    def __init__(self, mbtile_file, with_timestamps=False, timeout=30, wal=False):
         self.lock_cache_id = 'mbtiles-' + hashlib.md5(mbtile_file.encode('utf-8')).hexdigest()
         self.mbtile_file = mbtile_file
         self.supports_timestamp = with_timestamps
+        self.timeout = timeout
+        self.wal = wal
         self.ensure_mbtile()
         self._db_conn_cache = threading.local()
 
@@ -49,7 +51,7 @@ class MBTilesCache(TileCacheBase):
     def db(self):
         if not getattr(self._db_conn_cache, 'db', None):
             self.ensure_mbtile()
-            self._db_conn_cache.db = sqlite3.connect(self.mbtile_file)
+            self._db_conn_cache.db = sqlite3.connect(self.mbtile_file, self.timeout)
         return self._db_conn_cache.db
 
     def cleanup(self):
@@ -62,8 +64,8 @@ class MBTilesCache(TileCacheBase):
 
     def ensure_mbtile(self):
         if not os.path.exists(self.mbtile_file):
-            with FileLock(os.path.join(os.path.dirname(self.mbtile_file), 'init.lck'),
-                remove_on_unlock=True):
+            with FileLock(self.mbtile_file + '.init.lck',
+                remove_on_unlock=REMOVE_ON_UNLOCK):
                 if not os.path.exists(self.mbtile_file):
                     ensure_directory(self.mbtile_file)
                     self._initialize_mbtile()
@@ -71,6 +73,10 @@ class MBTilesCache(TileCacheBase):
     def _initialize_mbtile(self):
         log.info('initializing MBTile file %s', self.mbtile_file)
         db  = sqlite3.connect(self.mbtile_file)
+
+        if self.wal:
+            db.execute('PRAGMA journal_mode=wal')
+
         stmt = """
             CREATE TABLE tiles (
                 zoom_level integer,
@@ -135,25 +141,42 @@ class MBTilesCache(TileCacheBase):
     def store_tile(self, tile):
         if tile.stored:
             return True
-        with tile_buffer(tile) as buf:
-            if PY2:
-                content = buffer(buf.read())
-            else:
-                content = buf.read()
-            x, y, level = tile.coord
-            cursor = self.db.cursor()
-            try:
+        return self._store_bulk([tile])
+
+    def store_tiles(self, tiles):
+        tiles = [t for t in tiles if not t.stored]
+        return self._store_bulk(tiles)
+
+    def _store_bulk(self, tiles):
+        records = []
+        # tile_buffer (as_buffer) will encode the tile to the target format
+        # we collect all tiles before, to avoid having the db transaction
+        # open during this slow encoding
+        for tile in tiles:
+            with tile_buffer(tile) as buf:
+                if PY2:
+                    content = buffer(buf.read())
+                else:
+                    content = buf.read()
+                x, y, level = tile.coord
                 if self.supports_timestamp:
-                    stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data, last_modified) VALUES (?,?,?,?, datetime(?, 'unixepoch', 'localtime'))"
-                    cursor.execute(stmt, (level, x, y, content, time.time()))
+                    records.append((level, x, y, content, time.time()))
                 else:
-                    stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)"
-                    cursor.execute(stmt, (level, x, y, content))
-                self.db.commit()
-            except sqlite3.OperationalError as ex:
-                log.warn('unable to store tile: %s', ex)
-                return False
-            return True
+                    records.append((level, x, y, content))
+
+        cursor = self.db.cursor()
+        try:
+            if self.supports_timestamp:
+                stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data, last_modified) VALUES (?,?,?,?, datetime(?, 'unixepoch', 'localtime'))"
+                cursor.executemany(stmt, records)
+            else:
+                stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)"
+                cursor.executemany(stmt, records)
+            self.db.commit()
+        except sqlite3.OperationalError as ex:
+            log.warn('unable to store tile: %s', ex)
+            return False
+        return True
 
     def load_tile(self, tile, with_metadata=False):
         if tile.source or tile.coord is None:
@@ -271,10 +294,12 @@ class MBTilesCache(TileCacheBase):
 class MBTilesLevelCache(TileCacheBase):
     supports_timestamp = True
 
-    def __init__(self, mbtiles_dir):
+    def __init__(self, mbtiles_dir, timeout=30, wal=False):
         self.lock_cache_id = 'sqlite-' + hashlib.md5(mbtiles_dir.encode('utf-8')).hexdigest()
         self.cache_dir = mbtiles_dir
         self._mbtiles = {}
+        self.timeout = timeout
+        self.wal = wal
         self._mbtiles_lock = threading.Lock()
 
     def _get_level(self, level):
@@ -287,6 +312,8 @@ class MBTilesLevelCache(TileCacheBase):
                 self._mbtiles[level] = MBTilesCache(
                     mbtile_filename,
                     with_timestamps=True,
+                    timeout=self.timeout,
+                    wal=self.wal,
                 )
 
         return self._mbtiles[level]
@@ -313,6 +340,14 @@ class MBTilesLevelCache(TileCacheBase):
 
         return self._get_level(tile.coord[2]).store_tile(tile)
 
+    def store_tiles(self, tiles):
+        failed = False
+        for level, tiles in itertools.groupby(tiles, key=lambda t: t.coord[2]):
+            tiles = [t for t in tiles if not t.stored]
+            res = self._get_level(level).store_tiles(tiles)
+            if not res: failed = True
+        return failed
+
     def load_tile(self, tile, with_metadata=False):
         if tile.source or tile.coord is None:
             return True
diff --git a/mapproxy/cache/path.py b/mapproxy/cache/path.py
new file mode 100644
index 0000000..540551b
--- /dev/null
+++ b/mapproxy/cache/path.py
@@ -0,0 +1,226 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2010-2016 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from mapproxy.compat import string_type
+from mapproxy.util.fs import ensure_directory
+
+
+def location_funcs(layout):
+    if layout == 'tc':
+        return tile_location_tc, level_location
+    elif layout == 'mp':
+        return tile_location_mp, level_location
+    elif layout == 'tms':
+        return tile_location_tms, level_location
+    elif layout == 'reverse_tms':
+        return tile_location_reverse_tms, None
+    elif layout == 'quadkey':
+        return tile_location_quadkey, no_level_location
+    elif layout == 'arcgis':
+        return tile_location_arcgiscache, level_location_arcgiscache
+    else:
+        raise ValueError('unknown directory_layout "%s"' % layout)
+
+def level_location(level, cache_dir):
+    """
+    Return the path where all tiles for `level` will be stored.
+
+    >>> level_location(2, '/tmp/cache')
+    '/tmp/cache/02'
+    """
+    if isinstance(level, string_type):
+        return os.path.join(cache_dir, level)
+    else:
+        return os.path.join(cache_dir, "%02d" % level)
+
+
+def level_part(level):
+    """
+    Return the path where all tiles for `level` will be stored.
+
+    >>> level_part(2)
+    '02'
+    >>> level_part('2')
+    '2'
+    """
+    if isinstance(level, string_type):
+        return level
+    else:
+        return "%02d" % level
+
+
+def tile_location_tc(tile, cache_dir, file_ext, create_dir=False):
+    """
+    Return the location of the `tile`. Caches the result as ``location``
+    property of the `tile`.
+
+    :param tile: the tile object
+    :param create_dir: if True, create all necessary directories
+    :return: the full filename of the tile
+
+    >>> from mapproxy.cache.tile import Tile
+    >>> tile_location_tc(Tile((3, 4, 2)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/02/000/000/003/000/000/004.png'
+    """
+    if tile.location is None:
+        x, y, z = tile.coord
+        parts = (cache_dir,
+                level_part(z),
+                 "%03d" % int(x / 1000000),
+                 "%03d" % (int(x / 1000) % 1000),
+                 "%03d" % (int(x) % 1000),
+                 "%03d" % int(y / 1000000),
+                 "%03d" % (int(y / 1000) % 1000),
+                 "%03d.%s" % (int(y) % 1000, file_ext))
+        tile.location = os.path.join(*parts)
+    if create_dir:
+        ensure_directory(tile.location)
+    return tile.location
+
+def tile_location_mp(tile, cache_dir, file_ext, create_dir=False):
+    """
+    Return the location of the `tile`. Caches the result as ``location``
+    property of the `tile`.
+
+    :param tile: the tile object
+    :param create_dir: if True, create all necessary directories
+    :return: the full filename of the tile
+
+    >>> from mapproxy.cache.tile import Tile
+    >>> tile_location_mp(Tile((3, 4, 2)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/02/0000/0003/0000/0004.png'
+    >>> tile_location_mp(Tile((12345678, 98765432, 22)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/22/1234/5678/9876/5432.png'
+    """
+    if tile.location is None:
+        x, y, z = tile.coord
+        parts = (cache_dir,
+                level_part(z),
+                 "%04d" % int(x / 10000),
+                 "%04d" % (int(x) % 10000),
+                 "%04d" % int(y / 10000),
+                 "%04d.%s" % (int(y) % 10000, file_ext))
+        tile.location = os.path.join(*parts)
+    if create_dir:
+        ensure_directory(tile.location)
+    return tile.location
+
+def tile_location_tms(tile, cache_dir, file_ext, create_dir=False):
+    """
+    Return the location of the `tile`. Caches the result as ``location``
+    property of the `tile`.
+
+    :param tile: the tile object
+    :param create_dir: if True, create all necessary directories
+    :return: the full filename of the tile
+
+    >>> from mapproxy.cache.tile import Tile
+    >>> tile_location_tms(Tile((3, 4, 2)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/2/3/4.png'
+    """
+    if tile.location is None:
+        x, y, z = tile.coord
+        tile.location = os.path.join(
+            cache_dir, level_part(str(z)),
+            str(x), str(y) + '.' + file_ext
+        )
+    if create_dir:
+        ensure_directory(tile.location)
+    return tile.location
+
+def tile_location_reverse_tms(tile, cache_dir, file_ext, create_dir=False):
+    """
+    Return the location of the `tile`. Caches the result as ``location``
+    property of the `tile`.
+
+    :param tile: the tile object
+    :param create_dir: if True, create all necessary directories
+    :return: the full filename of the tile
+
+    >>> from mapproxy.cache.tile import Tile
+    >>> tile_location_reverse_tms(Tile((3, 4, 2)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/4/3/2.png'
+    """
+    if tile.location is None:
+        x, y, z = tile.coord
+        tile.location = os.path.join(
+            cache_dir, str(y), str(x), str(z) + '.' + file_ext
+        )
+    if create_dir:
+        ensure_directory(tile.location)
+    return tile.location
+
+def level_location_tms(level, cache_dir):
+    return level_location(str(level), cache_dir=cache_dir)
+
+def tile_location_quadkey(tile, cache_dir, file_ext, create_dir=False):
+    """
+    Return the location of the `tile`. Caches the result as ``location``
+    property of the `tile`.
+
+    :param tile: the tile object
+    :param create_dir: if True, create all necessary directories
+    :return: the full filename of the tile
+
+    >>> from mapproxy.cache.tile import Tile
+    >>> tile_location_quadkey(Tile((3, 4, 2)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/11.png'
+    """
+    if tile.location is None:
+        x, y, z = tile.coord
+        quadKey = ""
+        for i in range(z,0,-1):
+            digit = 0
+            mask = 1 << (i-1)
+            if (x & mask) != 0:
+                digit += 1
+            if (y & mask) != 0:
+                digit += 2
+            quadKey += str(digit)
+        tile.location = os.path.join(
+            cache_dir, quadKey + '.' + file_ext
+        )
+    if create_dir:
+        ensure_directory(tile.location)
+    return tile.location
+
+def no_level_location(level, cache_dir):
+    # dummy for quadkey cache which stores all tiles in one directory
+    raise NotImplementedError('cache does not have any level location')
+
+def tile_location_arcgiscache(tile, cache_dir, file_ext, create_dir=False):
+    """
+    Return the location of the `tile`. Caches the result as ``location``
+    property of the `tile`.
+
+    :param tile: the tile object
+    :param create_dir: if True, create all necessary directories
+    :return: the full filename of the tile
+
+    >>> from mapproxy.cache.tile import Tile
+    >>> tile_location_arcgiscache(Tile((1234567, 87654321, 9)), '/tmp/cache', 'png').replace('\\\\', '/')
+    '/tmp/cache/L09/R05397fb1/C0012d687.png'
+    """
+    if tile.location is None:
+        x, y, z = tile.coord
+        parts = (cache_dir, 'L%02d' % z, 'R%08x' % y, 'C%08x.%s' % (x, file_ext))
+        tile.location = os.path.join(*parts)
+    if create_dir:
+        ensure_directory(tile.location)
+    return tile.location
+
+def level_location_arcgiscache(z, cache_dir):
+    return level_location('L%02d' % z, cache_dir=cache_dir)
\ No newline at end of file
diff --git a/mapproxy/cache/redis.py b/mapproxy/cache/redis.py
new file mode 100644
index 0000000..abb9ce1
--- /dev/null
+++ b/mapproxy/cache/redis.py
@@ -0,0 +1,88 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2017 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement, absolute_import
+
+import hashlib
+
+from mapproxy.image import ImageSource
+from mapproxy.cache.base import (
+    TileCacheBase,
+    tile_buffer,
+)
+from mapproxy.compat import BytesIO
+
+try:
+    import redis
+except ImportError:
+    redis = None
+
+
+import logging
+log = logging.getLogger(__name__)
+
+
+class RedisCache(TileCacheBase):
+    def __init__(self, host, port, prefix, ttl=0, db=0):
+        if redis is None:
+            raise ImportError("Redis backend requires 'redis' package.")
+
+        self.prefix = prefix
+        self.lock_cache_id = 'redis-' + hashlib.md5((host + str(port) + prefix + str(db)).encode('utf-8')).hexdigest()
+        self.ttl = ttl
+        self.r = redis.StrictRedis(host=host, port=port, db=db)
+
+    def _key(self, tile):
+        x, y, z = tile.coord
+        return self.prefix + '-%d-%d-%d' % (z, x, y)
+
+    def is_cached(self, tile):
+        if tile.coord is None or tile.source:
+            return True
+
+        return self.r.exists(self._key(tile))
+
+    def store_tile(self, tile):
+        if tile.stored:
+            return True
+
+        key = self._key(tile)
+
+        with tile_buffer(tile) as buf:
+            data = buf.read()
+
+        r = self.r.set(key, data)
+        if self.ttl:
+            # use ms expire times for unit-tests
+            self.r.pexpire(key, int(self.ttl * 1000))
+        return r
+
+    def load_tile(self, tile, with_metadata=False):
+        if tile.source or tile.coord is None:
+            return True
+        key = self._key(tile)
+        tile_data = self.r.get(key)
+        if tile_data:
+            tile.source = ImageSource(BytesIO(tile_data))
+            return True
+        return False
+
+    def remove_tile(self, tile):
+        if tile.coord is None:
+            return True
+
+        key = self._key(tile)
+        self.r.delete(key)
+        return True
diff --git a/mapproxy/cache/renderd.py b/mapproxy/cache/renderd.py
index 155ac3c..16dd7f6 100644
--- a/mapproxy/cache/renderd.py
+++ b/mapproxy/cache/renderd.py
@@ -30,6 +30,7 @@ except ImportError:
 from mapproxy.client.log import log_request
 from mapproxy.cache.tile import TileCreator, Tile
 from mapproxy.source import SourceError
+from mapproxy.util.lock import LockTimeout
 
 def has_renderd_support():
     if not json or not requests:
@@ -71,6 +72,9 @@ class RenderdTileCreator(TileCreator):
         if result['status'] == 'error':
             log_request(address, 500, None, duration=duration, method='RENDERD')
             raise SourceError("Error from renderd: %s" % result.get('error_message', 'unknown error from renderd'))
+        elif result['status'] == 'lock':
+            log_request(address, 503, None, duration=duration, method='RENDERD')
+            raise LockTimeout("Lock timeout from renderd: %s" % result.get('error_message', 'unknown lock timeout error from renderd'))
 
         log_request(address, 200, None, duration=duration, method='RENDERD')
 
diff --git a/mapproxy/cache/s3.py b/mapproxy/cache/s3.py
new file mode 100644
index 0000000..1bbd1d8
--- /dev/null
+++ b/mapproxy/cache/s3.py
@@ -0,0 +1,170 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2016 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement
+
+import hashlib
+import sys
+import threading
+
+from mapproxy.image import ImageSource
+from mapproxy.cache import path
+from mapproxy.cache.base import tile_buffer, TileCacheBase
+from mapproxy.util import async
+from mapproxy.util.py import reraise_exception
+
+try:
+    import boto3
+    import botocore
+except ImportError:
+    boto3 = None
+
+
+import logging
+log = logging.getLogger('mapproxy.cache.s3')
+
+
+_s3_sessions_cache = threading.local()
+def s3_session(profile_name=None):
+    if not hasattr(_s3_sessions_cache, 'sessions'):
+        _s3_sessions_cache.sessions = {}
+    if profile_name not in _s3_sessions_cache.sessions:
+        _s3_sessions_cache.sessions[profile_name] = boto3.session.Session(profile_name=profile_name)
+    return _s3_sessions_cache.sessions[profile_name]
+
+class S3ConnectionError(Exception):
+    pass
+
+class S3Cache(TileCacheBase):
+
+    def __init__(self, base_path, file_ext, directory_layout='tms',
+                 bucket_name='mapproxy', profile_name=None,
+                 _concurrent_writer=4):
+        super(S3Cache, self).__init__()
+        self.lock_cache_id = hashlib.md5(base_path.encode('utf-8') + bucket_name.encode('utf-8')).hexdigest()
+        self.bucket_name = bucket_name
+        try:
+            self.bucket = self.conn().head_bucket(Bucket=bucket_name)
+        except botocore.exceptions.ClientError as e:
+            if e.response['Error']['Code'] == '404':
+                raise S3ConnectionError('No such bucket: %s' % bucket_name)
+            elif e.response['Error']['Code'] == '403':
+                raise S3ConnectionError('Access denied. Check your credentials')
+            else:
+                reraise_exception(
+                    S3ConnectionError('Unknown error: %s' % e),
+                    sys.exc_info(),
+                )
+
+        self.base_path = base_path
+        self.file_ext = file_ext
+        self._concurrent_writer = _concurrent_writer
+
+        self._tile_location, _ = path.location_funcs(layout=directory_layout)
+
+    def tile_key(self, tile):
+        return self._tile_location(tile, self.base_path, self.file_ext).lstrip('/')
+
+    def conn(self):
+        if boto3 is None:
+            raise ImportError("S3 Cache requires 'boto3' package.")
+
+        try:
+            return s3_session().client("s3")
+        except Exception as e:
+            raise S3ConnectionError('Error during connection %s' % e)
+
+    def load_tile_metadata(self, tile):
+        if tile.timestamp:
+            return
+        self.is_cached(tile)
+
+    def _set_metadata(self, response, tile):
+        if 'LastModified' in response:
+            tile.timestamp = float(response['LastModified'].strftime('%s'))
+        if 'ContentLength' in response:
+            tile.size = response['ContentLength']
+
+    def is_cached(self, tile):
+        if tile.is_missing():
+            key = self.tile_key(tile)
+            try:
+                r = self.conn().head_object(Bucket=self.bucket_name, Key=key)
+                self._set_metadata(r, tile)
+            except botocore.exceptions.ClientError as e:
+                if e.response['Error']['Code'] in ('404', 'NoSuchKey'):
+                    return False
+                raise
+
+        return True
+
+    def load_tiles(self, tiles, with_metadata=True):
+        p = async.Pool(min(4, len(tiles)))
+        return all(p.map(self.load_tile, tiles))
+
+    def load_tile(self, tile, with_metadata=True):
+        if not tile.is_missing():
+            return True
+
+        key = self.tile_key(tile)
+        log.debug('S3:load_tile, key: %s' % key)
+
+        try:
+            r  = self.conn().get_object(Bucket=self.bucket_name, Key=key)
+            self._set_metadata(r, tile)
+            tile.source = ImageSource(r['Body'])
+        except botocore.exceptions.ClientError as e:
+            error = e.response.get('Errors', e.response)['Error'] # moto get_object can return Error wrapped in Errors...
+            if error['Code'] in ('404', 'NoSuchKey'):
+                return False
+            raise
+
+        return True
+
+    def remove_tile(self, tile):
+        key = self.tile_key(tile)
+        log.debug('remove_tile, key: %s' % key)
+        self.conn().delete_object(Bucket=self.bucket_name, Key=key)
+
+    def store_tiles(self, tiles):
+        p = async.Pool(min(self._concurrent_writer, len(tiles)))
+        p.map(self.store_tile, tiles)
+
+    def store_tile(self, tile):
+        if tile.stored:
+            return
+
+        key = self.tile_key(tile)
+        log.debug('S3: store_tile, key: %s' % key)
+
+        extra_args = {}
+        if self.file_ext in ('jpeg', 'png'):
+            extra_args['ContentType'] = 'image/' + self.file_ext
+        with tile_buffer(tile) as buf:
+            self.conn().upload_fileobj(
+                NopCloser(buf), # upload_fileobj closes buf, wrap in NopCloser
+                self.bucket_name,
+                key,
+                ExtraArgs=extra_args)
+
+class NopCloser(object):
+    def __init__(self, wrapped):
+        self.wrapped = wrapped
+
+    def close(self):
+        pass
+
+    def __getattr__(self, name):
+        return getattr(self.wrapped, name)
diff --git a/mapproxy/cache/tile.py b/mapproxy/cache/tile.py
index 95c55bc..0362848 100644
--- a/mapproxy/cache/tile.py
+++ b/mapproxy/cache/tile.py
@@ -37,12 +37,14 @@ Tile caching (creation, caching and retrieval of tiles).
 
 from __future__ import with_statement
 
+from functools import partial
 from contextlib import contextmanager
 from mapproxy.grid import MetaGrid
 from mapproxy.image.merge import merge_images
 from mapproxy.image.tile import TileSplitter
 from mapproxy.layer import MapQuery, BlankImage
 from mapproxy.util import async
+from mapproxy.util.py import reraise
 
 class TileManager(object):
     """
@@ -56,7 +58,9 @@ class TileManager(object):
     """
     def __init__(self, grid, cache, sources, format, locker, image_opts=None, request_format=None,
         meta_buffer=None, meta_size=None, minimize_meta_requests=False, identifier=None,
-        pre_store_filter=None, concurrent_tile_creators=1, tile_creator_class=None):
+        pre_store_filter=None, concurrent_tile_creators=1, tile_creator_class=None,
+        bulk_meta_tiles=False,
+        ):
         self.grid = grid
         self.cache = cache
         self.locker = locker
@@ -78,6 +82,11 @@ class TileManager(object):
                 self.meta_grid = MetaGrid(grid, meta_size=meta_size, meta_buffer=meta_buffer)
             elif any(source.supports_meta_tiles for source in sources):
                 raise ValueError('meta tiling configured but not supported by all sources')
+            elif meta_size and not meta_size == [1, 1] and bulk_meta_tiles:
+                # meta tiles configured but all sources are tiled
+                # use bulk_meta_tile mode that download tiles in parallel
+                self.meta_grid = MetaGrid(grid, meta_size=meta_size, meta_buffer=0)
+                self.tile_creator_class = partial(self.tile_creator_class, bulk_meta_tiles=True)
 
     @contextmanager
     def session(self):
@@ -196,11 +205,12 @@ class TileManager(object):
         return tile
 
 class TileCreator(object):
-    def __init__(self, tile_mgr, dimensions=None, image_merger=None):
+    def __init__(self, tile_mgr, dimensions=None, image_merger=None, bulk_meta_tiles=False):
         self.cache = tile_mgr.cache
         self.sources = tile_mgr.sources
         self.grid = tile_mgr.grid
         self.meta_grid = tile_mgr.meta_grid
+        self.bulk_meta_tiles = bulk_meta_tiles
         self.tile_mgr = tile_mgr
         self.dimensions = dimensions
         self.image_merger = image_merger
@@ -283,21 +293,25 @@ class TileCreator(object):
             try:
                 img = source.get_map(query)
             except BlankImage:
-                return None
+                return None, None
             else:
-                return img
+                return (img, source.coverage)
 
-        imgs = []
-        for img in async.imap(get_map_from_source, self.sources):
-            if img is not None:
-                imgs.append(img)
+        layers = []
+        for layer in async.imap(get_map_from_source, self.sources):
+            if layer[0] is not None:
+                layers.append(layer)
 
-        merger = self.image_merger
-        if not merger:
-            merger = merge_images
-        return merger(imgs, size=query.size, image_opts=self.tile_mgr.image_opts)
+        return merge_images(layers, size=query.size, bbox=query.bbox, bbox_srs=query.srs,
+                            image_opts=self.tile_mgr.image_opts, merger=self.image_merger)
 
     def _create_meta_tiles(self, meta_tiles):
+        if self.bulk_meta_tiles:
+            created_tiles = []
+            for meta_tile in meta_tiles:
+                    created_tiles.extend(self._create_bulk_meta_tile(meta_tile))
+            return created_tiles
+
         if self.tile_mgr.concurrent_tile_creators > 1 and len(meta_tiles) > 1:
             return self._create_threaded(self._create_meta_tile, meta_tiles)
 
@@ -307,6 +321,10 @@ class TileCreator(object):
         return created_tiles
 
     def _create_meta_tile(self, meta_tile):
+        """
+        _create_meta_tile queries a single meta tile and splits it into
+        tiles.
+        """
         tile_size = self.grid.tile_size
         query = MapQuery(meta_tile.bbox, meta_tile.size, self.grid.srs, self.tile_mgr.request_format,
             dimensions=self.dimensions)
@@ -321,11 +339,64 @@ class TileCreator(object):
                 if meta_tile_image.cacheable:
                     self.cache.store_tiles(splitted_tiles)
                 return splitted_tiles
-        # else
+            # else
         tiles = [Tile(coord) for coord in meta_tile.tiles]
         self.cache.load_tiles(tiles)
         return tiles
 
+    def _create_bulk_meta_tile(self, meta_tile):
+        """
+        _create_bulk_meta_tile queries each tile of the meta tile in parallel
+        (using concurrent_tile_creators).
+        """
+        tile_size = self.grid.tile_size
+        main_tile = Tile(meta_tile.main_tile_coord)
+        with self.tile_mgr.lock(main_tile):
+            if not all(self.is_cached(t) for t in meta_tile.tiles if t is not None):
+                async_pool = async.Pool(self.tile_mgr.concurrent_tile_creators)
+                def query_tile(coord):
+                    try:
+                        query = MapQuery(self.grid.tile_bbox(coord), tile_size, self.grid.srs, self.tile_mgr.request_format,
+                            dimensions=self.dimensions)
+                        tile_image = self._query_sources(query)
+                        if tile_image is None:
+                            return None
+
+                        if self.tile_mgr.image_opts != tile_image.image_opts:
+                            # call as_buffer to force conversion into cache format
+                            tile_image.as_buffer(self.tile_mgr.image_opts)
+
+                        tile = Tile(coord, cacheable=tile_image.cacheable)
+                        tile.source = tile_image
+                        tile = self.tile_mgr.apply_tile_filter(tile)
+                    except BlankImage:
+                        return None
+                    else:
+                        return tile
+
+                tiles = []
+                for tile_task in async_pool.imap(query_tile,
+                    [t for t in meta_tile.tiles if t is not None],
+                    use_result_objects=True,
+                ):
+                    if tile_task.exception is None:
+                        tile = tile_task.result
+                        if tile is not None:
+                            tiles.append(tile)
+                    else:
+                        ex = tile_task.exception
+                        async_pool.shutdown(True)
+                        reraise(ex)
+
+                self.cache.store_tiles([t for t in tiles if t.cacheable])
+                return tiles
+
+            # else
+        tiles = [Tile(coord) for coord in meta_tile.tiles]
+        self.cache.load_tiles(tiles)
+        return tiles
+
+
 class Tile(object):
     """
     Internal data object for all tiles. Stores the tile-``coord`` and the tile data.
diff --git a/mapproxy/client/arcgis.py b/mapproxy/client/arcgis.py
index fc72200..2b362b1 100644
--- a/mapproxy/client/arcgis.py
+++ b/mapproxy/client/arcgis.py
@@ -13,6 +13,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from mapproxy.client.http import HTTPClient
+from mapproxy.client.wms import WMSInfoClient
+from mapproxy.srs import SRS
+from mapproxy.featureinfo import create_featureinfo_doc
+
 class ArcGISClient(object):
     def __init__(self, request_template, http_client=None):
         self.request_template = request_template
@@ -33,3 +38,42 @@ class ArcGISClient(object):
         req.params.transparent = query.transparent
 
         return req.complete_url
+
+    def combined_client(self, other, query):
+        return
+
+class ArcGISInfoClient(WMSInfoClient):
+    def __init__(self, request_template, supported_srs=None, http_client=None,
+            return_geometries=False,
+            tolerance=5,
+        ):
+        self.request_template = request_template
+        self.http_client = http_client or HTTPClient()
+        if not supported_srs and self.request_template.params.srs is not None:
+            supported_srs = [SRS(self.request_template.params.srs)]
+        self.supported_srs = supported_srs or []
+        self.return_geometries = return_geometries
+        self.tolerance = tolerance
+
+    def get_info(self, query):
+        if self.supported_srs and query.srs not in self.supported_srs:
+            query = self._get_transformed_query(query)
+        resp = self._retrieve(query)
+        # always use query.info_format and not content-type from response (even esri example server aleays return text/plain)
+        return create_featureinfo_doc(resp.read(), query.info_format)
+
+    def _query_url(self, query):
+        req = self.request_template.copy()
+        req.params.bbox = query.bbox
+        req.params.size = query.size
+        req.params.pos = query.pos
+        req.params.srs = query.srs.srs_code
+        if query.info_format.startswith('text/html'):
+            req.params['f'] =  'html'
+        else:
+            req.params['f'] =  'json'
+
+        req.params['tolerance'] = self.tolerance
+        req.params['returnGeometry'] = str(self.return_geometries).lower()
+
+        return req.complete_url
diff --git a/mapproxy/client/wms.py b/mapproxy/client/wms.py
index 5d096a9..caa462e 100644
--- a/mapproxy/client/wms.py
+++ b/mapproxy/client/wms.py
@@ -134,18 +134,21 @@ class WMSInfoClient(object):
         """
         req_srs = query.srs
         req_bbox = query.bbox
+        req_coord = make_lin_transf((0, 0, query.size[0], query.size[1]), req_bbox)(query.pos)
+
         info_srs = self._best_supported_srs(req_srs)
         info_bbox = req_srs.transform_bbox_to(info_srs, req_bbox)
-
-        req_coord = make_lin_transf((0, query.size[1], query.size[0], 0), req_bbox)(query.pos)
+        # calculate new info_size to keep square pixels after transform_bbox_to
+        info_aratio = (info_bbox[3] - info_bbox[1])/(info_bbox[2] - info_bbox[0])
+        info_size = query.size[0], int(info_aratio*query.size[0])
 
         info_coord = req_srs.transform_to(info_srs, req_coord)
-        info_pos = make_lin_transf((info_bbox), (0, query.size[1], query.size[0], 0))(info_coord)
+        info_pos = make_lin_transf((info_bbox), (0, 0, info_size[0], info_size[1]))(info_coord)
         info_pos = int(round(info_pos[0])), int(round(info_pos[1]))
 
         info_query = InfoQuery(
             bbox=info_bbox,
-            size=query.size,
+            size=info_size,
             srs=info_srs,
             pos=info_pos,
             info_format=query.info_format,
diff --git a/mapproxy/compat/image.py b/mapproxy/compat/image.py
index 8b7165f..f258dae 100644
--- a/mapproxy/compat/image.py
+++ b/mapproxy/compat/image.py
@@ -19,35 +19,39 @@ __all__ = ['Image', 'ImageColor', 'ImageDraw', 'ImageFont', 'ImagePalette',
            'ImageChops', 'quantize']
 
 try:
+    import PIL
     from PIL import Image, ImageColor, ImageDraw, ImageFont, ImagePalette, ImageChops, ImageMath
     # prevent pyflakes warnings
     Image, ImageColor, ImageDraw, ImageFont, ImagePalette, ImageChops, ImageMath
 except ImportError:
-    try:
-        import Image, ImageColor, ImageDraw, ImageFont, ImagePalette, ImageChops, ImageMath
-        # prevent pyflakes warnings
-        Image, ImageColor, ImageDraw, ImageFont, ImagePalette, ImageChops, ImageMath
-    except ImportError:
-        # allow MapProxy to start without PIL (for tilecache only).
-        # issue warning and raise ImportError on first use of
-        # a function that requires PIL
-        warnings.warn('PIL is not available')
-        class NoPIL(object):
-            def __getattr__(self, name):
-                if name.startswith('__'):
-                    raise AttributeError()
-                raise ImportError('PIL is not available')
-        ImageDraw = ImageFont = ImagePalette = ImageChops = NoPIL()
-        # add some dummy stuff required on import/load time
-        Image = NoPIL()
-        Image.NEAREST = Image.BILINEAR = Image.BICUBIC = 1
-        Image.Image = NoPIL
-        ImageColor = NoPIL()
-        ImageColor.getrgb = lambda x: x
+    # allow MapProxy to start without PIL (for tilecache only).
+    # issue warning and raise ImportError on first use of
+    # a function that requires PIL
+    warnings.warn('PIL is not available')
+    class NoPIL(object):
+        def __getattr__(self, name):
+            if name.startswith('__'):
+                raise AttributeError()
+            raise ImportError('PIL is not available')
+    ImageDraw = ImageFont = ImagePalette = ImageChops = NoPIL()
+    # add some dummy stuff required on import/load time
+    Image = NoPIL()
+    Image.NEAREST = Image.BILINEAR = Image.BICUBIC = 1
+    Image.Image = NoPIL
+    ImageColor = NoPIL()
+    ImageColor.getrgb = lambda x: x
 
 def has_alpha_composite_support():
     return hasattr(Image, 'alpha_composite')
 
+def transform_uses_center():
+    # transformation behavior changed with Pillow 3.4
+    # https://github.com/python-pillow/Pillow/commit/5232361718bae0f0ccda76bfd5b390ebf9179b18
+    if hasattr(PIL, 'PILLOW_VERSION'):
+        if not PIL.PILLOW_VERSION.startswith(('1.', '2.', '3.0', '3.1', '3.2', '3.3')):
+            return True
+    return False
+
 def quantize_pil(img, colors=256, alpha=False, defaults=None):
     if hasattr(Image, 'FASTOCTREE'):
         if not alpha:
diff --git a/mapproxy/config/coverage.py b/mapproxy/config/coverage.py
index 7f8985d..8e7a65d 100644
--- a/mapproxy/config/coverage.py
+++ b/mapproxy/config/coverage.py
@@ -21,16 +21,41 @@ from mapproxy.util.geom import (
     load_datasource,
     load_ogr_datasource,
     load_polygons,
+    load_expire_tiles,
     require_geom_support,
     build_multipolygon,
 )
-from mapproxy.util.coverage import coverage
+from mapproxy.util.coverage import (
+    coverage,
+    diff_coverage,
+    union_coverage,
+    intersection_coverage,
+)
 from mapproxy.compat import string_type
 
 bbox_string_re = re.compile(r'[-+]?\d*.?\d+,[-+]?\d*.?\d+,[-+]?\d*.?\d+,[-+]?\d*.?\d+')
 
 def load_coverage(conf, base_path=None):
-    if 'ogr_datasource' in conf:
+    clip = False
+    if 'clip' in conf:
+        clip = conf['clip']
+
+    if 'union' in conf:
+        parts = []
+        for cov in conf['union']:
+            parts.append(load_coverage(cov))
+        return union_coverage(parts, clip=clip)
+    elif 'intersection' in conf:
+        parts = []
+        for cov in conf['intersection']:
+            parts.append(load_coverage(cov))
+        return intersection_coverage(parts, clip=clip)
+    elif 'difference' in conf:
+        parts = []
+        for cov in conf['difference']:
+            parts.append(load_coverage(cov))
+        return diff_coverage(parts, clip=clip)
+    elif 'ogr_datasource' in conf:
         require_geom_support()
         srs = conf['ogr_srs']
         datasource = conf['ogr_datasource']
@@ -70,6 +95,13 @@ def load_coverage(conf, base_path=None):
             where = conf.get('where', None)
             geom = load_datasource(datasource, where)
             bbox, geom = build_multipolygon(geom, simplify=True)
+    elif 'expire_tiles' in conf:
+        require_geom_support()
+        filename = abspath(conf['expire_tiles'])
+        geom = load_expire_tiles(filename)
+        _, geom = build_multipolygon(geom, simplify=False)
+        return coverage(geom, SRS(3857))
     else:
         return None
-    return coverage(geom or bbox, SRS(srs))
+
+    return coverage(geom or bbox, SRS(srs), clip=clip)
diff --git a/mapproxy/config/defaults.py b/mapproxy/config/defaults.py
index badbed8..809532f 100644
--- a/mapproxy/config/defaults.py
+++ b/mapproxy/config/defaults.py
@@ -63,6 +63,7 @@ cache = dict(
     meta_buffer = 80,
     minimize_meta_requests = False,
     link_single_color_images = False,
+    sqlite_timeout = 30,
 )
 
 grid = dict(
diff --git a/mapproxy/config/loader.py b/mapproxy/config/loader.py
index 4a7fb77..033853e 100644
--- a/mapproxy/config/loader.py
+++ b/mapproxy/config/loader.py
@@ -621,14 +621,47 @@ class ArcGISSourceConfiguration(SourceConfiguration):
         request = create_request(self.conf["req"], params)
         http_client, request.url = self.http_client(request.url)
         coverage = self.coverage()
+        res_range = resolution_range(self.conf)
 
         client = ArcGISClient(request, http_client)
         image_opts = self.image_opts(format=params.get('format'))
         return ArcGISSource(client, image_opts=image_opts, coverage=coverage,
+                            res_range=res_range,
                             supported_srs=supported_srs,
                             supported_formats=supported_formats or None)
 
 
+    def fi_source(self, params=None):
+        from mapproxy.client.arcgis import ArcGISInfoClient
+        from mapproxy.request.arcgis import create_identify_request
+        from mapproxy.source.arcgis import ArcGISInfoSource
+        from mapproxy.srs import SRS
+
+        if params is None: params = {}
+        request_format = self.conf['req'].get('format')
+        if request_format:
+            params['format'] = request_format
+        supported_srs = [SRS(code) for code in self.conf.get('supported_srs', [])]
+        fi_source = None
+        if self.conf.get('opts', {}).get('featureinfo', False):
+            opts = self.conf['opts']
+            tolerance = opts.get('featureinfo_tolerance', 5)
+            return_geometries = opts.get('featureinfo_return_geometries', False)
+
+            fi_request = create_identify_request(self.conf['req'], params)
+
+
+            http_client, fi_request.url = self.http_client(fi_request.url)
+            fi_client = ArcGISInfoClient(fi_request,
+                supported_srs=supported_srs,
+                http_client=http_client,
+                tolerance=tolerance,
+                return_geometries=return_geometries,
+            )
+            fi_source = ArcGISInfoSource(fi_client)
+        return fi_source
+
+
 class WMSSourceConfiguration(SourceConfiguration):
     source_type = ('wms',)
 
@@ -953,6 +986,10 @@ class CacheConfiguration(ConfigurationBase):
         return self.context.globals.get_path('cache_dir', self.conf,
             global_key='cache.base_dir')
 
+    @memoize
+    def has_multiple_grids(self):
+        return len(self.grid_confs()) > 1
+
     def lock_dir(self):
         lock_dir = self.context.globals.get_path('cache.tile_lock_dir', self.conf)
         if not lock_dir:
@@ -965,6 +1002,11 @@ class CacheConfiguration(ConfigurationBase):
         cache_dir = self.cache_dir()
         directory_layout = self.conf.get('cache', {}).get('directory_layout', 'tc')
         if self.conf.get('cache', {}).get('directory'):
+            if self.has_multiple_grids():
+                raise ConfigurationError(
+                    "using single directory for cache with multiple grids in %s" %
+                    (self.conf['name']),
+                )
             pass
         elif self.conf.get('cache', {}).get('use_grid_names'):
             cache_dir = os.path.join(cache_dir, self.conf['name'], grid_conf.tile_grid().name)
@@ -978,13 +1020,10 @@ class CacheConfiguration(ConfigurationBase):
             log.warn('link_single_color_images not supported on windows')
             link_single_color_images = False
 
-        lock_timeout = self.context.globals.get_value('http.client_timeout', {})
-
         return FileCache(
             cache_dir,
             file_ext=file_ext,
             directory_layout=directory_layout,
-            lock_timeout=lock_timeout,
             link_single_color_images=link_single_color_images,
         )
 
@@ -1000,8 +1039,77 @@ class CacheConfiguration(ConfigurationBase):
         else:
             mbfile_path = os.path.join(self.cache_dir(), filename)
 
+        sqlite_timeout = self.context.globals.get_value('cache.sqlite_timeout', self.conf)
+        wal = self.context.globals.get_value('cache.sqlite_wal', self.conf)
+
         return MBTilesCache(
             mbfile_path,
+            timeout=sqlite_timeout,
+            wal=wal,
+        )
+
+    def _geopackage_cache(self, grid_conf, file_ext):
+        from mapproxy.cache.geopackage import GeopackageCache, GeopackageLevelCache
+
+        filename = self.conf['cache'].get('filename')
+        table_name = self.conf['cache'].get('table_name') or \
+                     "{}_{}".format(self.conf['name'], grid_conf.tile_grid().name)
+        levels = self.conf['cache'].get('levels')
+
+        if not filename:
+            filename = self.conf['name'] + '.gpkg'
+        if filename.startswith('.' + os.sep):
+            gpkg_file_path = self.context.globals.abspath(filename)
+        else:
+            gpkg_file_path = os.path.join(self.cache_dir(), filename)
+
+        cache_dir = self.conf['cache'].get('directory')
+        if cache_dir:
+            cache_dir = os.path.join(
+                self.context.globals.abspath(cache_dir),
+                grid_conf.tile_grid().name
+            )
+        else:
+            cache_dir = self.cache_dir()
+            cache_dir = os.path.join(
+                cache_dir,
+                self.conf['name'],
+                grid_conf.tile_grid().name
+            )
+
+        if levels:
+            return GeopackageLevelCache(
+                cache_dir, grid_conf.tile_grid(), table_name
+            )
+        else:
+            return GeopackageCache(
+                gpkg_file_path, grid_conf.tile_grid(), table_name
+            )
+
+    def _s3_cache(self, grid_conf, file_ext):
+        from mapproxy.cache.s3 import S3Cache
+
+        bucket_name = self.context.globals.get_value('cache.bucket_name', self.conf,
+            global_key='cache.s3.bucket_name')
+
+        if not bucket_name:
+            raise ConfigurationError("no bucket_name configured for s3 cache %s" % self.conf['name'])
+
+        profile_name = self.context.globals.get_value('cache.profile_name', self.conf,
+            global_key='cache.s3.profile_name')
+
+        directory_layout = self.conf['cache'].get('directory_layout', 'tms')
+
+        base_path = self.conf['cache'].get('directory', None)
+        if base_path is None:
+            base_path = os.path.join(self.conf['name'], grid_conf.tile_grid().name)
+
+        return S3Cache(
+            base_path=base_path,
+            file_ext=file_ext,
+            directory_layout=directory_layout,
+            bucket_name=bucket_name,
+            profile_name=profile_name,
         )
 
     def _sqlite_cache(self, grid_conf, file_ext):
@@ -1021,8 +1129,13 @@ class CacheConfiguration(ConfigurationBase):
                 grid_conf.tile_grid().name
             )
 
+        sqlite_timeout = self.context.globals.get_value('cache.sqlite_timeout', self.conf)
+        wal = self.context.globals.get_value('cache.sqlite_wal', self.conf)
+
         return MBTilesLevelCache(
             cache_dir,
+            timeout=sqlite_timeout,
+            wal=wal,
         )
 
     def _couchdb_cache(self, grid_conf, file_ext):
@@ -1074,6 +1187,46 @@ class CacheConfiguration(ConfigurationBase):
             use_secondary_index=use_secondary_index,
         )
 
+    def _redis_cache(self, grid_conf, file_ext):
+        from mapproxy.cache.redis import RedisCache
+
+        host = self.conf['cache'].get('host', '127.0.0.1')
+        port = self.conf['cache'].get('port', 6379)
+        db = self.conf['cache'].get('db', 0)
+        ttl = self.conf['cache'].get('default_ttl', 3600)
+
+        prefix = self.conf['cache'].get('prefix')
+        if not prefix:
+            prefix = self.conf['name'] + '_' + grid_conf.tile_grid().name
+
+        return RedisCache(
+            host=host,
+            port=port,
+            db=db,
+            prefix=prefix,
+            ttl=ttl,
+        )
+
+    def _compact_cache(self, grid_conf, file_ext):
+        from mapproxy.cache.compact import CompactCacheV1
+
+        cache_dir = self.cache_dir()
+        if self.conf.get('cache', {}).get('directory'):
+            if self.has_multiple_grids():
+                raise ConfigurationError(
+                    "using single directory for cache with multiple grids in %s" %
+                    (self.conf['name']),
+                )
+            pass
+        else:
+            cache_dir = os.path.join(cache_dir, self.conf['name'], grid_conf.tile_grid().name)
+
+        if self.conf['cache']['version'] != 1:
+            raise ConfigurationError("compact cache only supports version 1")
+        return CompactCacheV1(
+            cache_dir=cache_dir,
+        )
+
     def _tile_cache(self, grid_conf, file_ext):
         if self.conf.get('disable_storage', False):
             from mapproxy.cache.dummy import DummyCache
@@ -1228,7 +1381,7 @@ class CacheConfiguration(ConfigurationBase):
                     factor=source.get('factor', 1.0),
                 )
 
-        return band_merger.merge, sources, source_image_opts
+        return band_merger, sources, source_image_opts
 
     @memoize
     def caches(self):
@@ -1253,6 +1406,8 @@ class CacheConfiguration(ConfigurationBase):
             global_key='cache.meta_buffer')
         meta_size = self.context.globals.get_value('meta_size', self.conf,
             global_key='cache.meta_size')
+        bulk_meta_tiles = self.context.globals.get_value('bulk_meta_tiles', self.conf,
+            global_key='cache.bulk_meta_tiles')
         minimize_meta_requests = self.context.globals.get_value('minimize_meta_requests', self.conf,
             global_key='cache.minimize_meta_requests')
         concurrent_tile_creators = self.context.globals.get_value('concurrent_tile_creators', self.conf,
@@ -1336,7 +1491,9 @@ class CacheConfiguration(ConfigurationBase):
                 minimize_meta_requests=minimize_meta_requests,
                 concurrent_tile_creators=concurrent_tile_creators,
                 pre_store_filter=tile_filter,
-                tile_creator_class=tile_creator_class)
+                tile_creator_class=tile_creator_class,
+                bulk_meta_tiles=bulk_meta_tiles,
+            )
             extent = merge_layer_extents(sources)
             if extent.is_default:
                 extent = map_extent_from_grid(tile_grid)
@@ -1493,7 +1650,7 @@ class LayerConfiguration(ConfigurationBase):
         return dimensions
 
     @memoize
-    def tile_layers(self):
+    def tile_layers(self, grid_name_as_path=False):
         from mapproxy.service.tile import TileLayer
         from mapproxy.cache.dummy import DummyCache
 
@@ -1524,7 +1681,6 @@ class LayerConfiguration(ConfigurationBase):
         tile_layers = []
         for cache_name in sources:
             for grid, extent, cache_source in self.context.caches[cache_name].caches():
-
                 if dimensions and not isinstance(cache_source.cache, DummyCache):
                     # caching of dimension layers is not supported yet
                     raise ConfigurationError(
@@ -1535,8 +1691,11 @@ class LayerConfiguration(ConfigurationBase):
                 md = {}
                 md['title'] = self.conf['title']
                 md['name'] = self.conf['name']
-                md['name_path'] = (self.conf['name'], grid.srs.srs_code.replace(':', '').upper())
                 md['grid_name'] = grid.name
+                if grid_name_as_path:
+                    md['name_path'] = (md['name'], md['grid_name'])
+                else:
+                    md['name_path'] = (self.conf['name'], grid.srs.srs_code.replace(':', '').upper())
                 md['name_internal'] = md['name_path'][0] + '_' + md['name_path'][1]
                 md['format'] = self.context.caches[cache_name].image_opts().format
                 md['cache_name'] = cache_name
@@ -1612,11 +1771,9 @@ class ServiceConfiguration(ConfigurationBase):
     def tile_layers(self, conf, use_grid_names=False):
         layers = odict()
         for layer_name, layer_conf in iteritems(self.context.layers):
-            for tile_layer in layer_conf.tile_layers():
+            for tile_layer in layer_conf.tile_layers(grid_name_as_path=use_grid_names):
                 if not tile_layer: continue
                 if use_grid_names:
-                    # new style layer names are tuples
-                    tile_layer.md['name_path'] = (tile_layer.md['name'], tile_layer.md['grid_name'])
                     layers[tile_layer.md['name_path']] = tile_layer
                 else:
                     layers[tile_layer.md['name_internal']] = tile_layer
diff --git a/mapproxy/config/spec.py b/mapproxy/config/spec.py
index 9d1a214..2695241 100644
--- a/mapproxy/config/spec.py
+++ b/mapproxy/config/spec.py
@@ -36,7 +36,7 @@ def validate_options(conf_dict):
     else:
         return [], True
 
-coverage = {
+coverage = recursive({
     'polygons': str(),
     'polygons_srs': str(),
     'bbox': one_of(str(), [number()]),
@@ -47,7 +47,13 @@ coverage = {
     'datasource': one_of(str(), [number()]),
     'where': str(),
     'srs': str(),
-}
+    'expire_tiles': str(),
+    'union': [recursive()],
+    'difference': [recursive()],
+    'intersection': [recursive()],
+    'clip': bool(),
+})
+
 image_opts = {
     'mode': str(),
     'colors': number(),
@@ -106,11 +112,22 @@ cache_types = {
     },
     'sqlite': {
         'directory': str(),
+        'sqlite_timeout': number(),
+        'sqlite_wal': bool(),
         'tile_lock_dir': str(),
     },
     'mbtiles': {
         'filename': str(),
+        'sqlite_timeout': number(),
+        'sqlite_wal': bool(),
+        'tile_lock_dir': str(),
+    },
+    'geopackage': {
+        'filename': str(),
+        'directory': str(),
         'tile_lock_dir': str(),
+        'table_name': str(),
+        'levels': bool(),
     },
     'couchdb': {
         'url': str(),
@@ -121,6 +138,13 @@ cache_types = {
         'tile_id': str(),
         'tile_lock_dir': str(),
     },
+    's3': {
+        'bucket_name': str(),
+        'directory_layout': str(),
+        'directory': str(),
+        'profile_name': str(),
+        'tile_lock_dir': str(),
+     },
     'riak': {
         'nodes': [riak_node],
         'protocol': one_of('pbc', 'http', 'https'),
@@ -130,7 +154,20 @@ cache_types = {
             'http': number(),
         },
         'secondary_index': bool(),
-    }
+        'tile_lock_dir': str(),
+    },
+    'redis': {
+        'host': str(),
+        'port': int(),
+        'db': int(),
+        'prefix': str(),
+        'default_ttl': int(),
+    },
+    'compact': {
+        'directory': str(),
+        required('version'): number(),
+        'tile_lock_dir': str(),
+    },
 }
 
 on_error = {
@@ -324,10 +361,15 @@ mapproxy_yaml_spec = {
             'tile_lock_dir': str(),
             'meta_size': [number()],
             'meta_buffer': number(),
+            'bulk_meta_tiles': bool(),
             'max_tile_limit': number(),
             'minimize_meta_requests': bool(),
             'concurrent_tile_creators': int(),
             'link_single_color_images': bool(),
+            's3': {
+                'bucket_name': str(),
+                'profile_name': str(),
+            },
         },
         'grid': {
             'tile_size': [int()],
@@ -356,6 +398,7 @@ mapproxy_yaml_spec = {
             'cache_dir': str(),
             'meta_size': [number()],
             'meta_buffer': number(),
+            'bulk_meta_tiles': bool(),
             'minimize_meta_requests': bool(),
             'concurrent_tile_creators': int(),
             'disable_storage': bool(),
@@ -486,6 +529,11 @@ mapproxy_yaml_spec = {
                     'transparent': bool(),
                     'time': str()
                 },
+                'opts': {
+                    'featureinfo': bool(),
+                    'featureinfo_tolerance': number(),
+                    'featureinfo_return_geometries': bool(),
+                },
                 'supported_srs': [str()],
                 'http': http_opts
             }),
diff --git a/mapproxy/config_template/base_config/full_example.yaml b/mapproxy/config_template/base_config/full_example.yaml
index 60c029b..14bdd62 100644
--- a/mapproxy/config_template/base_config/full_example.yaml
+++ b/mapproxy/config_template/base_config/full_example.yaml
@@ -50,9 +50,7 @@ services:
         email: info at omniscale.de
       # multiline strings are possible with the right indention
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
   wms:
@@ -106,9 +104,7 @@ services:
         email: info at omniscale.de
       # multiline strings are possible with the right indention
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
 layers:
diff --git a/mapproxy/config_template/paster/etc/mapproxy.yaml b/mapproxy/config_template/paster/etc/mapproxy.yaml
index 59445d3..7dca8c2 100644
--- a/mapproxy/config_template/paster/etc/mapproxy.yaml
+++ b/mapproxy/config_template/paster/etc/mapproxy.yaml
@@ -14,7 +14,7 @@ services:
       contact:
         person: Your Name Here
         position: Technical Director
-        organization: 
+        organization:
         address: Fakestreet 123
         city: Somewhere
         postcode: 12345
@@ -23,9 +23,7 @@ services:
         fax: +49(0)000-000000-0
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Insert license and copyright information for this service.
       fees: 'None'
 
 layers:
@@ -35,12 +33,12 @@ layers:
   # - name: osm_full_example
   #   title: Omniscale OSM WMS - osm.omniscale.net
   #   sources: [osm_cache_full_example]
-    
+
 caches:
   osm_cache:
     grids: [GLOBAL_MERCATOR, global_geodetic_sqrt2]
     sources: [osm_wms]
-  
+
   # osm_cache_full_example:
   #   meta_buffer: 20
   #   meta_size: [5, 5]
@@ -77,7 +75,7 @@ sources:
   #     # # always request in this format
   #     # format: image/png
   #     map: /home/map/mapserver.map
-    
+
 
 grids:
   global_geodetic_sqrt2:
diff --git a/mapproxy/featureinfo.py b/mapproxy/featureinfo.py
index e78d7ef..cfac32a 100644
--- a/mapproxy/featureinfo.py
+++ b/mapproxy/featureinfo.py
@@ -14,8 +14,12 @@
 # limitations under the License.
 
 import copy
+import json
+
+from functools import reduce
 from io import StringIO
-from mapproxy.compat import string_type, PY2, BytesIO
+
+from mapproxy.compat import string_type, PY2, BytesIO, iteritems
 
 try:
     from lxml import etree, html
@@ -120,12 +124,47 @@ class HTMLFeatureInfoDoc(XMLFeatureInfoDoc):
 
         return cls(result_tree)
 
+
+class JSONFeatureInfoDoc(FeatureInfoDoc):
+    info_type = 'json'
+
+    def __init__(self, content):
+        self.content = content
+
+    def as_string(self):
+        return self.content
+
+    @classmethod
+    def combine(cls, docs):
+        contents = [json.loads(d.content) for d in docs]
+        combined = reduce(lambda a, b: merge_dict(a, b), contents)
+        return cls(json.dumps(combined))
+
+
+def merge_dict(base, other):
+    """
+    Return `base` dict with values from `conf` merged in.
+    """
+    for k, v in iteritems(other):
+        if k not in base:
+            base[k] = v
+        else:
+            if isinstance(base[k], dict):
+                merge_dict(base[k], v)
+            elif isinstance(base[k], list):
+                base[k].extend(v)
+            else:
+                base[k] = v
+    return base
+
 def create_featureinfo_doc(content, info_format):
     info_format = info_format.split(';', 1)[0].strip() # remove mime options like charset
     if info_format in ('text/xml', 'application/vnd.ogc.gml'):
         return XMLFeatureInfoDoc(content)
     if info_format == 'text/html':
         return HTMLFeatureInfoDoc(content)
+    if info_format == 'application/json':
+        return JSONFeatureInfoDoc(content)
 
     return TextFeatureInfoDoc(content)
 
diff --git a/mapproxy/grid.py b/mapproxy/grid.py
index c905949..bea4920 100644
--- a/mapproxy/grid.py
+++ b/mapproxy/grid.py
@@ -409,9 +409,16 @@ class TileGrid(object):
                 threshold = thresholds.pop() if thresholds else None
 
             if threshold_result is not None:
-                return threshold_result
+                # Use previous level that was within stretch_factor,
+                # but only if this level res is smaller then res.
+                # This fixes selection for resolutions that are closer together then stretch_factor.
+                #
+                if l_res < res:
+                    return threshold_result
 
             if l_res <= res*self.stretch_factor:
+                # l_res within stretch_factor
+                # remember this level, check for thresholds or better res in next loop
                 threshold_result = level
             prev_l_res = l_res
         return level
@@ -1060,12 +1067,12 @@ def bbox_contains(one, two):
 def deg_to_m(deg):
     return deg * (6378137 * 2 * math.pi) / 360
 
-OGC_PIXLE_SIZE = 0.00028 #m/px
+OGC_PIXEL_SIZE = 0.00028 #m/px
 
 def ogc_scale_to_res(scale):
-    return scale * OGC_PIXLE_SIZE
+    return scale * OGC_PIXEL_SIZE
 def res_to_ogc_scale(res):
-    return res / OGC_PIXLE_SIZE
+    return res / OGC_PIXEL_SIZE
 
 def resolution_range(min_res=None, max_res=None, max_scale=None, min_scale=None):
     if min_scale == max_scale == min_res == max_res == None:
diff --git a/mapproxy/image/mask.py b/mapproxy/image/mask.py
index 5d48ffe..a98e1dc 100644
--- a/mapproxy/image/mask.py
+++ b/mapproxy/image/mask.py
@@ -31,7 +31,7 @@ def mask_image_source_from_coverage(img_source, bbox, bbox_srs, coverage,
 
 def mask_image(img, bbox, bbox_srs, coverage):
     geom = mask_polygons(bbox, SRS(bbox_srs), coverage)
-    mask = image_mask_from_geom(img, bbox, geom)
+    mask = image_mask_from_geom(img.size, bbox, geom)
     img = img.convert('RGBA')
     img.paste((255, 255, 255, 0), (0, 0), mask)
     return img
@@ -41,15 +41,32 @@ def mask_polygons(bbox, bbox_srs, coverage):
     coverage = coverage.intersection(bbox, bbox_srs)
     return flatten_to_polygons(coverage.geom)
 
-def image_mask_from_geom(img, bbox, polygons):
-    transf = make_lin_transf(bbox, (0, 0) + img.size)
+def image_mask_from_geom(size, bbox, polygons):
+    mask = Image.new('L', size, 255)
+    if len(polygons) == 0:
+        return mask
+
+    transf = make_lin_transf(bbox, (0, 0) + size)
+
+    # use negative ~.1 pixel buffer
+    buffer = -0.1 * min((bbox[2] - bbox[0]) / size[0], (bbox[3] - bbox[1]) / size[1])
 
-    mask = Image.new('L', img.size, 255)
     draw = ImageDraw.Draw(mask)
 
-    for p in polygons:
+    def draw_polygon(p):
         draw.polygon([transf(coord) for coord in p.exterior.coords], fill=0)
         for ring in p.interiors:
             draw.polygon([transf(coord) for coord in ring.coords], fill=255)
 
+    for p in polygons:
+        # little bit smaller polygon does not include touched pixels outside coverage
+        buffered = p.buffer(buffer, resolution=1, join_style=2)
+
+        if buffered.type == 'MultiPolygon':
+            # negative buffer can turn polygon into multipolygon
+            for p in buffered:
+                draw_polygon(p)
+        else:
+            draw_polygon(buffered)
+
     return mask
diff --git a/mapproxy/image/merge.py b/mapproxy/image/merge.py
index 7b40a2c..cf35ebe 100644
--- a/mapproxy/image/merge.py
+++ b/mapproxy/image/merge.py
@@ -36,12 +36,15 @@ class LayerMerger(object):
         self.layers = []
         self.cacheable = True
 
-    def add(self, layer_img, layer=None):
+    def add(self, img, coverage=None):
         """
         Add one layer image to merge. Bottom-layers first.
         """
-        if layer_img is not None:
-            self.layers.append((layer_img, layer))
+        if img is not None:
+            self.layers.append((img, coverage))
+
+
+class LayerMerger(LayerMerger):
 
     def merge(self, image_opts, size=None, bbox=None, bbox_srs=None, coverage=None):
         """
@@ -54,11 +57,11 @@ class LayerMerger(object):
         if not self.layers:
             return BlankImageSource(size=size, image_opts=image_opts, cacheable=True)
         if len(self.layers) == 1:
-            layer_img, layer = self.layers[0]
+            layer_img, layer_coverage = self.layers[0]
             layer_opts = layer_img.image_opts
             if (((layer_opts and not layer_opts.transparent) or image_opts.transparent)
                 and (not size or size == layer_img.size)
-                and (not layer or not layer.coverage or not layer.coverage.clip)
+                and (not layer_coverage or not layer_coverage.clip)
                 and not coverage):
                 # layer is opaque, no need to make transparent or add bgcolor
                 return layer_img
@@ -68,7 +71,7 @@ class LayerMerger(object):
 
         cacheable = self.cacheable
         result = create_image(size, image_opts)
-        for layer_img, layer in self.layers:
+        for layer_img, layer_coverage in self.layers:
             if not layer_img.cacheable:
                 cacheable = False
             img = layer_img.as_image()
@@ -78,14 +81,19 @@ class LayerMerger(object):
             else:
                 opacity = layer_image_opts.opacity
 
-            if layer and layer.coverage and layer.coverage.clip:
-                img = mask_image(img, bbox, bbox_srs, layer.coverage)
+            if layer_coverage and layer_coverage.clip:
+                img = mask_image(img, bbox, bbox_srs, layer_coverage)
 
             if result.mode != 'RGBA':
                 merge_composite = False
             else:
                 merge_composite = has_alpha_composite_support()
 
+            if 'transparency' in img.info:
+                # non-paletted PNGs can have a fixed transparency value
+                # convert to RGBA to have full alpha
+                img = img.convert('RGBA')
+
             if merge_composite:
                 if opacity is not None and opacity < 1.0:
                     # fade-out img to add opacity value
@@ -96,18 +104,18 @@ class LayerMerger(object):
                         ImageChops.constant(alpha, int(255 * opacity))
                     )
                     img.putalpha(alpha)
-                if img.mode == 'RGB':
-                    result.paste(img, (0, 0))
-                else:
+                if img.mode in ('RGBA', 'P'):
                     # assume paletted images have transparency
                     if img.mode == 'P':
                         img = img.convert('RGBA')
                     result = Image.alpha_composite(result, img)
+                else:
+                    result.paste(img, (0, 0))
             else:
                 if opacity is not None and opacity < 1.0:
                     img = img.convert(result.mode)
                     result = Image.blend(result, img, layer_image_opts.opacity)
-                elif img.mode == 'RGBA' or img.mode == 'P':
+                elif img.mode in ('RGBA', 'P'):
                     # assume paletted images have transparency
                     if img.mode == 'P':
                         img = img.convert('RGBA')
@@ -149,6 +157,7 @@ class BandMerger(object):
         self.cacheable = True
         self.mode = mode
         self.max_band = {}
+        self.max_src_images = 0
 
     def add_ops(self, dst_band, src_img, src_band, factor=1.0):
         self.ops.append(band_ops(
@@ -159,9 +168,10 @@ class BandMerger(object):
          ))
         # store highest requested band index for each source
         self.max_band[src_img] = max(self.max_band.get(src_img, 0), src_band)
+        self.max_src_images = max(src_img+1, self.max_src_images)
 
     def merge(self, sources, image_opts, size=None, bbox=None, bbox_srs=None, coverage=None):
-        if not sources:
+        if len(sources) < self.max_src_images:
             return BlankImageSource(size=size, image_opts=image_opts, cacheable=True)
 
         if size is None:
@@ -219,7 +229,7 @@ class BandMerger(object):
         return ImageSource(result, size=size, image_opts=image_opts)
 
 
-def merge_images(images, image_opts, size=None):
+def merge_images(layers, image_opts, size=None, bbox=None, bbox_srs=None, merger=None):
     """
     Merge multiple images into one.
 
@@ -227,12 +237,27 @@ def merge_images(images, image_opts, size=None):
     :param format: the format of the output `ImageSource`
     :param size: size of the merged image, if ``None`` the size
                  of the first image is used
+    :param bbox: Bounding box
+    :param bbox_srs: Bounding box SRS
+    :param merger: Image merger
     :rtype: `ImageSource`
     """
-    merger = LayerMerger()
-    for img in images:
-        merger.add(img)
-    return merger.merge(image_opts=image_opts, size=size)
+    if merger is None:
+        merger = LayerMerger()
+
+    # BandMerger does not have coverage support, passing only images
+    if isinstance(merger, BandMerger):
+        sources = [l[0] if isinstance(l, tuple) else l for l in layers]
+        return merger.merge(sources, image_opts=image_opts, size=size, bbox=bbox, bbox_srs=bbox_srs)
+
+    for layer in layers:
+        if isinstance(layer, tuple):
+            merger.add(layer[0], layer[1])
+        else:
+            merger.add(layer)
+
+    return merger.merge(image_opts=image_opts, size=size, bbox=bbox, bbox_srs=bbox_srs)
+
 
 def concat_legends(legends, format='png', size=None, bgcolor='#ffffff', transparent=True):
     """
diff --git a/mapproxy/image/tile.py b/mapproxy/image/tile.py
index 8612b4e..42d2746 100644
--- a/mapproxy/image/tile.py
+++ b/mapproxy/image/tile.py
@@ -1,12 +1,12 @@
 # This file is part of the MapProxy project.
 # Copyright (C) 2010 Omniscale <http://omniscale.de>
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #    http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -33,11 +33,11 @@ class TileMerger(object):
         """
         self.tile_grid = tile_grid
         self.tile_size = tile_size
-    
+
     def merge(self, ordered_tiles, image_opts):
         """
         Merge all tiles into one image.
-        
+
         :param ordered_tiles: list of tiles, sorted row-wise (top to bottom)
         :rtype: `ImageSource`
         """
@@ -47,7 +47,7 @@ class TileMerger(object):
                 tile = ordered_tiles.pop()
                 return tile
         src_size = self._src_size()
-        
+
         result = create_image(src_size, image_opts)
 
         cacheable = True
@@ -73,12 +73,12 @@ class TileMerger(object):
                 else:
                     raise
         return ImageSource(result, size=src_size, image_opts=image_opts, cacheable=cacheable)
-    
+
     def _src_size(self):
         width = self.tile_grid[0]*self.tile_size[0]
         height = self.tile_grid[1]*self.tile_size[1]
         return width, height
-    
+
     def _tile_offset(self, i):
         """
         Return the image offset (upper-left coord) of the i-th tile,
@@ -86,7 +86,7 @@ class TileMerger(object):
         """
         return (i%self.tile_grid[0]*self.tile_size[0],
                 i//self.tile_grid[0]*self.tile_size[1])
-    
+
 
 class TileSplitter(object):
     """
@@ -106,7 +106,7 @@ class TileSplitter(object):
         minx, miny = crop_coord
         maxx = minx + tile_size[0]
         maxy = miny + tile_size[1]
-        
+
         if (minx < 0 or miny < 0 or maxx > self.meta_img.size[0]
             or maxy > self.meta_img.size[1]):
 
@@ -121,7 +121,7 @@ class TileSplitter(object):
         else:
             crop = self.meta_img.crop((minx, miny, maxx, maxy))
         return ImageSource(crop, size=tile_size, image_opts=self.image_opts)
-    
+
 
 class TiledImage(object):
     """
@@ -142,20 +142,20 @@ class TiledImage(object):
         self.tile_size = tile_size
         self.src_bbox = src_bbox
         self.src_srs = src_srs
-    
+
     def image(self, image_opts):
         """
         Return the tiles as one merged image.
-        
+
         :rtype: `ImageSource`
         """
         tm = TileMerger(self.tile_grid, self.tile_size)
         return tm.merge(self.tiles, image_opts=image_opts)
-    
+
     def transform(self, req_bbox, req_srs, out_size, image_opts):
         """
         Return the the tiles as one merged and transformed image.
-        
+
         :param req_bbox: the bbox of the output image
         :param req_srs: the srs of the req_bbox
         :param out_size: the size in pixel of the output image
diff --git a/mapproxy/image/transform.py b/mapproxy/image/transform.py
index 9b15998..1a956ad 100644
--- a/mapproxy/image/transform.py
+++ b/mapproxy/image/transform.py
@@ -15,7 +15,7 @@
 
 from __future__ import division
 
-from mapproxy.compat.image import Image
+from mapproxy.compat.image import Image, transform_uses_center
 from mapproxy.image import ImageSource, image_filter
 from mapproxy.srs import make_lin_transf, bbox_equals
 
@@ -137,11 +137,19 @@ class ImageTransformer(object):
         to_src_px = make_lin_transf(src_bbox, src_quad)
         to_dst_w = make_lin_transf(dst_quad, dst_bbox)
         meshes = []
+
+        # more recent versions of Pillow use center coordinates for
+        # transformations, we manually need to add half a pixel otherwise
+        if transform_uses_center():
+            px_offset = 0.0
+        else:
+            px_offset = 0.5
+
         def dst_quad_to_src(quad):
             src_quad = []
             for dst_px in [(quad[0], quad[1]), (quad[0], quad[3]),
                            (quad[2], quad[3]), (quad[2], quad[1])]:
-                dst_w = to_dst_w((dst_px[0]+0.5, dst_px[1]+0.5))
+                dst_w = to_dst_w((dst_px[0]+px_offset, dst_px[1]+px_offset))
                 src_w = self.dst_srs.transform_to(self.src_srs, dst_w)
                 src_px = to_src_px(src_w)
                 src_quad.extend(src_px)
diff --git a/mapproxy/layer.py b/mapproxy/layer.py
index d404955..e0c7246 100644
--- a/mapproxy/layer.py
+++ b/mapproxy/layer.py
@@ -152,7 +152,7 @@ class InfoQuery(object):
 
     @property
     def coord(self):
-        return make_lin_transf((0, self.size[1], self.size[0], 0), self.bbox)(self.pos)
+        return make_lin_transf((0, 0, self.size[0], self.size[1]), self.bbox)(self.pos)
 
 class LegendQuery(object):
     def __init__(self, format, scale):
diff --git a/mapproxy/request/arcgis.py b/mapproxy/request/arcgis.py
index 6acae96..96c0bd5 100644
--- a/mapproxy/request/arcgis.py
+++ b/mapproxy/request/arcgis.py
@@ -14,9 +14,10 @@
 # limitations under the License.
 
 from functools import partial as fp
-from mapproxy.request.base import RequestParams, BaseRequest
 from mapproxy.compat import string_type
-
+from mapproxy.compat.modules import urlparse
+from mapproxy.request.base import RequestParams, BaseRequest
+from mapproxy.srs import make_lin_transf
 
 class ArcGISExportRequestParams(RequestParams):
     """
@@ -86,6 +87,80 @@ class ArcGISExportRequestParams(RequestParams):
     del _set_srs
 
 
+
+class ArcGISIdentifyRequestParams(ArcGISExportRequestParams):
+    def _get_format(self):
+        """
+        The requested format as string (w/o any 'image/', 'text/', etc prefixes)
+        """
+        return self["format"]
+    def _set_format(self, format):
+        self["format"] = format.rsplit("/")[-1]
+    format = property(_get_format, _set_format)
+    del _get_format
+    del _set_format
+
+    def _get_bbox(self):
+        """
+        ``bbox`` as a tuple (minx, miny, maxx, maxy).
+        """
+        if 'mapExtent' not in self.params or self.params['mapExtent'] is None:
+            return None
+        points = [float(val) for val in self.params['mapExtent'].split(',')]
+        return tuple(points[:4])
+    def _set_bbox(self, value):
+        if value is not None and not isinstance(value, string_type):
+            value = ','.join(str(x) for x in value)
+        self['mapExtent'] = value
+    bbox = property(_get_bbox, _set_bbox)
+    del _get_bbox
+    del _set_bbox
+
+    def _get_size(self):
+        """
+        Size of the request in pixel as a tuple (width, height),
+        or None if one is missing.
+        """
+        if 'imageDisplay' not in self.params or self.params['imageDisplay'] is None:
+            return None
+        dim = [float(val) for val in self.params['imageDisplay'].split(',')]
+        return tuple(dim[:2])
+    def _set_size(self, value):
+        if value is not None and not isinstance(value, string_type):
+            value = ','.join(str(x) for x in value) + ',96'
+        self['imageDisplay'] = value
+    size = property(_get_size, _set_size)
+    del _get_size
+    del _set_size
+
+    def _get_pos(self):
+        size = self.size
+        vals = self['geometry'].split(',')
+        x, y = float(vals[0]), float(vals[1])
+        return make_lin_transf(self.bbox, (0, 0, size[0], size[1]))((x, y))
+
+    def _set_pos(self, value):
+        size = self.size
+        req_coord = make_lin_transf((0, 0, size[0], size[1]), self.bbox)(value)
+        self['geometry'] = '%f,%f' % req_coord
+    pos = property(_get_pos, _set_pos)
+    del _get_pos
+    del _set_pos
+
+    @property
+    def srs(self):
+        srs = self.params.get('sr', None)
+        if srs:
+            return 'EPSG:%s' % srs
+
+    @srs.setter
+    def srs(self, srs):
+        if hasattr(srs, 'srs_code'):
+            code = srs.srs_code
+        else:
+            code = srs
+        self.params['sr'] = code.rsplit(':', 1)[-1]
+
 class ArcGISRequest(BaseRequest):
     request_params = ArcGISExportRequestParams
     fixed_params = {"f": "image"}
@@ -93,9 +168,7 @@ class ArcGISRequest(BaseRequest):
     def __init__(self, param=None, url='', validate=False, http=None):
         BaseRequest.__init__(self, param, url, validate, http)
 
-        self.url = self.url.rstrip("/")
-        if not self.url.endswith("export"):
-            self.url += "/export"
+        self.url = rest_endpoint(url)
 
     def copy(self):
         return self.__class__(param=self.params.copy(), url=self.url)
@@ -108,6 +181,35 @@ class ArcGISRequest(BaseRequest):
         return params.query_string
 
 
+class ArcGISIdentifyRequest(BaseRequest):
+    request_params = ArcGISIdentifyRequestParams
+    fixed_params = {'geometryType': 'esriGeometryPoint'}
+    def __init__(self, param=None, url='', validate=False, http=None):
+        BaseRequest.__init__(self, param, url, validate, http)
+
+        self.url = rest_identify_endpoint(url)
+
+    def copy(self):
+        return self.__class__(param=self.params.copy(), url=self.url)
+
+    @property
+    def query_string(self):
+        params = self.params.copy()
+        for key, value in self.fixed_params.items():
+            params[key] = value
+        return params.query_string
+
+
+
+def create_identify_request(req_data, param):
+    req_data = req_data.copy()
+
+    # Pop the URL off the request data.
+    url = req_data['url']
+    del req_data['url']
+
+    return ArcGISIdentifyRequest(url=url, param=req_data)
+
 def create_request(req_data, param):
     req_data = req_data.copy()
 
@@ -123,3 +225,35 @@ def create_request(req_data, param):
         req_data['transparent'] = str(req_data['transparent'])
 
     return ArcGISRequest(url=url, param=req_data)
+
+
+def rest_endpoint(url):
+    parts = urlparse.urlsplit(url)
+    path = parts.path.rstrip('/').split('/')
+
+    if path[-1] in ('export', 'exportImage'):
+        if path[-2] == 'MapServer':
+            path[-1] = 'export'
+        elif path[-2] == 'ImageServer':
+            path[-1] = 'exportImage'
+    elif path[-1] == 'MapServer':
+        path.append('export')
+    elif path[-1] == 'ImageServer':
+        path.append('exportImage')
+
+    parts = parts[0], parts[1], '/'.join(path), parts[3], parts[4]
+    return urlparse.urlunsplit(parts)
+
+
+def rest_identify_endpoint(url):
+    parts = urlparse.urlsplit(url)
+    path = parts.path.rstrip('/').split('/')
+
+    if path[-1] in ('export', 'exportImage'):
+        path[-1] = 'identify'
+    elif path[-1] in ('MapServer', 'ImageServer'):
+        path.append('identify')
+
+    parts = parts[0], parts[1], '/'.join(path), parts[3], parts[4]
+    return urlparse.urlunsplit(parts)
+
diff --git a/mapproxy/request/wms/__init__.py b/mapproxy/request/wms/__init__.py
index 47e8489..548be1e 100644
--- a/mapproxy/request/wms/__init__.py
+++ b/mapproxy/request/wms/__init__.py
@@ -734,10 +734,12 @@ info_formats = {
     Version('1.3.0'): (('text', 'text/plain'),
                        ('html', 'text/html'),
                        ('xml', 'text/xml'),
+                       ('json', 'application/json'),
                       ),
     None: (('text', 'text/plain'),
            ('html', 'text/html'),
            ('xml', 'application/vnd.ogc.gml'),
+           ('json', 'application/json'),
           )
 }
 
diff --git a/mapproxy/response.py b/mapproxy/response.py
index 098aa93..d537892 100644
--- a/mapproxy/response.py
+++ b/mapproxy/response.py
@@ -91,7 +91,7 @@ class Response(object):
 
         self.last_modified = timestamp
         if (timestamp or etag_data) and max_age is not None:
-            self.headers['Cache-control'] = 'max-age=%d public' % max_age
+            self.headers['Cache-control'] = 'public, max-age=%d, s-maxage=%d' % (max_age, max_age)
 
     def make_conditional(self, req):
         """
diff --git a/mapproxy/script/export.py b/mapproxy/script/export.py
index 32c47d1..4a35207 100644
--- a/mapproxy/script/export.py
+++ b/mapproxy/script/export.py
@@ -97,6 +97,10 @@ def export_command(args=None):
     parser.add_option("-f", "--mapproxy-conf", dest="mapproxy_conf",
         help="MapProxy configuration")
 
+    parser.add_option("-q", "--quiet",
+                      action="count", dest="quiet", default=0,
+                      help="reduce number of messages to stdout, repeat to disable progress output")
+
     parser.add_option("--source", dest="source",
         help="source to export (source or cache)")
 
@@ -204,6 +208,22 @@ def export_command(args=None):
             'type': 'mbtiles',
             'filename': options.dest,
         }
+    elif options.type == 'sqlite':
+        cache_conf['cache'] = {
+            'type': 'sqlite',
+            'directory': options.dest,
+        }
+    elif options.type == 'geopackage':
+        cache_conf['cache'] = {
+            'type': 'geopackage',
+            'filename': options.dest,
+        }
+    elif options.type == 'compact-v1':
+        cache_conf['cache'] = {
+            'type': 'compact',
+            'version': 1,
+            'directory': options.dest,
+        }
     elif options.type in ('tc', 'mapproxy'):
         cache_conf['cache'] = {
             'type': 'file',
@@ -257,7 +277,7 @@ def export_command(args=None):
 
     print(format_export_task(task, custom_grid=custom_grid))
 
-    logger = ProgressLog(verbose=True, silent=False)
+    logger = ProgressLog(verbose=options.quiet==0, silent=options.quiet>=2)
     try:
         seed_task(task, progress_logger=logger, dry_run=options.dry_run,
              concurrency=options.concurrency)
diff --git a/mapproxy/script/scales.py b/mapproxy/script/scales.py
index 854a379..b283367 100644
--- a/mapproxy/script/scales.py
+++ b/mapproxy/script/scales.py
@@ -91,9 +91,9 @@ def scales_command(args=None):
     if args[0] == '-':
         values = values_from_stdin()
     elif options.eval:
-        values = map(eval, args)
+        values = [eval(a) for a in args]
     else:
-        values = map(float, args)
+        values = [float(a) for a in args]
 
     values.sort(reverse=True)
 
diff --git a/mapproxy/seed/cleanup.py b/mapproxy/seed/cleanup.py
index 058a338..81c7eea 100644
--- a/mapproxy/seed/cleanup.py
+++ b/mapproxy/seed/cleanup.py
@@ -16,9 +16,14 @@
 from __future__ import print_function
 
 import os
+from mapproxy.compat.itertools import izip_longest
 from mapproxy.seed.util import format_cleanup_task
 from mapproxy.util.fs import cleanup_directory
-from mapproxy.seed.seeder import TileWorkerPool, TileWalker, TileCleanupWorker
+from mapproxy.seed.seeder import (
+    TileWorkerPool, TileWalker, TileCleanupWorker,
+    SeedProgress,
+)
+from mapproxy.seed.util import ProgressLog
 
 def cleanup(tasks, concurrency=2, dry_run=False, skip_geoms_for_last_levels=0,
                verbose=True, progress_logger=None):
@@ -28,22 +33,37 @@ def cleanup(tasks, concurrency=2, dry_run=False, skip_geoms_for_last_levels=0,
         if task.coverage is False:
             continue
 
+        # seed_progress for tilewalker cleanup
+        seed_progress = None
+        # cleanup_progress for os.walk based cleanup
+        cleanup_progress = None
+        if progress_logger and progress_logger.progress_store:
+            progress_logger.current_task_id = task.id
+            start_progress = progress_logger.progress_store.get(task.id)
+            seed_progress = SeedProgress(old_progress_identifier=start_progress)
+            cleanup_progress = DirectoryCleanupProgress(old_dir=start_progress)
+
         if task.complete_extent:
-            if hasattr(task.tile_manager.cache, 'level_location'):
-                simple_cleanup(task, dry_run=dry_run, progress_logger=progress_logger)
+            if callable(getattr(task.tile_manager.cache, 'level_location', None)):
+                simple_cleanup(task, dry_run=dry_run, progress_logger=progress_logger,
+                    cleanup_progress=cleanup_progress)
                 continue
-            elif hasattr(task.tile_manager.cache, 'remove_level_tiles_before'):
+            elif callable(getattr(task.tile_manager.cache, 'remove_level_tiles_before', None)):
                 cache_cleanup(task, dry_run=dry_run, progress_logger=progress_logger)
                 continue
 
         tilewalker_cleanup(task, dry_run=dry_run, concurrency=concurrency,
                          skip_geoms_for_last_levels=skip_geoms_for_last_levels,
-                         progress_logger=progress_logger)
+                         progress_logger=progress_logger,
+                         seed_progress=seed_progress,
+        )
+
 
-def simple_cleanup(task, dry_run, progress_logger=None):
+def simple_cleanup(task, dry_run, progress_logger=None, cleanup_progress=None):
     """
     Cleanup cache level on file system level.
     """
+
     for level in task.levels:
         level_dir = task.tile_manager.cache.level_location(level)
         if dry_run:
@@ -53,6 +73,16 @@ def simple_cleanup(task, dry_run, progress_logger=None):
             file_handler = None
         if progress_logger:
             progress_logger.log_message('removing old tiles in ' + normpath(level_dir))
+            if progress_logger.progress_store:
+                cleanup_progress.step_dir(level_dir)
+                if cleanup_progress.already_processed():
+                    continue
+                progress_logger.progress_store.add(
+                    task.id,
+                    cleanup_progress.current_progress_identifier(),
+                )
+                progress_logger.progress_store.write()
+
         cleanup_directory(level_dir, task.remove_timestamp,
             file_handler=file_handler, remove_empty_dirs=True)
 
@@ -78,7 +108,7 @@ def normpath(path):
     return path
 
 def tilewalker_cleanup(task, dry_run, concurrency, skip_geoms_for_last_levels,
-    progress_logger=None):
+    progress_logger=None, seed_progress=None):
     """
     Cleanup tiles with tile traversal.
     """
@@ -88,7 +118,8 @@ def tilewalker_cleanup(task, dry_run, concurrency, skip_geoms_for_last_levels,
                                       dry_run=dry_run, size=concurrency)
     tile_walker = TileWalker(task, tile_worker_pool, handle_stale=True,
                              work_on_metatiles=False, progress_logger=progress_logger,
-                             skip_geoms_for_last_levels=skip_geoms_for_last_levels)
+                             skip_geoms_for_last_levels=skip_geoms_for_last_levels,
+                             seed_progress=seed_progress)
     try:
         tile_walker.walk()
     except KeyboardInterrupt:
@@ -96,3 +127,60 @@ def tilewalker_cleanup(task, dry_run, concurrency, skip_geoms_for_last_levels,
         raise
     finally:
         tile_worker_pool.stop()
+
+
+class DirectoryCleanupProgress(object):
+    def __init__(self, old_dir=None):
+        self.old_dir = old_dir
+        self.current_dir = None
+
+    def step_dir(self, dir):
+        self.current_dir = dir
+
+    def already_processed(self):
+        return self.can_skip(self.old_dir, self.current_dir)
+
+    def current_progress_identifier(self):
+        if self.already_processed() or self.current_dir is None:
+            return self.old_dir
+        return self.current_dir
+
+    @staticmethod
+    def can_skip(old_dir, current_dir):
+        """
+        Return True if the `current_dir` is before `old_dir` when compared
+        lexicographic.
+
+        >>> DirectoryCleanupProgress.can_skip(None, '/00')
+        False
+        >>> DirectoryCleanupProgress.can_skip(None, '/00/000/000')
+        False
+
+        >>> DirectoryCleanupProgress.can_skip('/01/000/001', '/00')
+        True
+        >>> DirectoryCleanupProgress.can_skip('/01/000/001', '/01/000/000')
+        True
+        >>> DirectoryCleanupProgress.can_skip('/01/000/001', '/01/000/000/000')
+        True
+        >>> DirectoryCleanupProgress.can_skip('/01/000/001', '/01/000/001')
+        False
+        >>> DirectoryCleanupProgress.can_skip('/01/000/001', '/01/000/001/000')
+        False
+        """
+        if old_dir is None:
+            return False
+        if current_dir is None:
+            return False
+        for old, current in izip_longest(old_dir.split(os.path.sep), current_dir.split(os.path.sep), fillvalue=None):
+            if old is None:
+                return False
+            if current is None:
+                return False
+            if old < current:
+                return False
+            if old > current:
+                return True
+        return False
+
+    def running(self):
+        return True
diff --git a/mapproxy/seed/script.py b/mapproxy/seed/script.py
index d01369e..64579be 100644
--- a/mapproxy/seed/script.py
+++ b/mapproxy/seed/script.py
@@ -15,11 +15,17 @@
 
 from __future__ import print_function
 
+import errno
+import os
+import re
+import signal
 import sys
+import time
 import logging
 from logging.config import fileConfig
 
-from optparse import OptionParser
+from subprocess import Popen
+from optparse import OptionParser, OptionValueError
 
 from mapproxy.config.loader import load_configuration, ConfigurationError
 from mapproxy.seed.config import load_seed_tasks_conf
@@ -29,6 +35,9 @@ from mapproxy.seed.util import (format_seed_task, format_cleanup_task,
     ProgressLog, ProgressStore)
 from mapproxy.seed.cachelock import CacheLocker
 
+SECONDS_PER_DAY = 60 * 60 * 24
+SECONDS_PER_MINUTE = 60
+
 def setup_logging(logging_conf=None):
     if logging_conf is not None:
         fileConfig(logging_conf, {'here': './'})
@@ -43,6 +52,35 @@ def setup_logging(logging_conf=None):
     ch.setFormatter(formatter)
     mapproxy_log.addHandler(ch)
 
+
+def check_duration(option, opt, value, parser):
+    try:
+        setattr(parser.values, option.dest, parse_duration(value))
+    except ValueError:
+        raise OptionValueError(
+            "option %s: invalid duration value: %r, expected (10s, 15m, 0.5h, 3d, etc)"
+            % (opt, value),
+        )
+
+
+def parse_duration(string):
+    match = re.match(r'^(\d*.?\d+)(s|m|h|d)', string)
+    if not match:
+        raise ValueError('invalid duration, not in format: 10s, 0.5h, etc.')
+    duration = float(match.group(1))
+    unit = match.group(2)
+    if unit == 's':
+        return duration
+    duration *= 60
+    if unit == 'm':
+        return duration
+    duration *= 60
+    if unit == 'h':
+        return duration
+    duration *= 24
+    return duration
+
+
 class SeedScript(object):
     usage = "usage: %prog [options] seed_conf"
     parser = OptionParser(usage)
@@ -97,6 +135,19 @@ class SeedScript(object):
                       default=None,
                       help="filename for storing the seed progress (for --continue option)")
 
+    parser.add_option("--duration", dest="duration",
+                      help="stop seeding after (120s, 15m, 4h, 0.5d, etc)",
+                      type=str, action="callback", callback=check_duration)
+
+    parser.add_option("--reseed-file", dest="reseed_file",
+                      help="start of last re-seed", metavar="FILE",
+                      default=None)
+    parser.add_option("--reseed-interval", dest="reseed_interval",
+                      help="only start seeding if --reseed-file is older then --reseed-interval",
+                      metavar="DURATION",
+                      type=str, action="callback", callback=check_duration,
+                      default=None)
+
     parser.add_option("--log-config", dest='logging_conf', default=None,
                       help="logging configuration")
 
@@ -118,6 +169,10 @@ class SeedScript(object):
 
         setup_logging(options.logging_conf)
 
+        if options.duration:
+            # calls with --duration are handled in call_with_duration
+            sys.exit(self.call_with_duration(options, args))
+
         try:
             mapproxy_conf = load_configuration(options.conf_file, seed=True)
         except ConfigurationError as ex:
@@ -133,6 +188,29 @@ class SeedScript(object):
             # disable verbose output for non-ttys
             options.quiet = 1
 
+        progress = None
+        if options.continue_seed or options.progress_file:
+            if not options.progress_file:
+                options.progress_file = '.mapproxy_seed_progress'
+            progress = ProgressStore(options.progress_file,
+                                     continue_seed=options.continue_seed)
+
+        if options.reseed_file:
+            if not os.path.exists(options.reseed_file):
+                # create --reseed-file if missing
+                with open(options.reseed_file, 'w'):
+                    pass
+            else:
+                if progress and not os.path.exists(options.progress_file):
+                    # we have an existing --reseed-file but no --progress-file
+                    # meaning the last seed call was completed
+                    if options.reseed_interval and (
+                        os.path.getmtime(options.reseed_file) > (time.time() - options.reseed_interval)
+                    ):
+                        print("no need for re-seeding")
+                        sys.exit(1)
+                    os.utime(options.reseed_file, (time.time(), time.time()))
+
         with mapproxy_conf:
             try:
                 seed_conf = load_seed_tasks_conf(options.seed_file, mapproxy_conf)
@@ -152,15 +230,6 @@ class SeedScript(object):
                     print(format_cleanup_task(task))
                 return 0
 
-            progress = None
-            if options.continue_seed or options.progress_file:
-                if options.progress_file:
-                    progress_file = options.progress_file
-                else:
-                    progress_file = '.mapproxy_seed_progress'
-                progress = ProgressStore(progress_file,
-                    continue_seed=options.continue_seed)
-
             try:
                 if options.interactive:
                     seed_tasks, cleanup_tasks = self.interactive(seed_tasks, cleanup_tasks)
@@ -178,7 +247,8 @@ class SeedScript(object):
                     print('========== Cleanup tasks ==========')
                     print('Start cleanup process (%d task%s)' % (
                         len(cleanup_tasks), 's' if len(cleanup_tasks) > 1 else ''))
-                    logger = ProgressLog(verbose=options.quiet==0, silent=options.quiet>=2)
+                    logger = ProgressLog(verbose=options.quiet==0, silent=options.quiet>=2,
+                        progress_store=progress)
                     cleanup(cleanup_tasks, verbose=options.quiet==0, dry_run=options.dry_run,
                             concurrency=options.concurrency, progress_logger=logger,
                             skip_geoms_for_last_levels=options.geom_levels)
@@ -225,6 +295,48 @@ class SeedScript(object):
 
         return seed_names, cleanup_names
 
+    def call_with_duration(self, options, args):
+        # --duration is implemented by calling mapproxy-seed again in a separate
+        # process (but without --duration) and terminating that process
+        # after --duration
+
+        argv = sys.argv[:]
+        for i, arg in enumerate(sys.argv):
+            if arg == '--duration':
+                argv = sys.argv[:i] + sys.argv[i+2:]
+                break
+            elif arg.startswith('--duration='):
+                argv = sys.argv[:i] + sys.argv[i+1:]
+                break
+
+        # call mapproxy-seed again, poll status, terminate after --duration
+        cmd = Popen(args=argv)
+        start = time.time()
+        while True:
+            if (time.time() - start) > options.duration:
+                try:
+                    cmd.send_signal(signal.SIGINT)
+                    # try to stop with sigint
+                    # send sigterm after 10 seconds
+                    for _ in range(10):
+                        time.sleep(1)
+                        if cmd.poll() is not None:
+                            break
+                    else:
+                        cmd.terminate()
+                except OSError as ex:
+                    if ex.errno != errno.ESRCH:  # no such process
+                        raise
+                return 0
+            if cmd.poll() is not None:
+                return cmd.returncode
+            try:
+                time.sleep(1)
+            except KeyboardInterrupt:
+                # force termination
+                start = 0
+
+
     def interactive(self, seed_tasks, cleanup_tasks):
         selected_seed_tasks = []
         print('========== Select seeding tasks ==========')
@@ -264,5 +376,6 @@ def split_comma_seperated_option(option):
             result.extend(args.split(','))
     return result
 
+
 if __name__ == '__main__':
     main()
diff --git a/mapproxy/seed/seeder.py b/mapproxy/seed/seeder.py
index 7769221..320fc6e 100644
--- a/mapproxy/seed/seeder.py
+++ b/mapproxy/seed/seeder.py
@@ -16,6 +16,7 @@
 from __future__ import print_function, division
 
 import sys
+from collections import deque
 from contextlib import contextmanager
 import time
 try:
@@ -32,7 +33,7 @@ from mapproxy.util.lock import LockTimeout
 from mapproxy.seed.util import format_seed_task, timestamp
 from mapproxy.seed.cachelock import DummyCacheLocker, CacheLockedError
 
-from mapproxy.seed.util import (exp_backoff, ETA, limit_sub_bbox,
+from mapproxy.seed.util import (exp_backoff, limit_sub_bbox,
     status_symbol, BackoffError)
 
 import logging
@@ -54,40 +55,11 @@ else:
     queue_class = multiprocessing.Queue
 
 
-class TileProcessor(object):
-    def __init__(self, dry_run=False):
-        self._lastlog = time.time()
-        self.dry_run = dry_run
-
-    def log_progress(self, progress):
-        if (self._lastlog + .1) < time.time():
-            # log progress at most every 100ms
-            print('[%s] %6.2f%% %s \tETA: %s\r' % (
-                timestamp(), progress[1]*100, progress[0],
-                progress[2]
-            ), end=' ')
-            sys.stdout.flush()
-            self._lastlog = time.time()
-
-    def process(self, tiles, progress):
-        if not self.dry_run:
-            self.process_tiles(tiles)
-
-        self.log_progress(progress)
-
-    def stop(self):
-        raise NotImplementedError()
-
-    def process_tiles(self, tiles):
-        raise NotImplementedError()
-
-
-class TileWorkerPool(TileProcessor):
+class TileWorkerPool(object):
     """
     Manages multiple TileWorker.
     """
     def __init__(self, task, worker_class, size=2, dry_run=False, progress_logger=None):
-        TileProcessor.__init__(self, dry_run=dry_run)
         self.tiles_queue = queue_class(size)
         self.task = task
         self.dry_run = dry_run
@@ -193,17 +165,14 @@ class TileCleanupWorker(TileWorker):
 class SeedProgress(object):
     def __init__(self, old_progress_identifier=None):
         self.progress = 0.0
-        self.eta = ETA()
         self.level_progress_percentages = [1.0]
-        self.level_progresses = []
+        self.level_progresses = None
+        self.level_progresses_level = 0
         self.progress_str_parts = []
-        self.old_level_progresses = None
-        if old_progress_identifier is not None:
-            self.old_level_progresses = old_progress_identifier
+        self.old_level_progresses = old_progress_identifier
 
     def step_forward(self, subtiles=1):
         self.progress += self.level_progress_percentages[-1] / subtiles
-        self.eta.update(self.progress)
 
     @property
     def progress_str(self):
@@ -211,53 +180,79 @@ class SeedProgress(object):
 
     @contextmanager
     def step_down(self, i, subtiles):
+        if self.level_progresses is None:
+            self.level_progresses = []
+        self.level_progresses = self.level_progresses[:self.level_progresses_level]
         self.level_progresses.append((i, subtiles))
+        self.level_progresses_level += 1
         self.progress_str_parts.append(status_symbol(i, subtiles))
         self.level_progress_percentages.append(self.level_progress_percentages[-1] / subtiles)
+
         yield
+
         self.level_progress_percentages.pop()
         self.progress_str_parts.pop()
-        self.level_progresses.pop()
 
-    def already_processed(self):
-        if self.old_level_progresses == []:
-            return True
-
-        if self.old_level_progresses is None:
-            return False
+        self.level_progresses_level -= 1
+        if self.level_progresses_level == 0:
+            self.level_progresses = []
 
-        if self.progress_is_behind(self.old_level_progresses, self.level_progresses):
-            return True
-        else:
-            return False
+    def already_processed(self):
+        return self.can_skip(self.old_level_progresses, self.level_progresses)
 
     def current_progress_identifier(self):
-        return self.level_progresses
+        if self.already_processed() or self.level_progresses is None:
+            return self.old_level_progresses
+        return self.level_progresses[:]
 
     @staticmethod
-    def progress_is_behind(old_progress, current_progress):
+    def can_skip(old_progress, current_progress):
         """
         Return True if the `current_progress` is behind the `old_progress` -
         when it isn't as far as the old progress.
 
-        >>> SeedProgress.progress_is_behind([], [(0, 1)])
-        True
-        >>> SeedProgress.progress_is_behind([(0, 1), (1, 4)], [(0, 1)])
+        >>> SeedProgress.can_skip(None, [(0, 4)])
         False
-        >>> SeedProgress.progress_is_behind([(0, 1), (1, 4)], [(0, 1), (0, 4)])
+        >>> SeedProgress.can_skip([], [(0, 4)])
         True
-        >>> SeedProgress.progress_is_behind([(0, 1), (1, 4)], [(0, 1), (1, 4)])
+        >>> SeedProgress.can_skip([(0, 4)], None)
+        False
+        >>> SeedProgress.can_skip([(0, 4)], [(0, 4)])
+        False
+        >>> SeedProgress.can_skip([(1, 4)], [(0, 4)])
         True
-        >>> SeedProgress.progress_is_behind([(0, 1), (1, 4)], [(0, 1), (3, 4)])
+        >>> SeedProgress.can_skip([(0, 4)], [(0, 4), (0, 4)])
         False
 
+        >>> SeedProgress.can_skip([(0, 4), (0, 4), (2, 4)], [(0, 4), (0, 4)])
+        False
+        >>> SeedProgress.can_skip([(0, 4), (0, 4), (2, 4)], [(0, 4), (0, 4), (1, 4)])
+        True
+        >>> SeedProgress.can_skip([(0, 4), (0, 4), (2, 4)], [(0, 4), (0, 4), (2, 4)])
+        False
+        >>> SeedProgress.can_skip([(0, 4), (0, 4), (2, 4)], [(0, 4), (0, 4), (3, 4)])
+        False
+        >>> SeedProgress.can_skip([(0, 4), (0, 4), (2, 4)], [(0, 4), (1, 4)])
+        False
+        >>> SeedProgress.can_skip([(0, 4), (0, 4), (2, 4)], [(0, 4), (1, 4), (0, 4)])
+        False
         """
-        for old, current in izip_longest(old_progress, current_progress, fillvalue=(9e15, 9e15)):
+        if current_progress is None:
+            return False
+        if old_progress is None:
+            return False
+        if old_progress == []:
+            return True
+        for old, current in izip_longest(old_progress, current_progress, fillvalue=None):
+            if old is None:
+                return False
+            if current is None:
+                return False
             if old < current:
                 return False
             if old > current:
                 return True
-        return True
+        return False
 
     def running(self):
         return True
@@ -270,6 +265,12 @@ class SeedInterrupted(Exception):
 
 
 class TileWalker(object):
+    """
+    TileWalker traverses through all tiles in a tile grid and calls worker_pool.process
+    for each (meta) tile. It traverses the tile grid (pyramid) depth-first.
+    Intersection with coverages are checked before handling subtiles in the next level,
+    allowing to determine if all subtiles should be seeded or skipped.
+    """
     def __init__(self, task, worker_pool, handle_stale=False, handle_uncached=False,
                  work_on_metatiles=True, skip_geoms_for_last_levels=0, progress_logger=None,
                  seed_progress=None):
@@ -283,13 +284,33 @@ class TileWalker(object):
         self.progress_logger = progress_logger
 
         num_seed_levels = len(task.levels)
-        self.report_till_level = task.levels[int(num_seed_levels * 0.8)]
+        if num_seed_levels >= 4:
+            self.report_till_level = task.levels[num_seed_levels-2]
+        else:
+            self.report_till_level = task.levels[num_seed_levels-1]
         meta_size = self.tile_mgr.meta_grid.meta_size if self.tile_mgr.meta_grid else (1, 1)
         self.tiles_per_metatile = meta_size[0] * meta_size[1]
         self.grid = MetaGrid(self.tile_mgr.grid, meta_size=meta_size, meta_buffer=0)
         self.count = 0
         self.seed_progress = seed_progress or SeedProgress()
 
+        # It is possible that we 'walk' through the same tile multiple times
+        # when seeding irregular tile grids[0]. limit_sub_bbox prevents that we
+        # recurse into the same area multiple times, but it is still possible
+        # that a tile is processed multiple times. Locking prevents that a tile
+        # is seeded multiple times, but it is possible that we count the same tile
+        # multiple times (in dry-mode, or while the tile is in the process queue).
+
+        # Tile counts can be off by 280% with sqrt2 grids.
+        # We keep a small cache of already processed tiles to skip most duplicates.
+        # A simple cache of 64 tile coordinates for each level already brings the
+        # difference down to ~8%, which is good enough and faster than a more
+        # sophisticated FIFO cache with O(1) lookup, or even caching all tiles.
+
+        # [0] irregular tile grids: where one tile does not have exactly 4 subtiles
+        # Typically when you use res_factor, or a custom res list.
+        self.seeded_tiles = {l: deque(maxlen=64) for l in task.levels}
+
     def walk(self):
         assert self.handle_stale or self.handle_uncached
         bbox = self.task.coverage.extent.bbox_for(self.tile_mgr.grid.srs)
@@ -330,7 +351,6 @@ class TileWalker(object):
         if current_level in levels:
             levels = levels[1:]
             process = True
-        current_level += 1
 
         for i, (subtile, sub_bbox, intersection) in enumerate(subtiles):
             if subtile is None: # no intersection
@@ -347,12 +367,17 @@ class TileWalker(object):
                     if self.seed_progress.already_processed():
                         self.seed_progress.step_forward()
                     else:
-                        self._walk(sub_bbox, levels, current_level=current_level,
+                        self._walk(sub_bbox, levels, current_level=current_level+1,
                             all_subtiles=all_subtiles)
 
             if not process:
                 continue
 
+            # check if subtile was already processed. see comment in __init__
+            if subtile in self.seeded_tiles[current_level]:
+                continue
+            self.seeded_tiles[current_level].appendleft(subtile)
+
             if not self.work_on_metatiles:
                 # collect actual tiles
                 handle_tiles = self.grid.tile_list(subtile)
@@ -436,6 +461,10 @@ class CleanupTask(object):
         self.coverage = coverage
         self.complete_extent = complete_extent
 
+    @property
+    def id(self):
+        return 'cleanup', self.md['name'], self.md['cache_name'], self.md['grid_name']
+
     def intersects(self, bbox):
         if self.coverage.contains(bbox, self.grid.srs): return CONTAINS
         if self.coverage.intersects(bbox, self.grid.srs): return INTERSECTS
diff --git a/mapproxy/seed/util.py b/mapproxy/seed/util.py
index c375c8e..d33249f 100644
--- a/mapproxy/seed/util.py
+++ b/mapproxy/seed/util.py
@@ -42,56 +42,6 @@ class bidict(dict):
             dict.__setitem__(self, key, val)
             dict.__setitem__(self, val, key)
 
-class ETA(object):
-    def __init__(self):
-        self.avgs = []
-        self.last_tick_start = time.time()
-        self.progress = 0.0
-        self.ticks = 10000
-        self.tick_duration_sums = 0.0
-        self.tick_duration_divisor = 0.0
-        self.tick_count = 0
-
-    def update(self, progress):
-        self.progress = progress
-        missing_ticks = (self.progress * self.ticks) - self.tick_count
-        if missing_ticks:
-            tick_duration = (time.time() - self.last_tick_start) / missing_ticks
-
-            while missing_ticks > 0:
-
-                # reduce the influence of older messurements
-                self.tick_duration_sums *= 0.999
-                self.tick_duration_divisor *= 0.999
-
-                self.tick_count += 1
-
-                self.tick_duration_sums += tick_duration
-                self.tick_duration_divisor += 1
-
-                missing_ticks -= 1
-
-            self.last_tick_start = time.time()
-
-    def eta_string(self):
-        timestamp = self.eta()
-        if timestamp is None:
-            return 'N/A'
-        try:
-            return time.strftime('%Y-%m-%d-%H:%M:%S', time.localtime(timestamp))
-        except (ValueError, OSError): # OSError since Py 3.3
-            # raised when time is out of range (e.g. year >2038)
-            return 'N/A'
-
-    def eta(self):
-        if not self.tick_count: return
-        return (self.last_tick_start +
-                ((self.tick_duration_sums/self.tick_duration_divisor)
-                 * (self.ticks - self.tick_count)))
-
-    def __str__(self):
-        return self.eta_string()
-
 class ProgressStore(object):
     """
     Reads and stores seed progresses to a file.
@@ -142,7 +92,9 @@ class ProgressLog(object):
         if not out:
             out = sys.stdout
         self.out = out
-        self.lastlog = time.time()
+        self._laststep = time.time()
+        self._lastprogress = 0
+
         self.verbose = verbose
         self.silent = silent
         self.current_task_id = None
@@ -157,27 +109,38 @@ class ProgressLog(object):
     def log_step(self, progress):
         if not self.verbose:
             return
-        if (self.lastlog + .1) < time.time():
-            # log progress at most every 100ms
-            self.out.write('[%s] %6.2f%%\t%-20s ETA: %s\r' % (
+        if (self._laststep + .5) < time.time():
+            # log progress at most every 500ms
+            self.out.write('[%s] %6.2f%%\t%-20s \r' % (
                 timestamp(), progress.progress*100, progress.progress_str,
-                progress.eta
             ))
             self.out.flush()
-            self.lastlog = time.time()
+            self._laststep = time.time()
 
     def log_progress(self, progress, level, bbox, tiles):
-        if self.progress_store and self.current_task_id:
-            self.progress_store.add(self.current_task_id,
-                progress.current_progress_identifier())
-            self.progress_store.write()
+        progress_interval = 1
+        if not self.verbose:
+            progress_interval = 30
+
+        log_progess = False
+        if progress.progress == 1.0 or (self._lastprogress + progress_interval) < time.time():
+            self._lastprogress = time.time()
+            log_progess = True
+
+        if log_progess:
+            if self.progress_store and self.current_task_id:
+                self.progress_store.add(self.current_task_id,
+                    progress.current_progress_identifier())
+                self.progress_store.write()
 
         if self.silent:
             return
-        self.out.write('[%s] %2s %6.2f%% %s (%d tiles) ETA: %s\n' % (
-            timestamp(), level, progress.progress*100,
-            format_bbox(bbox), tiles, progress.eta))
-        self.out.flush()
+
+        if log_progess:
+            self.out.write('[%s] %2s %6.2f%% %s (%d tiles)\n' % (
+                timestamp(), level, progress.progress*100,
+                format_bbox(bbox), tiles))
+            self.out.flush()
 
 
 def limit_sub_bbox(bbox, sub_bbox):
diff --git a/mapproxy/service/templates/wmts100capabilities.xml b/mapproxy/service/templates/wmts100capabilities.xml
index d0e0d0c..a3a7acc 100644
--- a/mapproxy/service/templates/wmts100capabilities.xml
+++ b/mapproxy/service/templates/wmts100capabilities.xml
@@ -1,5 +1,5 @@
 <?xml version="1.0"?>
-<Capabilities xmlns="http://www.opengis.net/wmts/1.0" xmlns:ows="http://www.opengis.net/ows/1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:gml="http://www.opengis.net/gml" xsi:schemaLocation="http://www.opengis.net/wmts/1.0   ../wmtsGetCapabilities_response.xsd" version="1.0.0">
+<Capabilities xmlns="http://www.opengis.net/wmts/1.0" xmlns:ows="http://www.opengis.net/ows/1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:gml="http://www.opengis.net/gml" xsi:schemaLocation="http://www.opengis.net/wmts/1.0 http://schemas.opengis.net/wmts/1.0/wmtsGetCapabilities_response.xsd" version="1.0.0">
   <ows:ServiceIdentification>
     <ows:Title>{{service.title}}</ows:Title>
     <ows:Abstract>{{service.abstract}}</ows:Abstract>
diff --git a/mapproxy/service/wms.py b/mapproxy/service/wms.py
index 95e2e67..bc9c2df 100644
--- a/mapproxy/service/wms.py
+++ b/mapproxy/service/wms.py
@@ -548,7 +548,7 @@ class LayerRenderer(object):
                 if layer_task.exception is None:
                     layer, layer_img = layer_task.result
                     if layer_img is not None:
-                        layer_merger.add(layer_img, layer=layer)
+                        layer_merger.add(layer_img, layer.coverage)
                 else:
                     ex = layer_task.exception
                     async_pool.shutdown(True)
@@ -566,7 +566,7 @@ class LayerRenderer(object):
             if layer_task.exception is None:
                 layer, layer_img = layer_task.result
                 if layer_img is not None:
-                    layer_merger.add(layer_img, layer=layer)
+                    layer_merger.add(layer_img, layer.coverage)
                 rendered += 1
             else:
                 layer_merger.cacheable = False
diff --git a/mapproxy/source/arcgis.py b/mapproxy/source/arcgis.py
index 18015d0..c87fecc 100644
--- a/mapproxy/source/arcgis.py
+++ b/mapproxy/source/arcgis.py
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from mapproxy.source.wms import WMSSource
+from mapproxy.source.wms import WMSSource, WMSInfoSource
 
 import logging
 log = logging.getLogger('mapproxy.source.arcgis')
@@ -21,6 +21,17 @@ log = logging.getLogger('mapproxy.source.arcgis')
 
 class ArcGISSource(WMSSource):
     def __init__(self, client, image_opts=None, coverage=None,
-                 supported_srs=None, supported_formats=None):
-        WMSSource.__init__(self, client, image_opts=image_opts, coverage=coverage,
-                           supported_srs=supported_srs, supported_formats=supported_formats)
+                 res_range=None, supported_srs=None, supported_formats=None):
+        WMSSource.__init__(self, client, image_opts=image_opts,
+                           coverage=coverage, res_range=res_range,
+                           supported_srs=supported_srs,
+                           supported_formats=supported_formats)
+
+
+class ArcGISInfoSource(WMSInfoSource):
+    def __init__(self, client):
+        self.client = client
+
+    def get_info(self, query):
+        doc = self.client.get_info(query)
+        return doc
\ No newline at end of file
diff --git a/mapproxy/test/http.py b/mapproxy/test/http.py
index 5295f43..7df49e3 100644
--- a/mapproxy/test/http.py
+++ b/mapproxy/test/http.py
@@ -15,6 +15,7 @@
 
 from __future__ import print_function
 
+import re
 import threading
 import sys
 import cgi
@@ -36,7 +37,7 @@ if PY2:
 else:
     from http.server import HTTPServer as HTTPServer_, BaseHTTPRequestHandler
 
-class RequestsMissmatchError(AssertionError):
+class RequestsMismatchError(AssertionError):
     def __init__(self, assertions):
         self.assertions = assertions
 
@@ -44,7 +45,7 @@ class RequestsMissmatchError(AssertionError):
         assertions = []
         for assertion in self.assertions:
             assertions.append(text_indent(str(assertion), '    ', ' -  '))
-        return 'requests missmatch:\n' + '\n'.join(assertions)
+        return 'requests mismatch:\n' + '\n'.join(assertions)
 
 class RequestError(str):
     pass
@@ -56,14 +57,14 @@ def text_indent(text, indent, first_indent=None):
     text = first_indent + text
     return text.replace('\n', '\n' + indent)
 
-class RequestMissmatch(object):
+class RequestMismatch(object):
     def __init__(self, msg, expected, actual):
         self.msg = msg
         self.expected = expected
         self.actual = actual
 
     def __str__(self):
-        return ('requests missmatch, expected:\n' +
+        return ('requests mismatch, expected:\n' +
             text_indent(str(self.expected), '    ') +
             '\n  got:\n' + text_indent(str(self.actual), '    '))
 
@@ -162,7 +163,7 @@ def mock_http_handler(requests_responses, unordered=False, query_comparator=None
             if 'method' in req:
                 if req['method'] != method:
                     self.server.assertions.append(
-                        RequestMissmatch('unexpected method', req['method'], method)
+                        RequestMismatch('unexpected method', req['method'], method)
                     )
                     self.server.shutdown = True
             if req.get('require_basic_auth', False):
@@ -177,20 +178,20 @@ def mock_http_handler(requests_responses, unordered=False, query_comparator=None
                 for k, v in req['headers'].items():
                     if k not in self.headers:
                         self.server.assertions.append(
-                            RequestMissmatch('missing header', k, self.headers)
+                            RequestMismatch('missing header', k, self.headers)
                         )
                     elif self.headers[k] != v:
                         self.server.assertions.append(
-                            RequestMissmatch('header missmatch', '%s: %s' % (k, v), self.headers)
+                            RequestMismatch('header mismatch', '%s: %s' % (k, v), self.headers)
                         )
             if not query_comparator(req['path'], self.query_data):
                 self.server.assertions.append(
-                    RequestMissmatch('requests differ', req['path'], self.query_data)
+                    RequestMismatch('requests differ', req['path'], self.query_data)
                 )
                 query_actual = set(query_to_dict(self.query_data).items())
                 query_expected = set(query_to_dict(req['path']).items())
                 self.server.assertions.append(
-                    RequestMissmatch('requests params differ', query_expected - query_actual, query_actual - query_expected)
+                    RequestMismatch('requests params differ', query_expected - query_actual, query_actual - query_expected)
                 )
                 self.server.shutdown = True
             if 'req_assert_function' in req:
@@ -271,11 +272,11 @@ class MockServ(object):
 
         if not self._thread.sucess and value:
             print('requests to mock httpd did not '
-            'match expectations:\n %s' % RequestsMissmatchError(self._thread.assertions))
+            'match expectations:\n %s' % RequestsMismatchError(self._thread.assertions))
         if value:
             raise reraise((type, value, traceback))
         if not self._thread.sucess:
-            raise RequestsMissmatchError(self._thread.assertions)
+            raise RequestsMismatchError(self._thread.assertions)
 
 def wms_query_eq(expected, actual):
     """
@@ -312,6 +313,8 @@ def wms_query_eq(expected, actual):
 
     return True
 
+numbers_only = re.compile('^-?\d+\.\d+(,-?\d+\.\d+)*$')
+
 def query_eq(expected, actual):
     """
     >>> query_eq('bAR=baz&foo=bizz', 'foO=bizz&bar=baz')
@@ -322,11 +325,58 @@ def query_eq(expected, actual):
     True
     >>> query_eq('/1/2/3.png', '/1/2/0.png')
     False
+    >>> query_eq('/map?point=2.9999999999,1.00000000001', '/map?point=3.0,1.0')
+    True
+    """
+
+    if path_from_query(expected) != path_from_query(actual):
+        return False
+
+    expected = query_to_dict(expected)
+    actual = query_to_dict(actual)
+
+    if set(expected.keys()) != set(actual.keys()):
+        return False
+
+    for ke, ve in expected.items():
+        if numbers_only.match(ve):
+            if not float_string_almost_eq(ve, actual[ke]):
+                return False
+        else:
+            if ve != actual[ke]:
+                return False
+
+    return True
+
+def float_string_almost_eq(expected, actual):
+    """
+    Compares if two strings with comma-separated floats are almost equal.
+    Strings must contain floats.
+
+    >>> float_string_almost_eq('12345678900', '12345678901')
+    False
+    >>> float_string_almost_eq('12345678900.0', '12345678901.0')
+    True
+
+    >>> float_string_almost_eq('12345678900.0,-3.0', '12345678901.0,-2.9999999999')
+    True
     """
-    return (query_to_dict(expected) == query_to_dict(actual) and
-            path_from_query(expected) == path_from_query(actual))
+    if not numbers_only.match(expected) or not numbers_only.match(actual):
+        return False
+
+    expected_nums = [float(x) for x in expected.split(',')]
+    actual_nums = [float(x) for x in actual.split(',')]
 
-def assert_query_eq(expected, actual):
+    if len(expected_nums) != len(actual_nums):
+        return False
+
+    for e, a in zip(expected_nums, actual_nums):
+        if abs(e - a) > abs((e+a)/2)/10e9:
+            return False
+
+    return True
+
+def assert_query_eq(expected, actual, fuzzy_number_compare=False):
     path_actual = path_from_query(actual)
     path_expected = path_from_query(expected)
     assert path_expected == path_actual, path_expected + '!=' + path_actual
@@ -334,7 +384,11 @@ def assert_query_eq(expected, actual):
     query_actual = set(query_to_dict(actual).items())
     query_expected = set(query_to_dict(expected).items())
 
-    assert query_expected == query_actual, '%s != %s\t%s|%s' % (
+    if fuzzy_number_compare:
+        equal = query_eq(expected, actual)
+    else:
+        equal = query_expected == query_actual
+    assert equal, '%s != %s\t%s|%s' % (
         expected, actual, query_expected - query_actual, query_actual - query_expected)
 
 def path_from_query(query):
@@ -391,13 +445,13 @@ def mock_httpd(address, requests_responses, unordered=False, bbox_aware_query_co
         yield
     except:
         if not t.sucess:
-            print(str(RequestsMissmatchError(t.assertions)))
+            print(str(RequestsMismatchError(t.assertions)))
         raise
     finally:
         t.shutdown = True
         t.join(1)
     if not t.sucess:
-        raise RequestsMissmatchError(t.assertions)
+        raise RequestsMismatchError(t.assertions)
 
 @contextmanager
 def mock_single_req_httpd(address, request_handler):
@@ -407,13 +461,13 @@ def mock_single_req_httpd(address, request_handler):
         yield
     except:
         if not t.sucess:
-            print(str(RequestsMissmatchError(t.assertions)))
+            print(str(RequestsMismatchError(t.assertions)))
         raise
     finally:
         t.shutdown = True
         t.join(1)
     if not t.sucess:
-        raise RequestsMissmatchError(t.assertions)
+        raise RequestsMismatchError(t.assertions)
 
 
 def make_wsgi_env(query_string, extra_environ={}):
diff --git a/mapproxy/test/system/fixture/arcgis.yaml b/mapproxy/test/system/fixture/arcgis.yaml
index ac0e590..589d720 100644
--- a/mapproxy/test/system/fixture/arcgis.yaml
+++ b/mapproxy/test/system/fixture/arcgis.yaml
@@ -1,5 +1,7 @@
 services:
   tms:
+  wms:
+    featureinfo_types: ['json']
 
 layers:
   - name: app2_layer
@@ -8,6 +10,9 @@ layers:
   - name: app2_with_layers_layer
     title: ArcGIS Cache Layer
     sources: [app2_with_layers_cache]
+  - name: app2_with_layers_fi_layer
+    title: ArcGIS Cache Layer
+    sources: [app2_with_layers_fi_cache]
   - name: app2_wrong_url_layer
     title: ArcGIS Cache Layer
     sources: [app2_wrong_url_cache]
@@ -19,6 +24,9 @@ caches:
   app2_with_layers_cache:
     grids: [GLOBAL_MERCATOR]
     sources: [app2_with_layers_source]
+  app2_with_layers_fi_cache:
+    grids: [GLOBAL_MERCATOR]
+    sources: [app2_with_layers_fi_source]
   app2_wrong_url_cache:
     grids: [GLOBAL_MERCATOR]
     sources: [app2_wrong_url_source]
@@ -32,7 +40,17 @@ sources:
     type: arcgis
     req:
       layers: show:0,1
-      url: http://localhost:42423/arcgis/rest/services/ExampleLayer/ImageServer
+      url: http://localhost:42423/arcgis/rest/services/ExampleLayer/MapServer
+  app2_with_layers_fi_source:
+    type: arcgis
+    opts:
+      featureinfo: true
+      featureinfo_tolerance: 10
+      featureinfo_return_geometries: true
+    supported_srs: ['EPSG:3857']
+    req:
+      layers: show:1,2,3
+      url: http://localhost:42423/arcgis/rest/services/ExampleLayer/MapServer
   app2_wrong_url_source:
     type: arcgis
     req:
diff --git a/mapproxy/test/system/fixture/cache.gpkg b/mapproxy/test/system/fixture/cache.gpkg
new file mode 100644
index 0000000..11a281c
Binary files /dev/null and b/mapproxy/test/system/fixture/cache.gpkg differ
diff --git a/mapproxy/test/system/fixture/cache_geopackage.yaml b/mapproxy/test/system/fixture/cache_geopackage.yaml
new file mode 100644
index 0000000..2465e00
--- /dev/null
+++ b/mapproxy/test/system/fixture/cache_geopackage.yaml
@@ -0,0 +1,56 @@
+globals:
+  cache:
+    base_dir: cache_data/
+
+services:
+  tms:
+  wms:
+    md:
+      title: MapProxy test fixture
+
+layers:
+  - name: gpkg
+    title: TMS Cache Layer
+    sources: [gpkg_cache, new_gpkg, new_gpkg_table]
+  - name: gpkg_new
+    title: TMS Cache Layer
+    sources: [new_gpkg]
+
+caches:
+  gpkg_cache:
+    grids: [cache_grid]
+    cache:
+      type: geopackage
+      filename: ./cache.gpkg
+      table_name: cache
+      tile_lock_dir: ./testlockdir
+    sources: [tms]
+  new_gpkg:
+    grids: [new_grid]
+    sources: []
+    cache:
+      type: geopackage
+      filename: ./cache_new.gpkg
+      table_name: cache
+      tile_lock_dir: ./testlockdir
+  new_gpkg_table:
+    grids: [cache_grid]
+    cache:
+      type: geopackage
+      filename: ./cache.gpkg
+      table_name: new_cache
+      tile_lock_dir: ./testlockdir
+    sources: [tms]
+
+grids:
+  cache_grid:
+    srs: EPSG:900913
+  new_grid:
+    srs: EPSG:4326
+
+
+sources:
+  tms:
+    type: tile
+    url: http://localhost:42423/tiles/%(tc_path)s.png
+
diff --git a/mapproxy/test/system/fixture/cache_s3.yaml b/mapproxy/test/system/fixture/cache_s3.yaml
new file mode 100644
index 0000000..ae947c7
--- /dev/null
+++ b/mapproxy/test/system/fixture/cache_s3.yaml
@@ -0,0 +1,58 @@
+globals:
+  cache:
+    s3:
+      bucket_name: default_bucket
+
+services:
+  tms:
+  wms:
+    md:
+      title: MapProxy S3
+
+layers:
+  - name: default
+    title: Default
+    sources: [default_cache]
+  - name: quadkey
+    title: Quadkey
+    sources: [quadkey_cache]
+  - name: reverse
+    title: Reverse
+    sources: [reverse_cache]
+
+caches:
+  default_cache:
+    grids: [webmercator]
+    cache:
+      type: s3
+    sources: [tms]
+
+  quadkey_cache:
+    grids: [webmercator]
+    cache:
+      type: s3
+      bucket_name: tiles
+      directory_layout: quadkey
+      directory: quadkeytiles
+    sources: [tms]
+
+  reverse_cache:
+    grids: [webmercator]
+    cache:
+      type: s3
+      bucket_name: tiles
+      directory_layout: reverse_tms
+      directory: reversetiles
+    sources: [tms]
+
+grids:
+  webmercator:
+    name: WebMerc
+    base: GLOBAL_WEBMERCATOR
+
+
+sources:
+  tms:
+    type: tile
+    url: http://localhost:42423/tiles/%(tc_path)s.png
+
diff --git a/mapproxy/test/system/fixture/coverage.yaml b/mapproxy/test/system/fixture/coverage.yaml
index 46aed1b..b21923f 100644
--- a/mapproxy/test/system/fixture/coverage.yaml
+++ b/mapproxy/test/system/fixture/coverage.yaml
@@ -26,9 +26,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: wms_cache
diff --git a/mapproxy/test/system/fixture/formats.yaml b/mapproxy/test/system/fixture/formats.yaml
index ee44277..8b9d1f7 100644
--- a/mapproxy/test/system/fixture/formats.yaml
+++ b/mapproxy/test/system/fixture/formats.yaml
@@ -25,9 +25,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: jpeg_cache_tiff_source
diff --git a/mapproxy/test/system/fixture/inspire.yaml b/mapproxy/test/system/fixture/inspire.yaml
index 487b23a..29dd9d3 100644
--- a/mapproxy/test/system/fixture/inspire.yaml
+++ b/mapproxy/test/system/fixture/inspire.yaml
@@ -41,9 +41,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
     inspire_md:
       type: linked
       languages:
diff --git a/mapproxy/test/system/fixture/inspire_full.yaml b/mapproxy/test/system/fixture/inspire_full.yaml
index 6b4d701..95b20fc 100644
--- a/mapproxy/test/system/fixture/inspire_full.yaml
+++ b/mapproxy/test/system/fixture/inspire_full.yaml
@@ -41,9 +41,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
       keyword_list:
        - vocabulary: GEMET
          keywords:   [Orthoimagery]
diff --git a/mapproxy/test/system/fixture/layer.yaml b/mapproxy/test/system/fixture/layer.yaml
index 0d9a5ce..dc84b6f 100644
--- a/mapproxy/test/system/fixture/layer.yaml
+++ b/mapproxy/test/system/fixture/layer.yaml
@@ -41,9 +41,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: direct
diff --git a/mapproxy/test/system/fixture/legendgraphic.yaml b/mapproxy/test/system/fixture/legendgraphic.yaml
index 6e909f3..b27c02e 100644
--- a/mapproxy/test/system/fixture/legendgraphic.yaml
+++ b/mapproxy/test/system/fixture/legendgraphic.yaml
@@ -26,9 +26,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: wms_legend
diff --git a/mapproxy/test/system/fixture/mixed_mode.yaml b/mapproxy/test/system/fixture/mixed_mode.yaml
index 6819e38..988567a 100644
--- a/mapproxy/test/system/fixture/mixed_mode.yaml
+++ b/mapproxy/test/system/fixture/mixed_mode.yaml
@@ -26,13 +26,11 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: mixed_mode
-    title: cache with PNG and JPEG 
+    title: cache with PNG and JPEG
     sources: [mixed_cache]
 
 caches:
diff --git a/mapproxy/test/system/fixture/scalehints.yaml b/mapproxy/test/system/fixture/scalehints.yaml
index a182b87..a2a4293 100644
--- a/mapproxy/test/system/fixture/scalehints.yaml
+++ b/mapproxy/test/system/fixture/scalehints.yaml
@@ -26,9 +26,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
  -  name: res
diff --git a/mapproxy/test/system/fixture/seedonly.yaml b/mapproxy/test/system/fixture/seedonly.yaml
index e0f3396..69909cd 100644
--- a/mapproxy/test/system/fixture/seedonly.yaml
+++ b/mapproxy/test/system/fixture/seedonly.yaml
@@ -26,9 +26,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: wms_cache
diff --git a/mapproxy/test/system/fixture/util-conf-wms-111-cap.xml b/mapproxy/test/system/fixture/util-conf-wms-111-cap.xml
index 0b1c4a3..9f0917d 100644
--- a/mapproxy/test/system/fixture/util-conf-wms-111-cap.xml
+++ b/mapproxy/test/system/fixture/util-conf-wms-111-cap.xml
@@ -28,7 +28,7 @@
       <ContactElectronicMailAddress>osm at omniscale.de</ContactElectronicMailAddress>
   </ContactInformation>
   <Fees>none</Fees>
-  <AccessConstraints>This service is intended for private and evaluation use only. The data is licensed as Creative Commons Attribution-Share Alike 2.0 (http://creativecommons.org/licenses/by-sa/2.0/)</AccessConstraints>
+  <AccessConstraints>Here be dragons.</AccessConstraints>
 </Service>
 <Capability>
   <Request>
diff --git a/mapproxy/test/system/fixture/util_wms_capabilities111.xml b/mapproxy/test/system/fixture/util_wms_capabilities111.xml
index f24db45..45ee51d 100644
--- a/mapproxy/test/system/fixture/util_wms_capabilities111.xml
+++ b/mapproxy/test/system/fixture/util_wms_capabilities111.xml
@@ -28,7 +28,7 @@
       <ContactElectronicMailAddress>info at omniscale.de</ContactElectronicMailAddress>
   </ContactInformation>
   <Fees>None</Fees>
-  <AccessConstraints>This service is intended for private and evaluation use only. The data is licensed as Creative Commons Attribution-Share Alike 2.0 (http://creativecommons.org/licenses/by-sa/2.0/)</AccessConstraints>
+  <AccessConstraints>Here be dragons.</AccessConstraints>
 </Service>
 <Capability>
   <Request>
diff --git a/mapproxy/test/system/fixture/util_wms_capabilities130.xml b/mapproxy/test/system/fixture/util_wms_capabilities130.xml
index f8cb4a1..616f3fe 100644
--- a/mapproxy/test/system/fixture/util_wms_capabilities130.xml
+++ b/mapproxy/test/system/fixture/util_wms_capabilities130.xml
@@ -24,7 +24,7 @@
       <ContactElectronicMailAddress>info at omniscale.de</ContactElectronicMailAddress>
   </ContactInformation>
     <Fees>None</Fees>
-    <AccessConstraints>This service is intended for private and evaluation use only. The data is licensed as Creative Commons Attribution-Share Alike 2.0 (http://creativecommons.org/licenses/by-sa/2.0/)</AccessConstraints>
+    <AccessConstraints>Here be dragons.</AccessConstraints>
 </Service>
 <Capability>
   <Request>
diff --git a/mapproxy/test/system/fixture/wms_versions.yaml b/mapproxy/test/system/fixture/wms_versions.yaml
index 01bfaa6..306c40c 100644
--- a/mapproxy/test/system/fixture/wms_versions.yaml
+++ b/mapproxy/test/system/fixture/wms_versions.yaml
@@ -23,9 +23,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: direct
diff --git a/mapproxy/test/system/fixture/wmts.yaml b/mapproxy/test/system/fixture/wmts.yaml
index a743619..945489e 100644
--- a/mapproxy/test/system/fixture/wmts.yaml
+++ b/mapproxy/test/system/fixture/wmts.yaml
@@ -28,9 +28,7 @@ services:
         fax: +49(0)441-9392774-9
         email: info at omniscale.de
       access_constraints:
-        This service is intended for private and evaluation use only.
-        The data is licensed as Creative Commons Attribution-Share Alike 2.0
-        (http://creativecommons.org/licenses/by-sa/2.0/)
+        Here be dragons.
 
 layers:
   - name: wms_cache
diff --git a/mapproxy/test/system/test_arcgis.py b/mapproxy/test/system/test_arcgis.py
index 8047538..8e01c3a 100644
--- a/mapproxy/test/system/test_arcgis.py
+++ b/mapproxy/test/system/test_arcgis.py
@@ -16,7 +16,7 @@
 from __future__ import with_statement, division
 
 from io import BytesIO
-from mapproxy.request.arcgis import ArcGISRequest
+from mapproxy.request.wms import WMS111FeatureInfoRequest
 from mapproxy.test.image import is_png, create_tmp_image
 from mapproxy.test.http import mock_httpd
 from mapproxy.test.system import module_setup, module_teardown, SystemTest
@@ -32,13 +32,18 @@ def teardown_module():
 
 transp = create_tmp_image((512, 512), mode='RGBA', color=(0, 0, 0, 0))
 
+
 class TestArcgisSource(SystemTest):
     config = test_config
     def setup(self):
         SystemTest.setup(self)
+        self.common_fi_req = WMS111FeatureInfoRequest(url='/service?',
+            param=dict(x='10', y='20', width='200', height='200', layers='app2_with_layers_fi_layer',
+                       format='image/png', query_layers='app2_with_layers_fi_layer', styles='',
+                       bbox='1000,400,2000,1400', srs='EPSG:3857', info_format='application/json'))
 
     def test_get_tile(self):
-        expected_req = [({'path': '/arcgis/rest/services/ExampleLayer/ImageServer/export?f=image&format=png&imageSR=900913&bboxSR=900913&bbox=-20037508.342789244,-20037508.342789244,20037508.342789244,20037508.342789244&size=512,512'},
+        expected_req = [({'path': '/arcgis/rest/services/ExampleLayer/ImageServer/exportImage?f=image&format=png&imageSR=900913&bboxSR=900913&bbox=-20037508.342789244,-20037508.342789244,20037508.342789244,20037508.342789244&size=512,512'},
                  {'body': transp, 'headers': {'content-type': 'image/png'}}),
                 ]
 
@@ -50,7 +55,7 @@ class TestArcgisSource(SystemTest):
             assert is_png(data)
 
     def test_get_tile_with_layer(self):
-        expected_req = [({'path': '/arcgis/rest/services/ExampleLayer/ImageServer/export?f=image&format=png&layers=show:0,1&imageSR=900913&bboxSR=900913&bbox=-20037508.342789244,-20037508.342789244,20037508.342789244,20037508.342789244&size=512,512'},
+        expected_req = [({'path': '/arcgis/rest/services/ExampleLayer/MapServer/export?f=image&format=png&layers=show:0,1&imageSR=900913&bboxSR=900913&bbox=-20037508.342789244,-20037508.342789244,20037508.342789244,20037508.342789244&size=512,512'},
                  {'body': transp, 'headers': {'content-type': 'image/png'}}),
                 ]
 
@@ -62,10 +67,45 @@ class TestArcgisSource(SystemTest):
             assert is_png(data)
 
     def test_get_tile_from_missing_arcgis_layer(self):
-        expected_req = [({'path': '/arcgis/rest/services/NonExistentLayer/ImageServer/export?f=image&format=png&imageSR=900913&bboxSR=900913&bbox=-20037508.342789244,-20037508.342789244,20037508.342789244,20037508.342789244&size=512,512'},
+        expected_req = [({'path': '/arcgis/rest/services/NonExistentLayer/ImageServer/exportImage?f=image&format=png&imageSR=900913&bboxSR=900913&bbox=-20037508.342789244,-20037508.342789244,20037508.342789244,20037508.342789244&size=512,512'},
                  {'body': b'', 'status': 400}),
                 ]
 
         with mock_httpd(('localhost', 42423), expected_req, bbox_aware_query_comparator=True):
             resp = self.app.get('/tms/1.0.0/app2_wrong_url_layer/0/0/1.png', status=500)
             eq_(resp.status_code, 500)
+
+    def test_identify(self):
+        expected_req = [(
+            {'path': '/arcgis/rest/services/ExampleLayer/MapServer/identify?f=json&'
+                'geometry=1050.000000,1300.000000&returnGeometry=true&imageDisplay=200,200,96'
+                '&mapExtent=1000.0,400.0,2000.0,1400.0&layers=show:1,2,3'
+                '&tolerance=10&geometryType=esriGeometryPoint&sr=3857'
+            },
+            {'body': b'{"results": []}', 'headers': {'content-type': 'application/json'}}),
+        ]
+
+        with mock_httpd(('localhost', 42423), expected_req, bbox_aware_query_comparator=True):
+            resp = self.app.get(self.common_fi_req)
+            eq_(resp.content_type, 'application/json')
+            eq_(resp.content_length, len(resp.body))
+            eq_(resp.body, b'{"results": []}')
+
+
+    def test_transformed_identify(self):
+        expected_req = [(
+            {'path': '/arcgis/rest/services/ExampleLayer/MapServer/identify?f=json&'
+                'geometry=573295.377585,6927820.884193&returnGeometry=true&imageDisplay=200,321,96'
+                '&mapExtent=556597.453966,6446275.84102,890555.926346,6982997.92039&layers=show:1,2,3'
+                '&tolerance=10&geometryType=esriGeometryPoint&sr=3857'
+            },
+            {'body': b'{"results": []}', 'headers': {'content-type': 'application/json'}}),
+        ]
+
+        with mock_httpd(('localhost', 42423), expected_req):
+            self.common_fi_req.params.bbox = '5,50,8,53'
+            self.common_fi_req.params.srs = 'EPSG:4326'
+            resp = self.app.get(self.common_fi_req)
+            eq_(resp.content_type, 'application/json')
+            eq_(resp.content_length, len(resp.body))
+            eq_(resp.body, b'{"results": []}')
diff --git a/mapproxy/test/system/test_auth.py b/mapproxy/test/system/test_auth.py
index 02c44a4..f2db9e0 100644
--- a/mapproxy/test/system/test_auth.py
+++ b/mapproxy/test/system/test_auth.py
@@ -264,7 +264,7 @@ class TestWMSAuth(SystemTest):
             return {
                 'authorized': 'partial',
                 'layers': {
-                    'layer1b': {'featureinfo': True, 'limited_to':  {'srs': 'EPSG:4326', 'geometry': [-40.0, -40.0, 0.0, 0.0]}},
+                    'layer1b': {'featureinfo': True, 'limited_to':  {'srs': 'EPSG:4326', 'geometry': [-80.0, -40.0, 0.0, -10.0]}},
                 }
             }
 
diff --git a/mapproxy/test/system/test_cache_geopackage.py b/mapproxy/test/system/test_cache_geopackage.py
new file mode 100644
index 0000000..c6aa47b
--- /dev/null
+++ b/mapproxy/test/system/test_cache_geopackage.py
@@ -0,0 +1,128 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2011 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement, division
+
+import os
+import shutil
+
+from io import BytesIO
+
+from mapproxy.request.wms import WMS111MapRequest
+from mapproxy.test.http import MockServ
+from mapproxy.test.image import is_png, create_tmp_image
+from mapproxy.test.system import prepare_env, create_app, module_teardown, SystemTest
+from mapproxy.cache.geopackage import GeopackageCache
+from mapproxy.grid import TileGrid
+from nose.tools import eq_
+import sqlite3
+
+test_config = {}
+
+
+def setup_module():
+    prepare_env(test_config, 'cache_geopackage.yaml')
+
+    shutil.copy(os.path.join(test_config['fixture_dir'], 'cache.gpkg'),
+        test_config['base_dir'])
+    create_app(test_config)
+
+
+def teardown_module():
+    module_teardown(test_config)
+
+
+class TestGeopackageCache(SystemTest):
+    config = test_config
+    table_name = 'cache'
+
+    def setup(self):
+        SystemTest.setup(self)
+        self.common_map_req = WMS111MapRequest(url='/service?',
+            param=dict(service='WMS',
+                       version='1.1.1', bbox='-180,-80,0,0',
+                       width='200', height='200',
+                       layers='gpkg', srs='EPSG:4326',
+                       format='image/png',
+                       styles='', request='GetMap'))
+
+    def test_get_map_cached(self):
+        resp = self.app.get(self.common_map_req)
+        eq_(resp.content_type, 'image/png')
+        data = BytesIO(resp.body)
+        assert is_png(data)
+
+    def test_get_map_uncached(self):
+        assert os.path.exists(os.path.join(test_config['base_dir'], 'cache.gpkg')) # already created on startup
+
+        self.common_map_req.params.bbox = '-180,0,0,80'
+        serv = MockServ(port=42423)
+        serv.expects('/tiles/01/000/000/000/000/000/001.png')
+        serv.returns(create_tmp_image((256, 256)))
+        with serv:
+            resp = self.app.get(self.common_map_req)
+            eq_(resp.content_type, 'image/png')
+            data = BytesIO(resp.body)
+            assert is_png(data)
+
+        # now cached
+        resp = self.app.get(self.common_map_req)
+        eq_(resp.content_type, 'image/png')
+        data = BytesIO(resp.body)
+        assert is_png(data)
+
+    def test_bad_config_geopackage_no_gpkg_contents(self):
+        gpkg_file = os.path.join(test_config['base_dir'], 'cache.gpkg')
+        table_name = 'no_gpkg_contents'
+
+        with sqlite3.connect(gpkg_file) as db:
+            cur = db.execute('''SELECT name FROM sqlite_master WHERE type='table' AND name=?''',
+                             (table_name,))
+            content = cur.fetchone()
+            assert content[0] == table_name
+
+        with sqlite3.connect(gpkg_file) as db:
+            cur = db.execute('''SELECT table_name FROM gpkg_contents WHERE table_name=?''',
+                             (table_name,))
+            content = cur.fetchone()
+            assert not content
+
+        GeopackageCache(gpkg_file, TileGrid(srs=4326), table_name=table_name)
+
+        with sqlite3.connect(gpkg_file) as db:
+            cur = db.execute('''SELECT table_name FROM gpkg_contents WHERE table_name=?''',
+                             (table_name,))
+            content = cur.fetchone()
+            assert content[0] == table_name
+
+    def test_bad_config_geopackage_no_spatial_ref_sys(self):
+        gpkg_file = os.path.join(test_config['base_dir'], 'cache.gpkg')
+        organization_coordsys_id = 3785
+        table_name='no_gpkg_spatial_ref_sys'
+
+        with sqlite3.connect(gpkg_file) as db:
+            cur = db.execute('''SELECT organization_coordsys_id FROM gpkg_spatial_ref_sys WHERE organization_coordsys_id=?''',
+                             (organization_coordsys_id,))
+            content = cur.fetchone()
+            assert not content
+
+        GeopackageCache(gpkg_file, TileGrid(srs=3785), table_name=table_name)
+
+        with sqlite3.connect(gpkg_file) as db:
+            cur = db.execute(
+                '''SELECT organization_coordsys_id FROM gpkg_spatial_ref_sys WHERE organization_coordsys_id=?''',
+                (organization_coordsys_id,))
+            content = cur.fetchone()
+            assert content[0] == organization_coordsys_id
diff --git a/mapproxy/test/system/test_cache_s3.py b/mapproxy/test/system/test_cache_s3.py
new file mode 100644
index 0000000..d84ac6e
--- /dev/null
+++ b/mapproxy/test/system/test_cache_s3.py
@@ -0,0 +1,115 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2016 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement, division
+
+from io import BytesIO
+
+from mapproxy.request.wms import WMS111MapRequest
+from mapproxy.test.image import is_png, create_tmp_image
+from mapproxy.test.system import prepare_env, create_app, module_teardown, SystemTest
+
+from nose.tools import eq_
+from nose.plugins.skip import SkipTest
+
+try:
+    import boto3
+    from moto import mock_s3
+except ImportError:
+    boto3 = None
+    mock_s3 = None
+
+
+test_config = {}
+
+_mock = None
+
+def setup_module():
+    if not mock_s3 or not boto3:
+        raise SkipTest("boto3 and moto required for S3 tests")
+
+    global _mock
+    _mock = mock_s3()
+    _mock.start()
+
+    boto3.client("s3").create_bucket(Bucket="default_bucket")
+    boto3.client("s3").create_bucket(Bucket="tiles")
+    boto3.client("s3").create_bucket(Bucket="reversetiles")
+
+    prepare_env(test_config, 'cache_s3.yaml')
+    create_app(test_config)
+
+def teardown_module():
+    module_teardown(test_config)
+    _mock.stop()
+
+class TestS3Cache(SystemTest):
+    config = test_config
+    table_name = 'cache'
+
+    def setup(self):
+        SystemTest.setup(self)
+        self.common_map_req = WMS111MapRequest(url='/service?',
+            param=dict(service='WMS',
+                       version='1.1.1', bbox='-150,-40,-140,-30',
+                       width='100', height='100',
+                       layers='default', srs='EPSG:4326',
+                       format='image/png',
+                       styles='', request='GetMap'))
+
+    def test_get_map_cached(self):
+        # mock_s3 interferes with MockServ, use boto to manually upload tile
+        tile = create_tmp_image((256, 256))
+        boto3.client("s3").upload_fileobj(
+                BytesIO(tile),
+                Bucket='default_bucket',
+                Key='default_cache/WebMerc/4/1/9.png',
+        )
+
+        resp = self.app.get(self.common_map_req)
+        eq_(resp.content_type, 'image/png')
+        data = BytesIO(resp.body)
+        assert is_png(data)
+
+
+    def test_get_map_cached_quadkey(self):
+        # mock_s3 interferes with MockServ, use boto to manually upload tile
+        tile = create_tmp_image((256, 256))
+        boto3.client("s3").upload_fileobj(
+                BytesIO(tile),
+                Bucket='tiles',
+                Key='quadkeytiles/2003.png',
+        )
+
+        self.common_map_req.params.layers = 'quadkey'
+        resp = self.app.get(self.common_map_req)
+        eq_(resp.content_type, 'image/png')
+        data = BytesIO(resp.body)
+        assert is_png(data)
+
+    def test_get_map_cached_reverse_tms(self):
+        # mock_s3 interferes with MockServ, use boto to manually upload tile
+        tile = create_tmp_image((256, 256))
+        boto3.client("s3").upload_fileobj(
+                BytesIO(tile),
+                Bucket='tiles',
+                Key='reversetiles/9/1/4.png',
+        )
+
+        self.common_map_req.params.layers = 'reverse'
+        resp = self.app.get(self.common_map_req)
+        eq_(resp.content_type, 'image/png')
+        data = BytesIO(resp.body)
+        assert is_png(data)
diff --git a/mapproxy/test/system/test_kml.py b/mapproxy/test/system/test_kml.py
index 429e6e9..fd161bc 100644
--- a/mapproxy/test/system/test_kml.py
+++ b/mapproxy/test/system/test_kml.py
@@ -88,7 +88,7 @@ class TestKML(SystemTest):
             assert 'Last-modified' not in resp.headers
         else:
             eq_(resp.headers['Last-modified'], format_httpdate(timestamp))
-        eq_(resp.headers['Cache-control'], 'max-age=%d public' % max_age)
+        eq_(resp.headers['Cache-control'], 'public, max-age=%d, s-maxage=%d' % (max_age, max_age))
 
     def test_get_cached_tile(self):
         etag, max_age = self._update_timestamp()
diff --git a/mapproxy/test/system/test_multi_cache_layers.py b/mapproxy/test/system/test_multi_cache_layers.py
index befcfbe..0c70038 100644
--- a/mapproxy/test/system/test_multi_cache_layers.py
+++ b/mapproxy/test/system/test_multi_cache_layers.py
@@ -71,11 +71,11 @@ class TestMultiCacheLayer(SystemTest):
 
     def test_tms_capabilities(self):
         resp = self.app.get('/tms/1.0.0/')
-        assert 'http://localhost/tms/1.0.0/multi_cache/wmts_incompatible_grid' in resp
-        assert 'http://localhost/tms/1.0.0/multi_cache/GLOBAL_WEBMERCATOR' in resp
-        assert 'http://localhost/tms/1.0.0/multi_cache/InspireCrs84Quad' in resp
-        assert 'http://localhost/tms/1.0.0/multi_cache/gk3' in resp
-        assert 'http://localhost/tms/1.0.0/cache/utm32' in resp
+        assert 'http://localhost/tms/1.0.0/multi_cache/EPSG25832' in resp
+        assert 'http://localhost/tms/1.0.0/multi_cache/EPSG3857' in resp
+        assert 'http://localhost/tms/1.0.0/multi_cache/CRS84' in resp
+        assert 'http://localhost/tms/1.0.0/multi_cache/EPSG31467' in resp
+        assert 'http://localhost/tms/1.0.0/cache/EPSG25832' in resp
         xml = resp.lxml
         assert xml.xpath('count(//TileMap)') == 5
 
diff --git a/mapproxy/test/system/test_tms.py b/mapproxy/test/system/test_tms.py
index 265355d..31f21b2 100644
--- a/mapproxy/test/system/test_tms.py
+++ b/mapproxy/test/system/test_tms.py
@@ -179,7 +179,7 @@ class TestTileService(SystemTest):
     def _check_cache_control_headers(self, resp, etag, max_age):
         eq_(resp.headers['ETag'], etag)
         eq_(resp.headers['Last-modified'], 'Fri, 13 Feb 2009 23:31:30 GMT')
-        eq_(resp.headers['Cache-control'], 'max-age=%d public' % max_age)
+        eq_(resp.headers['Cache-control'], 'public, max-age=%d, s-maxage=%d' % (max_age, max_age))
 
     def test_get_cached_tile(self):
         etag, max_age = self._update_timestamp()
diff --git a/mapproxy/test/system/test_wms.py b/mapproxy/test/system/test_wms.py
index 8c18d0c..d61fd24 100644
--- a/mapproxy/test/system/test_wms.py
+++ b/mapproxy/test/system/test_wms.py
@@ -439,7 +439,8 @@ class TestWMS111(WMSTest):
         # broken bbox for the requested srs
         url =  """/service?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&BBOX=-72988843.697212,-255661507.634227,142741550.188860,255661507.634227&SRS=EPSG:25833&WIDTH=164&HEIGHT=388&LAYERS=wms_cache_100&STYLES=&FORMAT=image/png&TRANSPARENT=TRUE"""
         resp = self.app.get(url)
-        is_111_exception(resp.lxml, 'Request too large or invalid BBOX.')
+        # result depends on proj version
+        is_111_exception(resp.lxml, re_msg='Request too large or invalid BBOX.|Could not transform BBOX: Invalid result.')
 
     def test_get_map_broken_bbox(self):
         url = """/service?VERSION=1.1.11&REQUEST=GetMap&SRS=EPSG:31468&BBOX=-10000855.0573254,2847125.18913603,-9329367.42767611,4239924.78564583&WIDTH=130&HEIGHT=62&LAYERS=wms_cache&STYLES=&FORMAT=image/png&TRANSPARENT=TRUE"""
@@ -529,24 +530,24 @@ class TestWMS111(WMSTest):
     def test_get_featureinfo_transformed(self):
         expected_req = ({'path': r'/service?LAYERs=foo,bar&SERVICE=WMS&FORMAT=image%2Fpng'
                                   '&REQUEST=GetFeatureInfo&HEIGHT=200&SRS=EPSG%3A900913'
-                                  '&BBOX=5197367.93088,5312902.73895,5311885.44223,5434731.78213'
+                                  '&BBOX=1172272.30156,7196018.03449,1189711.04571,7213496.99738'
                                   '&styles=&VERSION=1.1.1&feature_count=100'
-                                  '&WIDTH=200&QUERY_LAYERS=foo,bar&X=14&Y=78'},
+                                  '&WIDTH=200&QUERY_LAYERS=foo,bar&X=14&Y=20'},
                         {'body': b'info', 'headers': {'content-type': 'text/plain'}})
 
         # out fi point at x=10,y=20
-        p_25832  = (3570269+10*(3643458 - 3570269)/200, 5540889+20*(5614078 - 5540889)/200)
-        # the transformed fi point at x=10,y=22
-        p_900913 = (5197367.93088+14*(5311885.44223 - 5197367.93088)/200,
-                    5312902.73895+78*(5434731.78213 - 5312902.73895)/200)
+        p_25832  = (600000+10*(610000 - 600000)/200, 6010000-20*(6010000 - 6000000)/200)
+        # the transformed fi point at x=14,y=20
+        p_900913 = (1172272.30156+14*(1189711.04571-1172272.30156)/200,
+                    7213496.99738-20*(7213496.99738 - 7196018.03449)/200)
 
         # are they the same?
-        # check with tolerance: pixel resolution is ~570 and x/y position is rounded to pizel
-        assert abs(SRS(25832).transform_to(SRS(900913), p_25832)[0] - p_900913[0]) < 570/2
-        assert abs(SRS(25832).transform_to(SRS(900913), p_25832)[1] - p_900913[1]) < 570/2
+        # check with tolerance: pixel resolution is ~50 and x/y position is rounded to pizel
+        assert abs(SRS(25832).transform_to(SRS(900913), p_25832)[0] - p_900913[0]) < 50
+        assert abs(SRS(25832).transform_to(SRS(900913), p_25832)[1] - p_900913[1]) < 50
 
         with mock_httpd(('localhost', 42423), [expected_req], bbox_aware_query_comparator=True):
-            self.common_fi_req.params['bbox'] = '3570269,5540889,3643458,5614078'
+            self.common_fi_req.params['bbox'] = '600000,6000000,610000,6010000'
             self.common_fi_req.params['srs'] = 'EPSG:25832'
             self.common_fi_req.params.pos = 10, 20
             self.common_fi_req.params['feature_count'] = 100
diff --git a/mapproxy/test/test_http_helper.py b/mapproxy/test/test_http_helper.py
index 2d17fcf..f5b197c 100644
--- a/mapproxy/test/test_http_helper.py
+++ b/mapproxy/test/test_http_helper.py
@@ -15,8 +15,8 @@
 
 import requests
 from mapproxy.test.http import (
-    MockServ, RequestsMissmatchError, mock_httpd,
-    basic_auth_value,
+    MockServ, RequestsMismatchError, mock_httpd,
+    basic_auth_value, query_eq,
 )
 
 from nose.tools import eq_
@@ -48,7 +48,7 @@ class TestMockServ(object):
         try:
             with serv:
                 requests.get('http://localhost:%d/test' % serv.port)
-        except RequestsMissmatchError as ex:
+        except RequestsMismatchError as ex:
             assert ex.assertions[0].expected == 'Accept: Coffee'
 
     def test_expects_post(self):
@@ -65,7 +65,7 @@ class TestMockServ(object):
         try:
             with serv:
                 requests.get('http://localhost:%d/test' % serv.port)
-        except RequestsMissmatchError as ex:
+        except RequestsMismatchError as ex:
             assert ex.assertions[0].expected == 'POST'
             assert ex.assertions[0].actual == 'GET'
         else:
@@ -137,8 +137,8 @@ class TestMockServ(object):
             with serv:
                 resp = requests.get('http://localhost:%d/test1' % serv.port)
                 eq_(resp.content, b'hello1')
-        except RequestsMissmatchError as ex:
-            assert 'requests missmatch:\n -  missing requests' in str(ex)
+        except RequestsMismatchError as ex:
+            assert 'requests mismatch:\n -  missing requests' in str(ex)
         else:
             raise AssertionError('AssertionError expected')
 
@@ -177,7 +177,7 @@ class TestMockServ(object):
                     raise AssertionError('RequestException expected')
                 resp = requests.get('http://localhost:%d/test2' % serv.port)
                 eq_(resp.content, b'hello2')
-        except RequestsMissmatchError as ex:
+        except RequestsMismatchError as ex:
             assert 'unexpected request' in ex.assertions[0]
         else:
             raise AssertionError('AssertionError expected')
@@ -207,3 +207,13 @@ class TestMockHttpd(object):
                     'Authorization': basic_auth_value('foo', 'bar'), 'Accept': 'Coffee'}
                 )
                 eq_(resp.content, b'ok')
+
+
+def test_query_eq():
+    assert query_eq('?baz=42&foo=bar', '?foo=bar&baz=42')
+    assert query_eq('?baz=42.00&foo=bar', '?foo=bar&baz=42.0')
+    assert query_eq('?baz=42.000000001&foo=bar', '?foo=bar&baz=42.0')
+    assert not query_eq('?baz=42.00000001&foo=bar', '?foo=bar&baz=42.0')
+
+    assert query_eq('?baz=42.000000001,23.99999999999&foo=bar', '?foo=bar&baz=42.0,24.0')
+    assert not query_eq('?baz=42.00000001&foo=bar', '?foo=bar&baz=42.0')
\ No newline at end of file
diff --git a/mapproxy/test/unit/fixture/cache.gpkg b/mapproxy/test/unit/fixture/cache.gpkg
new file mode 100644
index 0000000..11a281c
Binary files /dev/null and b/mapproxy/test/unit/fixture/cache.gpkg differ
diff --git a/mapproxy/test/unit/test_async.py b/mapproxy/test/unit/test_async.py
index a9f7053..b7dcd1b 100644
--- a/mapproxy/test/unit/test_async.py
+++ b/mapproxy/test/unit/test_async.py
@@ -32,7 +32,7 @@ class TestThreaded(object):
         stop = time.time()
 
         duration = stop - start
-        assert duration < 0.2
+        assert duration < 0.5, "took %s" % duration
 
         eq_(len(result), 40)
 
@@ -68,7 +68,7 @@ class TestEventlet(object):
         stop = time.time()
 
         duration = stop - start
-        assert duration < 0.1
+        assert duration < 0.2, "took %s" % duration
 
         eq_(len(result), 40)
 
diff --git a/mapproxy/test/unit/test_cache.py b/mapproxy/test/unit/test_cache.py
index 8d8465e..b1cac63 100644
--- a/mapproxy/test/unit/test_cache.py
+++ b/mapproxy/test/unit/test_cache.py
@@ -323,6 +323,11 @@ class TestTileManagerWMSSource(object):
              ((0.0, -90.0, 180.0, 90.0), (512, 512), SRS(4326))])
 
 
+class TestTileManagerWMSSourceConcurrent(TestTileManagerWMSSource):
+    def setup(self):
+        TestTileManagerWMSSource.setup(self)
+        self.tile_mgr.concurrent_tile_creators = 2
+
 class TestTileManagerWMSSourceMinimalMetaRequests(object):
     def setup(self):
         self.file_cache = MockFileCache('/dev/null', 'png')
@@ -482,6 +487,65 @@ class TestTileManagerMultipleSourcesWithMetaTiles(object):
 
         assert self.tile_mgr.meta_grid is None
 
+
+class TestTileManagerBulkMetaTiles(object):
+    def setup(self):
+        self.file_cache = MockFileCache('/dev/null', 'png')
+        self.grid = TileGrid(SRS(4326), bbox=[-180, -90, 180, 90], origin='ul')
+        self.source_base = SolidColorMockSource(color='#ff0000')
+        self.source_base.supports_meta_tiles = False
+        self.source_overlay = MockSource()
+        self.source_overlay.supports_meta_tiles = False
+        self.locker = TileLocker(tmp_lock_dir, 10, "id")
+        self.tile_mgr = TileManager(self.grid, self.file_cache,
+            [self.source_base, self.source_overlay], 'png',
+            meta_size=[2, 2], meta_buffer=0,
+            locker=self.locker,
+            bulk_meta_tiles=True,
+        )
+
+    def test_bulk_get(self):
+        tiles = self.tile_mgr.creator().create_tiles([Tile((0, 0, 2))])
+        eq_(len(tiles), 2*2)
+        eq_(self.file_cache.stored_tiles, set([(0, 0, 2), (1, 0, 2), (0, 1, 2), (1, 1, 2)]))
+        for requested in [self.source_base.requested, self.source_overlay.requested]:
+            eq_(set(requested), set([
+                ((-180.0, 0.0, -90.0, 90.0), (256, 256), SRS(4326)),
+                ((-90.0, 0.0, 0.0, 90.0), (256, 256), SRS(4326)),
+                ((-180.0, -90.0, -90.0, 0.0), (256, 256), SRS(4326)),
+                ((-90.0, -90.0, 0.0, 0.0), (256, 256), SRS(4326)),
+            ]))
+
+    def test_bulk_get_error(self):
+        self.tile_mgr.sources = [self.source_base, ErrorSource()]
+        try:
+            self.tile_mgr.creator().create_tiles([Tile((0, 0, 2))])
+        except Exception as ex:
+            eq_(ex.args[0], "source error")
+
+    def test_bulk_get_multiple_meta_tiles(self):
+        tiles = self.tile_mgr.creator().create_tiles([Tile((1, 0, 2)), Tile((2, 0, 2))])
+        eq_(len(tiles), 2*2*2)
+        eq_(self.file_cache.stored_tiles, set([
+            (0, 0, 2), (1, 0, 2), (0, 1, 2), (1, 1, 2),
+            (2, 0, 2), (3, 0, 2), (2, 1, 2), (3, 1, 2),
+        ]))
+
+class ErrorSource(MapLayer):
+    def __init__(self, *args):
+        MapLayer.__init__(self, *args)
+        self.requested = []
+
+    def get_map(self, query):
+        self.requested.append((query.bbox, query.size, query.srs))
+        raise Exception("source error")
+
+class TestTileManagerBulkMetaTilesConcurrent(TestTileManagerBulkMetaTiles):
+    def setup(self):
+        TestTileManagerBulkMetaTiles.setup(self)
+        self.tile_mgr.concurrent_tile_creators = 2
+
+
 default_image_opts = ImageOptions(resampling='bicubic')
 
 class TestCacheMapLayer(object):
diff --git a/mapproxy/test/unit/test_cache_compact.py b/mapproxy/test/unit/test_cache_compact.py
new file mode 100644
index 0000000..405f09a
--- /dev/null
+++ b/mapproxy/test/unit/test_cache_compact.py
@@ -0,0 +1,127 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2016 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement, division
+
+import os
+import time
+import struct
+
+from io import BytesIO
+
+from mapproxy.cache.compact import CompactCacheV1
+from mapproxy.cache.tile import Tile
+from mapproxy.image import ImageSource
+from mapproxy.image.opts import ImageOptions
+from mapproxy.test.unit.test_cache_tile import TileCacheTestBase
+
+from nose.tools import eq_
+
+class TestCompactCacheV1(TileCacheTestBase):
+
+    always_loads_metadata = True
+
+    def setup(self):
+        TileCacheTestBase.setup(self)
+        self.cache = CompactCacheV1(
+            cache_dir=self.cache_dir,
+        )
+
+    def test_bundle_files(self):
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundle'))
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundlx'))
+        self.cache.store_tile(self.create_tile(coord=(0, 0, 0)))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundlx'))
+
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundle'))
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundlx'))
+        self.cache.store_tile(self.create_tile(coord=(127, 127, 12)))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundlx'))
+
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0100C0080.bundle'))
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0100C0080.bundlx'))
+        self.cache.store_tile(self.create_tile(coord=(128, 256, 12)))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0100C0080.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0100C0080.bundlx'))
+
+    def test_bundle_files_not_created_on_is_cached(self):
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundle'))
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundlx'))
+        self.cache.is_cached(Tile(coord=(0, 0, 0)))
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundle'))
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L00', 'R0000C0000.bundlx'))
+
+    def test_missing_tiles(self):
+        self.cache.store_tile(self.create_tile(coord=(130, 200, 8)))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L08', 'R0080C0080.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L08', 'R0080C0080.bundlx'))
+
+        # test that all other tiles in this bundle are missing
+        assert self.cache.is_cached(Tile((130, 200, 8)))
+        for x in range(128, 255):
+            for y in range(128, 255):
+                if x == 130 and y == 200:
+                    continue
+                assert not self.cache.is_cached(Tile((x, y, 8))), (x, y)
+                assert not self.cache.load_tile(Tile((x, y, 8))), (x, y)
+
+    def test_remove_level_tiles_before(self):
+        self.cache.store_tile(self.create_tile(coord=(0, 0, 12)))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundlx'))
+
+        # not removed with timestamp
+        self.cache.remove_level_tiles_before(12, time.time())
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0000C0000.bundlx'))
+
+        # removed with timestamp=0 (remove_all:true in seed.yaml)
+        self.cache.remove_level_tiles_before(12, 0)
+        assert not os.path.exists(os.path.join(self.cache_dir, 'L12'))
+
+
+    def test_bundle_header(self):
+        t = Tile((5000, 1000, 12), ImageSource(BytesIO(b'a' * 4000), image_opts=ImageOptions(format='image/png')))
+        self.cache.store_tile(t)
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle'))
+        assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundlx'))
+
+        def assert_header(tile_bytes_written, max_tile_bytes):
+            with open(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle'), 'r+b') as f:
+                header = struct.unpack('<lllllllllllllll', f.read(60))
+                eq_(header[11], 896)
+                eq_(header[12], 1023)
+                eq_(header[13], 4992)
+                eq_(header[14], 5119)
+                eq_(header[6], 60 + 128*128*4 + sum(tile_bytes_written))
+                eq_(header[2], max_tile_bytes)
+                eq_(header[4], len(tile_bytes_written)*4)
+
+        assert_header([4000 + 4], 4000)
+
+        t = Tile((5000, 1001, 12), ImageSource(BytesIO(b'a' * 6000), image_opts=ImageOptions(format='image/png')))
+        self.cache.store_tile(t)
+        assert_header([4000 + 4, 6000 + 4], 6000)
+
+        t = Tile((4992, 999, 12), ImageSource(BytesIO(b'a' * 1000), image_opts=ImageOptions(format='image/png')))
+        self.cache.store_tile(t)
+        assert_header([4000 + 4, 6000 + 4, 1000 + 4], 6000)
+
+        t = Tile((5000, 1001, 12), ImageSource(BytesIO(b'a' * 3000), image_opts=ImageOptions(format='image/png')))
+        self.cache.store_tile(t)
+        assert_header([4000 + 4, 6000 + 4 + 3000 + 4, 1000 + 4], 6000) # still contains bytes from overwritten tile
+
diff --git a/mapproxy/test/unit/test_cache_geopackage.py b/mapproxy/test/unit/test_cache_geopackage.py
new file mode 100644
index 0000000..baf8321
--- /dev/null
+++ b/mapproxy/test/unit/test_cache_geopackage.py
@@ -0,0 +1,221 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2016 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement, division
+
+import os
+import time
+import sqlite3
+import threading
+
+from io import BytesIO
+
+from mapproxy.image import ImageSource
+from mapproxy.cache.geopackage import GeopackageCache, GeopackageLevelCache
+from mapproxy.cache.tile import Tile
+from mapproxy.grid import tile_grid, TileGrid
+from mapproxy.test.unit.test_cache_tile import TileCacheTestBase
+
+from nose.tools import eq_
+
+class TestGeopackageCache(TileCacheTestBase):
+
+    always_loads_metadata = True
+
+    def setup(self):
+        TileCacheTestBase.setup(self)
+        self.gpkg_file = os.path.join(self.cache_dir, 'tmp.gpkg')
+        self.table_name = 'test_tiles'
+        self.cache = GeopackageCache(
+            self.gpkg_file,
+            tile_grid=tile_grid(3857, name='global-webmarcator'),
+            table_name=self.table_name,
+        )
+
+    def teardown(self):
+        if self.cache:
+            self.cache.cleanup()
+        TileCacheTestBase.teardown(self)
+
+    def test_new_geopackage(self):
+        assert os.path.exists(self.gpkg_file)
+
+        with sqlite3.connect(self.gpkg_file) as db:
+            cur = db.execute('''SELECT name FROM sqlite_master WHERE type='table' AND name=?''',
+                             (self.table_name,))
+            content = cur.fetchone()
+            assert content[0] == self.table_name
+
+        with sqlite3.connect(self.gpkg_file) as db:
+            cur = db.execute('''SELECT table_name, data_type FROM gpkg_contents WHERE table_name = ?''',
+                             (self.table_name,))
+            content = cur.fetchone()
+            assert content[0] == self.table_name
+            assert content[1] == 'tiles'
+
+        with sqlite3.connect(self.gpkg_file) as db:
+            cur = db.execute('''SELECT table_name FROM gpkg_tile_matrix WHERE table_name = ?''',
+                             (self.table_name,))
+            content = cur.fetchall()
+            assert len(content) == 20
+
+        with sqlite3.connect(self.gpkg_file) as db:
+            cur = db.execute('''SELECT table_name FROM gpkg_tile_matrix_set WHERE table_name = ?''',
+                             (self.table_name,))
+            content = cur.fetchone()
+            assert content[0] == self.table_name
+
+    def test_load_empty_tileset(self):
+        assert self.cache.load_tiles([Tile(None)]) == True
+        assert self.cache.load_tiles([Tile(None), Tile(None), Tile(None)]) == True
+
+    def test_load_more_than_2000_tiles(self):
+        # prepare data
+        for i in range(0, 2010):
+            assert self.cache.store_tile(Tile((i, 0, 10),  ImageSource(BytesIO(b'foo'))))
+
+        tiles = [Tile((i, 0, 10)) for i in range(0, 2010)]
+        assert self.cache.load_tiles(tiles)
+
+    def test_timeouts(self):
+        self.cache._db_conn_cache.db = sqlite3.connect(self.cache.geopackage_file, timeout=0.05)
+
+        def block():
+            # block database by delaying the commit
+            db = sqlite3.connect(self.cache.geopackage_file)
+            cur = db.cursor()
+            stmt = "INSERT OR REPLACE INTO {0} (zoom_level, tile_column, tile_row, tile_data) " \
+                   "VALUES (?,?,?,?)".format(self.table_name)
+            cur.execute(stmt, (3, 1, 1, '1234'))
+            time.sleep(0.2)
+            db.commit()
+
+        try:
+            assert self.cache.store_tile(self.create_tile((0, 0, 1))) == True
+
+            t = threading.Thread(target=block)
+            t.start()
+            time.sleep(0.05)
+            assert self.cache.store_tile(self.create_tile((0, 0, 1))) == False
+        finally:
+            t.join()
+
+        assert self.cache.store_tile(self.create_tile((0, 0, 1))) == True
+
+
+class TestGeopackageLevelCache(TileCacheTestBase):
+
+    always_loads_metadata = True
+
+    def setup(self):
+        TileCacheTestBase.setup(self)
+        self.cache = GeopackageLevelCache(
+            self.cache_dir,
+            tile_grid=tile_grid(3857, name='global-webmarcator'),
+            table_name='test_tiles',
+        )
+
+    def teardown(self):
+        if self.cache:
+            self.cache.cleanup()
+        TileCacheTestBase.teardown(self)
+
+    def test_level_files(self):
+        if os.path.exists(self.cache_dir):
+            eq_(os.listdir(self.cache_dir), [])
+
+        self.cache.store_tile(self.create_tile((0, 0, 1)))
+        eq_(os.listdir(self.cache_dir), ['1.gpkg'])
+
+        self.cache.store_tile(self.create_tile((0, 0, 5)))
+        eq_(sorted(os.listdir(self.cache_dir)), ['1.gpkg', '5.gpkg'])
+
+    def test_remove_level_files(self):
+        self.cache.store_tile(self.create_tile((0, 0, 1)))
+        self.cache.store_tile(self.create_tile((0, 0, 2)))
+        eq_(sorted(os.listdir(self.cache_dir)), ['1.gpkg', '2.gpkg'])
+
+        self.cache.remove_level_tiles_before(1, timestamp=0)
+        eq_(os.listdir(self.cache_dir), ['2.gpkg'])
+
+    def test_remove_level_tiles_before(self):
+        self.cache.store_tile(self.create_tile((0, 0, 1)))
+        self.cache.store_tile(self.create_tile((0, 0, 2)))
+
+        eq_(sorted(os.listdir(self.cache_dir)), ['1.gpkg', '2.gpkg'])
+        assert self.cache.is_cached(Tile((0, 0, 1)))
+
+        self.cache.remove_level_tiles_before(1, timestamp=time.time() - 60)
+        assert self.cache.is_cached(Tile((0, 0, 1)))
+
+        self.cache.remove_level_tiles_before(1, timestamp=0)
+        assert not self.cache.is_cached(Tile((0, 0, 1)))
+
+        eq_(sorted(os.listdir(self.cache_dir)), ['1.gpkg', '2.gpkg'])
+        assert self.cache.is_cached(Tile((0, 0, 2)))
+
+
+    def test_bulk_store_tiles_with_different_levels(self):
+        self.cache.store_tiles([
+            self.create_tile((0, 0, 1)),
+            self.create_tile((0, 0, 2)),
+            self.create_tile((1, 0, 2)),
+            self.create_tile((1, 0, 1)),
+        ])
+
+        eq_(sorted(os.listdir(self.cache_dir)), ['1.gpkg', '2.gpkg'])
+        assert self.cache.is_cached(Tile((0, 0, 1)))
+        assert self.cache.is_cached(Tile((1, 0, 1)))
+        assert self.cache.is_cached(Tile((0, 0, 2)))
+        assert self.cache.is_cached(Tile((1, 0, 2)))
+
+class TestGeopackageCacheInitErrors(object):
+    table_name = 'cache'
+
+    def test_bad_config_geopackage_srs(self):
+        error_msg = None
+        gpkg_file = os.path.join(os.path.join(os.path.dirname(__file__),
+                                                              'fixture'),
+                                 'cache.gpkg')
+        table_name = 'cache'
+        try:
+            GeopackageCache(gpkg_file, TileGrid(srs=4326), table_name)
+        except ValueError as ve:
+            error_msg = ve
+        assert "srs is improperly configured." in str(error_msg)
+
+    def test_bad_config_geopackage_tile(self):
+        error_msg = None
+        gpkg_file = os.path.join(os.path.join(os.path.dirname(__file__),
+                                                              'fixture'),
+                                 'cache.gpkg')
+        table_name = 'cache'
+        try:
+            GeopackageCache(gpkg_file, TileGrid(srs=900913, tile_size=(512, 512)), table_name)
+        except ValueError as ve:
+            error_msg = ve
+        assert "tile_size is improperly configured." in str(error_msg)
+
+    def test_bad_config_geopackage_res(self):
+        error_msg = None
+        gpkg_file = os.path.join(os.path.join(os.path.dirname(__file__),
+                                              'fixture'),
+                                 'cache.gpkg')
+        table_name = 'cache'
+        try:
+            GeopackageCache(gpkg_file, TileGrid(srs=900913, res=[1000, 100, 10]), table_name)
+        except ValueError as ve:
+            error_msg = ve
+        assert "res is improperly configured." in str(error_msg)
diff --git a/mapproxy/test/unit/test_cache_redis.py b/mapproxy/test/unit/test_cache_redis.py
new file mode 100644
index 0000000..4aa3645
--- /dev/null
+++ b/mapproxy/test/unit/test_cache_redis.py
@@ -0,0 +1,71 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2017 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import with_statement
+
+try:
+    import redis
+except ImportError:
+    redis = None
+
+import time
+import os
+
+from nose.plugins.skip import SkipTest
+
+from mapproxy.cache.tile import Tile
+from mapproxy.cache.redis import RedisCache
+
+from mapproxy.test.unit.test_cache_tile import TileCacheTestBase
+
+class TestRedisCache(TileCacheTestBase):
+    always_loads_metadata = False
+    def setup(self):
+        if not redis:
+            raise SkipTest("redis required for Redis tests")
+
+        redis_host = os.environ.get('MAPPROXY_TEST_REDIS')
+        if not redis_host:
+            raise SkipTest()
+        self.host, self.port = redis_host.split(':')
+
+        TileCacheTestBase.setup(self)
+
+        self.cache = RedisCache(self.host, int(self.port), prefix='mapproxy-test', db=1)
+
+    def teardown(self):
+        for k in self.cache.r.keys('mapproxy-test-*'):
+            self.cache.r.delete(k)
+
+    def test_expire(self):
+        cache = RedisCache(self.host, int(self.port), prefix='mapproxy-test', db=1, ttl=0)
+        t1 = self.create_tile(coord=(9382, 1234, 9))
+        assert cache.store_tile(t1)
+        time.sleep(0.1)
+        t2 = Tile(t1.coord)
+        assert cache.is_cached(t2)
+
+        cache = RedisCache(self.host, int(self.port), prefix='mapproxy-test', db=1, ttl=0.05)
+        t1 = self.create_tile(coord=(5382, 2234, 9))
+        assert cache.store_tile(t1)
+        time.sleep(0.1)
+        t2 = Tile(t1.coord)
+        assert not cache.is_cached(t2)
+
+    def test_double_remove(self):
+        tile = self.create_tile()
+        self.create_cached_tile(tile)
+        assert self.cache.remove_tile(tile)
+        assert self.cache.remove_tile(tile)
diff --git a/mapproxy/test/unit/test_cache_s3.py b/mapproxy/test/unit/test_cache_s3.py
new file mode 100644
index 0000000..9964017
--- /dev/null
+++ b/mapproxy/test/unit/test_cache_s3.py
@@ -0,0 +1,85 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2011 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+    import boto3
+    from moto import mock_s3
+except ImportError:
+    boto3 = None
+    mock_s3 = None
+
+from nose.plugins.skip import SkipTest
+
+from mapproxy.cache.s3 import S3Cache
+from mapproxy.test.unit.test_cache_tile import TileCacheTestBase
+
+
+class TestS3Cache(TileCacheTestBase):
+    always_loads_metadata = True
+    uses_utc = True
+
+    def setup(self):
+        if not mock_s3 or not boto3:
+            raise SkipTest("boto3 and moto required for S3 tests")
+
+        TileCacheTestBase.setup(self)
+
+        self.mock = mock_s3()
+        self.mock.start()
+
+        self.bucket_name = "test"
+        dir_name = 'mapproxy'
+
+        boto3.client("s3").create_bucket(Bucket=self.bucket_name)
+
+        self.cache = S3Cache(dir_name,
+            file_ext='png',
+            directory_layout='tms',
+            bucket_name=self.bucket_name,
+            profile_name=None,
+            _concurrent_writer=1, # moto is not thread safe
+        )
+
+    def teardown(self):
+        self.mock.stop()
+        TileCacheTestBase.teardown(self)
+
+    def check_tile_key(self, layout, tile_coord, key):
+        cache = S3Cache('/mycache/webmercator', 'png', bucket_name=self.bucket_name, directory_layout=layout)
+        cache.store_tile(self.create_tile(tile_coord))
+
+        # raises, if key is missing
+        boto3.client("s3").head_object(Bucket=self.bucket_name, Key=key)
+
+    def test_tile_keys(self):
+        yield self.check_tile_key, 'mp', (12345, 67890,  2), 'mycache/webmercator/02/0001/2345/0006/7890.png'
+        yield self.check_tile_key, 'mp', (12345, 67890, 12), 'mycache/webmercator/12/0001/2345/0006/7890.png'
+
+        yield self.check_tile_key, 'tc', (12345, 67890,  2), 'mycache/webmercator/02/000/012/345/000/067/890.png'
+        yield self.check_tile_key, 'tc', (12345, 67890, 12), 'mycache/webmercator/12/000/012/345/000/067/890.png'
+
+        yield self.check_tile_key, 'tms', (12345, 67890,  2), 'mycache/webmercator/2/12345/67890.png'
+        yield self.check_tile_key, 'tms', (12345, 67890, 12), 'mycache/webmercator/12/12345/67890.png'
+
+        yield self.check_tile_key, 'quadkey', (0, 0, 0), 'mycache/webmercator/.png'
+        yield self.check_tile_key, 'quadkey', (0, 0, 1), 'mycache/webmercator/0.png'
+        yield self.check_tile_key, 'quadkey', (1, 1, 1), 'mycache/webmercator/3.png'
+        yield self.check_tile_key, 'quadkey', (12345, 67890, 12), 'mycache/webmercator/200200331021.png'
+
+        yield self.check_tile_key, 'arcgis', (1, 2, 3), 'mycache/webmercator/L03/R00000002/C00000001.png'
+        yield self.check_tile_key, 'arcgis', (9, 2, 3), 'mycache/webmercator/L03/R00000002/C00000009.png'
+        yield self.check_tile_key, 'arcgis', (10, 2, 3), 'mycache/webmercator/L03/R00000002/C0000000a.png'
+        yield self.check_tile_key, 'arcgis', (12345, 67890, 12), 'mycache/webmercator/L12/R00010932/C00003039.png'
+
diff --git a/mapproxy/test/unit/test_cache_tile.py b/mapproxy/test/unit/test_cache_tile.py
index 284d22c..891ccaa 100644
--- a/mapproxy/test/unit/test_cache_tile.py
+++ b/mapproxy/test/unit/test_cache_tile.py
@@ -15,6 +15,7 @@
 
 from __future__ import with_statement
 
+import datetime
 import os
 import shutil
 import threading
@@ -29,21 +30,19 @@ from PIL import Image
 from mapproxy.cache.tile import Tile
 from mapproxy.cache.file import FileCache
 from mapproxy.cache.mbtiles import MBTilesCache, MBTilesLevelCache
-from mapproxy.cache.base import CacheBackendError
 from mapproxy.image import ImageSource
 from mapproxy.image.opts import ImageOptions
 from mapproxy.test.image import create_tmp_image_buf, is_png
 
-from nose.tools import eq_, assert_raises
+from nose.tools import eq_
 
 tile_image = create_tmp_image_buf((256, 256), color='blue')
 tile_image2 = create_tmp_image_buf((256, 256), color='red')
 
-def timestamp_is_now(timestamp, delta=5):
-    return abs(timestamp - time.time()) <= delta
 
 class TileCacheTestBase(object):
     always_loads_metadata = False
+    uses_utc = False
 
     def setup(self):
         self.cache_dir = tempfile.mkdtemp()
@@ -52,23 +51,23 @@ class TileCacheTestBase(object):
         if hasattr(self, 'cache_dir') and os.path.exists(self.cache_dir):
             shutil.rmtree(self.cache_dir)
 
-    def create_tile(self, coord=(0, 0, 4)):
+    def create_tile(self, coord=(3009, 589, 12)):
         return Tile(coord,
             ImageSource(tile_image,
                 image_opts=ImageOptions(format='image/png')))
 
-    def create_another_tile(self, coord=(0, 0, 4)):
+    def create_another_tile(self, coord=(3009, 589, 12)):
         return Tile(coord,
             ImageSource(tile_image2,
                 image_opts=ImageOptions(format='image/png')))
 
     def test_is_cached_miss(self):
-        assert not self.cache.is_cached(Tile((0, 0, 4)))
+        assert not self.cache.is_cached(Tile((3009, 589, 12)))
 
     def test_is_cached_hit(self):
         tile = self.create_tile()
         self.create_cached_tile(tile)
-        assert self.cache.is_cached(Tile((0, 0, 4)))
+        assert self.cache.is_cached(Tile((3009, 589, 12)))
 
     def test_is_cached_none(self):
         assert self.cache.is_cached(Tile(None))
@@ -77,7 +76,7 @@ class TileCacheTestBase(object):
         assert self.cache.load_tile(Tile(None))
 
     def test_load_tile_not_cached(self):
-        tile = Tile((0, 0, 4))
+        tile = Tile((3009, 589, 12))
         assert not self.cache.load_tile(tile)
         assert tile.source is None
         assert tile.is_missing()
@@ -85,16 +84,16 @@ class TileCacheTestBase(object):
     def test_load_tile_cached(self):
         tile = self.create_tile()
         self.create_cached_tile(tile)
-        tile = Tile((0, 0, 4))
+        tile = Tile((3009, 589, 12))
         assert self.cache.load_tile(tile) == True
         assert not tile.is_missing()
 
     def test_store_tiles(self):
-        tiles = [self.create_tile((x, 0, 4)) for x in range(4)]
+        tiles = [self.create_tile((x, 589, 12)) for x in range(4)]
         tiles[0].stored = True
         self.cache.store_tiles(tiles)
 
-        tiles = [Tile((x, 0, 4)) for x in range(4)]
+        tiles = [Tile((x, 589, 12)) for x in range(4)]
         assert tiles[0].is_missing()
         assert self.cache.load_tile(tiles[0]) == False
         assert tiles[0].is_missing()
@@ -145,7 +144,10 @@ class TileCacheTestBase(object):
         assert self.cache.load_tile(tile, with_metadata=True)
         assert tile.source is not None
         if tile.timestamp:
-            assert timestamp_is_now(tile.timestamp, delta=10)
+            now = time.time()
+            if self.uses_utc:
+                now = time.mktime(datetime.datetime.utcnow().timetuple())
+            assert abs(tile.timestamp - now) <= 10
         if tile.size:
             assert tile.size == size
 
@@ -172,13 +174,13 @@ class TileCacheTestBase(object):
         # tile object is marked as stored,
         # check that is is not stored 'again'
         # (used for disable_storage)
-        tile = Tile((0, 0, 4), ImageSource(BytesIO(b'foo')))
+        tile = Tile((1234, 589, 12), ImageSource(BytesIO(b'foo')))
         tile.stored = True
         self.cache.store_tile(tile)
 
         assert self.cache.is_cached(tile)
 
-        tile = Tile((0, 0, 4))
+        tile = Tile((1234, 589, 12))
         assert not self.cache.is_cached(tile)
 
     def test_remove(self):
@@ -189,6 +191,11 @@ class TileCacheTestBase(object):
         self.cache.remove_tile(Tile((1, 0, 4)))
         assert not self.cache.is_cached(Tile((1, 0, 4)))
 
+        # check if we can recreate a removed tile
+        tile = self.create_tile((1, 0, 4))
+        self.create_cached_tile(tile)
+        assert self.cache.is_cached(Tile((1, 0, 4)))
+
     def create_cached_tile(self, tile):
         self.cache.store_tile(tile)
 
@@ -249,6 +256,58 @@ class TestFileTileCache(TileCacheTestBase):
             f.write(b'foo')
 
 
+    def check_tile_location(self, layout, tile_coord, path):
+        cache = FileCache('/tmp/foo', 'png', directory_layout=layout)
+        eq_(cache.tile_location(Tile(tile_coord)), path)
+
+    def test_tile_locations(self):
+        yield self.check_tile_location, 'mp', (12345, 67890,  2), '/tmp/foo/02/0001/2345/0006/7890.png'
+        yield self.check_tile_location, 'mp', (12345, 67890, 12), '/tmp/foo/12/0001/2345/0006/7890.png'
+
+        yield self.check_tile_location, 'tc', (12345, 67890,  2), '/tmp/foo/02/000/012/345/000/067/890.png'
+        yield self.check_tile_location, 'tc', (12345, 67890, 12), '/tmp/foo/12/000/012/345/000/067/890.png'
+
+        yield self.check_tile_location, 'tms', (12345, 67890,  2), '/tmp/foo/2/12345/67890.png'
+        yield self.check_tile_location, 'tms', (12345, 67890, 12), '/tmp/foo/12/12345/67890.png'
+
+        yield self.check_tile_location, 'quadkey', (0, 0, 0), '/tmp/foo/.png'
+        yield self.check_tile_location, 'quadkey', (0, 0, 1), '/tmp/foo/0.png'
+        yield self.check_tile_location, 'quadkey', (1, 1, 1), '/tmp/foo/3.png'
+        yield self.check_tile_location, 'quadkey', (12345, 67890, 12), '/tmp/foo/200200331021.png'
+
+        yield self.check_tile_location, 'arcgis', (1, 2, 3), '/tmp/foo/L03/R00000002/C00000001.png'
+        yield self.check_tile_location, 'arcgis', (9, 2, 3), '/tmp/foo/L03/R00000002/C00000009.png'
+        yield self.check_tile_location, 'arcgis', (10, 2, 3), '/tmp/foo/L03/R00000002/C0000000a.png'
+        yield self.check_tile_location, 'arcgis', (12345, 67890, 12), '/tmp/foo/L12/R00010932/C00003039.png'
+
+
+    def check_level_location(self, layout, level, path):
+        cache = FileCache('/tmp/foo', 'png', directory_layout=layout)
+        eq_(cache.level_location(level), path)
+
+    def test_level_locations(self):
+        yield self.check_level_location, 'mp', 2, '/tmp/foo/02'
+        yield self.check_level_location, 'mp', 12, '/tmp/foo/12'
+
+        yield self.check_level_location, 'tc',  2, '/tmp/foo/02'
+        yield self.check_level_location, 'tc', 12, '/tmp/foo/12'
+
+        yield self.check_level_location, 'tms',  '2', '/tmp/foo/2'
+        yield self.check_level_location, 'tms', 12, '/tmp/foo/12'
+
+        yield self.check_level_location, 'arcgis', 3, '/tmp/foo/L03'
+        yield self.check_level_location, 'arcgis', 3, '/tmp/foo/L03'
+        yield self.check_level_location, 'arcgis', 3, '/tmp/foo/L03'
+        yield self.check_level_location, 'arcgis', 12, '/tmp/foo/L12'
+
+    def test_level_location_quadkey(self):
+        try:
+            self.check_level_location('quadkey', 0, None)
+        except NotImplementedError:
+            pass
+        else:
+            assert False, "expected NotImplementedError"
+
 class TestMBTileCache(TileCacheTestBase):
     def setup(self):
         TileCacheTestBase.setup(self)
@@ -347,3 +406,17 @@ class TestMBTileLevelCache(TileCacheTestBase):
 
         eq_(sorted(os.listdir(self.cache_dir)), ['1.mbtile', '2.mbtile'])
         assert self.cache.is_cached(Tile((0, 0, 2)))
+
+    def test_bulk_store_tiles_with_different_levels(self):
+        self.cache.store_tiles([
+            self.create_tile((0, 0, 1)),
+            self.create_tile((0, 0, 2)),
+            self.create_tile((1, 0, 2)),
+            self.create_tile((1, 0, 1)),
+        ])
+
+        eq_(sorted(os.listdir(self.cache_dir)), ['1.mbtile', '2.mbtile'])
+        assert self.cache.is_cached(Tile((0, 0, 1)))
+        assert self.cache.is_cached(Tile((1, 0, 1)))
+        assert self.cache.is_cached(Tile((0, 0, 2)))
+        assert self.cache.is_cached(Tile((1, 0, 2)))
diff --git a/mapproxy/test/unit/test_client.py b/mapproxy/test/unit/test_client.py
index 1c000d6..f8662e7 100644
--- a/mapproxy/test/unit/test_client.py
+++ b/mapproxy/test/unit/test_client.py
@@ -296,32 +296,32 @@ class TestWMSInfoClient(object):
         http = MockHTTPClient()
         wms = WMSInfoClient(req, http_client=http, supported_srs=[SRS(25832)])
         fi_req = InfoQuery((8, 50, 9, 51), (512, 512),
-                           SRS(4326), (256, 256), 'text/plain')
+                           SRS(4326), (128, 64), 'text/plain')
 
         wms.get_info(fi_req)
 
         assert wms_query_eq(http.requested[0],
             TESTSERVER_URL+'/service?map=foo&LAYERS=foo&SERVICE=WMS&FORMAT=image%2Fpng'
-                           '&REQUEST=GetFeatureInfo&HEIGHT=512&SRS=EPSG%3A25832&info_format=text/plain'
+                           '&REQUEST=GetFeatureInfo&SRS=EPSG%3A25832&info_format=text/plain'
                            '&query_layers=foo'
-                           '&VERSION=1.1.1&WIDTH=512&STYLES=&x=259&y=255'
-                           '&BBOX=428333.552496,5538630.70275,500000.0,5650300.78652')
+                           '&VERSION=1.1.1&WIDTH=512&HEIGHT=797&STYLES=&x=135&y=101'
+                           '&BBOX=428333.552496,5538630.70275,500000.0,5650300.78652'), http.requested[0]
 
     def test_transform_fi_request(self):
         req = WMS111FeatureInfoRequest(url=TESTSERVER_URL + '/service?map=foo', param={'layers':'foo', 'srs': 'EPSG:25832'})
         http = MockHTTPClient()
         wms = WMSInfoClient(req, http_client=http)
         fi_req = InfoQuery((8, 50, 9, 51), (512, 512),
-                           SRS(4326), (256, 256), 'text/plain')
+                           SRS(4326), (128, 64), 'text/plain')
 
         wms.get_info(fi_req)
 
         assert wms_query_eq(http.requested[0],
             TESTSERVER_URL+'/service?map=foo&LAYERS=foo&SERVICE=WMS&FORMAT=image%2Fpng'
-                           '&REQUEST=GetFeatureInfo&HEIGHT=512&SRS=EPSG%3A25832&info_format=text/plain'
+                           '&REQUEST=GetFeatureInfo&SRS=EPSG%3A25832&info_format=text/plain'
                            '&query_layers=foo'
-                           '&VERSION=1.1.1&WIDTH=512&STYLES=&x=259&y=255'
-                           '&BBOX=428333.552496,5538630.70275,500000.0,5650300.78652')
+                           '&VERSION=1.1.1&WIDTH=512&HEIGHT=797&STYLES=&x=135&y=101'
+                           '&BBOX=428333.552496,5538630.70275,500000.0,5650300.78652'), http.requested[0]
 
 class TestWMSMapRequest100(object):
     def setup(self):
diff --git a/mapproxy/test/unit/test_client_arcgis.py b/mapproxy/test/unit/test_client_arcgis.py
new file mode 100644
index 0000000..467eea8
--- /dev/null
+++ b/mapproxy/test/unit/test_client_arcgis.py
@@ -0,0 +1,73 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2010 Omniscale <http://omniscale.de>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from io import BytesIO
+
+from mapproxy.client.arcgis import ArcGISInfoClient
+from mapproxy.layer import InfoQuery
+from mapproxy.request.arcgis import ArcGISIdentifyRequest
+from mapproxy.srs import SRS
+from mapproxy.test.http import assert_query_eq
+
+TESTSERVER_ADDRESS = ('127.0.0.1', 56413)
+TESTSERVER_URL = 'http://%s:%s' % TESTSERVER_ADDRESS
+
+
+
+class MockHTTPClient(object):
+    def __init__(self):
+        self.requested = []
+
+    def open(self, url, data=None):
+        self.requested.append(url)
+        result = BytesIO(b'{}')
+        result.seek(0)
+        result.headers = {}
+        return result
+
+class TestArcGISInfoClient(object):
+    def test_fi_request(self):
+        req = ArcGISIdentifyRequest(url=TESTSERVER_URL + '/MapServer/export?map=foo', param={'layers':'foo'})
+        http = MockHTTPClient()
+        wms = ArcGISInfoClient(req, http_client=http, supported_srs=[SRS(4326)])
+        fi_req = InfoQuery((8, 50, 9, 51), (512, 512),
+                           SRS(4326), (128, 64), 'text/plain')
+
+        wms.get_info(fi_req)
+
+        assert_query_eq(http.requested[0],
+            TESTSERVER_URL+'/MapServer/identify?map=foo'
+                           '&imageDisplay=512,512,96&sr=4326&f=json'
+                           '&layers=foo&tolerance=5&returnGeometry=false'
+                           '&geometryType=esriGeometryPoint&geometry=8.250000,50.875000'
+                           '&mapExtent=8,50,9,51',
+            fuzzy_number_compare=True)
+
+    def test_transform_fi_request_supported_srs(self):
+        req = ArcGISIdentifyRequest(url=TESTSERVER_URL + '/MapServer/export?map=foo', param={'layers':'foo'})
+        http = MockHTTPClient()
+        wms = ArcGISInfoClient(req, http_client=http, supported_srs=[SRS(25832)])
+        fi_req = InfoQuery((8, 50, 9, 51), (512, 512),
+                           SRS(4326), (128, 64), 'text/plain')
+
+        wms.get_info(fi_req)
+
+        assert_query_eq(http.requested[0],
+            TESTSERVER_URL+'/MapServer/identify?map=foo'
+                           '&imageDisplay=512,797,96&sr=25832&f=json'
+                           '&layers=foo&tolerance=5&returnGeometry=false'
+                           '&geometryType=esriGeometryPoint&geometry=447229.979084,5636149.370634'
+                           '&mapExtent=428333.552496,5538630.70275,500000.0,5650300.78652',
+            fuzzy_number_compare=True)
\ No newline at end of file
diff --git a/mapproxy/test/unit/test_conf_loader.py b/mapproxy/test/unit/test_conf_loader.py
index 848cab2..af39c9d 100644
--- a/mapproxy/test/unit/test_conf_loader.py
+++ b/mapproxy/test/unit/test_conf_loader.py
@@ -24,8 +24,10 @@ from mapproxy.config.loader import (
     merge_dict,
     ConfigurationError,
 )
+from mapproxy.config.coverage import load_coverage
 from mapproxy.config.spec import validate_options
 from mapproxy.cache.tile import TileManager
+from mapproxy.seed.spec import validate_seed_conf
 from mapproxy.test.helper import TempFile
 from mapproxy.test.unit.test_grid import assert_almost_equal_bbox
 from nose.tools import eq_, assert_raises
@@ -923,3 +925,20 @@ class TestImageOptions(object):
 
         conf.globals.image_options.image_opts({}, 'image/jpeg')
 
+class TestLoadCoverage(object):
+    def test_union(self):
+        conf = {
+            'coverages': {
+                'covname': {
+                    'union': [
+                        {'bbox': [0, 0, 10, 10], 'srs': 'EPSG:4326'},
+                        {'bbox': [10, 0, 20, 10], 'srs': 'EPSG:4326', 'unknown': True},
+                    ],
+                },
+            },
+        }
+
+        errors, informal_only = validate_seed_conf(conf)
+        assert informal_only
+        assert len(errors) == 1
+        eq_(errors[0], "unknown 'unknown' in coverages.covname.union[1]")
diff --git a/mapproxy/test/unit/test_featureinfo.py b/mapproxy/test/unit/test_featureinfo.py
index 1a441da..39dff83 100644
--- a/mapproxy/test/unit/test_featureinfo.py
+++ b/mapproxy/test/unit/test_featureinfo.py
@@ -21,8 +21,13 @@ import tempfile
 from lxml import etree, html
 from nose.tools import eq_
 
-from mapproxy.featureinfo import (combined_inputs, XSLTransformer,
-    XMLFeatureInfoDoc, HTMLFeatureInfoDoc)
+from mapproxy.featureinfo import (
+    combined_inputs,
+    XSLTransformer,
+    XMLFeatureInfoDoc,
+    HTMLFeatureInfoDoc,
+    JSONFeatureInfoDoc,
+)
 from mapproxy.test.helper import strip_whitespace
 
 def test_combined_inputs():
@@ -177,3 +182,16 @@ class TestHTMLFeatureInfoDocsNoLXML(object):
             b"<p>baz2\n<p>foo</p>\n<body><p>bar</p></body>",
             result.as_string())
         eq_(result.info_type, 'text')
+
+class TestJSONFeatureInfoDocs(object):
+    def test_combine(self):
+        docs = [
+            JSONFeatureInfoDoc('{}'),
+            JSONFeatureInfoDoc('{"results": [{"foo": 1}]}'),
+            JSONFeatureInfoDoc('{"results": [{"bar": 2}]}'),
+        ]
+        result = JSONFeatureInfoDoc.combine(docs)
+
+        eq_('''{"results": [{"foo": 1}, {"bar": 2}]}''',
+            result.as_string())
+        eq_(result.info_type, 'json')
diff --git a/mapproxy/test/unit/test_geom.py b/mapproxy/test/unit/test_geom.py
index e5a455b..8d11fea 100644
--- a/mapproxy/test/unit/test_geom.py
+++ b/mapproxy/test/unit/test_geom.py
@@ -16,17 +16,27 @@
 from __future__ import division, with_statement
 
 import os
+import tempfile
+import shutil
 
 from mapproxy.srs import SRS, bbox_equals
 from mapproxy.util.geom import (
     load_polygons,
     load_datasource,
+    load_geojson,
+    load_expire_tiles,
     transform_geometry,
     geom_support,
     bbox_polygon,
     build_multipolygon,
 )
-from mapproxy.util.coverage import coverage, MultiCoverage
+from mapproxy.util.coverage import (
+    coverage,
+    MultiCoverage,
+    union_coverage,
+    diff_coverage,
+    intersection_coverage,
+)
 from mapproxy.layer import MapExtent, DefaultMapExtent
 from mapproxy.test.helper import TempFile
 
@@ -138,6 +148,40 @@ class TestPolygonLoading(object):
             eq_(polygon.type, 'Polygon')
             assert polygon.equals(shapely.geometry.Polygon([(0, 0), (15, 0), (15, 10), (0, 10)]))
 
+
+class TestGeoJSONLoading(object):
+    def test_geojson(self):
+        yield (self.check_geojson,
+            '''{"type": "Polygon", "coordinates": [[[0, 0], [10, 0], [10, 10], [0, 0]]]}''',
+            shapely.geometry.Polygon([[0, 0], [10, 0], [10, 10], [0, 0]]),
+        )
+
+        yield (self.check_geojson,
+            '''{"type": "MultiPolygon", "coordinates": [[[[0, 0], [10, 0], [10, 10], [0, 0]]], [[[20, 0], [30, 0], [20, 10], [20, 0]]]]}''',
+            shapely.geometry.Polygon([[0, 0], [10, 0], [10, 10], [0, 0]]).union(shapely.geometry.Polygon([[20, 0], [30, 0], [20, 10], [20, 0]])),
+        )
+
+        yield (self.check_geojson,
+            '''{"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[[0, 0], [10, 0], [10, 10], [0, 0]]]}}''',
+            shapely.geometry.Polygon([[0, 0], [10, 0], [10, 10], [0, 0]]),
+        )
+
+        yield (self.check_geojson,
+            '''{"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[[0, 0], [10, 0], [10, 10], [0, 0]]]}}]}''',
+            shapely.geometry.Polygon([[0, 0], [10, 0], [10, 10], [0, 0]]),
+        )
+
+    def check_geojson(self, geojson, geometry):
+        with TempFile() as fname:
+            with open(fname, 'w') as f:
+                f.write(geojson)
+            polygon = load_geojson(fname)
+            bbox, polygon = build_multipolygon(polygon, simplify=True)
+            assert polygon.is_valid
+            assert polygon.type in ('Polygon', 'MultiPolygon'), polygon.type
+            assert polygon.equals(geometry)
+
+
 class TestTransform(object):
     def test_polygon_transf(self):
         p1 = shapely.geometry.Polygon([(0, 0), (10, 0), (10, 10), (0, 10)])
@@ -268,6 +312,71 @@ class TestBBOXCoverage(object):
         assert coverage([-10, 10, 80, 80], SRS(4326)) != coverage([-10, 10.0, 80.0, 80], SRS(31467))
 
 
+class TestUnionCoverage(object):
+    def setup(self):
+        self.coverage = union_coverage([
+            coverage([0, 0, 10, 10], SRS(4326)),
+            coverage(shapely.wkt.loads("POLYGON((10 0, 20 0, 20 10, 10 10, 10 0))"), SRS(4326)),
+            coverage(shapely.wkt.loads("POLYGON((-1000000 0, 0 0, 0 1000000, -1000000 1000000, -1000000 0))"), SRS(3857)),
+        ])
+
+    def test_bbox(self):
+        assert bbox_equals(self.coverage.bbox, [-8.98315284, 0.0, 20.0, 10.0], 0.0001), self.coverage.bbox
+
+    def test_contains(self):
+        assert self.coverage.contains((0, 0, 5, 5), SRS(4326))
+        assert self.coverage.contains((-50000, 0, -20000, 20000), SRS(3857))
+        assert not self.coverage.contains((-50000, -100, -20000, 20000), SRS(3857))
+
+    def test_intersects(self):
+        assert self.coverage.intersects((0, 0, 5, 5), SRS(4326))
+        assert self.coverage.intersects((5, 0, 25, 5), SRS(4326))
+        assert self.coverage.intersects((-50000, 0, -20000, 20000), SRS(3857))
+        assert self.coverage.intersects((-50000, -100, -20000, 20000), SRS(3857))
+
+
+class TestDiffCoverage(object):
+    def setup(self):
+        g1 = coverage(shapely.wkt.loads("POLYGON((-10 0, 20 0, 20 10, -10 10, -10 0))"), SRS(4326))
+        g2 = coverage([0, 2, 8, 8], SRS(4326))
+        g3 = coverage(shapely.wkt.loads("POLYGON((-1000000 500000, 0 500000, 0 1000000, -1000000 1000000, -1000000 500000))"), SRS(3857))
+        self.coverage = diff_coverage([g1, g2, g3])
+
+    def test_bbox(self):
+        assert bbox_equals(self.coverage.bbox, [-10, 0.0, 20.0, 10.0], 0.0001), self.coverage.bbox
+
+    def test_contains(self):
+        assert self.coverage.contains((0, 0, 1, 1), SRS(4326))
+        assert self.coverage.contains((-1100000, 510000, -1050000, 600000), SRS(3857))
+        assert not self.coverage.contains((-1100000, 510000, -990000, 600000), SRS(3857)) # touches # g3
+        assert not self.coverage.contains((4, 4, 5, 5), SRS(4326)) # in g2
+
+    def test_intersects(self):
+        assert self.coverage.intersects((0, 0, 1, 1), SRS(4326))
+        assert self.coverage.intersects((-1100000, 510000, -1050000, 600000), SRS(3857))
+        assert self.coverage.intersects((-1100000, 510000, -990000, 600000), SRS(3857)) # touches # g3
+        assert not self.coverage.intersects((4, 4, 5, 5), SRS(4326)) # in g2
+
+
+class TestIntersectionCoverage(object):
+    def setup(self):
+        g1 = coverage(shapely.wkt.loads("POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))"), SRS(4326))
+        g2 = coverage([5, 5, 15, 15], SRS(4326))
+        self.coverage = intersection_coverage([g1, g2])
+
+    def test_bbox(self):
+        assert bbox_equals(self.coverage.bbox, [5.0, 5.0, 10.0, 10.0], 0.0001), self.coverage.bbox
+
+    def test_contains(self):
+        assert not self.coverage.contains((0, 0, 1, 1), SRS(4326))
+        assert self.coverage.contains((6, 6, 7, 7), SRS(4326))
+
+    def test_intersects(self):
+        assert self.coverage.intersection((3, 6, 7, 7), SRS(4326))
+        assert self.coverage.intersection((6, 6, 7, 7), SRS(4326))
+        assert not self.coverage.intersects((0, 0, 1, 1), SRS(4326))
+
+
 class TestMultiCoverage(object):
     def setup(self):
         # box from 10 10 to 80 80 with small spike/corner to -10 60 (upper left)
@@ -364,3 +473,40 @@ class TestLoadDatasource(object):
 
             geoms = load_datasource(fname)
             eq_(len(geoms), 2)
+
+    def test_geojson(self):
+        with TempFile() as fname:
+            with open(fname, 'wb') as f:
+                f.write(b'''{"type": "FeatureCollection", "features": [
+                    {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[[0, 0], [10, 0], [10, 10], [0, 0]]]} },
+                    {"type": "Feature", "geometry": {"type": "MultiPolygon", "coordinates": [[[[0, 0], [10, 0], [10, 10], [0, 0]]], [[[0, 0], [10, 0], [10, 10], [0, 0]]], [[[0, 0], [10, 0], [10, 10], [0, 0]]]]} },
+                    {"type": "Feature", "geometry": {"type": "Point", "coordinates": [0, 0]} }
+                ]}''')
+
+            geoms = load_datasource(fname)
+            eq_(len(geoms), 4)
+
+    def test_expire_tiles_dir(self):
+        dirname = tempfile.mkdtemp()
+        try:
+            fname = os.path.join(dirname, 'tiles')
+            with open(fname, 'wb') as f:
+                f.write(b"4/2/5\n")
+                f.write(b"4/2/6\n")
+                f.write(b"4/4/3\n")
+
+            geoms = load_expire_tiles(dirname)
+            eq_(len(geoms), 3)
+        finally:
+            shutil.rmtree(dirname)
+
+    def test_expire_tiles_file(self):
+        with TempFile() as fname:
+            with open(fname, 'wb') as f:
+                f.write(b"4/2/5\n")
+                f.write(b"4/2/6\n")
+                f.write(b"error\n")
+                f.write(b"4/2/1\n") # rest of file is ignored
+
+            geoms = load_expire_tiles(fname)
+            eq_(len(geoms), 2)
diff --git a/mapproxy/test/unit/test_grid.py b/mapproxy/test/unit/test_grid.py
index 4675f20..e17b780 100644
--- a/mapproxy/test/unit/test_grid.py
+++ b/mapproxy/test/unit/test_grid.py
@@ -679,6 +679,22 @@ class TestGKTileGridUL(TileGridTest):
         assert t1[1] == t3[3]
 
 
+class TestClosestLevelTinyResFactor(object):
+    def setup(self):
+        self.grid = TileGrid(SRS(31467),
+            bbox=[420000,30000,900000,350000], origin='ul',
+            res=[4000,3750,3500,3250,3000,2750,2500,2250,2000,1750,1500,1250,1000,750,650,500,250,100,50,20,10,5,2.5,2,1.5,1,0.5],
+        )
+
+    def test_closest_level(self):
+        eq_(self.grid.closest_level(5000), 0)
+        eq_(self.grid.closest_level(4000), 0)
+        eq_(self.grid.closest_level(3750), 1)
+        eq_(self.grid.closest_level(3500), 2)
+        eq_(self.grid.closest_level(3250), 3)
+        eq_(self.grid.closest_level(3000), 4)
+
+
 class TestOrigins(object):
     def test_basic(self):
         grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ll')
diff --git a/mapproxy/test/unit/test_image.py b/mapproxy/test/unit/test_image.py
index 8435834..238da1a 100644
--- a/mapproxy/test/unit/test_image.py
+++ b/mapproxy/test/unit/test_image.py
@@ -19,10 +19,18 @@ from __future__ import with_statement
 import os
 from io import BytesIO
 from mapproxy.compat.image import Image, ImageDraw
-from mapproxy.image import ImageSource, ReadBufWrapper, is_single_color_image
-from mapproxy.image import peek_image_format
+from mapproxy.image import (
+    ImageSource,
+    BlankImageSource,
+    ReadBufWrapper,
+    is_single_color_image,
+    peek_image_format,
+    _make_transparent as make_transparent,
+    SubImageSource,
+    img_has_transparency,
+    quantize,
+)
 from mapproxy.image.merge import merge_images, BandMerger
-from mapproxy.image import _make_transparent as make_transparent, SubImageSource, img_has_transparency, quantize
 from mapproxy.image.opts import ImageOptions
 from mapproxy.image.tile import TileMerger, TileSplitter
 from mapproxy.image.transform import ImageTransformer
@@ -311,6 +319,17 @@ class TestLayerMerge(object):
             (10*10, (127, 127, 255, 255)),
         ])
 
+    def test_merge_L(self):
+        img1 = ImageSource(Image.new('RGBA', (10, 10), (255, 0, 255, 255)))
+        img2 = ImageSource(Image.new('L', (10, 10), 100))
+
+        # img2 overlays img1
+        result = merge_images([img1, img2], ImageOptions(transparent=True))
+        img = result.as_image()
+        assert_img_colors_eq(img, [
+            (10*10, (100, 100, 100, 255)),
+        ])
+
     def test_paletted_merge(self):
         if not hasattr(Image, 'FASTOCTREE'):
             raise SkipTest()
@@ -347,6 +366,16 @@ class TestLayerMerge(object):
         img = result.as_image()
         eq_(img.getpixel((0, 0)), (0, 255, 255))
 
+    def test_merge_rgb_with_transp(self):
+        img1 = ImageSource(Image.new('RGB', (10, 10), (255, 0, 255)))
+        raw = Image.new('RGB', (10, 10), (0, 255, 255))
+        raw.info = {'transparency': (0, 255, 255)} # make full transparent
+        img2 = ImageSource(raw)
+
+        result = merge_images([img1, img2], ImageOptions(transparent=False))
+        img = result.as_image()
+        eq_(img.getpixel((0, 0)), (255, 0, 255))
+
 
 class TestLayerCompositeMerge(object):
     def test_composite_merge(self):
@@ -582,6 +611,7 @@ class TestBandMerge(object):
         self.img1 = ImageSource(Image.new('RGB', (10, 10), (100, 110, 120)))
         self.img2 = ImageSource(Image.new('RGB', (10, 10), (200, 210, 220)))
         self.img3 = ImageSource(Image.new('RGB', (10, 10), (0, 255, 0)))
+        self.blank = BlankImageSource(size=(10, 10), image_opts=ImageOptions())
 
     def test_merge_noops(self):
         """
@@ -595,12 +625,15 @@ class TestBandMerge(object):
         eq_(img.size, (10, 10))
         eq_(img.getpixel((0, 0)), (0, 0, 0))
 
-    def test_merge_no_source(self):
+    def test_merge_missing_source(self):
         """
-        Check that empty source list returns BlankImageSource.
+        Check that empty source list or source list with missing images
+        returns BlankImageSource.
         """
         merger = BandMerger(mode='RGB')
         merger.add_ops(dst_band=0, src_img=0, src_band=0)
+        merger.add_ops(dst_band=1, src_img=1, src_band=0)
+        merger.add_ops(dst_band=2, src_img=2, src_band=0)
 
         img_opts = ImageOptions('RGBA', transparent=True)
         result = merger.merge([], img_opts, size=(10, 10))
@@ -609,6 +642,13 @@ class TestBandMerge(object):
         eq_(img.size, (10, 10))
         eq_(img.getpixel((0, 0)), (255, 255, 255, 0))
 
+        result = merger.merge([self.img0, self.img1], img_opts, size=(10, 10))
+        img = result.as_image()
+
+        eq_(img.size, (10, 10))
+        eq_(img.getpixel((0, 0)), (255, 255, 255, 0))
+
+
     def test_rgb_merge(self):
         """
         Check merge of RGB bands
diff --git a/mapproxy/test/unit/test_image_mask.py b/mapproxy/test/unit/test_image_mask.py
index eb8a338..f41fd3f 100644
--- a/mapproxy/test/unit/test_image_mask.py
+++ b/mapproxy/test/unit/test_image_mask.py
@@ -13,13 +13,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from mapproxy.compat.image import Image
+from mapproxy.compat.image import Image, ImageDraw
 from mapproxy.srs import SRS
 from mapproxy.image import ImageSource
 from mapproxy.image.opts import ImageOptions
 from mapproxy.image.mask import mask_image_source_from_coverage
+from mapproxy.image.merge import LayerMerger
 from mapproxy.util.coverage import load_limited_to
-from mapproxy.test.image import assert_img_colors_eq
+from mapproxy.test.image import assert_img_colors_eq, create_image
+from nose.tools import eq_
 
 try:
     from shapely.geometry import Polygon
@@ -73,9 +75,9 @@ class TestMaskImage(object):
         geom = 'POLYGON((2 2, 2 8, 8 8, 8 2, 2 2), (4 4, 4 6, 6 6, 6 4, 4 4))'
 
         result = mask_image_source_from_coverage(img, [0, 0, 10, 10], SRS(4326), coverage(geom))
-        # 60*61 - 20*21 = 3240
+        # 60*60 - 20*20 = 3200
         assert_img_colors_eq(result.as_image().getcolors(),
-            [(10000-3240, (255, 255, 255, 0)), (3240, (100, 0, 200, 255))])
+            [(10000-3200, (255, 255, 255, 0)), (3200, (100, 0, 200, 255))])
 
     def test_shapely_mask_with_transform_partial_image_transparent(self):
         img = ImageSource(Image.new('RGB', (100, 100), color=(100, 0, 200)),
@@ -87,3 +89,38 @@ class TestMaskImage(object):
         # 20*20 = 400
         assert_img_colors_eq(result.as_image().getcolors(),
             [(10000-400, (255, 255, 255, 0)), (400, (100, 0, 200, 255))])
+
+
+class TestLayerCoverageMerge(object):
+    def setup(self):
+        self.coverage1 = coverage(Polygon([(0, 0), (0, 10), (10, 10), (10, 0)]), 3857)
+        self.coverage2 = coverage([2, 2, 8, 8], 3857)
+
+    def test_merge_single_coverage(self):
+        merger = LayerMerger()
+        merger.add(ImageSource(Image.new('RGB', (10, 10), (255, 255, 255))), self.coverage1)
+        result = merger.merge(image_opts=ImageOptions(transparent=True), bbox=(5, 0, 15, 10), bbox_srs=3857)
+        img = result.as_image()
+        eq_(img.mode, 'RGBA')
+        eq_(img.getpixel((4, 0)), (255, 255, 255, 255))
+        eq_(img.getpixel((6, 0)), (255, 255, 255, 0))
+
+    def test_merge_overlapping_coverage(self):
+        color1 = (255, 255, 0)
+        color2 = (0, 255, 255)
+        merger = LayerMerger()
+        merger.add(ImageSource(Image.new('RGB', (10, 10), color1)), self.coverage1)
+        merger.add(ImageSource(Image.new('RGB', (10, 10), color2)), self.coverage2)
+
+        result = merger.merge(image_opts=ImageOptions(), bbox=(0, 0, 10, 10), bbox_srs=3857)
+        img = result.as_image()
+        eq_(img.mode, 'RGB')
+
+        expected = create_image((10, 10), color1, 'RGB')
+        draw = ImageDraw.Draw(expected)
+        draw.polygon([(2, 2), (7, 2), (7, 7), (2, 7)], fill=color2)
+
+        for x in range(0, 9):
+            for y in range(0, 9):
+                eq_(img.getpixel((x, y)), expected.getpixel((x, y)))
+
diff --git a/mapproxy/test/unit/test_request.py b/mapproxy/test/unit/test_request.py
index e99d972..4238d18 100644
--- a/mapproxy/test/unit/test_request.py
+++ b/mapproxy/test/unit/test_request.py
@@ -22,7 +22,7 @@ from mapproxy.request.tile import TMSRequest, tile_request, TileRequest
 from mapproxy.request.wms import (wms_request, WMSMapRequest, WMSMapRequestParams,
                               WMS111MapRequest, WMS100MapRequest, WMS130MapRequest,
                               WMS111FeatureInfoRequest)
-from mapproxy.request.arcgis import ArcGISRequest
+from mapproxy.request.arcgis import ArcGISRequest, ArcGISIdentifyRequest
 from mapproxy.exception import RequestError
 from mapproxy.request.wms.exception import (WMS111ExceptionHandler, WMSImageExceptionHandler,
                                      WMSBlankExceptionHandler)
@@ -232,6 +232,53 @@ class TestArcGISRequest(object):
         eq_("4326", req.params.bboxSR)
         eq_("4326", req.params["bboxSR"])
 
+    def check_endpoint(self, url, expected):
+        req = ArcGISRequest(url=url)
+        eq_(req.url, expected)
+
+    def test_endpoint_urls(self):
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer/', 'http://example.com/ArcGIS/rest/MapServer/export'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer', 'http://example.com/ArcGIS/rest/MapServer/export'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer/export', 'http://example.com/ArcGIS/rest/MapServer/export'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/', 'http://example.com/ArcGIS/rest/ImageServer/exportImage'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer', 'http://example.com/ArcGIS/rest/ImageServer/exportImage'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/export', 'http://example.com/ArcGIS/rest/ImageServer/exportImage'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/exportImage', 'http://example.com/ArcGIS/rest/ImageServer/exportImage'
+
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer/export?param=foo', 'http://example.com/ArcGIS/rest/MapServer/export?param=foo'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/export?param=foo', 'http://example.com/ArcGIS/rest/ImageServer/exportImage?param=foo'
+
+
+class TestArcGISIndentifyRequest(object):
+    def test_base_request(self):
+        req = ArcGISIdentifyRequest(url="http://example.com/ArcGIS/rest/MapServer/")
+        eq_("http://example.com/ArcGIS/rest/MapServer/identify", req.url)
+        req.params.bbox = [-180.0, -90.0, 180.0, 90.0]
+        eq_((-180.0, -90.0, 180.0, 90.0), req.params.bbox)
+        eq_("-180.0,-90.0,180.0,90.0", req.params["mapExtent"])
+        req.params.size = [256, 256]
+        eq_((256, 256), req.params.size)
+        eq_("256,256,96", req.params["imageDisplay"])
+        req.params.srs = "EPSG:4326"
+        eq_("EPSG:4326", req.params.srs)
+        eq_("4326", req.params["sr"])
+
+    def check_endpoint(self, url, expected):
+        req = ArcGISIdentifyRequest(url=url)
+        eq_(req.url, expected)
+
+    def test_endpoint_urls(self):
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer/', 'http://example.com/ArcGIS/rest/MapServer/identify'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer', 'http://example.com/ArcGIS/rest/MapServer/identify'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer/export', 'http://example.com/ArcGIS/rest/MapServer/identify'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/', 'http://example.com/ArcGIS/rest/ImageServer/identify'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer', 'http://example.com/ArcGIS/rest/ImageServer/identify'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/export', 'http://example.com/ArcGIS/rest/ImageServer/identify'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/exportImage', 'http://example.com/ArcGIS/rest/ImageServer/identify'
+
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/MapServer/export?param=foo', 'http://example.com/ArcGIS/rest/MapServer/identify?param=foo'
+        yield self.check_endpoint, 'http://example.com/ArcGIS/rest/ImageServer/export?param=foo', 'http://example.com/ArcGIS/rest/ImageServer/identify?param=foo'
+
 
 class TestRequest(object):
     def setup(self):
diff --git a/mapproxy/test/unit/test_seed.py b/mapproxy/test/unit/test_seed.py
index 818367a..f4c2c47 100644
--- a/mapproxy/test/unit/test_seed.py
+++ b/mapproxy/test/unit/test_seed.py
@@ -154,7 +154,7 @@ class TestSeeder(object):
 
     def test_seed_full_bbox_continue(self):
         task = self.make_bbox_task([-180, -90, 180, 90], SRS(4326), [0, 1, 2])
-        seed_progress = SeedProgress([(0, 1), (0, 2)])
+        seed_progress = SeedProgress([(0, 1), (1, 2)])
         seeder = TileWalker(task, self.seed_pool, handle_uncached=True, seed_progress=seed_progress)
         seeder.walk()
 
@@ -263,3 +263,53 @@ class TestRemovebreforeTimetamp(object):
             before_timestamp_from_options({'minutes': 15}) + 60 * 15,
             time.time(), -1
         )
+
+class TestSeedProgress(object):
+    def test_progress_identifier(self):
+        old = SeedProgress()
+        with old.step_down(0, 2):
+            with old.step_down(0, 4):
+                eq_(old.current_progress_identifier(), [(0, 2), (0, 4)])
+            # previous leafs are still present
+            eq_(old.current_progress_identifier(), [(0, 2), (0, 4)])
+            with old.step_down(1, 4):
+                eq_(old.current_progress_identifier(), [(0, 2), (1, 4)])
+            eq_(old.current_progress_identifier(), [(0, 2), (1, 4)])
+
+        eq_(old.current_progress_identifier(), []) # empty list after seed
+
+        with old.step_down(1, 2):
+            eq_(old.current_progress_identifier(), [(1, 2)])
+            with old.step_down(0, 4):
+                with old.step_down(1, 4):
+                    eq_(old.current_progress_identifier(), [(1, 2), (0, 4), (1, 4)])
+
+    def test_already_processed(self):
+        new = SeedProgress([(0, 2)])
+        with new.step_down(0, 2):
+            assert not new.already_processed()
+            with new.step_down(0, 2):
+                assert not new.already_processed()
+
+        new = SeedProgress([(1, 2)])
+        with new.step_down(0, 2):
+            assert new.already_processed()
+            with new.step_down(0, 2):
+                assert new.already_processed()
+
+
+        new = SeedProgress([(0, 2), (1, 4), (2, 4)])
+        with new.step_down(0, 2):
+            assert not new.already_processed()
+            with new.step_down(0, 4):
+                assert new.already_processed()
+            with new.step_down(1, 4):
+                assert not new.already_processed()
+                with new.step_down(1, 4):
+                    assert new.already_processed()
+                with new.step_down(2, 4):
+                    assert not new.already_processed()
+                with new.step_down(3, 4):
+                    assert not new.already_processed()
+            with new.step_down(2, 4):
+                assert not new.already_processed()
diff --git a/mapproxy/test/unit/test_wms_layer.py b/mapproxy/test/unit/test_wms_layer.py
index bd60be9..9bd6bc0 100644
--- a/mapproxy/test/unit/test_wms_layer.py
+++ b/mapproxy/test/unit/test_wms_layer.py
@@ -15,7 +15,7 @@
 
 from __future__ import with_statement, division
 
-from mapproxy.layer import MapQuery
+from mapproxy.layer import MapQuery, InfoQuery
 from mapproxy.srs import SRS
 from mapproxy.service.wms import combined_layers
 from nose.tools import eq_
@@ -76,3 +76,9 @@ class TestCombinedLayers(object):
         eq_(combined[1].client.request_template.params.layers, ['c', 'd'])
         eq_(combined[2].client.request_template.params.layers, ['e', 'f'])
 
+
+class TestInfoQuery(object):
+    def test_coord(self):
+        query = InfoQuery((8, 50, 9, 51), (400, 1000),
+                           SRS(4326), (100, 600), 'text/plain')
+        eq_(query.coord, (8.25, 50.4))
diff --git a/mapproxy/util/async.py b/mapproxy/util/async.py
index 217e04c..7a63b21 100644
--- a/mapproxy/util/async.py
+++ b/mapproxy/util/async.py
@@ -92,7 +92,7 @@ class EventletPool(object):
                         raise
         if len(args[0]) == 1:
             eventlet.sleep()
-            return _result_iter([call(*zip(*args)[0])], use_result_objects)
+            return _result_iter([call(*list(zip(*args))[0])], use_result_objects)
         pool = eventlet.greenpool.GreenPool(self.size)
         return _result_iter(pool.imap(call, *args), use_result_objects)
 
diff --git a/mapproxy/util/coverage.py b/mapproxy/util/coverage.py
index 4ad2a17..05a83c0 100644
--- a/mapproxy/util/coverage.py
+++ b/mapproxy/util/coverage.py
@@ -25,6 +25,7 @@ from mapproxy.util.geom import (
     load_polygon_lines,
     transform_geometry,
     bbox_polygon,
+    EmptyGeometryError,
 )
 from mapproxy.srs import SRS
 
@@ -39,11 +40,11 @@ except ImportError:
     # missing Shapely is handled by require_geom_support
     pass
 
-def coverage(geom, srs):
+def coverage(geom, srs, clip=False):
     if isinstance(geom, (list, tuple)):
-        return BBOXCoverage(geom, srs)
+        return BBOXCoverage(geom, srs, clip=clip)
     else:
-        return GeomCoverage(geom, srs)
+        return GeomCoverage(geom, srs, clip=clip)
 
 def load_limited_to(limited_to):
     require_geom_support()
@@ -107,11 +108,11 @@ class MultiCoverage(object):
         return '<MultiCoverage %r: %r>' % (self.extent.llbbox, self.coverages)
 
 class BBOXCoverage(object):
-    clip = False
-    def __init__(self, bbox, srs):
+    def __init__(self, bbox, srs, clip=False):
         self.bbox = bbox
         self.srs = srs
         self.geom = None
+        self.clip = clip
 
     @property
     def extent(self):
@@ -139,7 +140,7 @@ class BBOXCoverage(object):
 
         if intersection[0] >= intersection[2] or intersection[1] >= intersection[3]:
             return None
-        return BBOXCoverage(intersection, self.srs)
+        return BBOXCoverage(intersection, self.srs, clip=self.clip)
 
     def contains(self, bbox, srs):
         bbox = self._bbox_in_coverage_srs(bbox, srs)
@@ -150,7 +151,7 @@ class BBOXCoverage(object):
             return self
 
         bbox = self.srs.transform_bbox_to(srs, self.bbox)
-        return BBOXCoverage(bbox, srs)
+        return BBOXCoverage(bbox, srs, clip=self.clip)
 
     def __eq__(self, other):
         if not isinstance(other, BBOXCoverage):
@@ -218,7 +219,7 @@ class GeomCoverage(object):
             return self
 
         geom = transform_geometry(self.srs, srs, self.geom)
-        return GeomCoverage(geom, srs)
+        return GeomCoverage(geom, srs, clip=self.clip)
 
     def intersects(self, bbox, srs):
         bbox = self._geom_in_coverage_srs(bbox, srs)
@@ -227,7 +228,7 @@ class GeomCoverage(object):
 
     def intersection(self, bbox, srs):
         bbox = self._geom_in_coverage_srs(bbox, srs)
-        return GeomCoverage(self.geom.intersection(bbox), self.srs)
+        return GeomCoverage(self.geom.intersection(bbox), self.srs, clip=self.clip)
 
     def contains(self, bbox, srs):
         bbox = self._geom_in_coverage_srs(bbox, srs)
@@ -255,4 +256,72 @@ class GeomCoverage(object):
         return not self.__eq__(other)
 
     def __repr__(self):
-        return '<GeomCoverage %r: %r>' % (self.extent.llbbox, self.geom)
\ No newline at end of file
+        return '<GeomCoverage %r: %r>' % (self.extent.llbbox, self.geom)
+
+def union_coverage(coverages, clip=None):
+    """
+    Create a coverage that is the union of all `coverages`.
+    Resulting coverage is in the SRS of the first coverage.
+    """
+    srs = coverages[0].srs
+
+    coverages = [c.transform_to(srs) for c in coverages]
+
+    geoms = []
+    for c in coverages:
+        if isinstance(c, BBOXCoverage):
+            geoms.append(bbox_polygon(c.bbox))
+        else:
+            geoms.append(c.geom)
+
+    import shapely.ops
+    union = shapely.ops.cascaded_union(geoms)
+
+    return GeomCoverage(union, srs=srs, clip=clip)
+
+def diff_coverage(coverages, clip=None):
+    """
+    Create a coverage by subtracting all `coverages` from the first one.
+    Resulting coverage is in the SRS of the first coverage.
+    """
+    srs = coverages[0].srs
+
+    coverages = [c.transform_to(srs) for c in coverages]
+
+    geoms = []
+    for c in coverages:
+        if isinstance(c, BBOXCoverage):
+            geoms.append(bbox_polygon(c.bbox))
+        else:
+            geoms.append(c.geom)
+
+    sub = shapely.ops.cascaded_union(geoms[1:])
+    diff = geoms[0].difference(sub)
+
+    if diff.is_empty:
+        raise EmptyGeometryError("diff did not return any geometry")
+
+    return GeomCoverage(diff, srs=srs, clip=clip)
+
+def intersection_coverage(coverages, clip=None):
+    """
+    Create a coverage by creating the intersection of all `coverages`.
+    Resulting coverage is in the SRS of the first coverage.
+    """
+    srs = coverages[0].srs
+
+    coverages = [c.transform_to(srs) for c in coverages]
+
+    geoms = []
+    for c in coverages:
+        if isinstance(c, BBOXCoverage):
+            geoms.append(bbox_polygon(c.bbox))
+        else:
+            geoms.append(c.geom)
+
+    intersection = reduce(lambda a, b: a.intersection(b), geoms)
+
+    if intersection.is_empty:
+        raise EmptyGeometryError("intersection did not return any geometry")
+
+    return GeomCoverage(intersection, srs=srs, clip=clip)
\ No newline at end of file
diff --git a/mapproxy/util/ext/serving.py b/mapproxy/util/ext/serving.py
index 4fd73d7..d78a2e3 100644
--- a/mapproxy/util/ext/serving.py
+++ b/mapproxy/util/ext/serving.py
@@ -604,12 +604,17 @@ def restart_with_reloader():
         _log('info', ' * Restarting with reloader')
 
         args = [sys.executable] + sys.argv
-        # pip installs commands as .exe, but sys.argv[0]
-        # can miss the prefix. add .exe to avoid file-not-found
-        # in subprocess call
-        if os.name == 'nt' and '.' not in args[1]:
-            args[1] = args[1] + '.exe'
-
+        if os.name == 'nt':
+            # pip installs commands as .exe, but sys.argv[0]
+            # can miss the prefix.
+            # Add .exe to avoid file-not-found in subprocess call.
+            # Also, recent pip versions create .exe commands that are not
+            # executable by Python, but there is a -script.py which
+            # we need to call in this case. Check for this first.
+            if os.path.exists(args[1] + '-script.py'):
+                args[1] = args[1] + '-script.py'
+            elif not args[1].endswith('.exe'):
+                args[1] = args[1] + '.exe'
         new_environ = os.environ.copy()
         new_environ['WERKZEUG_RUN_MAIN'] = 'true'
 
diff --git a/mapproxy/util/ext/wmsparse/test/test_parse.py b/mapproxy/util/ext/wmsparse/test/test_parse.py
index 66549e0..3fcaeae 100644
--- a/mapproxy/util/ext/wmsparse/test/test_parse.py
+++ b/mapproxy/util/ext/wmsparse/test/test_parse.py
@@ -14,7 +14,7 @@ class TestWMS111(object):
         md = cap.metadata()
         eq_(md['name'], 'OGC:WMS')
         eq_(md['title'], 'Omniscale OpenStreetMap WMS')
-        eq_(md['access_constraints'], 'This service is intended for private and evaluation use only. The data is licensed as Creative Commons Attribution-Share Alike 2.0 (http://creativecommons.org/licenses/by-sa/2.0/)')
+        eq_(md['access_constraints'], 'Here be dragons.')
         eq_(md['fees'], 'none')
         eq_(md['online_resource'], 'http://omniscale.de/')
         eq_(md['abstract'], 'Omniscale OpenStreetMap WMS (powered by MapProxy)')
diff --git a/mapproxy/util/ext/wmsparse/test/wms-omniscale-111.xml b/mapproxy/util/ext/wmsparse/test/wms-omniscale-111.xml
index 0b1c4a3..9f0917d 100644
--- a/mapproxy/util/ext/wmsparse/test/wms-omniscale-111.xml
+++ b/mapproxy/util/ext/wmsparse/test/wms-omniscale-111.xml
@@ -28,7 +28,7 @@
       <ContactElectronicMailAddress>osm at omniscale.de</ContactElectronicMailAddress>
   </ContactInformation>
   <Fees>none</Fees>
-  <AccessConstraints>This service is intended for private and evaluation use only. The data is licensed as Creative Commons Attribution-Share Alike 2.0 (http://creativecommons.org/licenses/by-sa/2.0/)</AccessConstraints>
+  <AccessConstraints>Here be dragons.</AccessConstraints>
 </Service>
 <Capability>
   <Request>
diff --git a/mapproxy/util/geom.py b/mapproxy/util/geom.py
index 7ea1ffb..6a0c1a6 100644
--- a/mapproxy/util/geom.py
+++ b/mapproxy/util/geom.py
@@ -16,10 +16,12 @@
 from __future__ import division, with_statement
 
 import os
+import json
 import codecs
 from functools import partial
 from contextlib import closing
 
+from mapproxy.grid import tile_grid
 from mapproxy.compat import string_type
 
 import logging
@@ -55,13 +57,15 @@ def load_datasource(datasource, where=None):
 
     Returns a list of Shapely Polygons.
     """
-    # check if it is a  wkt file
+    # check if it is a  wkt or geojson file
     if os.path.exists(os.path.abspath(datasource)):
         with open(os.path.abspath(datasource), 'rb') as fp:
             data = fp.read(50)
         if data.lower().lstrip().startswith((b'polygon', b'multipolygon')):
             return load_polygons(datasource)
-
+        # only load geojson directly if we don't have a filter
+        if where is None and data and data.startswith(b'{'):
+            return load_geojson(datasource)
     # otherwise pass to OGR
     return load_ogr_datasource(datasource, where=where)
 
@@ -111,6 +115,41 @@ def load_polygons(geom_files):
 
     return polygons
 
+def load_geojson(datasource):
+    with open(datasource) as f:
+        geojson = json.load(f)
+        t = geojson.get('type')
+        if not t:
+            raise CoverageReadError("not a GeoJSON")
+        geometries = []
+        if t == 'FeatureCollection':
+            for f in geojson.get('features'):
+                geom = f.get('geometry')
+                if geom:
+                    geometries.append(geom)
+        elif t == 'Feature':
+            if 'geometry' in geojson:
+                geometries.append(geojson['geometry'])
+        elif t in ('Polygon', 'MultiPolygon'):
+            geometries.append(geojson)
+        else:
+            log_config.warn('skipping feature of type %s from %s: not a Polygon/MultiPolygon',
+                        t, datasource)
+
+    polygons = []
+    for geom in geometries:
+        geom = shapely.geometry.asShape(geom)
+        if geom.type == 'Polygon':
+            polygons.append(geom)
+        elif geom.type == 'MultiPolygon':
+            for p in geom:
+                polygons.append(p)
+        else:
+            log_config.warn('ignoring non-polygon geometry (%s) from %s',
+                geom.type, datasource)
+
+    return polygons
+
 def load_polygon_lines(line_iter, source='<string>'):
     polygons = []
     for line in line_iter:
@@ -173,12 +212,15 @@ def transform_geometry(from_srs, to_srs, geometry):
     transf = partial(transform_xy, from_srs, to_srs)
 
     if geometry.type == 'Polygon':
-        return transform_polygon(transf, geometry)
-
-    if geometry.type == 'MultiPolygon':
-        return transform_multipolygon(transf, geometry)
+        result = transform_polygon(transf, geometry)
+    elif geometry.type == 'MultiPolygon':
+        result = transform_multipolygon(transf, geometry)
+    else:
+        raise ValueError('cannot transform %s' % geometry.type)
 
-    raise ValueError('cannot transform %s' % geometry.type)
+    if not result.is_valid:
+        result = result.buffer(0)
+    return result
 
 def transform_polygon(transf, polygon):
     ext = transf(polygon.exterior.xy)
@@ -216,4 +258,33 @@ def flatten_to_polygons(geometry):
 
     return []
 
-
+def load_expire_tiles(expire_dir, grid=None):
+    if grid is None:
+        grid = tile_grid(3857, origin='nw')
+    tiles = set()
+
+    def parse(filename):
+        with open(filename) as f:
+            try:
+                for line in f:
+                    if not line:
+                        continue
+                    tile = tuple(map(int, line.split('/')))
+                    tiles.add(tile)
+            except:
+                log_config.warn('found error in %s, skipping rest of file', filename)
+
+    if os.path.isdir(expire_dir):
+        for root, dirs, files in os.walk(expire_dir):
+            for name in files:
+                filename = os.path.join(root, name)
+                parse(filename)
+    else:
+        parse(expire_dir)
+
+    boxes = []
+    for tile in tiles:
+        z, x, y = tile
+        boxes.append(shapely.geometry.box(*grid.tile_bbox((x, y, z))))
+
+    return boxes
diff --git a/mapproxy/util/py.py b/mapproxy/util/py.py
index b76b3ab..91a410d 100644
--- a/mapproxy/util/py.py
+++ b/mapproxy/util/py.py
@@ -70,12 +70,13 @@ class cached_property(object):
 
 def memoize(func):
     @wraps(func)
-    def wrapper(self, *args):
+    def wrapper(self, *args, **kwargs):
         if not hasattr(self, '__memoize_cache'):
             self.__memoize_cache = {}
         cache = self.__memoize_cache.setdefault(func, {})
-        if args not in cache:
-            cache[args] = func(self, *args)
-        return cache[args]
+        key = args + tuple(kwargs.items())
+        if key not in cache:
+            cache[key] = func(self, *args, **kwargs)
+        return cache[key]
     return wrapper
 
diff --git a/release.py b/release.py
index ce9faec..1878a39 100644
--- a/release.py
+++ b/release.py
@@ -11,8 +11,8 @@ from scriptine import path
 from scriptine.shell import backtick_, sh
 
 PACKAGE_NAME = 'MapProxy'
-REMOTE_DOC_LOCATION = 'omniscale.de:domains/mapproxy.org/docs'
-REMOTE_REL_LOCATION = 'omniscale.de:domains/mapproxy.org/static/rel'
+REMOTE_DOC_LOCATION = 'mapproxy.org:/opt/www/mapproxy.org/docs'
+REMOTE_REL_LOCATION = 'mapproxy.org:/opt/www/mapproxy.org/static/rel'
 
 VERSION_FILES = [
     ('setup.py', 'version="###"'),
@@ -78,6 +78,10 @@ def upload_sdist_command():
     remote_rel_location = REMOTE_REL_LOCATION
     sh('scp dist/MapProxy-%(ver)s.* %(remote_rel_location)s' % locals())
 
+def upload_test_sdist_command():
+    date = backtick_('date +%Y%m%d').strip()
+    print('python setup.py egg_info -R -D -b ".dev%s" register -r testpypi sdist upload -r testpypi' % (date, ))
+
 def upload_final_sdist_command():
     sh('python setup.py egg_info -b "" -D sdist upload')
 
diff --git a/requirements-tests.txt b/requirements-tests.txt
index 2152b3a..351bb02 100644
--- a/requirements-tests.txt
+++ b/requirements-tests.txt
@@ -1,12 +1,32 @@
-WebTest==2.0.10
-lxml==3.2.4
-nose==1.3.0
-Shapely==1.5.8
-PyYAML==3.10
-Pillow==2.8.1
-WebOb==1.2.3
-beautifulsoup4==4.4.0
-coverage==3.7
-requests==2.0.1
-six==1.4.1
-waitress==0.8.7
+WebTest==2.0.25
+lxml==3.7.3
+nose==1.3.7
+Shapely==1.5.17
+PyYAML==3.12
+Pillow==4.0.0
+WebOb==1.7.1
+coverage==4.3.4
+requests==2.13.0
+boto3==1.4.4
+moto==0.4.31
+eventlet==0.20.1
+beautifulsoup4==4.5.3
+boto==2.46.1
+botocore==1.5.14
+docutils==0.13.1
+enum-compat==0.0.2
+futures==3.0.5
+greenlet==0.4.12
+httpretty==0.8.10
+Jinja2==2.9.5
+jmespath==0.9.1
+MarkupSafe==0.23
+olefile==0.44
+python-dateutil==2.6.0
+pytz==2016.10
+s3transfer==0.1.10
+six==1.10.0
+waitress==1.0.2
+Werkzeug==0.11.15
+xmltodict==0.10.2
+redis==2.10.5
diff --git a/setup.py b/setup.py
index 11f23e4..227870e 100644
--- a/setup.py
+++ b/setup.py
@@ -54,7 +54,7 @@ def long_description(changelog_releases=10):
 
 setup(
     name='MapProxy',
-    version="1.8.2a0",
+    version="1.10.0a0",
     description='An accelerating proxy for web map services',
     long_description=long_description(7),
     author='Oliver Tonnhofer',
diff --git a/tox.ini b/tox.ini
index fc8b286..b751393 100644
--- a/tox.ini
+++ b/tox.ini
@@ -32,4 +32,4 @@ commands =
     sphinx-build -b html -d {envtmpdir}/doctrees . {envtmpdir}/html
     sphinx-build -b latex -d {envtmpdir}/doctrees . {envtmpdir}/latex
     make -C {envtmpdir}/latex all-pdf
-    rsync -a --delete-after {envtmpdir}/html/ {envtmpdir}/latex/MapProxy.pdf ssh-226270-upload at mapproxy.org:domains/mapproxy.org/docs/nightly/
+    rsync -a --delete-after {envtmpdir}/html/ {envtmpdir}/latex/MapProxy.pdf os at mapproxy.org:/opt/www/mapproxy.org/docs/nightly/

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/mapproxy.git



More information about the Pkg-grass-devel mailing list