[qgis] 01/01: Imported Upstream version 2.18.5+dfsg

Bas Couwenberg sebastic at debian.org
Fri Mar 24 19:32:27 UTC 2017


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch upstream
in repository qgis.

commit 4b01f151514a5e8f0db5bafe9d13067c91bba461
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Fri Mar 24 17:08:30 2017 +0100

    Imported Upstream version 2.18.5+dfsg
---
 CMakeLists.txt                                     |    2 +-
 ChangeLog                                          |  508 ++
 cmake/FindQScintilla.cmake                         |    4 +-
 debian/changelog                                   |   10 +-
 debian/compat.in                                   |    1 -
 debian/control.in                                  |   30 +-
 debian/copyright                                   |   13 -
 debian/python-qgis.install.in                      |    2 -
 debian/rules                                       |   58 +-
 i18n/qgis_de.ts                                    |    4 +-
 images/icons/qgis-icon-macos.png                   |  Bin 0 -> 169214 bytes
 ms-windows/osgeo4w/package-nightly.cmd             |    5 +-
 ms-windows/osgeo4w/package.cmd                     |    6 +-
 python/core/core.sip                               |    1 +
 python/core/layertree/qgslayertreegroup.sip        |   19 +-
 python/core/layertree/qgslayertreelayer.sip        |   36 +-
 python/core/layertree/qgslayertreenode.sip         |    9 +-
 python/core/qgsmaprenderercustompainterjob.sip     |    1 +
 python/core/qgsmaprendererjob.sip                  |    2 +
 python/core/qgsmaprendererparalleljob.sip          |    1 +
 python/core/qgsmaprenderersequentialjob.sip        |    1 +
 python/core/qgsvectorlayercache.sip                |   11 +-
 python/ext-libs/CMakeLists.txt                     |    2 +-
 .../ext-libs/Jinja2-2.7.2-py2.7.egg-info/PKG-INFO  |   55 -
 .../Jinja2-2.7.2-py2.7.egg-info/SOURCES.txt        |  126 -
 .../dependency_links.txt                           |    1 -
 .../Jinja2-2.7.2-py2.7.egg-info/entry_points.txt   |    4 -
 .../installed-files.txt                            |   92 -
 .../Jinja2-2.7.2-py2.7.egg-info/not-zip-safe       |    1 -
 .../Jinja2-2.7.2-py2.7.egg-info/requires.txt       |    4 -
 .../Jinja2-2.7.2-py2.7.egg-info/top_level.txt      |    1 -
 .../ext-libs/Pygments-1.6-py2.7.egg-info/PKG-INFO  |   46 -
 .../Pygments-1.6-py2.7.egg-info/SOURCES.txt        |  416 --
 .../dependency_links.txt                           |    1 -
 .../Pygments-1.6-py2.7.egg-info/entry_points.txt   |    3 -
 .../installed-files.txt                            |  160 -
 .../Pygments-1.6-py2.7.egg-info/not-zip-safe       |    1 -
 .../Pygments-1.6-py2.7.egg-info/top_level.txt      |    1 -
 python/ext-libs/httplib2/CMakeLists.txt            |    3 -
 python/ext-libs/httplib2/__init__.py               | 1695 ------
 python/ext-libs/httplib2/iri2uri.py                |  130 -
 python/ext-libs/httplib2/socks.py                  |  438 --
 python/ext-libs/jinja2/__init__.py                 |   69 -
 python/ext-libs/jinja2/_compat.py                  |  150 -
 python/ext-libs/jinja2/_stringdefs.py              |  132 -
 python/ext-libs/jinja2/bccache.py                  |  337 --
 python/ext-libs/jinja2/compiler.py                 | 1640 ------
 python/ext-libs/jinja2/constants.py                |   32 -
 python/ext-libs/jinja2/debug.py                    |  337 --
 python/ext-libs/jinja2/defaults.py                 |   43 -
 python/ext-libs/jinja2/environment.py              | 1191 -----
 python/ext-libs/jinja2/exceptions.py               |  146 -
 python/ext-libs/jinja2/ext.py                      |  636 ---
 python/ext-libs/jinja2/filters.py                  |  987 ----
 python/ext-libs/jinja2/lexer.py                    |  733 ---
 python/ext-libs/jinja2/loaders.py                  |  471 --
 python/ext-libs/jinja2/meta.py                     |  103 -
 python/ext-libs/jinja2/nodes.py                    |  914 ----
 python/ext-libs/jinja2/optimizer.py                |   68 -
 python/ext-libs/jinja2/parser.py                   |  895 ----
 python/ext-libs/jinja2/runtime.py                  |  581 --
 python/ext-libs/jinja2/sandbox.py                  |  368 --
 python/ext-libs/jinja2/tests.py                    |  149 -
 python/ext-libs/jinja2/testsuite/__init__.py       |  156 -
 python/ext-libs/jinja2/testsuite/api.py            |  261 -
 python/ext-libs/jinja2/testsuite/bytecode_cache.py |   37 -
 python/ext-libs/jinja2/testsuite/core_tags.py      |  305 --
 python/ext-libs/jinja2/testsuite/debug.py          |   58 -
 python/ext-libs/jinja2/testsuite/doctests.py       |   29 -
 python/ext-libs/jinja2/testsuite/ext.py            |  459 --
 python/ext-libs/jinja2/testsuite/filters.py        |  515 --
 python/ext-libs/jinja2/testsuite/imports.py        |  141 -
 python/ext-libs/jinja2/testsuite/inheritance.py    |  250 -
 python/ext-libs/jinja2/testsuite/lexnparse.py      |  593 ---
 python/ext-libs/jinja2/testsuite/loader.py         |  226 -
 python/ext-libs/jinja2/testsuite/regression.py     |  279 -
 python/ext-libs/jinja2/testsuite/res/__init__.py   |    0
 .../jinja2/testsuite/res/templates/broken.html     |    3 -
 .../jinja2/testsuite/res/templates/foo/test.html   |    1 -
 .../testsuite/res/templates/syntaxerror.html       |    4 -
 .../jinja2/testsuite/res/templates/test.html       |    1 -
 python/ext-libs/jinja2/testsuite/security.py       |  166 -
 python/ext-libs/jinja2/testsuite/tests.py          |   93 -
 python/ext-libs/jinja2/testsuite/utils.py          |   82 -
 python/ext-libs/jinja2/utils.py                    |  520 --
 python/ext-libs/jinja2/visitor.py                  |   87 -
 python/ext-libs/pygments/__init__.py               |   91 -
 python/ext-libs/pygments/cmdline.py                |  441 --
 python/ext-libs/pygments/console.py                |   74 -
 python/ext-libs/pygments/filter.py                 |   74 -
 python/ext-libs/pygments/filters/__init__.py       |  356 --
 python/ext-libs/pygments/formatter.py              |   92 -
 python/ext-libs/pygments/formatters/__init__.py    |   68 -
 python/ext-libs/pygments/formatters/_mapping.py    |   92 -
 python/ext-libs/pygments/formatters/bbcode.py      |  109 -
 python/ext-libs/pygments/formatters/html.py        |  821 ---
 python/ext-libs/pygments/formatters/img.py         |  553 --
 python/ext-libs/pygments/formatters/latex.py       |  378 --
 python/ext-libs/pygments/formatters/other.py       |  115 -
 python/ext-libs/pygments/formatters/rtf.py         |  136 -
 python/ext-libs/pygments/formatters/svg.py         |  154 -
 python/ext-libs/pygments/formatters/terminal.py    |  112 -
 python/ext-libs/pygments/formatters/terminal256.py |  222 -
 python/ext-libs/pygments/lexer.py                  |  765 ---
 python/ext-libs/pygments/lexers/__init__.py        |  229 -
 python/ext-libs/pygments/lexers/_asybuiltins.py    | 1645 ------
 python/ext-libs/pygments/lexers/_clbuiltins.py     |  232 -
 python/ext-libs/pygments/lexers/_lassobuiltins.py  | 5416 -------------------
 python/ext-libs/pygments/lexers/_luabuiltins.py    |  249 -
 python/ext-libs/pygments/lexers/_mapping.py        |  340 --
 .../ext-libs/pygments/lexers/_openedgebuiltins.py  |  562 --
 python/ext-libs/pygments/lexers/_phpbuiltins.py    | 3787 -------------
 .../ext-libs/pygments/lexers/_postgres_builtins.py |  233 -
 .../pygments/lexers/_robotframeworklexer.py        |  557 --
 .../ext-libs/pygments/lexers/_scilab_builtins.py   |   40 -
 .../ext-libs/pygments/lexers/_sourcemodbuiltins.py | 1072 ----
 python/ext-libs/pygments/lexers/_stan_builtins.py  |  174 -
 python/ext-libs/pygments/lexers/_vimbuiltins.py    |   13 -
 python/ext-libs/pygments/lexers/agile.py           | 1917 -------
 python/ext-libs/pygments/lexers/asm.py             |  398 --
 python/ext-libs/pygments/lexers/compiled.py        | 3496 ------------
 python/ext-libs/pygments/lexers/dalvik.py          |  104 -
 python/ext-libs/pygments/lexers/dotnet.py          |  630 ---
 python/ext-libs/pygments/lexers/foxpro.py          |  428 --
 python/ext-libs/pygments/lexers/functional.py      | 2598 ---------
 python/ext-libs/pygments/lexers/hdl.py             |  356 --
 python/ext-libs/pygments/lexers/jvm.py             | 1109 ----
 python/ext-libs/pygments/lexers/math.py            | 1652 ------
 python/ext-libs/pygments/lexers/other.py           | 3667 -------------
 python/ext-libs/pygments/lexers/parsers.py         |  778 ---
 python/ext-libs/pygments/lexers/shell.py           |  410 --
 python/ext-libs/pygments/lexers/special.py         |  100 -
 python/ext-libs/pygments/lexers/sql.py             |  559 --
 python/ext-libs/pygments/lexers/templates.py       | 1742 ------
 python/ext-libs/pygments/lexers/text.py            | 1843 -------
 python/ext-libs/pygments/lexers/web.py             | 3423 ------------
 python/ext-libs/pygments/plugin.py                 |   74 -
 python/ext-libs/pygments/scanner.py                |  104 -
 python/ext-libs/pygments/style.py                  |  117 -
 python/ext-libs/pygments/styles/__init__.py        |   70 -
 python/ext-libs/pygments/styles/autumn.py          |   65 -
 python/ext-libs/pygments/styles/borland.py         |   51 -
 python/ext-libs/pygments/styles/bw.py              |   49 -
 python/ext-libs/pygments/styles/colorful.py        |   81 -
 python/ext-libs/pygments/styles/default.py         |   73 -
 python/ext-libs/pygments/styles/emacs.py           |   72 -
 python/ext-libs/pygments/styles/friendly.py        |   72 -
 python/ext-libs/pygments/styles/fruity.py          |   42 -
 python/ext-libs/pygments/styles/manni.py           |   75 -
 python/ext-libs/pygments/styles/monokai.py         |  106 -
 python/ext-libs/pygments/styles/murphy.py          |   80 -
 python/ext-libs/pygments/styles/native.py          |   65 -
 python/ext-libs/pygments/styles/pastie.py          |   75 -
 python/ext-libs/pygments/styles/perldoc.py         |   69 -
 python/ext-libs/pygments/styles/rrt.py             |   33 -
 python/ext-libs/pygments/styles/tango.py           |  141 -
 python/ext-libs/pygments/styles/trac.py            |   63 -
 python/ext-libs/pygments/styles/vim.py             |   63 -
 python/ext-libs/pygments/styles/vs.py              |   38 -
 python/ext-libs/pygments/token.py                  |  195 -
 python/ext-libs/pygments/unistring.py              |  140 -
 python/ext-libs/pygments/util.py                   |  277 -
 .../requests-2.10.0.dist-info/DESCRIPTION.rst      | 1257 -----
 python/ext-libs/requests-2.10.0.dist-info/METADATA | 1286 -----
 python/ext-libs/requests-2.10.0.dist-info/RECORD   |  169 -
 python/ext-libs/requests-2.10.0.dist-info/WHEEL    |    6 -
 .../requests-2.10.0.dist-info/metadata.json        |    1 -
 .../requests-2.10.0.dist-info/top_level.txt        |    1 -
 python/ext-libs/requests/__init__.py               |   89 -
 python/ext-libs/requests/adapters.py               |  483 --
 python/ext-libs/requests/api.py                    |  149 -
 python/ext-libs/requests/auth.py                   |  242 -
 python/ext-libs/requests/cacert.pem                | 5616 --------------------
 python/ext-libs/requests/certs.py                  |   25 -
 python/ext-libs/requests/compat.py                 |   62 -
 python/ext-libs/requests/cookies.py                |  493 --
 python/ext-libs/requests/exceptions.py             |  114 -
 python/ext-libs/requests/hooks.py                  |   34 -
 python/ext-libs/requests/models.py                 |  855 ---
 python/ext-libs/requests/packages/__init__.py      |   36 -
 .../ext-libs/requests/packages/chardet/__init__.py |   32 -
 .../ext-libs/requests/packages/chardet/big5freq.py |  925 ----
 .../requests/packages/chardet/big5prober.py        |   42 -
 .../requests/packages/chardet/chardetect.py        |   80 -
 .../requests/packages/chardet/chardistribution.py  |  231 -
 .../packages/chardet/charsetgroupprober.py         |  106 -
 .../requests/packages/chardet/charsetprober.py     |   62 -
 .../packages/chardet/codingstatemachine.py         |   61 -
 .../ext-libs/requests/packages/chardet/compat.py   |   34 -
 .../requests/packages/chardet/constants.py         |   39 -
 .../requests/packages/chardet/cp949prober.py       |   44 -
 .../requests/packages/chardet/escprober.py         |   86 -
 python/ext-libs/requests/packages/chardet/escsm.py |  242 -
 .../requests/packages/chardet/eucjpprober.py       |   90 -
 .../requests/packages/chardet/euckrfreq.py         |  596 ---
 .../requests/packages/chardet/euckrprober.py       |   42 -
 .../requests/packages/chardet/euctwfreq.py         |  428 --
 .../requests/packages/chardet/euctwprober.py       |   41 -
 .../requests/packages/chardet/gb2312freq.py        |  472 --
 .../requests/packages/chardet/gb2312prober.py      |   41 -
 .../requests/packages/chardet/hebrewprober.py      |  283 -
 .../ext-libs/requests/packages/chardet/jisfreq.py  |  569 --
 .../ext-libs/requests/packages/chardet/jpcntx.py   |  227 -
 .../packages/chardet/langbulgarianmodel.py         |  229 -
 .../requests/packages/chardet/langcyrillicmodel.py |  329 --
 .../requests/packages/chardet/langgreekmodel.py    |  225 -
 .../requests/packages/chardet/langhebrewmodel.py   |  201 -
 .../packages/chardet/langhungarianmodel.py         |  225 -
 .../requests/packages/chardet/langthaimodel.py     |  200 -
 .../requests/packages/chardet/latin1prober.py      |  139 -
 .../requests/packages/chardet/mbcharsetprober.py   |   86 -
 .../requests/packages/chardet/mbcsgroupprober.py   |   54 -
 .../ext-libs/requests/packages/chardet/mbcssm.py   |  572 --
 .../requests/packages/chardet/sbcharsetprober.py   |  120 -
 .../requests/packages/chardet/sbcsgroupprober.py   |   69 -
 .../requests/packages/chardet/sjisprober.py        |   91 -
 .../requests/packages/chardet/universaldetector.py |  170 -
 .../requests/packages/chardet/utf8prober.py        |   76 -
 .../ext-libs/requests/packages/urllib3/__init__.py |   96 -
 .../requests/packages/urllib3/_collections.py      |  324 --
 .../requests/packages/urllib3/connection.py        |  330 --
 .../requests/packages/urllib3/connectionpool.py    |  849 ---
 .../requests/packages/urllib3/contrib/__init__.py  |    0
 .../requests/packages/urllib3/contrib/appengine.py |  231 -
 .../requests/packages/urllib3/contrib/ntlmpool.py  |  115 -
 .../requests/packages/urllib3/contrib/pyopenssl.py |  358 --
 .../requests/packages/urllib3/contrib/socks.py     |  172 -
 .../requests/packages/urllib3/exceptions.py        |  209 -
 .../ext-libs/requests/packages/urllib3/fields.py   |  178 -
 .../ext-libs/requests/packages/urllib3/filepost.py |   94 -
 .../requests/packages/urllib3/packages/__init__.py |    5 -
 .../packages/urllib3/packages/ordered_dict.py      |  259 -
 .../requests/packages/urllib3/packages/six.py      |  385 --
 .../packages/ssl_match_hostname/__init__.py        |   13 -
 .../packages/ssl_match_hostname/_implementation.py |  105 -
 .../requests/packages/urllib3/poolmanager.py       |  284 -
 .../ext-libs/requests/packages/urllib3/request.py  |  151 -
 .../ext-libs/requests/packages/urllib3/response.py |  526 --
 .../requests/packages/urllib3/util/__init__.py     |   46 -
 .../requests/packages/urllib3/util/connection.py   |  101 -
 .../requests/packages/urllib3/util/request.py      |   72 -
 .../requests/packages/urllib3/util/response.py     |   74 -
 .../requests/packages/urllib3/util/retry.py        |  294 -
 .../requests/packages/urllib3/util/ssl_.py         |  320 --
 .../requests/packages/urllib3/util/timeout.py      |  242 -
 .../ext-libs/requests/packages/urllib3/util/url.py |  217 -
 python/ext-libs/requests/sessions.py               |  689 ---
 python/ext-libs/requests/status_codes.py           |   91 -
 python/ext-libs/requests/structures.py             |  106 -
 python/ext-libs/requests/utils.py                  |  728 ---
 python/gui/attributetable/qgsdualview.sip          |   12 +-
 python/gui/qgsgeometryrubberband.sip               |    2 +-
 python/plugins/MetaSearch/dialogs/maindialog.py    |   10 +-
 python/plugins/MetaSearch/metadata.txt             |   12 +-
 .../MetaSearch/resources/connections-default.xml   |    2 +-
 python/plugins/MetaSearch/util.py                  |   21 +-
 python/plugins/processing/algs/qgis/Merge.py       |    1 +
 python/plugins/processing/algs/saga/CMakeLists.txt |    2 +
 .../processing/algs/saga/SagaAlgorithm230.py       |   64 +
 .../processing/algs/saga/SagaAlgorithmProvider.py  |    5 +-
 .../2.3.0/AccumulatedCost(Anisotropic).txt         |    8 +
 .../2.3.0/AccumulatedCost(Isotropic).txt           |    7 +
 .../description/2.3.0/AddCoordinatestopoints.txt   |    4 +
 .../description/2.3.0/AddGridValuestoPoints.txt    |    7 +
 .../description/2.3.0/AddGridValuestoShapes.txt    |    7 +
 .../2.3.0/AddPointAttributestoPolygons.txt         |    7 +
 .../2.3.0/AddPolygonAttributestoPoints.txt         |    6 +
 .../algs/saga/description/2.3.0/Aggregate.txt      |    5 +
 .../2.3.0/AggregatePointObservations.txt           |   14 +
 .../saga/description/2.3.0/AggregationIndex.txt    |    5 +
 .../2.3.0/AnalyticalHierarchyProcess.txt           |    5 +
 .../description/2.3.0/AnalyticalHillshading.txt    |    8 +
 .../description/2.3.0/B-SplineApproximation.txt    |   10 +
 .../description/2.3.0/BurnStreamNetworkintoDEM.txt |    8 +
 .../algs/saga/description/2.3.0/CellBalance.txt    |    7 +
 .../saga/description/2.3.0/ChangeDateFormat.txt    |    7 +
 .../saga/description/2.3.0/ChangeDetection.txt     |   16 +
 .../saga/description/2.3.0/ChangeGridValues.txt    |    6 +
 .../saga/description/2.3.0/ChangeTimeFormat.txt    |    7 +
 .../algs/saga/description/2.3.0/ChannelNetwork.txt |   14 +
 .../2.3.0/ChannelNetworkandDrainageBasins.txt      |   11 +
 .../saga/description/2.3.0/ClipGridwithPolygon.txt |    5 +
 .../description/2.3.0/ClipPointswithPolygons.txt   |    7 +
 .../algs/saga/description/2.3.0/CloseGaps.txt      |    6 +
 .../saga/description/2.3.0/CloseGapswithSpline.txt |   12 +
 .../saga/description/2.3.0/CloseOneCellGaps.txt    |    4 +
 .../description/2.3.0/ClusterAnalysisforGrids.txt  |    9 +
 .../2.3.0/ConfusionMatrix(GridPolygons).txt        |   12 +
 .../description/2.3.0/ContourLinesfromGrid.txt     |    8 +
 .../2.3.0/ConvergenceIndex(SearchRadius).txt       |   11 +
 .../saga/description/2.3.0/ConvergenceIndex.txt    |    6 +
 .../description/2.3.0/ConvertDataStorageType.txt   |    5 +
 .../description/2.3.0/ConvertLinestoPoints.txt     |    6 +
 .../description/2.3.0/ConvertLinestoPolygons.txt   |    4 +
 .../2.3.0/ConvertMultipointstoPoints.txt           |    4 +
 .../description/2.3.0/ConvertPointstoLine(s).txt   |    6 +
 .../2.3.0/ConvertPolygonLineVerticestoPoints.txt   |    4 +
 .../description/2.3.0/ConvertPolygonstoLines.txt   |    4 +
 .../algs/saga/description/2.3.0/ConvexHull.txt     |    6 +
 .../description/2.3.0/CreateLinesGraticule.txt     |    9 +
 .../description/2.3.0/CreatePolygonsGraticule.txt  |    9 +
 .../algs/saga/description/2.3.0/CropToData.txt     |    4 +
 .../2.3.0/Cross-ClassificationandTabulation.txt    |    7 +
 .../algs/saga/description/2.3.0/CrossProfiles.txt  |    8 +
 .../description/2.3.0/CubicSplineApproximation.txt |   13 +
 .../description/2.3.0/CurvatureClassification.txt  |    5 +
 .../algs/saga/description/2.3.0/CutShapesLayer.txt |    7 +
 .../description/2.3.0/DTMFilter(slope-based).txt   |    8 +
 .../2.3.0/DiffusiveHillslopeEvolution(ADI).txt     |   11 +
 .../2.3.0/DiffusiveHillslopeEvolution(FTCS).txt    |   11 +
 .../2.3.0/DirectionalStatisticsforSingleGrid.txt   |   23 +
 .../algs/saga/description/2.3.0/DistanceMatrix.txt |    4 +
 .../2.3.0/DiurnalAnisotropicHeating.txt            |    5 +
 .../description/2.3.0/DiversityOfCategories.txt    |   15 +
 .../2.3.0/DownslopeDistanceGradient.txt            |    7 +
 .../saga/description/2.3.0/EdgeContamination.txt   |    4 +
 .../description/2.3.0/EffectiveAirFlowHeights.txt  |   15 +
 .../2.3.0/FastRegionGrowingAlgorithm.txt           |    7 +
 .../description/2.3.0/FastRepresentativeness.txt   |    7 +
 .../saga/description/2.3.0/FillGapsinRecords.txt   |    6 +
 .../2.3.0/FillSinks(PlanchonDarboux,2001).txt      |    5 +
 .../saga/description/2.3.0/FillSinks(QMofESP).txt  |    6 +
 .../saga/description/2.3.0/FillSinks(WangLiu).txt  |    7 +
 .../description/2.3.0/FillSinksXXL(WangLiu).txt    |    5 +
 .../algs/saga/description/2.3.0/FilterClumps.txt   |    5 +
 .../saga/description/2.3.0/FireRiskAnalysis.txt    |   18 +
 .../saga/description/2.3.0/FitNPointstoshape.txt   |    5 +
 .../algs/saga/description/2.3.0/FlatDetection.txt  |    6 +
 .../saga/description/2.3.0/FlattenPolygonLayer.txt |    4 +
 .../2.3.0/FlowAccumulation(FlowTracing).txt        |   17 +
 .../2.3.0/FlowAccumulation(QMofESP).txt            |    6 +
 .../2.3.0/FlowAccumulation(Recursive).txt          |   18 +
 .../2.3.0/FlowAccumulation(Top-Down).txt           |    5 +
 .../algs/saga/description/2.3.0/FlowPathLength.txt |    8 +
 .../2.3.0/FlowWidthandSpecificCatchmentArea.txt    |    7 +
 .../2.3.0/Fragmentation(Alternative).txt           |   17 +
 .../description/2.3.0/Fragmentation(Standard).txt  |   17 +
 ...gmentationClassesfromDensityandConnectivity.txt |    9 +
 .../algs/saga/description/2.3.0/Function.txt       |    8 +
 .../algs/saga/description/2.3.0/Fuzzify.txt        |   10 +
 .../description/2.3.0/FuzzyIntersection(AND).txt   |    5 +
 .../algs/saga/description/2.3.0/FuzzyUnion(OR).txt |    5 +
 .../algs/saga/description/2.3.0/GaussianFilter.txt |    7 +
 ...aphicallyWeightedMultipleRegression(Points).txt |   16 +
 ...allyWeightedMultipleRegression(PointsGrids).txt |   20 +
 .../GeographicallyWeightedMultipleRegression.txt   |   23 +
 ...eographicallyWeightedRegression(PointsGrid).txt |   20 +
 .../2.3.0/GeographicallyWeightedRegression.txt     |   23 +
 .../saga/description/2.3.0/GeometricFigures.txt    |    7 +
 .../saga/description/2.3.0/GetShapesExtents.txt    |    5 +
 .../description/2.3.0/GlobalMoransIforGrids.txt    |    5 +
 ...adientVectorfromCartesiantoPolarCoordinates.txt |   10 +
 ...adientVectorfromPolartoCartesianCoordinates.txt |   10 +
 .../GradientVectorsfromDirectionalComponents.txt   |   10 +
 .../GradientVectorsfromDirectionandLength.txt      |   10 +
 .../2.3.0/GradientVectorsfromSurface.txt           |    9 +
 .../algs/saga/description/2.3.0/GridBuffer.txt     |    6 +
 .../algs/saga/description/2.3.0/GridCalculator.txt |    9 +
 .../algs/saga/description/2.3.0/GridCellIndex.txt  |    5 +
 .../algs/saga/description/2.3.0/GridDifference.txt |    5 +
 .../algs/saga/description/2.3.0/GridDivision.txt   |    5 +
 .../algs/saga/description/2.3.0/GridMasking.txt    |    6 +
 .../saga/description/2.3.0/GridNormalisation.txt   |    6 +
 .../saga/description/2.3.0/GridOrientation.txt     |    5 +
 .../saga/description/2.3.0/GridProximityBuffer.txt |    8 +
 .../saga/description/2.3.0/GridSkeletonization.txt |    9 +
 .../saga/description/2.3.0/GridStandardisation.txt |    5 +
 .../2.3.0/GridStatisticsforPolygons.txt            |   16 +
 .../2.3.0/GridValuestoPoints(randomly).txt         |    5 +
 .../saga/description/2.3.0/GridValuestoPoints.txt  |    7 +
 .../algs/saga/description/2.3.0/GridVolume.txt     |    5 +
 .../algs/saga/description/2.3.0/GridsProduct.txt   |    4 +
 .../algs/saga/description/2.3.0/GridsSum.txt       |    4 +
 .../saga/description/2.3.0/HistogramSurface.txt    |    5 +
 .../algs/saga/description/2.3.0/Hypsometry.txt     |   10 +
 .../description/2.3.0/InverseDistanceWeighted.txt  |   22 +
 .../saga/description/2.3.0/InvertDataNo-Data.txt   |    4 +
 .../description/2.3.0/KernelDensityEstimation.txt  |   11 +
 .../algs/saga/description/2.3.0/LSFactor.txt       |    9 +
 .../algs/saga/description/2.3.0/LakeFlood.txt      |    7 +
 .../description/2.3.0/LandSurfaceTemperature.txt   |   10 +
 .../saga/description/2.3.0/LaplacianFilter.txt     |    8 +
 .../saga/description/2.3.0/Layerofextremevalue.txt |    5 +
 .../algs/saga/description/2.3.0/LeastCostPaths.txt |    7 +
 .../description/2.3.0/Line-PolygonIntersection.txt |    6 +
 .../algs/saga/description/2.3.0/LineDissolve.txt   |    8 +
 .../algs/saga/description/2.3.0/LineProperties.txt |    7 +
 .../saga/description/2.3.0/LineSimplification.txt  |    5 +
 .../description/2.3.0/LocalMinimaandMaxima.txt     |    5 +
 .../algs/saga/description/2.3.0/MajorityFilter.txt |    7 +
 .../saga/description/2.3.0/MassBalanceIndex.txt    |    8 +
 .../algs/saga/description/2.3.0/MergeLayers.txt    |    6 +
 .../saga/description/2.3.0/MetricConversions.txt   |    5 +
 .../description/2.3.0/MinimumDistanceAnalysis.txt  |    4 +
 .../description/2.3.0/ModifiedQuadraticShepard.txt |   11 +
 .../saga/description/2.3.0/MorphologicalFilter.txt |    7 +
 .../2.3.0/MorphometricProtectionIndex.txt          |    5 +
 .../algs/saga/description/2.3.0/Mosaicking.txt     |   15 +
 .../saga/description/2.3.0/Multi-BandVariation.txt |   11 +
 .../description/2.3.0/MultiDirectionLeeFilter.txt  |   10 +
 .../MultilevelB-SplineInterpolation(fromGrid).txt  |   13 +
 .../2.3.0/MultilevelB-SplineInterpolation.txt      |   11 +
 ...ultilevelB-SplineInterpolationforCategories.txt |   11 +
 .../MultipleRegressionAnalysis(GridGrids).txt      |   15 +
 .../MultipleRegressionAnalysis(PointsGrids).txt    |   16 +
 ...esolutionIndexofValleyBottomFlatness(MRVBF).txt |   13 +
 .../saga/description/2.3.0/NaturalNeighbour.txt    |   10 +
 .../saga/description/2.3.0/NearestNeighbour.txt    |    9 +
 .../2.3.0/OrderedWeightedAveraging(OWA).txt        |    5 +
 .../description/2.3.0/OrdinaryKriging(Global).txt  |   27 +
 .../saga/description/2.3.0/OrdinaryKriging.txt     |   24 +
 .../2.3.0/OverlandFlow-KinematicWaveD8.txt         |   13 +
 .../2.3.0/OverlandFlowDistancetoChannelNetwork.txt |    8 +
 .../algs/saga/description/2.3.0/Patching.txt       |    6 +
 .../saga/description/2.3.0/PatternAnalysis.txt     |   11 +
 .../2.3.0/PointStatisticsforPolygons.txt           |   14 +
 .../algs/saga/description/2.3.0/PointsFilter.txt   |   12 +
 .../algs/saga/description/2.3.0/PointsThinning.txt |    6 +
 .../2.3.0/PolartoCartesianCoordinates.txt          |    8 +
 .../description/2.3.0/Polygon-LineIntersection.txt |    5 +
 .../saga/description/2.3.0/PolygonCentroids.txt    |    5 +
 .../saga/description/2.3.0/PolygonClipping.txt     |    6 +
 .../saga/description/2.3.0/PolygonDifference.txt   |    6 +
 .../saga/description/2.3.0/PolygonDissolve.txt     |    8 +
 .../description/2.3.0/PolygonDissolveAllPolygs.txt |    5 +
 .../saga/description/2.3.0/PolygonIdentity.txt     |    6 +
 .../saga/description/2.3.0/PolygonIntersect.txt    |    6 +
 .../2.3.0/PolygonPartstoSeparatePolygons.txt       |    5 +
 .../saga/description/2.3.0/PolygonProperties.txt   |    8 +
 .../description/2.3.0/PolygonSelfIntersection.txt  |    5 +
 .../saga/description/2.3.0/PolygonShapeIndices.txt |    4 +
 .../2.3.0/PolygonSymmetricalDifference.txt         |    6 +
 .../algs/saga/description/2.3.0/PolygonUnion.txt   |    6 +
 .../algs/saga/description/2.3.0/PolygonUpdate.txt  |    6 +
 .../description/2.3.0/PolygonstoEdgesandNodes.txt  |    5 +
 .../description/2.3.0/PolynomialRegression.txt     |   14 +
 .../2.3.0/PrincipleComponentsAnalysis.txt          |    6 +
 .../saga/description/2.3.0/Profilefrompoints.txt   |    7 +
 .../saga/description/2.3.0/ProfilesfromLines.txt   |    9 +
 .../algs/saga/description/2.3.0/ProximityGrid.txt  |    6 +
 .../2.3.0/QuadTreeStructuretoShapes.txt            |    7 +
 .../algs/saga/description/2.3.0/RGBComposite.txt   |   24 +
 .../description/2.3.0/RadiusofVariance(Grid).txt   |    7 +
 .../algs/saga/description/2.3.0/RandomField.txt    |   12 +
 .../description/2.3.0/RandomTerrainGeneration.txt  |    9 +
 .../algs/saga/description/2.3.0/RankFilter.txt     |    7 +
 .../saga/description/2.3.0/RealSurfaceArea.txt     |    4 +
 .../description/2.3.0/ReclassifyGridValues.txt     |   18 +
 .../2.3.0/RegressionAnalysis(PointsGrid).txt       |    9 +
 .../2.3.0/RelativeHeightsandSlopePositions.txt     |   11 +
 .../description/2.3.0/RemoveDuplicatePoints.txt    |    7 +
 .../description/2.3.0/Representativeness(Grid).txt |    6 +
 .../algs/saga/description/2.3.0/Resampling.txt     |   12 +
 .../description/2.3.0/ResidualAnalysis(Grid).txt   |   18 +
 .../algs/saga/description/2.3.0/RunningAverage.txt |    6 +
 .../saga/description/2.3.0/SAGAWetnessIndex.txt    |   13 +
 .../algs/saga/description/2.3.0/SeedGeneration.txt |   11 +
 .../2.3.0/Separatepointsbydirection.txt            |    6 +
 .../algs/saga/description/2.3.0/ShapesBuffer.txt   |   10 +
 .../saga/description/2.3.0/ShapesBufferFixed.txt   |    9 +
 .../algs/saga/description/2.3.0/ShapestoGrid.txt   |   14 +
 .../saga/description/2.3.0/SharedPolygonEdges.txt  |    8 +
 .../saga/description/2.3.0/ShrinkAndExpand.txt     |    8 +
 .../algs/saga/description/2.3.0/SimpleFilter.txt   |    7 +
 .../saga/description/2.3.0/SimpleRegionGrowing.txt |   14 +
 .../algs/saga/description/2.3.0/Simulation.txt     |   16 +
 .../2.3.0/SinkDrainageRouteDetection.txt           |    6 +
 .../algs/saga/description/2.3.0/SinkRemoval.txt    |    8 +
 .../algs/saga/description/2.3.0/SkyViewFactor.txt  |   12 +
 .../description/2.3.0/Slope,Aspect,Curvature.txt   |   18 +
 .../algs/saga/description/2.3.0/SlopeLength.txt    |    4 +
 .../2.3.0/SoilTextureClassification.txt            |    7 +
 .../2.3.0/SpatialPointPatternAnalysis.txt          |    7 +
 .../description/2.3.0/SplitShapesLayerRandomly.txt |    7 +
 .../saga/description/2.3.0/StatisticsforGrids.txt  |   14 +
 .../algs/saga/description/2.3.0/StrahlerOrder.txt  |    4 +
 .../saga/description/2.3.0/StreamPowerIndex.txt    |    6 +
 .../description/2.3.0/SuccessiveFlowRouting.txt    |    7 +
 .../description/2.3.0/SupervisedClassification.txt |   22 +
 .../description/2.3.0/SurfaceSpecificPoints.txt    |    6 +
 .../2.3.0/TPIBasedLandformClassification.txt       |   12 +
 .../2.3.0/TerrainRuggednessIndex(TRI).txt          |    9 +
 .../description/2.3.0/ThinPlateSpline(TIN).txt     |   12 +
 .../saga/description/2.3.0/ThinPlateSpline.txt     |   17 +
 .../saga/description/2.3.0/ThresholdBuffer.txt     |    8 +
 .../description/2.3.0/TopographicCorrection.txt    |   11 +
 .../2.3.0/TopographicPositionIndex(TPI).txt        |   11 +
 .../2.3.0/TopographicWetnessIndex(TWI).txt         |    8 +
 .../2.3.0/Transectthroughpolygonshapefile.txt      |    6 +
 .../saga/description/2.3.0/TransformShapes.txt     |   11 +
 .../algs/saga/description/2.3.0/TransposeGrids.txt |    6 +
 .../algs/saga/description/2.3.0/Triangulation.txt  |    9 +
 .../description/2.3.0/UniversalKriging(Global).txt |   32 +
 .../saga/description/2.3.0/UniversalKriging.txt    |   24 +
 .../algs/saga/description/2.3.0/UpslopeArea.txt    |   10 +
 .../saga/description/2.3.0/UserDefinedFilter.txt   |    6 +
 .../algs/saga/description/2.3.0/VariogramCloud.txt |    7 +
 .../saga/description/2.3.0/VariogramSurface.txt    |    9 +
 .../2.3.0/VectorRuggednessMeasure(VRM).txt         |    9 +
 .../description/2.3.0/VectorisingGridClasses.txt   |    7 +
 .../2.3.0/VegetationIndex(SlopeBased).txt          |   13 +
 .../2.3.0/VerticalDistancetoChannelNetwork.txt     |    8 +
 .../description/2.3.0/WaterRetentionCapacity.txt   |    6 +
 .../saga/description/2.3.0/WatershedBasins.txt     |    7 +
 .../description/2.3.0/WatershedSegmentation.txt    |   12 +
 .../2.3.0/WindEffect(WindwardLeewardIndex).txt     |   15 +
 .../saga/description/2.3.0/ZonalGridStatistics.txt |    8 +
 python/plugins/processing/gui/ProcessingToolbox.py |    2 +-
 python/plugins/processing/tools/vector.py          |    8 +
 rpm/qgis.spec.template                             |    3 -
 src/app/composer/qgscomposer.cpp                   |   14 +-
 src/app/main.cpp                                   |    5 +
 src/app/qgisapp.cpp                                |   21 +-
 src/app/qgisapp.h                                  |    3 +
 src/app/qgsalignrasterdialog.cpp                   |    1 +
 src/app/qgsattributetabledialog.cpp                |   16 +-
 src/app/qgsbookmarks.cpp                           |    8 +-
 src/app/qgsfieldsproperties.cpp                    |   10 +-
 src/app/qgsvisibilitypresets.cpp                   |    8 +-
 src/core/composer/qgscomposerlegend.cpp            |    2 +-
 src/core/layertree/qgslayertreegroup.cpp           |    8 +-
 src/core/layertree/qgslayertreegroup.h             |   18 +-
 src/core/layertree/qgslayertreelayer.cpp           |  109 +-
 src/core/layertree/qgslayertreelayer.h             |   45 +-
 src/core/layertree/qgslayertreenode.cpp            |    6 +-
 src/core/layertree/qgslayertreenode.h              |    9 +-
 src/core/qgsmaprenderercustompainterjob.cpp        |   28 +-
 src/core/qgsmaprenderercustompainterjob.h          |    1 +
 src/core/qgsmaprendererjob.h                       |    8 +
 src/core/qgsmaprendererparalleljob.cpp             |   22 +
 src/core/qgsmaprendererparalleljob.h               |    1 +
 src/core/qgsmaprenderersequentialjob.cpp           |    9 +
 src/core/qgsmaprenderersequentialjob.h             |    1 +
 src/core/qgsofflineediting.cpp                     |   33 +
 src/core/qgsofflineediting.h                       |    5 +
 src/core/qgsogrutils.cpp                           |    9 +-
 src/core/qgsvectorlayer.cpp                        |   38 +-
 src/core/qgsvectorlayercache.cpp                   |   11 +-
 src/core/qgsvectorlayercache.h                     |   13 +-
 src/core/qgsvectorlayerfeatureiterator.cpp         |    5 +
 src/core/raster/qgsrasterlayerrenderer.cpp         |    2 +-
 src/gui/CMakeLists.txt                             |   19 +
 src/gui/attributetable/qgsattributetablemodel.cpp  |    2 +-
 src/gui/attributetable/qgsdualview.cpp             |  186 +-
 src/gui/attributetable/qgsdualview.h               |   17 +-
 .../qgsdefaultsearchwidgetwrapper.cpp              |    4 -
 src/gui/qgsfiledownloader.h                        |    4 +-
 src/gui/qgsmapcanvas.cpp                           |   13 +-
 src/gui/qgsmessagelogviewer.cpp                    |   16 +-
 src/gui/qgsmessagelogviewer.h                      |    8 +-
 src/providers/arcgisrest/qgsafsdataitems.cpp       |   18 +-
 src/providers/arcgisrest/qgsafsprovider.cpp        |    2 +-
 src/providers/arcgisrest/qgsafsproviderextern.cpp  |    4 +-
 src/providers/arcgisrest/qgsamsdataitems.cpp       |   16 +-
 src/providers/arcgisrest/qgsamsproviderextern.cpp  |    4 +-
 src/providers/ogr/qgsogrprovider.cpp               |   67 +-
 src/providers/ogr/qgsogrprovider.h                 |    7 +
 src/providers/wfs/qgswfscapabilities.cpp           |   35 +
 src/providers/wfs/qgswfscapabilities.h             |    1 +
 src/providers/wfs/qgswfsconstants.cpp              |    1 +
 src/providers/wfs/qgswfsconstants.h                |    1 +
 src/providers/wfs/qgswfsdatasourceuri.cpp          |   68 +-
 src/providers/wfs/qgswfsdatasourceuri.h            |    8 +-
 src/providers/wfs/qgswfsfeatureiterator.cpp        |   24 +
 src/providers/wms/qgswmscapabilities.cpp           |   35 +-
 src/server/qgsmslayercache.cpp                     |    9 +-
 src/server/qgswcsprojectparser.cpp                 |   26 +-
 src/server/qgswfsprojectparser.cpp                 |   20 +-
 tests/src/app/testqgsattributetable.cpp            |   39 +
 tests/src/core/testqgscomposition.cpp              |   90 +
 tests/src/core/testqgsvectorlayercache.cpp         |   62 +
 tests/src/gui/testqgsdualview.cpp                  |   31 +
 tests/src/python/CMakeLists.txt                    |    1 +
 tests/src/python/test_console.py                   |    8 +
 tests/src/python/test_provider_wfs.py              |  122 +
 tests/src/python/test_qgsappstartup.py             |    6 +-
 tests/src/python/test_qgsmaprenderer.py            |  112 +
 tests/src/python/test_qgsserver.py                 |    1 +
 578 files changed, 4318 insertions(+), 90606 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 2c9c9fd..1b366b7 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,6 +1,6 @@
 SET(CPACK_PACKAGE_VERSION_MAJOR "2")
 SET(CPACK_PACKAGE_VERSION_MINOR "18")
-SET(CPACK_PACKAGE_VERSION_PATCH "4")
+SET(CPACK_PACKAGE_VERSION_PATCH "5")
 SET(COMPLETE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH})
 SET(RELEASE_NAME "Las Palmas")
 IF (POLICY CMP0048) # in CMake 3.0.0+
diff --git a/ChangeLog b/ChangeLog
index dfea69c..1b789df 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,511 @@
+Alessandro Pasotti <apasotti at boundlessgeo.com>	2017-03-24
+
+    [bugfix][backport] Apply authentication configuration to xyz layers
+
+Harrissou Sant-anna <delazj at gmail.com>	2017-03-24
+
+    Backport some UI fixes to 2.18 (#4191)
+
+    * Set a name to the raster resampling dialog
+    * Fix "Configure container" dialog
+
+Merge: 616868efbe f568ca2b85
+rldhont <rldhont at gmail.com>	2017-03-23
+
+    Merge pull request #4296 from rldhont/release-2_18-processing-saga-230
+
+    [Processing] Add SAGA LTR support in 2.18
+
+rldhont <rldhont at gmail.com>	2017-03-22
+
+    [Processing] Add SAGA LTR support in 2.18
+
+Merge: e8deab2177 75b20ca9c6
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-22
+
+    Merge pull request #4054 from nyalldawson/fix_2738
+
+    [composer] Restore legend customisation from composer templates (Fix #2738)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-22
+
+    Add unit test for loading composer template restores legend customisation
+
+    Sponsored by ENEL, on behalf of Faunalia
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-22
+
+    Update loose layer matching to use layer source, name and provider
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-01-25
+
+    [composer] Restore legend customisation from composer templates
+
+    This change allows customised legends within composer templates
+    to be correctly restored when creating a new composition from
+    the template in a different project.
+
+    The legend layers will be attached to any loaded layers with a
+    matching data source as the layer from the original template
+    composition.
+
+    Fix #2738
+
+    Sponsored by ENEL, on behalf of Faunalia
+
+Marco Hugentobler <marco.hugentobler at sourcepole.ch>	2017-03-21
+
+    Fix QgsGeometryRubberBand.setGeometry binding also in 2.18
+
+rldhont <rldhont at gmail.com>	2017-03-20
+
+    [Server] Enhance WCS mapLayerFromCoverage
+
+rldhont <rldhont at gmail.com>	2017-03-20
+
+    [Server] Enhance WFS mapLayerFromTypeName
+
+rldhont <rldhont at gmail.com>	2017-03-20
+
+    [Server] Layer cache messages
+
+Even Rouault <even.rouault at spatialys.com>	2017-03-14
+
+    [OGR provider] Use OGR_F_IsFieldSetAndNotNull() when available.
+
+    Starting with GDAL 2.2, there are 2 concepts: unset fields and null fields
+    whereas previously there was only unset fields. For QGIS purposes, both
+    states (unset/null) are equivalent.
+
+    Cherry-picked from 47dd83dd75c7f86fa59510a4b65c453f27fb3771
+
+Merge: af74635168 6becea740b
+Even Rouault <even.rouault at mines-paris.org>	2017-03-20
+
+    Merge pull request #4285 from rouault/ogr_fast_changeattributes_2.18
+
+    [2.18] OGR data provider: use transactions when changing attributes and geometries (if possible) (#16216)
+
+Even Rouault <even.rouault at spatialys.com>	2017-03-14
+
+    [OGR provider] Run addFeatures() and deleteFeatures() within transaction
+
+    Cherry-picked from fbb8ffcc164828158bef551bfbfc1302e8f8145f
+
+Martin Landa <landa.martin at gmail.com>	2017-02-22
+
+    OGR data provider: use transactions when changing attributes and geometries (if possible) (fixes #16216)
+
+    Cherry picked from 16a421c7a8292b4e697363e5c57634ff57025e3a
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-19
+
+    fix build (followup 737719e)
+
+Larry Shaffer <lshaffer at boundlessgeo.com>	2016-08-10
+
+    [auth] Add missing auth system ui headers for external C++ apps
+
+    (cherry-picked from f3e90f1d5a87b2a7c6c693aa8ba3eaad64161b1d)
+
+Merge: 5541ad0c06 737719e487
+Even Rouault <even.rouault at mines-paris.org>	2017-03-17
+
+    Merge pull request #4277 from rouault/outputformat_gml3_2.18
+
+    [WFS provider] Select GML3 output format for WFS 1.0 when available
+
+Even Rouault <even.rouault at spatialys.com>	2017-03-17
+
+    [WFS provider] Select GML3 output format for WFS 1.0 when available
+
+    Some WFS servers like QGIS servers can expose GML3 output format for GetFeature
+    requests, which enable to retrieve curve geometries, instead of linearized ones
+    with the default GML2 output format. So use GML3 when advertized, and that
+    no explicit outputFormat is passed in the URI.
+
+    Cherry-picked from commit 852f01b0f92d79b5786ef2f63c6c329067128e88
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-16
+
+    don't crash on late update timeouts
+
+Matthias Kuhn <matthias at opengis.ch>	2017-03-16
+
+    Fetch geometries when required by expression filter
+
+    Fix #16358
+
+rldhont <rldhont at gmail.com>	2017-03-15
+
+    [BUGFIX][Processing] Vector tools -- add next methods for features iterator
+
+Etienne Trimaille <gustrimaille at yahoo.fr>	2017-03-14
+
+    backport fix vertical alignment in attribute table references #12700 funded by Kartoza
+
+Marco Hugentobler <marco.hugentobler at sourcepole.ch>	2017-03-13
+
+    Fix datum transformation for raster layers
+
+Sandro Mani <manisandro at gmail.com>	2017-03-13
+
+    Fix possible null-pointer dereference
+
+Sandro Mani <manisandro at gmail.com>	2017-03-10
+
+    [ArcGIS Rest] Fix some icons
+
+Sandro Mani <manisandro at gmail.com>	2017-03-10
+
+    [ArcGIS Rest] Fix connection key inconsistencies
+
+Merge: b0a6689a08 a08477bd8e
+Jürgen Fischer <jef at norbit.de>	2017-03-13
+
+    Merge pull request #4249 from SrNetoChan/release-2_18
+
+     Add between/not between to numerical fields in select by form
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-11
+
+    windows test:
+    * skip options startup test (--help shows a message box on windows)
+    * avoid initial console help
+
+    (cherry picked from commit 7293c057fc29a29b5d29658146169f298fb81ff4)
+
+nirvn <nirvn.asia at gmail.com>	2016-12-27
+
+    Add between/not between to numerical fields in select by form
+
+Matthias Kuhn <matthias at opengis.ch>	2017-03-10
+
+    [processing] Fix merge algorithm
+
+    Fix #16292
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-10
+
+    * remove ext-libs/pyproj
+    * for owslib pyproj 1.8.9 in jessie and trusty is sufficient
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-10
+
+    german translation fix (fixes #16335)
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-10
+
+    * remove ext-libs that are already available on target platforms
+    * drop ubuntu precise support (not built anymore anyway)
+
+Larry Shaffer <lshaffer at boundlessgeo.com>	2017-03-09
+
+    Skip WMS_GetPrint_SRS test in PyQgsServer
+
+    As per discussion with Alessandro Pasotti
+
+    cherry-picked from 2af1e9f4822a8f272bf7df09bac614067bccfa8d
+
+Merge: b7da275437 05e74803fe
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-09
+
+    Merge pull request #4240 from tomkralidis/issue-16266
+
+    add pyproj to Python deps
+
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-09
+
+    add pyproj to Python deps (http://hub.qgis.org/issues/16266)
+
+Larry Shaffer <lshaffer at boundlessgeo.com>	2017-03-08
+
+    Fix low res macOS icon for dock, app switcher, etc., rendering
+
+    Use 1024x1024 size.
+
+    cherry-picked from 35edc182ea20eca6b3ae16defce5557f21b390a2
+
+radosuav <radosuav at op.pl>	2016-08-01
+
+    [Processing] Fix exception on right click ToolboxAction (#3347)
+
+    (cherry picked from commit 1da60c530d2ab008b4f31bec8a68a4f8c1a05703)
+
+Merge: 2a6bcebd5e 8e875ed9f5
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Merge pull request #4230 from nyalldawson/cache_table_218
+
+    Backport attribute table optimisations from 3.0
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Use correct stats in attribute table dialog title
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from ae5a3d3)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Avoid unnecessary layer reloads for attribute table
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from 6cd97d)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Followup "Ensure that full cache flag is cleared when invalid"
+
+    This is required - when the cache is invalidated it requires
+    a full rebuild (eg due to a new attribute being added) in order
+    to have complete information.
+
+    Since this could be a very lengthy process, it's not safe to
+    immediately rebuild the full cache. Instead, clear the full
+    cache flag and require users of this class to handle
+    responsive cache rebuilding by listening to the invalidated()
+    signal from the cache.
+
+    (cherry-picked from e1d80b5)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Move partial responsibility for filtering attribute table to
+    dual view master model
+
+    This avoids requiring the table to load ALL features when the
+    table is set to just display selected or visible features
+
+    Dramatically improves load time of the attribute table when
+    working with large layers, so long as the table is set to
+    display selected features or visible features by default
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from 4d8f886)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Fix attribute table always fetches geometries for features
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from 8a050de)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Ensure that full cache flag is cleared when invalid
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from 36f190c)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Add (failing) test to ensure that attribute table doesn't fetch
+    geometry by default
+
+    (because performance)
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from acaf97c)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Ensure both attribute table cache & master model request respect
+    geometry fetching
+
+    If request needs geometry but cache isn't fetching it then cache
+    is bypassed. This is a performance hit, so ensure that cache
+    and request are always in sync wrt to fetching geoms.
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from dadd613)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Small cleanup to QgsDualView - don't use cache to access layer
+
+    (cherry-picked from 361399c)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-07
+
+    Clear existing cache when QgsVectorLayerCache is set to cache geom
+
+    Any features inside the cache must be cleared, because they
+    won't necessarily have the feature's geometry cached
+
+    On behalf of Faunalia, sponsored by ENEL
+
+    (cherry-picked from 11c444)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-06
+
+    Revert "Dynamically adjust postgres feature queue size"
+
+    This reverts commit 37a7a22197056c08c5d759eef15791818d52c759.
+
+    Leads to performance regressions in some tests
+
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-05
+
+    [MetaSearch] update release vresion
+
+Merge: d79bb3a0f3 37a7a22197
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-05
+
+    Merge pull request #4222 from nyalldawson/postgres_queue_218
+
+    Dynamically adjust postgres feature queue size (backport)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-05
+
+    Dynamically adjust postgres feature queue size
+
+    Lower the default queue size, but automatically adjust it
+    based on how long each fetch takes. This change keeps fetching
+    responsive even for slow connections or databases. The current
+    approach with a fixed queue size can result in very slow feature
+    fetching, which prevents UI updates for multiple seconds.
+
+    (cherry-picked from fbe4be8)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-02-23
+
+    [composer] Friendlier error messages when exports fail
+
+    Make it clear that this is likely because the destination
+    file is open in another application
+
+    (cherry-picked from b95eb07)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-04
+
+    Prompt before replacing an existing map preset
+
+    It's too easy to accidently (and unrecoverably) erase an
+    entire preset using the replace preset menu
+
+    (cherry-picked from b1158ff)
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-04
+
+    Always reread recent projects before saving new ones
+
+    Avoids loss of recent projects from menu if working in multiple
+    QGIS sessions concurrently
+
+    (cherry-picked from f45c19c)
+
+Merge: 067df257dd fbe8459276
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-03
+
+    Merge pull request #4217 from tomkralidis/issue-16176-2.18
+
+    MetaSearch: fix help URL
+
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-03
+
+    MetaSearch: fix help URL (http://hub.qgis.org/issues/16176)
+
+Merge: 4f3719c00a 876a84d0a5
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-03
+
+    Merge pull request #4216 from tomkralidis/issue-16261-2.18
+
+     MetaSearch: clean OWS URL when adding from search #4215
+
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-03
+
+    MetaSearch: clean OWS URL when adding from search (http://hub.qgis.org/issues/16261)
+
+Tom Kralidis <tomkralidis at gmail.com>	2017-03-03
+
+    update data.gov CSW endpoint to https
+
+Merge: 30577386cb a4219b3561
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-03
+
+    Merge pull request #4207 from nyalldawson/cancel_job
+
+    Backport non-blocking render cancellation
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-03
+
+    Add a unit test for map render job cancelation
+
+Nyall Dawson <nyall.dawson at gmail.com>	2017-03-03
+
+    Don't block when canceling canvas render jobs
+
+    In some cases canceling render jobs can take a long time. Eg when
+    using database layers over a sloooooow connection, canceling a job
+    can be blocked by minutes while waiting for the first batch of feature
+    fetching to finish. (Since eg postgres features are fetched in batches
+    of 2000 with no opportunity to abort mid-way through this).
+
+    This meant that while the first render allows the GUI to remain
+    responsive, any subsequent render operations which occured before
+    the first render completes locks up the whole ui until the first
+    render can finish cancellation.
+
+    With this change, the render cancelation happens with blocking.
+    It means that you can pan and zoom around a map over of slow
+    connection without any ui locks.
+
+    (cherry-picked from 3b56b79)
+
+Larry Shaffer <lshaffer at boundlessgeo.com>	2017-03-02
+
+    Followup to dfe268f; update QScintilla new lib name in 2.10 for Qt4
+
+    [ci skip]
+
+Juergen E. Fischer <jef at norbit.de>	2017-03-02
+
+    fix exporting bookmarks
+
+Sandro Mani <manisandro at gmail.com>	2017-03-01
+
+    Fix build against recent sip/PyQt4:
+
+    qgsfiledownloader.sip:33:0:
+    src/gui/qgsfiledownloader.h:94:5: error: overriding non-deleted function 'virtual QgsFileDownloader::~QgsFileDownloader()'
+
+    RuntimeError: qgis._core cannot import type 'QList<QVariant>' from PyQt4.QtCore
+
+Matthias Kuhn <matthias at opengis.ch>	2017-02-28
+
+    Fix visibility presets in combination w/ offline editing
+
+Larry Shaffer <lshaffer at boundlessgeo.com>	2017-02-28
+
+    Update QScintilla CMake find module to handle new lib name in 2.10
+
+    [ci skip]
+
+    kind of cherry-picked from b1c653b24963eb45f9f5d0471e3ed17c19c5bf54
+
+Matthias Kuhn <matthias at opengis.ch>	2017-02-28
+
+    Save display expression with qml export
+
+Juergen E. Fischer <jef at norbit.de>	2017-02-24
+
+    message log viewer: window isn't closeable and last tab has to stay
+    (fixes #16220)
+
+    (cherry picked from commit e0c58eb7d7c1da5891bb4ee37a4ed8548c3ac1d1)
+
+Juergen E. Fischer <jef at norbit.de>	2017-02-24
+
+    Release of 2.18.4
+
 Andreas Sturmlechner <andreas.sturmlechner at gmail.com>	2017-02-19
 
     Fix QtWebKit automagic
diff --git a/cmake/FindQScintilla.cmake b/cmake/FindQScintilla.cmake
index 9223eca..2f87e4c 100644
--- a/cmake/FindQScintilla.cmake
+++ b/cmake/FindQScintilla.cmake
@@ -34,9 +34,9 @@ ELSE(EXISTS QSCINTILLA_VERSION_STR)
     )
 
   if(ENABLE_QT5)
-    set(QSCINTILLA_LIBRARY_NAMES qscintilla2-qt5 libqt5scintilla2 libqscintilla2-qt5 qt5scintilla2 libqscintilla2-qt5.dylib)
+    set(QSCINTILLA_LIBRARY_NAMES qscintilla2-qt5 qscintilla2_qt5 libqt5scintilla2 libqscintilla2-qt5 qt5scintilla2 libqscintilla2-qt5.dylib)
   else(ENABLE_QT5)
-    set(QSCINTILLA_LIBRARY_NAMES qscintilla2 libqscintilla2 libqscintilla2.dylib)
+    set(QSCINTILLA_LIBRARY_NAMES qscintilla2 qscintilla2_qt4 libqscintilla2 libqscintilla2.dylib)
   endif(ENABLE_QT5)
 
   find_library(QSCINTILLA_LIBRARY
diff --git a/debian/changelog b/debian/changelog
index ec8f830..5fbb86c 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,8 +1,14 @@
-qgis (2.18.4) UNRELEASED; urgency=medium
+qgis (2.18.5) UNRELEASED; urgency=medium
+
+  * Release of 2.18.5
+
+ -- Jürgen E. Fischer <jef at norbit.de>  Fri, 24 Mar 2017 13:21:57 +0100
+
+qgis (2.18.4) unstable; urgency=medium
 
   * Release of 2.18.4
 
- -- Jürgen E. Fischer <jef at norbit.de>  Fri, 24 Feb 2017 13:00:06 +0100
+ -- Jürgen E. Fischer <jef at norbit.de>  Fri, 24 Mar 2017 13:21:57 +0100
 
 qgis (2.18.3) unstable; urgency=medium
 
diff --git a/debian/compat.in b/debian/compat.in
index 35300fb..fa27883 100644
--- a/debian/compat.in
+++ b/debian/compat.in
@@ -1,3 +1,2 @@
 #stretch sid jessie trusty xenial yakkety#9
 #jessie#8
-#precise#7
diff --git a/debian/control.in b/debian/control.in
index 6df1687..633e86d 100644
--- a/debian/control.in
+++ b/debian/control.in
@@ -8,17 +8,15 @@ Build-Depends:
  cmake (>= 2.8),
 #sid stretch jessie trusty xenial yakkety# debhelper (>= 9),
 #sid stretch jessie trusty xenial yakkety# dh-python,
-#precise# debhelper (>= 7),
  flex,
  grass-dev,
  libexpat1-dev,
  libfcgi-dev,
 #sid stretch jessie trusty xenial yakkety# libgdal-dev (>= 1.10.1-0~),
-#precise# libgdal-dev (>= 1.9.0) | libgdal1-dev (<< 1.9.0),
  gdal-bin,
  python-gdal,
  libgeos-dev (>= 3.0.0),
-#jessie precise trusty# libgsl0-dev,
+#jessie trusty# libgsl0-dev,
 #sid stretch xenial yakkety# libgsl-dev,
  libpq-dev,
  libproj-dev,
@@ -28,7 +26,7 @@ Build-Depends:
  libqca2-plugin-ossl,
  libqtwebkit-dev,
 #stretch jessie# libqwt-dev,
-#precise trusty xenial yakkety sid# libqwt5-qt4-dev,
+#trusty xenial yakkety sid# libqwt5-qt4-dev,
 #sid stretch jessie trusty xenial yakkety# libqjson-dev,
  libspatialite-dev,
  libsqlite3-dev,
@@ -41,12 +39,10 @@ Build-Depends:
  python-qt4-sql,
  python-yaml, python-mock,
 #stretch sid xenial yakkety# python-future,
-#precise# python,
-#precise# python-central (>= 0.5),
 #sid stretch jessie trusty xenial yakkety# python-all (>= 2.6.6-3~), python-all-dev (>= 2.6.6-3~),
 #sid stretch jessie xenial yakkety# pyqt4.qsci-dev,
 #sid stretch jessie trusty xenial yakkety# python-pyspatialite,
-#sid jessie stretch trusty xenial yakkety# python-nose2,
+#sid jessie stretch xenial yakkety# python-nose2,
  python-sip (>= 4.5.0),
  python-sip-dev (>= 4.5.0),
  libosgearth-dev,
@@ -65,9 +61,9 @@ Build-Depends:
 Build-Conflicts: libqgis-dev, qgis-dev
 #sid stretch xenial yakkety#Standards-Version: 3.9.7
 #jessie#Standards-Version: 3.9.6
-#precise trusty#Standards-Version: 3.8.4
+#trusty#Standards-Version: 3.8.4
 #sid stretch jessie#X-Python-Version: >= 2.7, << 2.8
-#precise trusty xenial yakkety#XS-Python-Version: current
+#trusty xenial yakkety#XS-Python-Version: current
 Vcs-Browser: https://github.com/qgis/QGIS/
 Vcs-Git: https://github.com/qgis/QGIS.git
 Homepage: http://qgis.org/
@@ -241,9 +237,8 @@ Depends:
  grass-dev,
  libexpat1-dev,
 #sid stretch jessie trusty xenial yakkety# libgdal-dev (>= 1.10.1-0~),
-#precise# libgdal-dev (>= 1.9.0) | libgdal1-dev (<< 1.9.0),
  libgeos-dev (>= 3.0.0),
-#jessie precise trusty# libgsl0-dev,
+#jessie trusty# libgsl0-dev,
 #sid stretch xenial yakkety# libgsl-dev,
  libpq-dev,
  libproj-dev,
@@ -313,8 +308,7 @@ Depends:
  qgis-provider-grass (= ${binary:Version}),
  ${shlibs:Depends},
  ${misc:Depends},
-#!precise# grass-core
-#precise# grass
+ grass-core
 Replaces: qgis-provider-grass (<< 1:2.11.0+git20151002)
 Conflicts: qgis-provider-grass (<< 1:2.11.0+git20151002)
 Description: GRASS plugin for QGIS
@@ -375,15 +369,10 @@ Depends:
  python-qscintilla2,
  python-httplib2,
  python-jinja2,
- python-markupsafe,
  python-pygments,
- python-dateutil,
  python-requests,
- python-tz,
- python-six,
- python-yaml,
-#stretch sid xenial yakkety# python-future,
-#sid stretch jessie trusty xenial yakkety# python-pyspatialite,
+#!trusty# python-markupsafe, python-dateutil, python-yaml,
+#sid stretch jessie trusty xenial yakkety# python-owslib, python-tz, python-pyproj, python-future, python-six, python-pyspatialite,
  libqgispython{QGIS_ABI},
  ${shlibs:Depends},
  ${python:Depends},
@@ -391,7 +380,6 @@ Depends:
  ${sip:Depends}
 Provides: ${python:Provides}
 Recommends: liblwgeom-dev
-#precise#XB-Python-Version: ${python:Versions}
 Description: Python bindings to QGIS
  QGIS is a Geographic Information System (GIS) which manages, analyzes and
  display databases of geographic information.
diff --git a/debian/copyright b/debian/copyright
index 0858004..a5a1e89 100644
--- a/debian/copyright
+++ b/debian/copyright
@@ -137,10 +137,6 @@ Files: python/ext-libs/httplib2/socks.py
 Copyright: 2006, Dan-Haim
 License: BSD-2-Clause
 
-Files: python/ext-libs/jinja2/*
-Copyright: 2006-2010, the Jinja Team
-License: BSD-3-Clause
-
 Files: python/ext-libs/markupsafe/*
 Copyright: 2010, 2013, Armin Ronacher
 License: BSD-3-Clause
@@ -173,15 +169,6 @@ Copyright: 2006, Ancient World Mapping Center
            2005, Nuxeo SARL <http://nuxeo.com>
 License: BSD-3-Clause
 
-Files: python/ext-libs/pygments/*
-Copyright: 2006-2013, the Pygments team
-License: BSD-2-Clause
-
-Files: python/ext-libs/pygments/lexers/_robotframeworklexer.py
-Copyright: 2006-2013, the Pygments team
-                2012, Nokia Siemens Networks Oyj
-License: BSD-2-Clause and Apache-2.0
-
 Files: python/ext-libs/pyspatialite/*
 Copyright: 2004-2010, Gerhard Häring <gh at ghaering.de>
 License: Zlib
diff --git a/debian/python-qgis.install.in b/debian/python-qgis.install.in
index c10e07e..ffff318 100644
--- a/debian/python-qgis.install.in
+++ b/debian/python-qgis.install.in
@@ -7,6 +7,4 @@ usr/lib/python*/*-packages/qgis/networkanalysis/*
 usr/lib/python*/*-packages/qgis/PyQt/*
 usr/lib/python*/*-packages/qgis/server/*
 usr/lib/python*/*-packages/qgis/testing/*
-#precise#usr/lib/python*/*-packages/pyspatialite/*.py
-#precise#usr/lib/python*/*-packages/pyspatialite/*.so
 #sid stretch#usr/lib/python*/*-packages/PyQt4/*.so
diff --git a/debian/rules b/debian/rules
index 8d223ae..d585e9d 100755
--- a/debian/rules
+++ b/debian/rules
@@ -34,7 +34,7 @@ ifneq (,$(findstring -oracle,$(DISTRIBUTION)))
 	WITH_ORACLE=1
 endif
 
-ifneq ($(DISTRIBUTION),$(findstring $(DISTRIBUTION),"jessie stretch precise trusty xenial yakkety"))
+ifneq ($(DISTRIBUTION),$(findstring $(DISTRIBUTION),"jessie stretch trusty xenial yakkety"))
 	DISTRIBUTION := sid
 endif
 
@@ -70,16 +70,42 @@ CMAKE_OPTS := \
 	-DGENERATE_QHP=TRUE \
 	-DSUPPRESS_SIP_WARNINGS=TRUE \
 	-DWITH_CUSTOM_WIDGETS=TRUE \
-	-DWITH_INTERNAL_HTTPLIB2=FALSE \
-	-DWITH_INTERNAL_JINJA2=FALSE \
-	-DWITH_INTERNAL_MARKUPSAFE=FALSE \
-	-DWITH_INTERNAL_PYGMENTS=FALSE \
-	-DWITH_INTERNAL_DATEUTIL=FALSE \
-	-DWITH_INTERNAL_PYTZ=FALSE \
-	-DWITH_INTERNAL_YAML=FALSE \
 	-DQT_TAG_FILE=/usr/share/qt4/doc/html/qt.tags \
 	-DQT_DOC_URL=/usr/share/qt4/doc/html/
 
+ifneq (,$(findstring $(DISTRIBUTION),"trusty"))
+	CMAKE_OPTS += \
+		-DWITH_INTERNAL_MARKUPSAFE=TRUE \
+		-DWITH_INTERNAL_DATEUTIL=TRUE \
+		-DWITH_INTERNAL_NOSE2=TRUE \
+		-DWITH_INTERNAL_YAML=TRUE
+else
+	CMAKE_OPTS += \
+		-DWITH_INTERNAL_MARKUPSAFE=FALSE \
+		-DWITH_INTERNAL_DATEUTIL=FALSE \
+		-DWITH_INTERNAL_NOSE2=FALSE \
+		-DWITH_INTERNAL_YAML=FALSE
+endif
+
+ifneq (,$(findstring $(DISTRIBUTION),"jessie trusty"))
+	CMAKE_OPTS += \
+		-DWITH_INTERNAL_FUTURE=TRUE \
+		-DWITH_INTERNAL_PYTZ=TRUE \
+		-DWITH_INTERNAL_SIX=TRUE
+else
+	CMAKE_OPTS += \
+		-DWITH_INTERNAL_FUTURE=FALSE \
+		-DWITH_INTERNAL_PYTZ=FALSE \
+		-DWITH_INTERNAL_SIX=FALSE
+endif
+
+ifneq (,$(findstring $(DISTRIBUTION),"jessie trusty xenial"))
+	CMAKE_OPTS += -DWITH_INTERNAL_OWSLIB=TRUE
+else
+	CMAKE_OPTS += -DWITH_INTERNAL_OWSLIB=FALSE
+endif
+
+
 ifneq ($(SHA),)
 	CMAKE_OPTS += -DSHA=$(SHA)
 endif
@@ -101,26 +127,10 @@ ifneq (,$(filter parallel=%,$(DEB_BUILD_OPTIONS)))
 	MAKEFLAGS += -j$(NUMJOBS)
 endif
 
-ifneq (,$(findstring $(DISTRIBUTION),"precise"))
-	CMAKE_OPTS += -DWITH_INTERNAL_NOSE2=TRUE -DWITH_INTERNAL_SIX=TRUE
-else
-	CMAKE_OPTS += -DWITH_INTERNAL_NOSE2=FALSE -DWITH_INTERNAL_SIX=FALSE
-endif
-
-ifeq (,$(findstring $(DISTRIBUTION),"stretch sid xenial yakkety"))
-	CMAKE_OPTS += -DWITH_INTERNAL_FUTURE=TRUE
-else
-	CMAKE_OPTS += -DWITH_INTERNAL_FUTURE=FALSE
-endif
-
 ifneq (,$(WITH_GLOBE))
 	CMAKE_OPTS += -DWITH_GLOBE=TRUE
 endif
 
-ifneq (,$(findstring $(DISTRIBUTION),"precise"))
-	CMAKE_OPTS += -DWITH_PYSPATIALITE=TRUE
-endif
-
 ifneq (,$(findstring $(DISTRIBUTION),"sid stretch"))
 	CMAKE_OPTS += -DPOSTGRES_LIBRARY=/usr/lib/$(DEB_BUILD_MULTIARCH)/libpq.so
 endif
diff --git a/i18n/qgis_de.ts b/i18n/qgis_de.ts
index ad16029..a59c53d 100644
--- a/i18n/qgis_de.ts
+++ b/i18n/qgis_de.ts
@@ -21208,12 +21208,12 @@ Nur %1 von %2 Objekten geschrieben.</translation>
     <message>
         <location filename="../src/gui/editorwidgets/core/qgssearchwidgetwrapper.cpp" line="71"/>
         <source>Is missing (null)</source>
-        <translation>belegt (NULL)</translation>
+        <translation>fehlt (null)</translation>
     </message>
     <message>
         <location filename="../src/gui/editorwidgets/core/qgssearchwidgetwrapper.cpp" line="73"/>
         <source>Is not missing (not null)</source>
-        <translation>Nicht belegt (NOT NULL)</translation>
+        <translation>vorhanden (nicht null)</translation>
     </message>
     <message>
         <location filename="../src/gui/editorwidgets/core/qgssearchwidgetwrapper.cpp" line="75"/>
diff --git a/images/icons/qgis-icon-macos.png b/images/icons/qgis-icon-macos.png
new file mode 100644
index 0000000..8a779e6
Binary files /dev/null and b/images/icons/qgis-icon-macos.png differ
diff --git a/ms-windows/osgeo4w/package-nightly.cmd b/ms-windows/osgeo4w/package-nightly.cmd
index 35de35d..c011630 100644
--- a/ms-windows/osgeo4w/package-nightly.cmd
+++ b/ms-windows/osgeo4w/package-nightly.cmd
@@ -184,16 +184,15 @@ cmake %CMAKE_OPT% ^
 	-D CMAKE_INSTALL_PREFIX=%O4W_ROOT%/apps/%PACKAGENAME% ^
 	-D FCGI_INCLUDE_DIR=%O4W_ROOT%/include ^
 	-D FCGI_LIBRARY=%O4W_ROOT%/lib/libfcgi.lib ^
-	-D WITH_INTERNAL_JINJA2=FALSE ^
 	-D WITH_INTERNAL_MARKUPSAFE=FALSE ^
-	-D WITH_INTERNAL_PYGMENTS=FALSE ^
 	-D WITH_INTERNAL_DATEUTIL=FALSE ^
 	-D WITH_INTERNAL_PYTZ=FALSE ^
 	-D WITH_INTERNAL_SIX=FALSE ^
 	-D WITH_INTERNAL_NOSE2=FALSE ^
 	-D WITH_INTERNAL_MOCK=FALSE ^
-	-D WITH_INTERNAL_HTTPLIB2=FALSE ^
 	-D WITH_INTERNAL_FUTURE=FALSE ^
+	-D WITH_INTERNAL_YAML=FALSE ^
+	-D WITH_INTERNAL_OWSLIB=FALSE ^
 	%SRCDIR%
 if errorlevel 1 (echo cmake failed & goto error)
 
diff --git a/ms-windows/osgeo4w/package.cmd b/ms-windows/osgeo4w/package.cmd
index e8cabcd..0a2e3d2 100644
--- a/ms-windows/osgeo4w/package.cmd
+++ b/ms-windows/osgeo4w/package.cmd
@@ -181,13 +181,15 @@ cmake %CMAKE_OPT% ^
 	-D CMAKE_INSTALL_PREFIX=%O4W_ROOT%/apps/%PACKAGENAME% ^
 	-D FCGI_INCLUDE_DIR=%O4W_ROOT%/include ^
 	-D FCGI_LIBRARY=%O4W_ROOT%/lib/libfcgi.lib ^
-	-D WITH_INTERNAL_JINJA2=FALSE ^
 	-D WITH_INTERNAL_MARKUPSAFE=FALSE ^
-	-D WITH_INTERNAL_PYGMENTS=FALSE ^
 	-D WITH_INTERNAL_DATEUTIL=FALSE ^
 	-D WITH_INTERNAL_PYTZ=FALSE ^
 	-D WITH_INTERNAL_SIX=FALSE ^
+	-D WITH_INTERNAL_NOSE2=FALSE ^
+	-D WITH_INTERNAL_MOCK=FALSE ^
 	-D WITH_INTERNAL_FUTURE=FALSE ^
+	-D WITH_INTERNAL_YAML=FALSE ^
+	-D WITH_INTERNAL_OWSLIB=FALSE ^
 	%SRCDIR%
 if errorlevel 1 (echo cmake failed & goto error)
 
diff --git a/python/core/core.sip b/python/core/core.sip
index 8f0b6af..41f5450 100644
--- a/python/core/core.sip
+++ b/python/core/core.sip
@@ -3,6 +3,7 @@
 
 %Feature QT5_SUPPORT
 
+%Import QtCore/QtCoremod.sip
 %Import QtXml/QtXmlmod.sip
 %Import QtNetwork/QtNetworkmod.sip
 %Import QtSql/QtSqlmod.sip
diff --git a/python/core/layertree/qgslayertreegroup.sip b/python/core/layertree/qgslayertreegroup.sip
index 1725c64..df05bdf 100644
--- a/python/core/layertree/qgslayertreegroup.sip
+++ b/python/core/layertree/qgslayertreegroup.sip
@@ -55,12 +55,23 @@ class QgsLayerTreeGroup : QgsLayerTreeNode
     //! Find group node with specified name. Searches recursively the whole sub-tree.
     QgsLayerTreeGroup* findGroup( const QString& name );
 
-    //! Read group (tree) from XML element <layer-tree-group> and return the newly created group (or null on error)
-    static QgsLayerTreeGroup* readXML( QDomElement& element ) /Factory/;
+    /**
+     * Read group (tree) from XML element <layer-tree-group> and return the newly
+     * created group (or null on error). If the looseMatch
+     * parameter is true then child legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    static QgsLayerTreeGroup* readXML( QDomElement& element, bool looseMatch = false ) /Factory/;
+
     //! Write group (tree) as XML element <layer-tree-group> and add it to the given parent element
     virtual void writeXML( QDomElement& parentElement );
-    //! Read children from XML and append them to the group.
-    void readChildrenFromXML( QDomElement& element );
+
+    /**
+     * Read children from XML and append them to the group. If the looseMatch
+     * parameter is true then legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    void readChildrenFromXML( QDomElement& element, bool looseMatch = false );
 
     //! Return text representation of the tree. For debugging purposes only.
     virtual QString dump() const;
diff --git a/python/core/layertree/qgslayertreelayer.sip b/python/core/layertree/qgslayertreelayer.sip
index 21b694b..dcf1440 100644
--- a/python/core/layertree/qgslayertreelayer.sip
+++ b/python/core/layertree/qgslayertreelayer.sip
@@ -21,10 +21,30 @@ class QgsLayerTreeLayer : QgsLayerTreeNode
 %TypeHeaderCode
 #include <qgslayertreelayer.h>
 %End
+  public:
+
+    //! Parameters for loose layer matching
+    struct LayerMatchParams
+    {
+      //! Layer public source
+      QString source;
+      //! Layer name
+      QString name;
+      //! Provider
+      QString providerKey;
+    };
 
   public:
     explicit QgsLayerTreeLayer( QgsMapLayer* layer );
 
+    /**
+     * Creates a layer node which will attach to a layer with matching
+     * parameters. This can be used for "looser" layer matching,
+     * avoiding the usual layer id check in favour of attaching to any layer
+     * with an equal source/name/provider.
+     */
+    static QgsLayerTreeLayer* createLayerFromParams( const LayerMatchParams& source ) /Factory/;
+
     explicit QgsLayerTreeLayer( const QString& layerId, const QString& name = QString() );
 
     QString layerId() const;
@@ -38,13 +58,27 @@ class QgsLayerTreeLayer : QgsLayerTreeNode
     //! @note added in 2.18.1
     void setName( const QString& n );
 
+    /**
+     * Attempts to attach this layer node to a layer with a matching
+     * QgsMapLayer::publicSource(). This can be used for "looser" layer matching,
+     * avoiding the usual layer id check in favour of attaching to any layer
+     * with an equal source.
+     */
+    void attachToSource( const LayerMatchParams &source );
+
     QString layerName() const;
     void setLayerName( const QString& n );
 
     Qt::CheckState isVisible() const;
     void setVisible( Qt::CheckState visible );
 
-    static QgsLayerTreeLayer* readXML( QDomElement& element ) /Factory/;
+    /**
+     * Creates a new layer from an XML definition. If the looseMatch
+     * parameter is true then legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    static QgsLayerTreeLayer* readXML( QDomElement& element, bool looseMatch = false ) /Factory/;
+
     virtual void writeXML( QDomElement& parentElement );
 
     virtual QString dump() const;
diff --git a/python/core/layertree/qgslayertreenode.sip b/python/core/layertree/qgslayertreenode.sip
index abf9cb3..813aee2 100644
--- a/python/core/layertree/qgslayertreenode.sip
+++ b/python/core/layertree/qgslayertreenode.sip
@@ -83,8 +83,13 @@ class QgsLayerTreeNode : QObject
     //! @note added in 2.18.1
     virtual void setName( const QString& name ) = 0;
 
-    //! Read layer tree from XML. Returns new instance
-    static QgsLayerTreeNode *readXML( QDomElement &element );
+    /**
+     * Read layer tree from XML. Returns new instance. If the looseMatch
+     * parameter is true then child legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    static QgsLayerTreeNode *readXML( QDomElement &element, bool looseMatch = false ) /Factory/;
+
     //! Write layer tree to XML
     virtual void writeXML( QDomElement &parentElement ) = 0;
 
diff --git a/python/core/qgsmaprenderercustompainterjob.sip b/python/core/qgsmaprenderercustompainterjob.sip
index a5ed198..1e3fe34 100644
--- a/python/core/qgsmaprenderercustompainterjob.sip
+++ b/python/core/qgsmaprenderercustompainterjob.sip
@@ -19,6 +19,7 @@ class QgsMapRendererCustomPainterJob : QgsMapRendererJob
 
     virtual void start();
     virtual void cancel();
+    virtual void cancelWithoutBlocking();
     virtual void waitForFinished();
     virtual bool isActive() const;
     virtual QgsLabelingResults* takeLabelingResults() /Transfer/;
diff --git a/python/core/qgsmaprendererjob.sip b/python/core/qgsmaprendererjob.sip
index 83514f7..336cd62 100644
--- a/python/core/qgsmaprendererjob.sip
+++ b/python/core/qgsmaprendererjob.sip
@@ -56,6 +56,8 @@ class QgsMapRendererJob : QObject
     //! Does nothing if the rendering is not active.
     virtual void cancel() = 0;
 
+    virtual void cancelWithoutBlocking() = 0;
+
     //! Block until the job has finished.
     virtual void waitForFinished() = 0;
 
diff --git a/python/core/qgsmaprendererparalleljob.sip b/python/core/qgsmaprendererparalleljob.sip
index 089e5ec..d87e83a 100644
--- a/python/core/qgsmaprendererparalleljob.sip
+++ b/python/core/qgsmaprendererparalleljob.sip
@@ -18,6 +18,7 @@ class QgsMapRendererParallelJob : QgsMapRendererQImageJob
 
     virtual void start();
     virtual void cancel();
+    virtual void cancelWithoutBlocking();
     virtual void waitForFinished();
     virtual bool isActive() const;
 
diff --git a/python/core/qgsmaprenderersequentialjob.sip b/python/core/qgsmaprenderersequentialjob.sip
index d511d82..5e8c95e 100644
--- a/python/core/qgsmaprenderersequentialjob.sip
+++ b/python/core/qgsmaprenderersequentialjob.sip
@@ -19,6 +19,7 @@ class QgsMapRendererSequentialJob : QgsMapRendererQImageJob
 
     virtual void start();
     virtual void cancel();
+    virtual void cancelWithoutBlocking();
     virtual void waitForFinished();
     virtual bool isActive() const;
 
diff --git a/python/core/qgsvectorlayercache.sip b/python/core/qgsvectorlayercache.sip
index 9e74539..cb758fb 100644
--- a/python/core/qgsvectorlayercache.sip
+++ b/python/core/qgsvectorlayercache.sip
@@ -33,19 +33,10 @@ class QgsVectorLayerCache : QObject
      */
     int cacheSize();
 
-    /**
-     * Enable or disable the caching of geometries
-     *
-     * @param cacheGeometry    Enable or disable the caching of geometries
-     */
     void setCacheGeometry( bool cacheGeometry );
 
+    bool cacheGeometry() const;
 
-    /**
-     * Set the subset of attributes to be cached
-     *
-     * @param attributes   The attributes to be cached
-     */
     void setCacheSubsetOfAttributes( const QgsAttributeList& attributes );
 
     /**
diff --git a/python/ext-libs/CMakeLists.txt b/python/ext-libs/CMakeLists.txt
index 6e609a9..709aaa5 100644
--- a/python/ext-libs/CMakeLists.txt
+++ b/python/ext-libs/CMakeLists.txt
@@ -33,7 +33,7 @@ MACRO(EXT_PYLIB lib)
   ENDIF(WITH_INTERNAL_${ulib})
 ENDMACRO(EXT_PYLIB lib)
 
-FOREACH(pkg httplib2 jinja2 markupsafe owslib pygments dateutil pytz yaml nose2)
+FOREACH(pkg markupsafe owslib dateutil pytz yaml nose2)
   EXT_PYLIB(${pkg})
 ENDFOREACH(pkg)
 
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/PKG-INFO b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/PKG-INFO
deleted file mode 100644
index 0e72247..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/PKG-INFO
+++ /dev/null
@@ -1,55 +0,0 @@
-Metadata-Version: 1.1
-Name: Jinja2
-Version: 2.7.2
-Summary: A small but fast and easy to use stand-alone template engine written in pure python.
-Home-page: http://jinja.pocoo.org/
-Author: Armin Ronacher
-Author-email: armin.ronacher at active-4.com
-License: BSD
-Description: 
-        Jinja2
-        ~~~~~~
-        
-        Jinja2 is a template engine written in pure Python.  It provides a
-        `Django`_ inspired non-XML syntax but supports inline expressions and
-        an optional `sandboxed`_ environment.
-        
-        Nutshell
-        --------
-        
-        Here a small example of a Jinja template::
-        
-            {% extends 'base.html' %}
-            {% block title %}Memberlist{% endblock %}
-            {% block content %}
-              <ul>
-              {% for user in users %}
-                <li><a href="{{ user.url }}">{{ user.username }}</a></li>
-              {% endfor %}
-              </ul>
-            {% endblock %}
-        
-        Philosophy
-        ----------
-        
-        Application logic is for the controller but don't try to make the life
-        for the template designer too hard by giving him too few functionality.
-        
-        For more information visit the new `Jinja2 webpage`_ and `documentation`_.
-        
-        .. _sandboxed: http://en.wikipedia.org/wiki/Sandbox_(computer_security)
-        .. _Django: http://www.djangoproject.com/
-        .. _Jinja2 webpage: http://jinja.pocoo.org/
-        .. _documentation: http://jinja.pocoo.org/2/documentation/
-        
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Web Environment
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: BSD License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: Text Processing :: Markup :: HTML
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/SOURCES.txt b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/SOURCES.txt
deleted file mode 100644
index a27a9c4..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,126 +0,0 @@
-AUTHORS
-CHANGES
-LICENSE
-MANIFEST.in
-Makefile
-README.rst
-run-tests.py
-setup.cfg
-setup.py
-Jinja2.egg-info/PKG-INFO
-Jinja2.egg-info/SOURCES.txt
-Jinja2.egg-info/dependency_links.txt
-Jinja2.egg-info/entry_points.txt
-Jinja2.egg-info/not-zip-safe
-Jinja2.egg-info/requires.txt
-Jinja2.egg-info/top_level.txt
-artwork/jinjalogo.svg
-docs/Makefile
-docs/api.rst
-docs/cache_extension.py
-docs/changelog.rst
-docs/conf.py
-docs/contents.rst.inc
-docs/extensions.rst
-docs/faq.rst
-docs/index.rst
-docs/integration.rst
-docs/intro.rst
-docs/jinjaext.py
-docs/jinjastyle.sty
-docs/latexindex.rst
-docs/logo.pdf
-docs/sandbox.rst
-docs/switching.rst
-docs/templates.rst
-docs/tricks.rst
-docs/_static/.ignore
-docs/_static/jinja-small.png
-docs/_templates/sidebarintro.html
-docs/_templates/sidebarlogo.html
-docs/_themes/LICENSE
-docs/_themes/README
-docs/_themes/jinja/layout.html
-docs/_themes/jinja/relations.html
-docs/_themes/jinja/theme.conf
-docs/_themes/jinja/static/jinja.css_t
-examples/bench.py
-examples/profile.py
-examples/basic/cycle.py
-examples/basic/debugger.py
-examples/basic/inheritance.py
-examples/basic/test.py
-examples/basic/test_filter_and_linestatements.py
-examples/basic/test_loop_filter.py
-examples/basic/translate.py
-examples/basic/templates/broken.html
-examples/basic/templates/subbroken.html
-examples/rwbench/djangoext.py
-examples/rwbench/rwbench.py
-examples/rwbench/django/_form.html
-examples/rwbench/django/_input_field.html
-examples/rwbench/django/_textarea.html
-examples/rwbench/django/index.html
-examples/rwbench/django/layout.html
-examples/rwbench/genshi/helpers.html
-examples/rwbench/genshi/index.html
-examples/rwbench/genshi/layout.html
-examples/rwbench/jinja/helpers.html
-examples/rwbench/jinja/index.html
-examples/rwbench/jinja/layout.html
-examples/rwbench/mako/helpers.html
-examples/rwbench/mako/index.html
-examples/rwbench/mako/layout.html
-ext/djangojinja2.py
-ext/inlinegettext.py
-ext/jinja.el
-ext/Vim/jinja.vim
-ext/django2jinja/django2jinja.py
-ext/django2jinja/example.py
-ext/django2jinja/templates/index.html
-ext/django2jinja/templates/layout.html
-ext/django2jinja/templates/subtemplate.html
-jinja2/__init__.py
-jinja2/_compat.py
-jinja2/_stringdefs.py
-jinja2/bccache.py
-jinja2/compiler.py
-jinja2/constants.py
-jinja2/debug.py
-jinja2/defaults.py
-jinja2/environment.py
-jinja2/exceptions.py
-jinja2/ext.py
-jinja2/filters.py
-jinja2/lexer.py
-jinja2/loaders.py
-jinja2/meta.py
-jinja2/nodes.py
-jinja2/optimizer.py
-jinja2/parser.py
-jinja2/runtime.py
-jinja2/sandbox.py
-jinja2/tests.py
-jinja2/utils.py
-jinja2/visitor.py
-jinja2/testsuite/__init__.py
-jinja2/testsuite/api.py
-jinja2/testsuite/bytecode_cache.py
-jinja2/testsuite/core_tags.py
-jinja2/testsuite/debug.py
-jinja2/testsuite/doctests.py
-jinja2/testsuite/ext.py
-jinja2/testsuite/filters.py
-jinja2/testsuite/imports.py
-jinja2/testsuite/inheritance.py
-jinja2/testsuite/lexnparse.py
-jinja2/testsuite/loader.py
-jinja2/testsuite/regression.py
-jinja2/testsuite/security.py
-jinja2/testsuite/tests.py
-jinja2/testsuite/utils.py
-jinja2/testsuite/res/__init__.py
-jinja2/testsuite/res/templates/broken.html
-jinja2/testsuite/res/templates/syntaxerror.html
-jinja2/testsuite/res/templates/test.html
-jinja2/testsuite/res/templates/foo/test.html
\ No newline at end of file
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/dependency_links.txt b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/entry_points.txt b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/entry_points.txt
deleted file mode 100644
index 32e6b75..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/entry_points.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
-    [babel.extractors]
-    jinja2 = jinja2.ext:babel_extract[i18n]
-    
\ No newline at end of file
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/installed-files.txt b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/installed-files.txt
deleted file mode 100644
index 2e95a00..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/installed-files.txt
+++ /dev/null
@@ -1,92 +0,0 @@
-../jinja2/utils.py
-../jinja2/lexer.py
-../jinja2/compiler.py
-../jinja2/bccache.py
-../jinja2/debug.py
-../jinja2/exceptions.py
-../jinja2/loaders.py
-../jinja2/parser.py
-../jinja2/tests.py
-../jinja2/filters.py
-../jinja2/defaults.py
-../jinja2/environment.py
-../jinja2/constants.py
-../jinja2/meta.py
-../jinja2/sandbox.py
-../jinja2/nodes.py
-../jinja2/visitor.py
-../jinja2/__init__.py
-../jinja2/_stringdefs.py
-../jinja2/ext.py
-../jinja2/optimizer.py
-../jinja2/runtime.py
-../jinja2/_compat.py
-../jinja2/testsuite/utils.py
-../jinja2/testsuite/api.py
-../jinja2/testsuite/debug.py
-../jinja2/testsuite/tests.py
-../jinja2/testsuite/loader.py
-../jinja2/testsuite/bytecode_cache.py
-../jinja2/testsuite/filters.py
-../jinja2/testsuite/imports.py
-../jinja2/testsuite/lexnparse.py
-../jinja2/testsuite/security.py
-../jinja2/testsuite/doctests.py
-../jinja2/testsuite/__init__.py
-../jinja2/testsuite/core_tags.py
-../jinja2/testsuite/regression.py
-../jinja2/testsuite/inheritance.py
-../jinja2/testsuite/ext.py
-../jinja2/testsuite/res/__init__.py
-../jinja2/testsuite/res/templates/broken.html
-../jinja2/testsuite/res/templates/syntaxerror.html
-../jinja2/testsuite/res/templates/test.html
-../jinja2/testsuite/res/templates/foo/test.html
-../jinja2/utils.pyc
-../jinja2/lexer.pyc
-../jinja2/compiler.pyc
-../jinja2/bccache.pyc
-../jinja2/debug.pyc
-../jinja2/exceptions.pyc
-../jinja2/loaders.pyc
-../jinja2/parser.pyc
-../jinja2/tests.pyc
-../jinja2/filters.pyc
-../jinja2/defaults.pyc
-../jinja2/environment.pyc
-../jinja2/constants.pyc
-../jinja2/meta.pyc
-../jinja2/sandbox.pyc
-../jinja2/nodes.pyc
-../jinja2/visitor.pyc
-../jinja2/__init__.pyc
-../jinja2/_stringdefs.pyc
-../jinja2/ext.pyc
-../jinja2/optimizer.pyc
-../jinja2/runtime.pyc
-../jinja2/_compat.pyc
-../jinja2/testsuite/utils.pyc
-../jinja2/testsuite/api.pyc
-../jinja2/testsuite/debug.pyc
-../jinja2/testsuite/tests.pyc
-../jinja2/testsuite/loader.pyc
-../jinja2/testsuite/bytecode_cache.pyc
-../jinja2/testsuite/filters.pyc
-../jinja2/testsuite/imports.pyc
-../jinja2/testsuite/lexnparse.pyc
-../jinja2/testsuite/security.pyc
-../jinja2/testsuite/doctests.pyc
-../jinja2/testsuite/__init__.pyc
-../jinja2/testsuite/core_tags.pyc
-../jinja2/testsuite/regression.pyc
-../jinja2/testsuite/inheritance.pyc
-../jinja2/testsuite/ext.pyc
-../jinja2/testsuite/res/__init__.pyc
-./
-SOURCES.txt
-entry_points.txt
-not-zip-safe
-PKG-INFO
-dependency_links.txt
-top_level.txt
-requires.txt
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/not-zip-safe b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/not-zip-safe
deleted file mode 100644
index 8b13789..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/requires.txt b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/requires.txt
deleted file mode 100644
index ccd0e92..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/requires.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-markupsafe
-
-[i18n]
-Babel>=0.8
\ No newline at end of file
diff --git a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/top_level.txt b/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/top_level.txt
deleted file mode 100644
index 7f7afbf..0000000
--- a/python/ext-libs/Jinja2-2.7.2-py2.7.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-jinja2
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/PKG-INFO b/python/ext-libs/Pygments-1.6-py2.7.egg-info/PKG-INFO
deleted file mode 100644
index 6e9739d..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/PKG-INFO
+++ /dev/null
@@ -1,46 +0,0 @@
-Metadata-Version: 1.1
-Name: Pygments
-Version: 1.6
-Summary: Pygments is a syntax highlighting package written in Python.
-Home-page: http://pygments.org/
-Author: Georg Brandl
-Author-email: georg at python.org
-License: BSD License
-Description: 
-            Pygments
-            ~~~~~~~~
-        
-            Pygments is a syntax highlighting package written in Python.
-        
-            It is a generic syntax highlighter for general use in all kinds of software
-            such as forum systems, wikis or other applications that need to prettify
-            source code. Highlights are:
-        
-            * a wide range of common languages and markup formats is supported
-            * special attention is paid to details, increasing quality by a fair amount
-            * support for new languages and formats are added easily
-            * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image       formats that PIL supports and ANSI sequences
-            * it is usable as a command-line tool and as a library
-            * ... and it highlights even Brainfuck!
-        
-            The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
-        
-            .. _Pygments tip:
-               http://bitbucket.org/birkenfeld/pygments-main/get/default.zip#egg=Pygments-dev
-        
-            :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-            :license: BSD, see LICENSE for details.
-        
-Keywords: syntax highlighting
-Platform: any
-Classifier: License :: OSI Approved :: BSD License
-Classifier: Intended Audience :: Developers
-Classifier: Intended Audience :: End Users/Desktop
-Classifier: Intended Audience :: System Administrators
-Classifier: Development Status :: 6 - Mature
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 3
-Classifier: Operating System :: OS Independent
-Classifier: Topic :: Text Processing :: Filters
-Classifier: Topic :: Utilities
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/SOURCES.txt b/python/ext-libs/Pygments-1.6-py2.7.egg-info/SOURCES.txt
deleted file mode 100644
index dd49c40..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,416 +0,0 @@
-AUTHORS
-CHANGES
-LICENSE
-MANIFEST.in
-Makefile
-TODO
-ez_setup.py
-pygmentize
-setup.cfg
-setup.py
-Pygments.egg-info/PKG-INFO
-Pygments.egg-info/SOURCES.txt
-Pygments.egg-info/dependency_links.txt
-Pygments.egg-info/entry_points.txt
-Pygments.egg-info/not-zip-safe
-Pygments.egg-info/top_level.txt
-docs/generate.py
-docs/pygmentize.1
-docs/build/api.html
-docs/build/authors.html
-docs/build/changelog.html
-docs/build/cmdline.html
-docs/build/filterdevelopment.html
-docs/build/filters.html
-docs/build/formatterdevelopment.html
-docs/build/formatters.html
-docs/build/index.html
-docs/build/installation.html
-docs/build/integrate.html
-docs/build/java.html
-docs/build/lexerdevelopment.html
-docs/build/lexers.html
-docs/build/moinmoin.html
-docs/build/plugins.html
-docs/build/quickstart.html
-docs/build/rstdirective.html
-docs/build/styles.html
-docs/build/tokens.html
-docs/build/unicode.html
-docs/src/api.txt
-docs/src/authors.txt
-docs/src/changelog.txt
-docs/src/cmdline.txt
-docs/src/filterdevelopment.txt
-docs/src/filters.txt
-docs/src/formatterdevelopment.txt
-docs/src/formatters.txt
-docs/src/index.txt
-docs/src/installation.txt
-docs/src/integrate.txt
-docs/src/java.txt
-docs/src/lexerdevelopment.txt
-docs/src/lexers.txt
-docs/src/moinmoin.txt
-docs/src/plugins.txt
-docs/src/quickstart.txt
-docs/src/rstdirective.txt
-docs/src/styles.txt
-docs/src/tokens.txt
-docs/src/unicode.txt
-external/autopygmentize
-external/lasso-builtins-generator-9.lasso
-external/markdown-processor.py
-external/moin-parser.py
-external/pygments.bashcomp
-external/rst-directive-old.py
-external/rst-directive.py
-pygments/__init__.py
-pygments/cmdline.py
-pygments/console.py
-pygments/filter.py
-pygments/formatter.py
-pygments/lexer.py
-pygments/plugin.py
-pygments/scanner.py
-pygments/style.py
-pygments/token.py
-pygments/unistring.py
-pygments/util.py
-pygments/filters/__init__.py
-pygments/formatters/__init__.py
-pygments/formatters/_mapping.py
-pygments/formatters/bbcode.py
-pygments/formatters/html.py
-pygments/formatters/img.py
-pygments/formatters/latex.py
-pygments/formatters/other.py
-pygments/formatters/rtf.py
-pygments/formatters/svg.py
-pygments/formatters/terminal.py
-pygments/formatters/terminal256.py
-pygments/lexers/__init__.py
-pygments/lexers/_asybuiltins.py
-pygments/lexers/_clbuiltins.py
-pygments/lexers/_lassobuiltins.py
-pygments/lexers/_luabuiltins.py
-pygments/lexers/_mapping.py
-pygments/lexers/_openedgebuiltins.py
-pygments/lexers/_phpbuiltins.py
-pygments/lexers/_postgres_builtins.py
-pygments/lexers/_robotframeworklexer.py
-pygments/lexers/_scilab_builtins.py
-pygments/lexers/_sourcemodbuiltins.py
-pygments/lexers/_stan_builtins.py
-pygments/lexers/_vimbuiltins.py
-pygments/lexers/agile.py
-pygments/lexers/asm.py
-pygments/lexers/compiled.py
-pygments/lexers/dalvik.py
-pygments/lexers/dotnet.py
-pygments/lexers/foxpro.py
-pygments/lexers/functional.py
-pygments/lexers/hdl.py
-pygments/lexers/jvm.py
-pygments/lexers/math.py
-pygments/lexers/other.py
-pygments/lexers/parsers.py
-pygments/lexers/shell.py
-pygments/lexers/special.py
-pygments/lexers/sql.py
-pygments/lexers/templates.py
-pygments/lexers/text.py
-pygments/lexers/web.py
-pygments/styles/__init__.py
-pygments/styles/autumn.py
-pygments/styles/borland.py
-pygments/styles/bw.py
-pygments/styles/colorful.py
-pygments/styles/default.py
-pygments/styles/emacs.py
-pygments/styles/friendly.py
-pygments/styles/fruity.py
-pygments/styles/manni.py
-pygments/styles/monokai.py
-pygments/styles/murphy.py
-pygments/styles/native.py
-pygments/styles/pastie.py
-pygments/styles/perldoc.py
-pygments/styles/rrt.py
-pygments/styles/tango.py
-pygments/styles/trac.py
-pygments/styles/vim.py
-pygments/styles/vs.py
-scripts/check_sources.py
-scripts/detect_missing_analyse_text.py
-scripts/epydoc.css
-scripts/find_codetags.py
-scripts/find_error.py
-scripts/get_vimkw.py
-scripts/pylintrc
-scripts/reindent.py
-scripts/vim2pygments.py
-tests/old_run.py
-tests/run.py
-tests/support.py
-tests/test_basic_api.py
-tests/test_clexer.py
-tests/test_cmdline.py
-tests/test_examplefiles.py
-tests/test_html_formatter.py
-tests/test_latex_formatter.py
-tests/test_perllexer.py
-tests/test_regexlexer.py
-tests/test_token.py
-tests/test_using_api.py
-tests/test_util.py
-tests/dtds/HTML4-f.dtd
-tests/dtds/HTML4-s.dtd
-tests/dtds/HTML4.dcl
-tests/dtds/HTML4.dtd
-tests/dtds/HTML4.soc
-tests/dtds/HTMLlat1.ent
-tests/dtds/HTMLspec.ent
-tests/dtds/HTMLsym.ent
-tests/examplefiles/ANTLRv3.g
-tests/examplefiles/AcidStateAdvanced.hs
-tests/examplefiles/AlternatingGroup.mu
-tests/examplefiles/BOM.js
-tests/examplefiles/CPDictionary.j
-tests/examplefiles/Config.in.cache
-tests/examplefiles/Constants.mo
-tests/examplefiles/DancingSudoku.lhs
-tests/examplefiles/Errors.scala
-tests/examplefiles/File.hy
-tests/examplefiles/Intro.java
-tests/examplefiles/Makefile
-tests/examplefiles/Object.st
-tests/examplefiles/OrderedMap.hx
-tests/examplefiles/SmallCheck.hs
-tests/examplefiles/Sorting.mod
-tests/examplefiles/Sudoku.lhs
-tests/examplefiles/addressbook.proto
-tests/examplefiles/antlr_throws
-tests/examplefiles/apache2.conf
-tests/examplefiles/as3_test.as
-tests/examplefiles/as3_test2.as
-tests/examplefiles/as3_test3.as
-tests/examplefiles/aspx-cs_example
-tests/examplefiles/autoit_submit.au3
-tests/examplefiles/badcase.java
-tests/examplefiles/batchfile.bat
-tests/examplefiles/bigtest.nsi
-tests/examplefiles/boot-9.scm
-tests/examplefiles/ca65_example
-tests/examplefiles/cbmbas_example
-tests/examplefiles/cells.ps
-tests/examplefiles/ceval.c
-tests/examplefiles/cheetah_example.html
-tests/examplefiles/classes.dylan
-tests/examplefiles/condensed_ruby.rb
-tests/examplefiles/coq_RelationClasses
-tests/examplefiles/database.pytb
-tests/examplefiles/de.MoinMoin.po
-tests/examplefiles/demo.ahk
-tests/examplefiles/demo.cfm
-tests/examplefiles/django_sample.html+django
-tests/examplefiles/dwarf.cw
-tests/examplefiles/erl_session
-tests/examplefiles/escape_semicolon.clj
-tests/examplefiles/evil_regex.js
-tests/examplefiles/example.Rd
-tests/examplefiles/example.bug
-tests/examplefiles/example.c
-tests/examplefiles/example.ceylon
-tests/examplefiles/example.cls
-tests/examplefiles/example.cob
-tests/examplefiles/example.cpp
-tests/examplefiles/example.gs
-tests/examplefiles/example.gst
-tests/examplefiles/example.jag
-tests/examplefiles/example.kt
-tests/examplefiles/example.lua
-tests/examplefiles/example.monkey
-tests/examplefiles/example.moo
-tests/examplefiles/example.moon
-tests/examplefiles/example.msc
-tests/examplefiles/example.nim
-tests/examplefiles/example.ns2
-tests/examplefiles/example.p
-tests/examplefiles/example.pas
-tests/examplefiles/example.prg
-tests/examplefiles/example.rb
-tests/examplefiles/example.reg
-tests/examplefiles/example.rhtml
-tests/examplefiles/example.rkt
-tests/examplefiles/example.rpf
-tests/examplefiles/example.sh-session
-tests/examplefiles/example.shell-session
-tests/examplefiles/example.sml
-tests/examplefiles/example.snobol
-tests/examplefiles/example.stan
-tests/examplefiles/example.tea
-tests/examplefiles/example.ts
-tests/examplefiles/example.u
-tests/examplefiles/example.weechatlog
-tests/examplefiles/example.xhtml
-tests/examplefiles/example.xtend
-tests/examplefiles/example.yaml
-tests/examplefiles/example2.aspx
-tests/examplefiles/example2.msc
-tests/examplefiles/example_elixir.ex
-tests/examplefiles/example_file.fy
-tests/examplefiles/firefox.mak
-tests/examplefiles/flipflop.sv
-tests/examplefiles/foo.sce
-tests/examplefiles/format.ml
-tests/examplefiles/fucked_up.rb
-tests/examplefiles/function.mu
-tests/examplefiles/functional.rst
-tests/examplefiles/garcia-wachs.kk
-tests/examplefiles/genclass.clj
-tests/examplefiles/genshi_example.xml+genshi
-tests/examplefiles/genshitext_example.genshitext
-tests/examplefiles/glsl.frag
-tests/examplefiles/glsl.vert
-tests/examplefiles/hello.smali
-tests/examplefiles/hello.sp
-tests/examplefiles/html+php_faulty.php
-tests/examplefiles/http_request_example
-tests/examplefiles/http_response_example
-tests/examplefiles/import.hs
-tests/examplefiles/inet_pton6.dg
-tests/examplefiles/intro.ik
-tests/examplefiles/ints.php
-tests/examplefiles/intsyn.fun
-tests/examplefiles/intsyn.sig
-tests/examplefiles/irb_heredoc
-tests/examplefiles/irc.lsp
-tests/examplefiles/java.properties
-tests/examplefiles/jbst_example1.jbst
-tests/examplefiles/jbst_example2.jbst
-tests/examplefiles/jinjadesignerdoc.rst
-tests/examplefiles/json.lasso
-tests/examplefiles/json.lasso9
-tests/examplefiles/lighttpd_config.conf
-tests/examplefiles/linecontinuation.py
-tests/examplefiles/livescript-demo.ls
-tests/examplefiles/logos_example.xm
-tests/examplefiles/ltmain.sh
-tests/examplefiles/main.cmake
-tests/examplefiles/markdown.lsp
-tests/examplefiles/matlab_noreturn
-tests/examplefiles/matlab_sample
-tests/examplefiles/matlabsession_sample.txt
-tests/examplefiles/metagrammar.treetop
-tests/examplefiles/mg_sample.pro
-tests/examplefiles/minehunt.qml
-tests/examplefiles/minimal.ns2
-tests/examplefiles/moin_SyntaxReference.txt
-tests/examplefiles/multiline_regexes.rb
-tests/examplefiles/nanomsg.intr
-tests/examplefiles/nasm_aoutso.asm
-tests/examplefiles/nasm_objexe.asm
-tests/examplefiles/nemerle_sample.n
-tests/examplefiles/nginx_nginx.conf
-tests/examplefiles/numbers.c
-tests/examplefiles/objc_example.m
-tests/examplefiles/objc_example2.m
-tests/examplefiles/perl_misc
-tests/examplefiles/perl_perl5db
-tests/examplefiles/perl_regex-delims
-tests/examplefiles/perlfunc.1
-tests/examplefiles/phpMyAdmin.spec
-tests/examplefiles/phpcomplete.vim
-tests/examplefiles/pleac.in.rb
-tests/examplefiles/postgresql_test.txt
-tests/examplefiles/pppoe.applescript
-tests/examplefiles/psql_session.txt
-tests/examplefiles/py3_test.txt
-tests/examplefiles/pycon_test.pycon
-tests/examplefiles/pytb_test2.pytb
-tests/examplefiles/pytb_test3.pytb
-tests/examplefiles/python25-bsd.mak
-tests/examplefiles/qsort.prolog
-tests/examplefiles/r-console-transcript.Rout
-tests/examplefiles/ragel-cpp_rlscan
-tests/examplefiles/ragel-cpp_snippet
-tests/examplefiles/regex.js
-tests/examplefiles/reversi.lsp
-tests/examplefiles/robotframework.txt
-tests/examplefiles/ruby_func_def.rb
-tests/examplefiles/rust_example.rs
-tests/examplefiles/scilab.sci
-tests/examplefiles/session.dylan-console
-tests/examplefiles/sibling.prolog
-tests/examplefiles/simple.md
-tests/examplefiles/smarty_example.html
-tests/examplefiles/source.lgt
-tests/examplefiles/sources.list
-tests/examplefiles/sphere.pov
-tests/examplefiles/sqlite3.sqlite3-console
-tests/examplefiles/squid.conf
-tests/examplefiles/string.jl
-tests/examplefiles/string_delimiters.d
-tests/examplefiles/stripheredoc.sh
-tests/examplefiles/test.R
-tests/examplefiles/test.adb
-tests/examplefiles/test.asy
-tests/examplefiles/test.awk
-tests/examplefiles/test.bas
-tests/examplefiles/test.bmx
-tests/examplefiles/test.boo
-tests/examplefiles/test.bro
-tests/examplefiles/test.cs
-tests/examplefiles/test.css
-tests/examplefiles/test.cu
-tests/examplefiles/test.d
-tests/examplefiles/test.dart
-tests/examplefiles/test.dtd
-tests/examplefiles/test.ec
-tests/examplefiles/test.ecl
-tests/examplefiles/test.eh
-tests/examplefiles/test.erl
-tests/examplefiles/test.evoque
-tests/examplefiles/test.fan
-tests/examplefiles/test.flx
-tests/examplefiles/test.gdc
-tests/examplefiles/test.groovy
-tests/examplefiles/test.html
-tests/examplefiles/test.ini
-tests/examplefiles/test.java
-tests/examplefiles/test.jsp
-tests/examplefiles/test.maql
-tests/examplefiles/test.mod
-tests/examplefiles/test.moo
-tests/examplefiles/test.myt
-tests/examplefiles/test.nim
-tests/examplefiles/test.opa
-tests/examplefiles/test.pas
-tests/examplefiles/test.php
-tests/examplefiles/test.plot
-tests/examplefiles/test.ps1
-tests/examplefiles/test.pypylog
-tests/examplefiles/test.r3
-tests/examplefiles/test.rb
-tests/examplefiles/test.rhtml
-tests/examplefiles/test.scaml
-tests/examplefiles/test.ssp
-tests/examplefiles/test.tcsh
-tests/examplefiles/test.vb
-tests/examplefiles/test.vhdl
-tests/examplefiles/test.xqy
-tests/examplefiles/test.xsl
-tests/examplefiles/test2.pypylog
-tests/examplefiles/truncated.pytb
-tests/examplefiles/type.lisp
-tests/examplefiles/underscore.coffee
-tests/examplefiles/unicode.applescript
-tests/examplefiles/unicodedoc.py
-tests/examplefiles/unix-io.lid
-tests/examplefiles/webkit-transition.css
-tests/examplefiles/while.pov
-tests/examplefiles/wiki.factor
-tests/examplefiles/xml_example
-tests/examplefiles/zmlrpc.f90
-tests/support/tags
\ No newline at end of file
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/dependency_links.txt b/python/ext-libs/Pygments-1.6-py2.7.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/entry_points.txt b/python/ext-libs/Pygments-1.6-py2.7.egg-info/entry_points.txt
deleted file mode 100644
index 756d801..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/entry_points.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-[console_scripts]
-pygmentize = pygments.cmdline:main
-
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/installed-files.txt b/python/ext-libs/Pygments-1.6-py2.7.egg-info/installed-files.txt
deleted file mode 100644
index 283642e..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/installed-files.txt
+++ /dev/null
@@ -1,160 +0,0 @@
-../pygments/lexer.py
-../pygments/token.py
-../pygments/cmdline.py
-../pygments/util.py
-../pygments/style.py
-../pygments/plugin.py
-../pygments/scanner.py
-../pygments/unistring.py
-../pygments/__init__.py
-../pygments/console.py
-../pygments/filter.py
-../pygments/formatter.py
-../pygments/filters/__init__.py
-../pygments/formatters/rtf.py
-../pygments/formatters/svg.py
-../pygments/formatters/terminal.py
-../pygments/formatters/img.py
-../pygments/formatters/terminal256.py
-../pygments/formatters/latex.py
-../pygments/formatters/other.py
-../pygments/formatters/_mapping.py
-../pygments/formatters/__init__.py
-../pygments/formatters/bbcode.py
-../pygments/formatters/html.py
-../pygments/lexers/dotnet.py
-../pygments/lexers/_phpbuiltins.py
-../pygments/lexers/_robotframeworklexer.py
-../pygments/lexers/_luabuiltins.py
-../pygments/lexers/_postgres_builtins.py
-../pygments/lexers/_sourcemodbuiltins.py
-../pygments/lexers/_asybuiltins.py
-../pygments/lexers/dalvik.py
-../pygments/lexers/compiled.py
-../pygments/lexers/_openedgebuiltins.py
-../pygments/lexers/foxpro.py
-../pygments/lexers/functional.py
-../pygments/lexers/shell.py
-../pygments/lexers/_vimbuiltins.py
-../pygments/lexers/_scilab_builtins.py
-../pygments/lexers/parsers.py
-../pygments/lexers/hdl.py
-../pygments/lexers/other.py
-../pygments/lexers/web.py
-../pygments/lexers/sql.py
-../pygments/lexers/_mapping.py
-../pygments/lexers/jvm.py
-../pygments/lexers/templates.py
-../pygments/lexers/_lassobuiltins.py
-../pygments/lexers/__init__.py
-../pygments/lexers/math.py
-../pygments/lexers/asm.py
-../pygments/lexers/agile.py
-../pygments/lexers/special.py
-../pygments/lexers/_stan_builtins.py
-../pygments/lexers/text.py
-../pygments/lexers/_clbuiltins.py
-../pygments/styles/fruity.py
-../pygments/styles/trac.py
-../pygments/styles/friendly.py
-../pygments/styles/rrt.py
-../pygments/styles/bw.py
-../pygments/styles/monokai.py
-../pygments/styles/colorful.py
-../pygments/styles/default.py
-../pygments/styles/borland.py
-../pygments/styles/manni.py
-../pygments/styles/tango.py
-../pygments/styles/emacs.py
-../pygments/styles/__init__.py
-../pygments/styles/murphy.py
-../pygments/styles/vim.py
-../pygments/styles/native.py
-../pygments/styles/autumn.py
-../pygments/styles/pastie.py
-../pygments/styles/perldoc.py
-../pygments/styles/vs.py
-../pygments/lexer.pyc
-../pygments/token.pyc
-../pygments/cmdline.pyc
-../pygments/util.pyc
-../pygments/style.pyc
-../pygments/plugin.pyc
-../pygments/scanner.pyc
-../pygments/unistring.pyc
-../pygments/__init__.pyc
-../pygments/console.pyc
-../pygments/filter.pyc
-../pygments/formatter.pyc
-../pygments/filters/__init__.pyc
-../pygments/formatters/rtf.pyc
-../pygments/formatters/svg.pyc
-../pygments/formatters/terminal.pyc
-../pygments/formatters/img.pyc
-../pygments/formatters/terminal256.pyc
-../pygments/formatters/latex.pyc
-../pygments/formatters/other.pyc
-../pygments/formatters/_mapping.pyc
-../pygments/formatters/__init__.pyc
-../pygments/formatters/bbcode.pyc
-../pygments/formatters/html.pyc
-../pygments/lexers/dotnet.pyc
-../pygments/lexers/_phpbuiltins.pyc
-../pygments/lexers/_robotframeworklexer.pyc
-../pygments/lexers/_luabuiltins.pyc
-../pygments/lexers/_postgres_builtins.pyc
-../pygments/lexers/_sourcemodbuiltins.pyc
-../pygments/lexers/_asybuiltins.pyc
-../pygments/lexers/dalvik.pyc
-../pygments/lexers/compiled.pyc
-../pygments/lexers/_openedgebuiltins.pyc
-../pygments/lexers/foxpro.pyc
-../pygments/lexers/functional.pyc
-../pygments/lexers/shell.pyc
-../pygments/lexers/_vimbuiltins.pyc
-../pygments/lexers/_scilab_builtins.pyc
-../pygments/lexers/parsers.pyc
-../pygments/lexers/hdl.pyc
-../pygments/lexers/other.pyc
-../pygments/lexers/web.pyc
-../pygments/lexers/sql.pyc
-../pygments/lexers/_mapping.pyc
-../pygments/lexers/jvm.pyc
-../pygments/lexers/templates.pyc
-../pygments/lexers/_lassobuiltins.pyc
-../pygments/lexers/__init__.pyc
-../pygments/lexers/math.pyc
-../pygments/lexers/asm.pyc
-../pygments/lexers/agile.pyc
-../pygments/lexers/special.pyc
-../pygments/lexers/_stan_builtins.pyc
-../pygments/lexers/text.pyc
-../pygments/lexers/_clbuiltins.pyc
-../pygments/styles/fruity.pyc
-../pygments/styles/trac.pyc
-../pygments/styles/friendly.pyc
-../pygments/styles/rrt.pyc
-../pygments/styles/bw.pyc
-../pygments/styles/monokai.pyc
-../pygments/styles/colorful.pyc
-../pygments/styles/default.pyc
-../pygments/styles/borland.pyc
-../pygments/styles/manni.pyc
-../pygments/styles/tango.pyc
-../pygments/styles/emacs.pyc
-../pygments/styles/__init__.pyc
-../pygments/styles/murphy.pyc
-../pygments/styles/vim.pyc
-../pygments/styles/native.pyc
-../pygments/styles/autumn.pyc
-../pygments/styles/pastie.pyc
-../pygments/styles/perldoc.pyc
-../pygments/styles/vs.pyc
-./
-SOURCES.txt
-entry_points.txt
-not-zip-safe
-PKG-INFO
-dependency_links.txt
-top_level.txt
-../../../bin/pygmentize
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/not-zip-safe b/python/ext-libs/Pygments-1.6-py2.7.egg-info/not-zip-safe
deleted file mode 100644
index 8b13789..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/python/ext-libs/Pygments-1.6-py2.7.egg-info/top_level.txt b/python/ext-libs/Pygments-1.6-py2.7.egg-info/top_level.txt
deleted file mode 100644
index a9f49e0..0000000
--- a/python/ext-libs/Pygments-1.6-py2.7.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-pygments
diff --git a/python/ext-libs/httplib2/CMakeLists.txt b/python/ext-libs/httplib2/CMakeLists.txt
deleted file mode 100644
index f5f6ff6..0000000
--- a/python/ext-libs/httplib2/CMakeLists.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-FILE(GLOB PY_FILES *.py)
-
-PLUGIN_INSTALL(processing ./httplib2 ${PY_FILES})
diff --git a/python/ext-libs/httplib2/__init__.py b/python/ext-libs/httplib2/__init__.py
deleted file mode 100644
index 9ca99f9..0000000
--- a/python/ext-libs/httplib2/__init__.py
+++ /dev/null
@@ -1,1695 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-***************************************************************************
-    __init__.py
-    ---------------------
-    Date                 : November 2006
-    Copyright            : (C) 2012 by Joe Gregorio
-    Email                : joe at bitworking dot org
-***************************************************************************
-*                                                                         *
-*   This program is free software; you can redistribute it and/or modify  *
-*   it under the terms of the GNU General Public License as published by  *
-*   the Free Software Foundation; either version 2 of the License, or     *
-*   (at your option) any later version.                                   *
-*                                                                         *
-***************************************************************************
-"""
-
-from __future__ import generators
-
-"""
-httplib2
-
-A caching http interface that supports ETags and gzip
-to conserve bandwidth.
-
-Requires Python 2.3 or later
-
-Changelog:
-2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
-
-"""
-
-__author__ = "Joe Gregorio (joe at bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = ["Thomas Broyer (t.broyer at ltgt.net)",
-    "James Antill",
-    "Xavier Verges Farrero",
-    "Jonathan Feinberg",
-    "Blair Zajac",
-    "Sam Ruby",
-    "Louis Nyffenegger"]
-__license__ = "MIT"
-__version__ = "0.7.6"
-
-import re
-import sys
-import email
-import email.Utils
-import email.Message
-import email.FeedParser
-import StringIO
-import gzip
-import zlib
-import httplib
-import urlparse
-import urllib
-import base64
-import os
-import copy
-import calendar
-import time
-import random
-import errno
-try:
-    from hashlib import sha1 as _sha, md5 as _md5
-except ImportError:
-    # prior to Python 2.5, these were separate modules
-    import sha
-    import md5
-    _sha = sha.new
-    _md5 = md5.new
-import hmac
-from gettext import gettext as _
-import socket
-
-try:
-    from httplib2 import socks
-except ImportError:
-    try:
-        import socks
-    except ImportError:
-        socks = None
-
-# Build the appropriate socket wrapper for ssl
-try:
-    import ssl # python 2.6
-    ssl_SSLError = ssl.SSLError
-    def _ssl_wrap_socket(sock, key_file, cert_file,
-                         disable_validation, ca_certs):
-        if disable_validation:
-            cert_reqs = ssl.CERT_NONE
-        else:
-            cert_reqs = ssl.CERT_REQUIRED
-        # We should be specifying SSL version 3 or TLS v1, but the ssl module
-        # doesn't expose the necessary knobs. So we need to go with the default
-        # of SSLv23.
-        return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
-                               cert_reqs=cert_reqs, ca_certs=ca_certs)
-except (AttributeError, ImportError):
-    ssl_SSLError = None
-    def _ssl_wrap_socket(sock, key_file, cert_file,
-                         disable_validation, ca_certs):
-        if not disable_validation:
-            raise CertificateValidationUnsupported(
-                    "SSL certificate validation is not supported without "
-                    "the ssl module installed. To avoid this error, install "
-                    "the ssl module, or explicity disable validation.")
-        ssl_sock = socket.ssl(sock, key_file, cert_file)
-        return httplib.FakeSocket(sock, ssl_sock)
-
-
-if sys.version_info >= (2,3):
-    from iri2uri import iri2uri
-else:
-    def iri2uri(uri):
-        return uri
-
-def has_timeout(timeout): # python 2.6
-    if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
-        return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
-    return (timeout is not None)
-
-__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
-  'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent',
-  'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError',
-  'debuglevel', 'ProxiesUnavailableError']
-
-
-# The httplib debug level, set to a non-zero value to get debug output
-debuglevel = 0
-
-# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
-RETRIES = 2
-
-# Python 2.3 support
-if sys.version_info < (2,4):
-    def sorted(seq):
-        seq.sort()
-        return seq
-
-# Python 2.3 support
-def HTTPResponse__getheaders(self):
-    """Return list of (header, value) tuples."""
-    if self.msg is None:
-        raise httplib.ResponseNotReady()
-    return self.msg.items()
-
-if not hasattr(httplib.HTTPResponse, 'getheaders'):
-    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
-
-# All exceptions raised here derive from HttpLib2Error
-class HttpLib2Error(Exception): pass
-
-# Some exceptions can be caught and optionally
-# be turned back into responses.
-class HttpLib2ErrorWithResponse(HttpLib2Error):
-    def __init__(self, desc, response, content):
-        self.response = response
-        self.content = content
-        HttpLib2Error.__init__(self, desc)
-
-class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
-class RedirectLimit(HttpLib2ErrorWithResponse): pass
-class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
-class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-
-class MalformedHeader(HttpLib2Error): pass
-class RelativeURIError(HttpLib2Error): pass
-class ServerNotFoundError(HttpLib2Error): pass
-class ProxiesUnavailableError(HttpLib2Error): pass
-class CertificateValidationUnsupported(HttpLib2Error): pass
-class SSLHandshakeError(HttpLib2Error): pass
-class NotSupportedOnThisPlatform(HttpLib2Error): pass
-class CertificateHostnameMismatch(SSLHandshakeError):
-  def __init__(self, desc, host, cert):
-    HttpLib2Error.__init__(self, desc)
-    self.host = host
-    self.cert = cert
-
-# Open Items:
-# -----------
-# Proxy support
-
-# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
-
-# Pluggable cache storage (supports storing the cache in
-#   flat files by default. We need a plug-in architecture
-#   that can support Berkeley DB and Squid)
-
-# == Known Issues ==
-# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
-# Does not handle Cache-Control: max-stale
-# Does not use Age: headers when calculating cache freshness.
-
-
-# The number of redirections to follow before giving up.
-# Note that only GET redirects are automatically followed.
-# Will also honor 301 requests by saving that info and never
-# requesting that URI again.
-DEFAULT_MAX_REDIRECTS = 5
-
-# Default CA certificates file bundled with httplib2.
-CA_CERTS = os.path.join(
-        os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
-
-# Which headers are hop-by-hop headers by default
-HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
-
-def _get_end2end_headers(response):
-    hopbyhop = list(HOP_BY_HOP)
-    hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
-    return [header for header in response.keys() if header not in hopbyhop]
-
-URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
-
-def parse_uri(uri):
-    """Parses a URI using the regex given in Appendix B of RFC 3986.
-
-        (scheme, authority, path, query, fragment) = parse_uri(uri)
-    """
-    groups = URI.match(uri).groups()
-    return (groups[1], groups[3], groups[4], groups[6], groups[8])
-
-def urlnorm(uri):
-    (scheme, authority, path, query, fragment) = parse_uri(uri)
-    if not scheme or not authority:
-        raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
-    authority = authority.lower()
-    scheme = scheme.lower()
-    if not path:
-        path = "/"
-    # Could do syntax based normalization of the URI before
-    # computing the digest. See Section 6.2.2 of Std 66.
-    request_uri = query and "?".join([path, query]) or path
-    scheme = scheme.lower()
-    defrag_uri = scheme + "://" + authority + request_uri
-    return scheme, authority, request_uri, defrag_uri
-
-
-# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
-re_url_scheme    = re.compile(r'^\w+://')
-re_slash         = re.compile(r'[?/:|]+')
-
-def safename(filename):
-    """Return a filename suitable for the cache.
-
-    Strips dangerous and common characters to create a filename we
-    can use to store the cache in.
-    """
-
-    try:
-        if re_url_scheme.match(filename):
-            if isinstance(filename,str):
-                filename = filename.decode('utf-8')
-                filename = filename.encode('idna')
-            else:
-                filename = filename.encode('idna')
-    except UnicodeError:
-        pass
-    if isinstance(filename,unicode):
-        filename=filename.encode('utf-8')
-    filemd5 = _md5(filename).hexdigest()
-    filename = re_url_scheme.sub("", filename)
-    filename = re_slash.sub(",", filename)
-
-    # limit length of filename
-    if len(filename)>200:
-        filename=filename[:200]
-    return ",".join((filename, filemd5))
-
-NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
-def _normalize_headers(headers):
-    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.iteritems()])
-
-def _parse_cache_control(headers):
-    retval = {}
-    if headers.has_key('cache-control'):
-        parts =  headers['cache-control'].split(',')
-        parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
-        parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
-        retval = dict(parts_with_args + parts_wo_args)
-    return retval
-
-# Whether to use a strict mode to parse WWW-Authenticate headers
-# Might lead to bad results in case of ill-formed header value,
-# so disabled by default, falling back to relaxed parsing.
-# Set to true to turn on, useful for testing servers.
-USE_WWW_AUTH_STRICT_PARSING = 0
-
-# In regex below:
-#    [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+             matches a "token" as defined by HTTP
-#    "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?"    matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
-# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
-#    \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
-WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
-WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
-UNQUOTE_PAIRS = re.compile(r'\\(.)')
-def _parse_www_authenticate(headers, headername='www-authenticate'):
-    """Returns a dictionary of dictionaries, one dict
-    per auth_scheme."""
-    retval = {}
-    if headers.has_key(headername):
-        try:
-          authenticate = headers[headername].strip()
-          www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
-          while authenticate:
-              # Break off the scheme at the beginning of the line
-              if headername == 'authentication-info':
-                  (auth_scheme, the_rest) = ('digest', authenticate)
-              else:
-                  (auth_scheme, the_rest) = authenticate.split(" ", 1)
-              # Now loop over all the key value pairs that come after the scheme,
-              # being careful not to roll into the next scheme
-              match = www_auth.search(the_rest)
-              auth_params = {}
-              while match:
-                  if match and len(match.groups()) == 3:
-                      (key, value, the_rest) = match.groups()
-                      auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
-                  match = www_auth.search(the_rest)
-              retval[auth_scheme.lower()] = auth_params
-              authenticate = the_rest.strip()
-        except ValueError:
-          raise MalformedHeader("WWW-Authenticate")
-    return retval
-
-
-def _entry_disposition(response_headers, request_headers):
-    """Determine freshness from the Date, Expires and Cache-Control headers.
-
-    We don't handle the following:
-
-    1. Cache-Control: max-stale
-    2. Age: headers are not used in the calculations.
-
-    Not that this algorithm is simpler than you might think
-    because we are operating as a private (non-shared) cache.
-    This lets us ignore 's-maxage'. We can also ignore
-    'proxy-invalidate' since we aren't a proxy.
-    We will never return a stale document as
-    fresh as a design decision, and thus the non-implementation
-    of 'max-stale'. This also lets us safely ignore 'must-revalidate'
-    since we operate as if every server has sent 'must-revalidate'.
-    Since we are private we get to ignore both 'public' and
-    'private' parameters. We also ignore 'no-transform' since
-    we don't do any transformations.
-    The 'no-store' parameter is handled at a higher level.
-    So the only Cache-Control parameters we look at are:
-
-    no-cache
-    only-if-cached
-    max-age
-    min-fresh
-    """
-
-    retval = "STALE"
-    cc = _parse_cache_control(request_headers)
-    cc_response = _parse_cache_control(response_headers)
-
-    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
-        retval = "TRANSPARENT"
-        if 'cache-control' not in request_headers:
-            request_headers['cache-control'] = 'no-cache'
-    elif cc.has_key('no-cache'):
-        retval = "TRANSPARENT"
-    elif cc_response.has_key('no-cache'):
-        retval = "STALE"
-    elif cc.has_key('only-if-cached'):
-        retval = "FRESH"
-    elif response_headers.has_key('date'):
-        date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
-        now = time.time()
-        current_age = max(0, now - date)
-        if cc_response.has_key('max-age'):
-            try:
-                freshness_lifetime = int(cc_response['max-age'])
-            except ValueError:
-                freshness_lifetime = 0
-        elif response_headers.has_key('expires'):
-            expires = email.Utils.parsedate_tz(response_headers['expires'])
-            if None == expires:
-                freshness_lifetime = 0
-            else:
-                freshness_lifetime = max(0, calendar.timegm(expires) - date)
-        else:
-            freshness_lifetime = 0
-        if cc.has_key('max-age'):
-            try:
-                freshness_lifetime = int(cc['max-age'])
-            except ValueError:
-                freshness_lifetime = 0
-        if cc.has_key('min-fresh'):
-            try:
-                min_fresh = int(cc['min-fresh'])
-            except ValueError:
-                min_fresh = 0
-            current_age += min_fresh
-        if freshness_lifetime > current_age:
-            retval = "FRESH"
-    return retval
-
-def _decompressContent(response, new_content):
-    content = new_content
-    try:
-        encoding = response.get('content-encoding', None)
-        if encoding in ['gzip', 'deflate']:
-            if encoding == 'gzip':
-                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
-            if encoding == 'deflate':
-                content = zlib.decompress(content)
-            response['content-length'] = str(len(content))
-            # Record the historical presence of the encoding in a way the won't interfere.
-            response['-content-encoding'] = response['content-encoding']
-            del response['content-encoding']
-    except IOError:
-        content = ""
-        raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
-    return content
-
-def _updateCache(request_headers, response_headers, content, cache, cachekey):
-    if cachekey:
-        cc = _parse_cache_control(request_headers)
-        cc_response = _parse_cache_control(response_headers)
-        if cc.has_key('no-store') or cc_response.has_key('no-store'):
-            cache.delete(cachekey)
-        else:
-            info = email.Message.Message()
-            for key, value in response_headers.iteritems():
-                if key not in ['status','content-encoding','transfer-encoding']:
-                    info[key] = value
-
-            # Add annotations to the cache to indicate what headers
-            # are variant for this request.
-            vary = response_headers.get('vary', None)
-            if vary:
-                vary_headers = vary.lower().replace(' ', '').split(',')
-                for header in vary_headers:
-                    key = '-varied-%s' % header
-                    try:
-                        info[key] = request_headers[header]
-                    except KeyError:
-                        pass
-
-            status = response_headers.status
-            if status == 304:
-                status = 200
-
-            status_header = 'status: %d\r\n' % status
-
-            header_str = info.as_string()
-
-            header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
-            text = "".join([status_header, header_str, content])
-
-            cache.set(cachekey, text)
-
-def _cnonce():
-    dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
-    return dig[:16]
-
-def _wsse_username_token(cnonce, iso_now, password):
-    return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
-
-
-# For credentials we need two things, first
-# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
-# Then we also need a list of URIs that have already demanded authentication
-# That list is tricky since sub-URIs can take the same auth, or the
-# auth scheme may change as you descend the tree.
-# So we also need each Auth instance to be able to tell us
-# how close to the 'top' it is.
-
-class Authentication(object):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        self.path = path
-        self.host = host
-        self.credentials = credentials
-        self.http = http
-
-    def depth(self, request_uri):
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        return request_uri[len(self.path):].count("/")
-
-    def inscope(self, host, request_uri):
-        # XXX Should we normalize the request_uri?
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        return (host == self.host) and path.startswith(self.path)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header. Over-ride this in sub-classes."""
-        pass
-
-    def response(self, response, content):
-        """Gives us a chance to update with new nonces
-        or such returned from the last authorized response.
-        Over-rise this in sub-classes if necessary.
-
-        Return TRUE is the request is to be retried, for
-        example Digest may return stale=true.
-        """
-        return False
-
-
-
-class BasicAuthentication(Authentication):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
-
-
-class DigestAuthentication(Authentication):
-    """Only do qop='auth' and MD5, since that
-    is all Apache currently implements"""
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        self.challenge = challenge['digest']
-        qop = self.challenge.get('qop', 'auth')
-        self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
-        if self.challenge['qop'] is None:
-            raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
-        self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
-        if self.challenge['algorithm'] != 'MD5':
-            raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
-        self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
-        self.challenge['nc'] = 1
-
-    def request(self, method, request_uri, headers, content, cnonce = None):
-        """Modify the request headers"""
-        H = lambda x: _md5(x).hexdigest()
-        KD = lambda s, d: H("%s:%s" % (s, d))
-        A2 = "".join([method, ":", request_uri])
-        self.challenge['cnonce'] = cnonce or _cnonce()
-        request_digest  = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'],
-                    '%08x' % self.challenge['nc'],
-                    self.challenge['cnonce'],
-                    self.challenge['qop'], H(A2)
-                    ))
-        headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
-                self.credentials[0],
-                self.challenge['realm'],
-                self.challenge['nonce'],
-                request_uri,
-                self.challenge['algorithm'],
-                request_digest,
-                self.challenge['qop'],
-                self.challenge['nc'],
-                self.challenge['cnonce'],
-                )
-        if self.challenge.get('opaque'):
-            headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
-        self.challenge['nc'] += 1
-
-    def response(self, response, content):
-        if not response.has_key('authentication-info'):
-            challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
-            if 'true' == challenge.get('stale'):
-                self.challenge['nonce'] = challenge['nonce']
-                self.challenge['nc'] = 1
-                return True
-        else:
-            updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
-
-            if updated_challenge.has_key('nextnonce'):
-                self.challenge['nonce'] = updated_challenge['nextnonce']
-                self.challenge['nc'] = 1
-        return False
-
-
-class HmacDigestAuthentication(Authentication):
-    """Adapted from Robert Sayre's code and DigestAuthentication above."""
-    __author__ = "Thomas Broyer (t.broyer at ltgt.net)"
-
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        self.challenge = challenge['hmacdigest']
-        # TODO: self.challenge['domain']
-        self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
-        if self.challenge['reason'] not in ['unauthorized', 'integrity']:
-            self.challenge['reason'] = 'unauthorized'
-        self.challenge['salt'] = self.challenge.get('salt', '')
-        if not self.challenge.get('snonce'):
-            raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
-        self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
-        if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
-            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
-        self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
-        if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
-            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
-        if self.challenge['algorithm'] == 'HMAC-MD5':
-            self.hashmod = _md5
-        else:
-            self.hashmod = _sha
-        if self.challenge['pw-algorithm'] == 'MD5':
-            self.pwhashmod = _md5
-        else:
-            self.pwhashmod = _sha
-        self.key = "".join([self.credentials[0], ":",
-                    self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
-                    ":", self.challenge['realm']
-                    ])
-        self.key = self.pwhashmod.new(self.key).hexdigest().lower()
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers"""
-        keys = _get_end2end_headers(headers)
-        keylist = "".join(["%s " % k for k in keys])
-        headers_val = "".join([headers[k] for k in keys])
-        created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
-        cnonce = _cnonce()
-        request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
-        request_digest  = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
-        headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
-                self.credentials[0],
-                self.challenge['realm'],
-                self.challenge['snonce'],
-                cnonce,
-                request_uri,
-                created,
-                request_digest,
-                keylist,
-                )
-
-    def response(self, response, content):
-        challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
-        if challenge.get('reason') in ['integrity', 'stale']:
-            return True
-        return False
-
-
-class WsseAuthentication(Authentication):
-    """This is thinly tested and should not be relied upon.
-    At this time there isn't any third party server to test against.
-    Blogger and TypePad implemented this algorithm at one point
-    but Blogger has since switched to Basic over HTTPS and
-    TypePad has implemented it wrong, by never issuing a 401
-    challenge but instead requiring your client to telepathically know that
-    their endpoint is expecting WSSE profile="UsernameToken"."""
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'WSSE profile="UsernameToken"'
-        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
-        cnonce = _cnonce()
-        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
-        headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
-                self.credentials[0],
-                password_digest,
-                cnonce,
-                iso_now)
-
-class GoogleLoginAuthentication(Authentication):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        from urllib import urlencode
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        service = challenge['googlelogin'].get('service', 'xapi')
-        # Bloggger actually returns the service in the challenge
-        # For the rest we guess based on the URI
-        if service == 'xapi' and  request_uri.find("calendar") > 0:
-            service = "cl"
-        # No point in guessing Base or Spreadsheet
-        #elif request_uri.find("spreadsheets") > 0:
-        #    service = "wise"
-
-        auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
-        resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
-        lines = content.split('\n')
-        d = dict([tuple(line.split("=", 1)) for line in lines if line])
-        if resp.status == 403:
-            self.Auth = ""
-        else:
-            self.Auth = d['Auth']
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
-
-
-AUTH_SCHEME_CLASSES = {
-    "basic": BasicAuthentication,
-    "wsse": WsseAuthentication,
-    "digest": DigestAuthentication,
-    "hmacdigest": HmacDigestAuthentication,
-    "googlelogin": GoogleLoginAuthentication
-}
-
-AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
-
-class FileCache(object):
-    """Uses a local directory as a store for cached files.
-    Not really safe to use if multiple threads or processes are going to
-    be running on the same cache.
-    """
-    def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
-        self.cache = cache
-        self.safe = safe
-        if not os.path.exists(cache):
-            os.makedirs(self.cache)
-
-    def get(self, key):
-        retval = None
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        try:
-            f = file(cacheFullPath, "rb")
-            retval = f.read()
-            f.close()
-        except IOError:
-            pass
-        return retval
-
-    def set(self, key, value):
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        f = file(cacheFullPath, "wb")
-        f.write(value)
-        f.close()
-
-    def delete(self, key):
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        if os.path.exists(cacheFullPath):
-            os.remove(cacheFullPath)
-
-class Credentials(object):
-    def __init__(self):
-        self.credentials = []
-
-    def add(self, name, password, domain=""):
-        self.credentials.append((domain.lower(), name, password))
-
-    def clear(self):
-        self.credentials = []
-
-    def iter(self, domain):
-        for (cdomain, name, password) in self.credentials:
-            if cdomain == "" or domain == cdomain:
-                yield (name, password)
-
-class KeyCerts(Credentials):
-    """Identical to Credentials except that
-    name/password are mapped to key/cert."""
-    pass
-
-class AllHosts(object):
-  pass
-
-class ProxyInfo(object):
-    """Collect information required to use a proxy."""
-    bypass_hosts = ()
-
-    def __init__(self, proxy_type, proxy_host, proxy_port,
-        proxy_rdns=None, proxy_user=None, proxy_pass=None):
-        """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
-        constants. For example:
-
-        p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
-            proxy_host='localhost', proxy_port=8000)
-        """
-        self.proxy_type = proxy_type
-        self.proxy_host = proxy_host
-        self.proxy_port = proxy_port
-        self.proxy_rdns = proxy_rdns
-        self.proxy_user = proxy_user
-        self.proxy_pass = proxy_pass
-
-    def astuple(self):
-        return (self.proxy_type, self.proxy_host, self.proxy_port,
-            self.proxy_rdns, self.proxy_user, self.proxy_pass)
-
-    def isgood(self):
-        return (self.proxy_host != None) and (self.proxy_port != None)
-
-    @classmethod
-    def from_environment(cls, method='http'):
-        """
-        Read proxy info from the environment variables.
-        """
-        if method not in ['http', 'https']:
-          return
-
-        env_var = method + '_proxy'
-        url = os.environ.get(env_var, os.environ.get(env_var.upper()))
-        if not url:
-          return
-        pi = cls.from_url(url, method)
-
-        no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
-        bypass_hosts = []
-        if no_proxy:
-          bypass_hosts = no_proxy.split(',')
-        # special case, no_proxy=* means all hosts bypassed
-        if no_proxy == '*':
-          bypass_hosts = AllHosts
-
-        pi.bypass_hosts = bypass_hosts
-        return pi
-
-    @classmethod
-    def from_url(cls, url, method='http'):
-        """
-        Construct a ProxyInfo from a URL (such as http_proxy env var)
-        """
-        url = urlparse.urlparse(url)
-        username = None
-        password = None
-        port = None
-        if '@' in url[1]:
-          ident, host_port = url[1].split('@', 1)
-          if ':' in ident:
-            username, password = ident.split(':', 1)
-          else:
-            password = ident
-        else:
-          host_port = url[1]
-        if ':' in host_port:
-          host, port = host_port.split(':', 1)
-        else:
-          host = host_port
-
-        if port:
-            port = int(port)
-        else:
-            port = dict(https=443, http=80)[method]
-
-        proxy_type = 3 # socks.PROXY_TYPE_HTTP
-        return cls(
-            proxy_type = proxy_type,
-            proxy_host = host,
-            proxy_port = port,
-            proxy_user = username or None,
-            proxy_pass = password or None,
-        )
-
-    def applies_to(self, hostname):
-        return not self.bypass_host(hostname)
-
-    def bypass_host(self, hostname):
-        """Has this host been excluded from the proxy config"""
-        if self.bypass_hosts is AllHosts:
-          return True
-
-        bypass = False
-        for domain in self.bypass_hosts:
-          if hostname.endswith(domain):
-            bypass = True
-
-        return bypass
-
-
-class HTTPConnectionWithTimeout(httplib.HTTPConnection):
-    """
-    HTTPConnection subclass that supports timeouts
-
-    All timeouts are in seconds. If None is passed for timeout then
-    Python's default timeout for sockets will be used. See for example
-    the docs of socket.setdefaulttimeout():
-    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-    """
-
-    def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
-        httplib.HTTPConnection.__init__(self, host, port, strict)
-        self.timeout = timeout
-        self.proxy_info = proxy_info
-
-    def connect(self):
-        """Connect to the host and port specified in __init__."""
-        # Mostly verbatim from httplib.py.
-        if self.proxy_info and socks is None:
-            raise ProxiesUnavailableError(
-                'Proxy support missing but proxy use was requested!')
-        msg = "getaddrinfo returns an empty list"
-        if self.proxy_info and self.proxy_info.isgood():
-            use_proxy = True
-            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
-        else:
-            use_proxy = False
-        if use_proxy and proxy_rdns:
-            host = proxy_host
-            port = proxy_port
-        else:
-            host = self.host
-            port = self.port
-
-        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
-            af, socktype, proto, canonname, sa = res
-            try:
-                if use_proxy:
-                    self.sock = socks.socksocket(af, socktype, proto)
-                    self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
-                else:
-                    self.sock = socket.socket(af, socktype, proto)
-                    self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-                # Different from httplib: support timeouts.
-                if has_timeout(self.timeout):
-                    self.sock.settimeout(self.timeout)
-                    # End of difference from httplib.
-                if self.debuglevel > 0:
-                    print "connect: (%s, %s) ************" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-
-                self.sock.connect((self.host, self.port) + sa[2:])
-            except socket.error, msg:
-                if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                continue
-            break
-        if not self.sock:
-            raise socket.error, msg
-
-class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
-    """
-    This class allows communication via SSL.
-
-    All timeouts are in seconds. If None is passed for timeout then
-    Python's default timeout for sockets will be used. See for example
-    the docs of socket.setdefaulttimeout():
-    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-    """
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=None, proxy_info=None,
-                 ca_certs=None, disable_ssl_certificate_validation=False):
-        httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file,
-                cert_file=cert_file, strict=strict)
-        self.timeout = timeout
-        self.proxy_info = proxy_info
-        if ca_certs is None:
-          ca_certs = CA_CERTS
-        self.ca_certs = ca_certs
-        self.disable_ssl_certificate_validation = \
-                disable_ssl_certificate_validation
-
-    # The following two methods were adapted from https_wrapper.py, released
-    # with the Google Appengine SDK at
-    # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
-    # under the following license:
-    #
-    # Copyright 2007 Google Inc.
-    #
-    # Licensed under the Apache License, Version 2.0 (the "License");
-    # you may not use this file except in compliance with the License.
-    # You may obtain a copy of the License at
-    #
-    #     http://www.apache.org/licenses/LICENSE-2.0
-    #
-    # Unless required by applicable law or agreed to in writing, software
-    # distributed under the License is distributed on an "AS IS" BASIS,
-    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    # See the License for the specific language governing permissions and
-    # limitations under the License.
-    #
-
-    def _GetValidHostsForCert(self, cert):
-        """Returns a list of valid host globs for an SSL certificate.
-
-        Args:
-          cert: A dictionary representing an SSL certificate.
-        Returns:
-          list: A list of valid host globs.
-        """
-        if 'subjectAltName' in cert:
-            return [x[1] for x in cert['subjectAltName']
-                    if x[0].lower() == 'dns']
-        else:
-            return [x[0][1] for x in cert['subject']
-                    if x[0][0].lower() == 'commonname']
-
-    def _ValidateCertificateHostname(self, cert, hostname):
-        """Validates that a given hostname is valid for an SSL certificate.
-
-        Args:
-          cert: A dictionary representing an SSL certificate.
-          hostname: The hostname to test.
-        Returns:
-          bool: Whether or not the hostname is valid for this certificate.
-        """
-        hosts = self._GetValidHostsForCert(cert)
-        for host in hosts:
-            host_re = host.replace('.', '\.').replace('*', '[^.]*')
-            if re.search('^%s$' % (host_re,), hostname, re.I):
-                return True
-        return False
-
-    def connect(self):
-        "Connect to a host on a given (SSL) port."
-
-        msg = "getaddrinfo returns an empty list"
-        if self.proxy_info and self.proxy_info.isgood():
-            use_proxy = True
-            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
-        else:
-            use_proxy = False
-        if use_proxy and proxy_rdns:
-            host = proxy_host
-            port = proxy_port
-        else:
-            host = self.host
-            port = self.port
-
-        for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo(
-            host, port, 0, socket.SOCK_STREAM):
-            try:
-                if use_proxy:
-                    sock = socks.socksocket(family, socktype, proto)
-
-                    sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
-                else:
-                    sock = socket.socket(family, socktype, proto)
-                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
-                if has_timeout(self.timeout):
-                    sock.settimeout(self.timeout)
-                sock.connect((self.host, self.port))
-                self.sock =_ssl_wrap_socket(
-                    sock, self.key_file, self.cert_file,
-                    self.disable_ssl_certificate_validation, self.ca_certs)
-                if self.debuglevel > 0:
-                    print "connect: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if not self.disable_ssl_certificate_validation:
-                    cert = self.sock.getpeercert()
-                    hostname = self.host.split(':', 0)[0]
-                    if not self._ValidateCertificateHostname(cert, hostname):
-                        raise CertificateHostnameMismatch(
-                            'Server presented certificate that does not match '
-                            'host %s: %s' % (hostname, cert), hostname, cert)
-            except ssl_SSLError, e:
-                if sock:
-                    sock.close()
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                # Unfortunately the ssl module doesn't seem to provide any way
-                # to get at more detailed error information, in particular
-                # whether the error is due to certificate validation or
-                # something else (such as SSL protocol mismatch).
-                if e.errno == ssl.SSL_ERROR_SSL:
-                    raise SSLHandshakeError(e)
-                else:
-                    raise
-            except (socket.timeout, socket.gaierror):
-              raise
-            except socket.error, msg:
-              if self.debuglevel > 0:
-                  print "connect fail: (%s, %s)" % (self.host, self.port)
-                  if use_proxy:
-                      print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-              if self.sock:
-                  self.sock.close()
-              self.sock = None
-              continue
-            break
-        if not self.sock:
-          raise socket.error, msg
-
-SCHEME_TO_CONNECTION = {
-    'http': HTTPConnectionWithTimeout,
-    'https': HTTPSConnectionWithTimeout
-    }
-
-# Use a different connection object for Google App Engine
-try:
-  from google.appengine.api import apiproxy_stub_map
-  if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
-    raise ImportError  # Bail out; we're not actually running on App Engine.
-  from google.appengine.api.urlfetch import fetch
-  from google.appengine.api.urlfetch import InvalidURLError
-  from google.appengine.api.urlfetch import DownloadError
-  from google.appengine.api.urlfetch import ResponseTooLargeError
-  from google.appengine.api.urlfetch import SSLCertificateError
-
-
-  class ResponseDict(dict):
-    """Is a dictionary that also has a read() method, so
-    that it can pass itself off as an httlib.HTTPResponse()."""
-    def read(self):
-      pass
-
-
-  class AppEngineHttpConnection(object):
-    """Emulates an httplib.HTTPConnection object, but actually uses the Google
-    App Engine urlfetch library. This allows the timeout to be properly used on
-    Google App Engine, and avoids using httplib, which on Google App Engine is
-    just another wrapper around urlfetch.
-    """
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=None, proxy_info=None, ca_certs=None,
-                 disable_ssl_certificate_validation=False):
-      self.host = host
-      self.port = port
-      self.timeout = timeout
-      if key_file or cert_file or proxy_info or ca_certs:
-        raise NotSupportedOnThisPlatform()
-      self.response = None
-      self.scheme = 'http'
-      self.validate_certificate = not disable_ssl_certificate_validation
-      self.sock = True
-
-    def request(self, method, url, body, headers):
-      # Calculate the absolute URI, which fetch requires
-      netloc = self.host
-      if self.port:
-        netloc = '%s:%s' % (self.host, self.port)
-      absolute_uri = '%s://%s%s' % (self.scheme, netloc, url)
-      try:
-        try: # 'body' can be a stream.
-          body = body.read()
-        except AttributeError:
-          pass
-        response = fetch(absolute_uri, payload=body, method=method,
-            headers=headers, allow_truncated=False, follow_redirects=False,
-            deadline=self.timeout,
-            validate_certificate=self.validate_certificate)
-        self.response = ResponseDict(response.headers)
-        self.response['status'] = str(response.status_code)
-        self.response['reason'] = httplib.responses.get(response.status_code, 'Ok')
-        self.response.status = response.status_code
-        setattr(self.response, 'read', lambda : response.content)
-
-      # Make sure the exceptions raised match the exceptions expected.
-      except InvalidURLError:
-        raise socket.gaierror('')
-      except (DownloadError, ResponseTooLargeError, SSLCertificateError):
-        raise httplib.HTTPException()
-
-    def getresponse(self):
-      if self.response:
-        return self.response
-      else:
-        raise httplib.HTTPException()
-
-    def set_debuglevel(self, level):
-      pass
-
-    def connect(self):
-      pass
-
-    def close(self):
-      pass
-
-
-  class AppEngineHttpsConnection(AppEngineHttpConnection):
-    """Same as AppEngineHttpConnection, but for HTTPS URIs."""
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=None, proxy_info=None, ca_certs=None,
-                 disable_ssl_certificate_validation=False):
-      AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file,
-          strict, timeout, proxy_info, ca_certs, disable_ssl_certificate_validation)
-      self.scheme = 'https'
-
-  # Update the connection classes to use the Googel App Engine specific ones.
-  SCHEME_TO_CONNECTION = {
-      'http': AppEngineHttpConnection,
-      'https': AppEngineHttpsConnection
-      }
-
-except ImportError:
-  pass
-
-
-class Http(object):
-    """An HTTP client that handles:
-- all methods
-- caching
-- ETags
-- compression,
-- HTTPS
-- Basic
-- Digest
-- WSSE
-
-and more.
-    """
-    def __init__(self, cache=None, timeout=None,
-                 proxy_info=ProxyInfo.from_environment,
-                 ca_certs=None, disable_ssl_certificate_validation=False):
-        """If 'cache' is a string then it is used as a directory name for
-        a disk cache. Otherwise it must be an object that supports the
-        same interface as FileCache.
-
-        All timeouts are in seconds. If None is passed for timeout
-        then Python's default timeout for sockets will be used. See
-        for example the docs of socket.setdefaulttimeout():
-        http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-
-        `proxy_info` may be:
-          - a callable that takes the http scheme ('http' or 'https') and
-            returns a ProxyInfo instance per request. By default, uses
-            ProxyInfo.from_environment.
-          - a ProxyInfo instance (static proxy config).
-          - None (proxy disabled).
-
-        ca_certs is the path of a file containing root CA certificates for SSL
-        server certificate validation.  By default, a CA cert file bundled with
-        httplib2 is used.
-
-        If disable_ssl_certificate_validation is true, SSL cert validation will
-        not be performed.
-        """
-        self.proxy_info = proxy_info
-        self.ca_certs = ca_certs
-        self.disable_ssl_certificate_validation = \
-                disable_ssl_certificate_validation
-
-        # Map domain name to an httplib connection
-        self.connections = {}
-        # The location of the cache, for now a directory
-        # where cached responses are held.
-        if cache and isinstance(cache, basestring):
-            self.cache = FileCache(cache)
-        else:
-            self.cache = cache
-
-        # Name/password
-        self.credentials = Credentials()
-
-        # Key/cert
-        self.certificates = KeyCerts()
-
-        # authorization objects
-        self.authorizations = []
-
-        # If set to False then no redirects are followed, even safe ones.
-        self.follow_redirects = True
-
-        # Which HTTP methods do we apply optimistic concurrency to, i.e.
-        # which methods get an "if-match:" etag header added to them.
-        self.optimistic_concurrency_methods = ["PUT", "PATCH"]
-
-        # If 'follow_redirects' is True, and this is set to True then
-        # all redirecs are followed, including unsafe ones.
-        self.follow_all_redirects = False
-
-        self.ignore_etag = False
-
-        self.force_exception_to_status_code = False
-
-        self.timeout = timeout
-
-        # Keep Authorization: headers on a redirect.
-        self.forward_authorization_headers = False
-
-    def _auth_from_challenge(self, host, request_uri, headers, response, content):
-        """A generator that creates Authorization objects
-           that can be applied to requests.
-        """
-        challenges = _parse_www_authenticate(response, 'www-authenticate')
-        for cred in self.credentials.iter(host):
-            for scheme in AUTH_SCHEME_ORDER:
-                if challenges.has_key(scheme):
-                    yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
-
-    def add_credentials(self, name, password, domain=""):
-        """Add a name and password that will be used
-        any time a request requires authentication."""
-        self.credentials.add(name, password, domain)
-
-    def add_certificate(self, key, cert, domain):
-        """Add a key and cert that will be used
-        any time a request requires authentication."""
-        self.certificates.add(key, cert, domain)
-
-    def clear_credentials(self):
-        """Remove all the names and passwords
-        that are used for authentication"""
-        self.credentials.clear()
-        self.authorizations = []
-
-    def _conn_request(self, conn, request_uri, method, body, headers):
-        for i in range(RETRIES):
-            try:
-                if conn.sock is None:
-                  conn.connect()
-                conn.request(method, request_uri, body, headers)
-            except socket.timeout:
-                raise
-            except socket.gaierror:
-                conn.close()
-                raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
-            except ssl_SSLError:
-                conn.close()
-                raise
-            except socket.error, e:
-                err = 0
-                if hasattr(e, 'args'):
-                    err = getattr(e, 'args')[0]
-                else:
-                    err = e.errno
-                if err == errno.ECONNREFUSED: # Connection refused
-                    raise
-            except httplib.HTTPException:
-                # Just because the server closed the connection doesn't apparently mean
-                # that the server didn't send a response.
-                if conn.sock is None:
-                    if i < RETRIES-1:
-                        conn.close()
-                        conn.connect()
-                        continue
-                    else:
-                        conn.close()
-                        raise
-                if i < RETRIES-1:
-                    conn.close()
-                    conn.connect()
-                    continue
-            try:
-                response = conn.getresponse()
-            except (socket.error, httplib.HTTPException):
-                if i < RETRIES-1:
-                    conn.close()
-                    conn.connect()
-                    continue
-                else:
-                    raise
-            else:
-                content = ""
-                if method == "HEAD":
-                    conn.close()
-                else:
-                    content = response.read()
-                response = Response(response)
-                if method != "HEAD":
-                    content = _decompressContent(response, content)
-            break
-        return (response, content)
-
-
-    def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
-        """Do the actual request using the connection object
-        and also follow one level of redirects if necessary"""
-
-        auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
-        auth = auths and sorted(auths)[0][1] or None
-        if auth:
-            auth.request(method, request_uri, headers, body)
-
-        (response, content) = self._conn_request(conn, request_uri, method, body, headers)
-
-        if auth:
-            if auth.response(response, body):
-                auth.request(method, request_uri, headers, body)
-                (response, content) = self._conn_request(conn, request_uri, method, body, headers )
-                response._stale_digest = 1
-
-        if response.status == 401:
-            for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
-                authorization.request(method, request_uri, headers, body)
-                (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
-                if response.status != 401:
-                    self.authorizations.append(authorization)
-                    authorization.response(response, body)
-                    break
-
-        if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
-            if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
-                # Pick out the location header and basically start from the beginning
-                # remembering first to strip the ETag header and decrement our 'depth'
-                if redirections:
-                    if not response.has_key('location') and response.status != 300:
-                        raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
-                    # Fix-up relative redirects (which violate an RFC 2616 MUST)
-                    if response.has_key('location'):
-                        location = response['location']
-                        (scheme, authority, path, query, fragment) = parse_uri(location)
-                        if authority == None:
-                            response['location'] = urlparse.urljoin(absolute_uri, location)
-                    if response.status == 301 and method in ["GET", "HEAD"]:
-                        response['-x-permanent-redirect-url'] = response['location']
-                        if not response.has_key('content-location'):
-                            response['content-location'] = absolute_uri
-                        _updateCache(headers, response, content, self.cache, cachekey)
-                    if headers.has_key('if-none-match'):
-                        del headers['if-none-match']
-                    if headers.has_key('if-modified-since'):
-                        del headers['if-modified-since']
-                    if 'authorization' in headers and not self.forward_authorization_headers:
-                        del headers['authorization']
-                    if response.has_key('location'):
-                        location = response['location']
-                        old_response = copy.deepcopy(response)
-                        if not old_response.has_key('content-location'):
-                            old_response['content-location'] = absolute_uri
-                        redirect_method = method
-                        if response.status in [302, 303]:
-                            redirect_method = "GET"
-                            body = None
-                        (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
-                        response.previous = old_response
-                else:
-                    raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
-            elif response.status in [200, 203] and method in ["GET", "HEAD"]:
-                # Don't cache 206's since we aren't going to handle byte range requests
-                if not response.has_key('content-location'):
-                    response['content-location'] = absolute_uri
-                _updateCache(headers, response, content, self.cache, cachekey)
-
-        return (response, content)
-
-    def _normalize_headers(self, headers):
-        return _normalize_headers(headers)
-
-# Need to catch and rebrand some exceptions
-# Then need to optionally turn all exceptions into status codes
-# including all socket.* and httplib.* exceptions.
-
-
-    def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
-        """ Performs a single HTTP request.
-The 'uri' is the URI of the HTTP resource and can begin
-with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
-
-The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
-There is no restriction on the methods allowed.
-
-The 'body' is the entity body to be sent with the request. It is a string
-object.
-
-Any extra headers that are to be sent with the request should be provided in the
-'headers' dictionary.
-
-The maximum number of redirect to follow before raising an
-exception is 'redirections. The default is 5.
-
-The return value is a tuple of (response, content), the first
-being and instance of the 'Response' class, the second being
-a string that contains the response entity body.
-        """
-        try:
-            if headers is None:
-                headers = {}
-            else:
-                headers = self._normalize_headers(headers)
-
-            if not headers.has_key('user-agent'):
-                headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
-
-            uri = iri2uri(uri)
-
-            (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
-            domain_port = authority.split(":")[0:2]
-            if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
-                scheme = 'https'
-                authority = domain_port[0]
-
-            proxy_info = self._get_proxy_info(scheme, authority)
-
-            conn_key = scheme+":"+authority
-            if conn_key in self.connections:
-                conn = self.connections[conn_key]
-            else:
-                if not connection_type:
-                  connection_type = SCHEME_TO_CONNECTION[scheme]
-                certs = list(self.certificates.iter(authority))
-                if scheme == 'https':
-                    if certs:
-                        conn = self.connections[conn_key] = connection_type(
-                                authority, key_file=certs[0][0],
-                                cert_file=certs[0][1], timeout=self.timeout,
-                                proxy_info=proxy_info,
-                                ca_certs=self.ca_certs,
-                                disable_ssl_certificate_validation=
-                                        self.disable_ssl_certificate_validation)
-                    else:
-                        conn = self.connections[conn_key] = connection_type(
-                                authority, timeout=self.timeout,
-                                proxy_info=proxy_info,
-                                ca_certs=self.ca_certs,
-                                disable_ssl_certificate_validation=
-                                        self.disable_ssl_certificate_validation)
-                else:
-                    conn = self.connections[conn_key] = connection_type(
-                            authority, timeout=self.timeout,
-                            proxy_info=proxy_info)
-                conn.set_debuglevel(debuglevel)
-
-            if 'range' not in headers and 'accept-encoding' not in headers:
-                headers['accept-encoding'] = 'gzip, deflate'
-
-            info = email.Message.Message()
-            cached_value = None
-            if self.cache:
-                cachekey = defrag_uri
-                cached_value = self.cache.get(cachekey)
-                if cached_value:
-                    # info = email.message_from_string(cached_value)
-                    #
-                    # Need to replace the line above with the kludge below
-                    # to fix the non-existent bug not fixed in this
-                    # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
-                    try:
-                        info, content = cached_value.split('\r\n\r\n', 1)
-                        feedparser = email.FeedParser.FeedParser()
-                        feedparser.feed(info)
-                        info = feedparser.close()
-                        feedparser._parse = None
-                    except (IndexError, ValueError):
-                        self.cache.delete(cachekey)
-                        cachekey = None
-                        cached_value = None
-            else:
-                cachekey = None
-
-            if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
-                # http://www.w3.org/1999/04/Editing/
-                headers['if-match'] = info['etag']
-
-            if method not in ["GET", "HEAD"] and self.cache and cachekey:
-                # RFC 2616 Section 13.10
-                self.cache.delete(cachekey)
-
-            # Check the vary header in the cache to see if this request
-            # matches what varies in the cache.
-            if method in ['GET', 'HEAD'] and 'vary' in info:
-                vary = info['vary']
-                vary_headers = vary.lower().replace(' ', '').split(',')
-                for header in vary_headers:
-                    key = '-varied-%s' % header
-                    value = info[key]
-                    if headers.get(header, None) != value:
-                            cached_value = None
-                            break
-
-            if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
-                if info.has_key('-x-permanent-redirect-url'):
-                    # Should cached permanent redirects be counted in our redirection count? For now, yes.
-                    if redirections <= 0:
-                      raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
-                    (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
-                    response.previous = Response(info)
-                    response.previous.fromcache = True
-                else:
-                    # Determine our course of action:
-                    #   Is the cached entry fresh or stale?
-                    #   Has the client requested a non-cached response?
-                    #
-                    # There seems to be three possible answers:
-                    # 1. [FRESH] Return the cache entry w/o doing a GET
-                    # 2. [STALE] Do the GET (but add in cache validators if available)
-                    # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
-                    entry_disposition = _entry_disposition(info, headers)
-
-                    if entry_disposition == "FRESH":
-                        if not cached_value:
-                            info['status'] = '504'
-                            content = ""
-                        response = Response(info)
-                        if cached_value:
-                            response.fromcache = True
-                        return (response, content)
-
-                    if entry_disposition == "STALE":
-                        if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
-                            headers['if-none-match'] = info['etag']
-                        if info.has_key('last-modified') and not 'last-modified' in headers:
-                            headers['if-modified-since'] = info['last-modified']
-                    elif entry_disposition == "TRANSPARENT":
-                        pass
-
-                    (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-
-                if response.status == 304 and method == "GET":
-                    # Rewrite the cache entry with the new end-to-end headers
-                    # Take all headers that are in response
-                    # and overwrite their values in info.
-                    # unless they are hop-by-hop, or are listed in the connection header.
-
-                    for key in _get_end2end_headers(response):
-                        info[key] = response[key]
-                    merged_response = Response(info)
-                    if hasattr(response, "_stale_digest"):
-                        merged_response._stale_digest = response._stale_digest
-                    _updateCache(headers, merged_response, content, self.cache, cachekey)
-                    response = merged_response
-                    response.status = 200
-                    response.fromcache = True
-
-                elif response.status == 200:
-                    content = new_content
-                else:
-                    self.cache.delete(cachekey)
-                    content = new_content
-            else:
-                cc = _parse_cache_control(headers)
-                if cc.has_key('only-if-cached'):
-                    info['status'] = '504'
-                    response = Response(info)
-                    content = ""
-                else:
-                    (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-        except Exception, e:
-            if self.force_exception_to_status_code:
-                if isinstance(e, HttpLib2ErrorWithResponse):
-                    response = e.response
-                    content = e.content
-                    response.status = 500
-                    response.reason = str(e)
-                elif isinstance(e, socket.timeout):
-                    content = "Request Timeout"
-                    response = Response( {
-                            "content-type": "text/plain",
-                            "status": "408",
-                            "content-length": len(content)
-                            })
-                    response.reason = "Request Timeout"
-                else:
-                    content = str(e)
-                    response = Response( {
-                            "content-type": "text/plain",
-                            "status": "400",
-                            "content-length": len(content)
-                            })
-                    response.reason = "Bad Request"
-            else:
-                raise
-
-
-        return (response, content)
-
-    def _get_proxy_info(self, scheme, authority):
-        """Return a ProxyInfo instance (or None) based on the scheme
-        and authority.
-        """
-        hostname, port = urllib.splitport(authority)
-        proxy_info = self.proxy_info
-        if callable(proxy_info):
-            proxy_info = proxy_info(scheme)
-
-        if (hasattr(proxy_info, 'applies_to')
-            and not proxy_info.applies_to(hostname)):
-            proxy_info = None
-        return proxy_info
-
-
-class Response(dict):
-    """An object more like email.Message than httplib.HTTPResponse."""
-
-    """Is this response from our local cache"""
-    fromcache = False
-
-    """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
-    version = 11
-
-    "Status code returned by server. "
-    status = 200
-
-    """Reason phrase returned by server."""
-    reason = "Ok"
-
-    previous = None
-
-    def __init__(self, info):
-        # info is either an email.Message or
-        # an httplib.HTTPResponse object.
-        if isinstance(info, httplib.HTTPResponse):
-            for key, value in info.getheaders():
-                self[key.lower()] = value
-            self.status = info.status
-            self['status'] = str(self.status)
-            self.reason = info.reason
-            self.version = info.version
-        elif isinstance(info, email.Message.Message):
-            for key, value in info.items():
-                self[key.lower()] = value
-            self.status = int(self['status'])
-        else:
-            for key, value in info.iteritems():
-                self[key.lower()] = value
-            self.status = int(self.get('status', self.status))
-            self.reason = self.get('reason', self.reason)
-
-
-    def __getattr__(self, name):
-        if name == 'dict':
-            return self
-        else:
-            raise AttributeError, name
diff --git a/python/ext-libs/httplib2/iri2uri.py b/python/ext-libs/httplib2/iri2uri.py
deleted file mode 100644
index b924321..0000000
--- a/python/ext-libs/httplib2/iri2uri.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-***************************************************************************
-    iri2uri.py
-    ---------------------
-    Date                 : 2006
-    Copyright            : (C) 2006 Joe Gregorio
-    Email                : joe at bitworking dot org
-***************************************************************************
-*                                                                         *
-*   This program is free software; you can redistribute it and/or modify  *
-*   it under the terms of the GNU General Public License as published by  *
-*   the Free Software Foundation; either version 2 of the License, or     *
-*   (at your option) any later version.                                   *
-*                                                                         *
-***************************************************************************
-"""
-"""
-iri2uri
-
-Converts an IRI to a URI.
-
-"""
-__author__ = "Joe Gregorio (joe at bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = []
-__version__ = "1.0.0"
-__license__ = "MIT"
-__history__ = """
-"""
-# This will get replaced with a git SHA1 when you do a git archive
-__revision__ = '$Format:%H$'
-
-import urlparse
-
-
-# Convert an IRI to a URI following the rules in RFC 3987
-#
-# The characters we need to enocde and escape are defined in the spec:
-#
-# iprivate =  %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
-# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
-#         / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
-#         / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
-#         / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
-#         / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
-#         / %xD0000-DFFFD / %xE1000-EFFFD
-
-escape_range = [
-   (0xA0, 0xD7FF ),
-   (0xE000, 0xF8FF ),
-   (0xF900, 0xFDCF ),
-   (0xFDF0, 0xFFEF),
-   (0x10000, 0x1FFFD ),
-   (0x20000, 0x2FFFD ),
-   (0x30000, 0x3FFFD),
-   (0x40000, 0x4FFFD ),
-   (0x50000, 0x5FFFD ),
-   (0x60000, 0x6FFFD),
-   (0x70000, 0x7FFFD ),
-   (0x80000, 0x8FFFD ),
-   (0x90000, 0x9FFFD),
-   (0xA0000, 0xAFFFD ),
-   (0xB0000, 0xBFFFD ),
-   (0xC0000, 0xCFFFD),
-   (0xD0000, 0xDFFFD ),
-   (0xE1000, 0xEFFFD),
-   (0xF0000, 0xFFFFD ),
-   (0x100000, 0x10FFFD)
-]
-
-def encode(c):
-    retval = c
-    i = ord(c)
-    for low, high in escape_range:
-        if i < low:
-            break
-        if i >= low and i <= high:
-            retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
-            break
-    return retval
-
-
-def iri2uri(uri):
-    """Convert an IRI to a URI. Note that IRIs must be
-    passed in a unicode strings. That is, do not utf-8 encode
-    the IRI before passing it into the function."""
-    if isinstance(uri ,unicode):
-        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
-        authority = authority.encode('idna')
-        # For each character in 'ucschar' or 'iprivate'
-        #  1. encode as utf-8
-        #  2. then %-encode each octet of that utf-8
-        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
-        uri = "".join([encode(c) for c in uri])
-    return uri
-
-if __name__ == "__main__":
-    import unittest
-
-    class Test(unittest.TestCase):
-
-        def test_uris(self):
-            """Test that URIs are invariant under the transformation."""
-            invariant = [
-                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
-                u"http://www.ietf.org/rfc/rfc2396.txt",
-                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
-                u"mailto:John.Doe at example.com",
-                u"news:comp.infosystems.www.servers.unix",
-                u"tel:+1-816-555-1212",
-                u"telnet://192.0.2.16:80/",
-                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
-            for uri in invariant:
-                self.assertEqual(uri, iri2uri(uri))
-
-        def test_iri(self):
-            """ Test that the right type of escaping is done for each part of the URI."""
-            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
-            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
-            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
-            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
-            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
-
-    unittest.main()
-
-
diff --git a/python/ext-libs/httplib2/socks.py b/python/ext-libs/httplib2/socks.py
deleted file mode 100644
index 0991f4c..0000000
--- a/python/ext-libs/httplib2/socks.py
+++ /dev/null
@@ -1,438 +0,0 @@
-"""SocksiPy - Python SOCKS module.
-Version 1.00
-
-Copyright 2006 Dan-Haim. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-1. Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-3. Neither the name of Dan Haim nor the names of his contributors may be used
-   to endorse or promote products derived from this software without specific
-   prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
-OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
-
-
-This module provides a standard socket-like interface for Python
-for tunneling connections through SOCKS proxies.
-
-"""
-
-"""
-
-Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
-for use in PyLoris (http://pyloris.sourceforge.net/)
-
-Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
-mainly to merge bug fixes found in Sourceforge
-
-"""
-
-import base64
-import socket
-import struct
-import sys
-
-if getattr(socket, 'socket', None) is None:
-    raise ImportError('socket.socket missing, proxy support unusable')
-
-PROXY_TYPE_SOCKS4 = 1
-PROXY_TYPE_SOCKS5 = 2
-PROXY_TYPE_HTTP = 3
-PROXY_TYPE_HTTP_NO_TUNNEL = 4
-
-_defaultproxy = None
-_orgsocket = socket.socket
-
-class ProxyError(Exception): pass
-class GeneralProxyError(ProxyError): pass
-class Socks5AuthError(ProxyError): pass
-class Socks5Error(ProxyError): pass
-class Socks4Error(ProxyError): pass
-class HTTPError(ProxyError): pass
-
-_generalerrors = ("success",
-    "invalid data",
-    "not connected",
-    "not available",
-    "bad proxy type",
-    "bad input")
-
-_socks5errors = ("succeeded",
-    "general SOCKS server failure",
-    "connection not allowed by ruleset",
-    "Network unreachable",
-    "Host unreachable",
-    "Connection refused",
-    "TTL expired",
-    "Command not supported",
-    "Address type not supported",
-    "Unknown error")
-
-_socks5autherrors = ("succeeded",
-    "authentication is required",
-    "all offered authentication methods were rejected",
-    "unknown username or invalid password",
-    "unknown error")
-
-_socks4errors = ("request granted",
-    "request rejected or failed",
-    "request rejected because SOCKS server cannot connect to identd on the client",
-    "request rejected because the client program and identd report different user-ids",
-    "unknown error")
-
-def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
-    """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
-    Sets a default proxy which all further socksocket objects will use,
-    unless explicitly changed.
-    """
-    global _defaultproxy
-    _defaultproxy = (proxytype, addr, port, rdns, username, password)
-
-def wrapmodule(module):
-    """wrapmodule(module)
-    Attempts to replace a module's socket library with a SOCKS socket. Must set
-    a default proxy using setdefaultproxy(...) first.
-    This will only work on modules that import socket directly into the namespace;
-    most of the Python Standard Library falls into this category.
-    """
-    if _defaultproxy != None:
-        module.socket.socket = socksocket
-    else:
-        raise GeneralProxyError((4, "no proxy specified"))
-
-class socksocket(socket.socket):
-    """socksocket([family[, type[, proto]]]) -> socket object
-    Open a SOCKS enabled socket. The parameters are the same as
-    those of the standard socket init. In order for SOCKS to work,
-    you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
-    """
-
-    def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
-        _orgsocket.__init__(self, family, type, proto, _sock)
-        if _defaultproxy != None:
-            self.__proxy = _defaultproxy
-        else:
-            self.__proxy = (None, None, None, None, None, None)
-        self.__proxysockname = None
-        self.__proxypeername = None
-        self.__httptunnel = True
-
-    def __recvall(self, count):
-        """__recvall(count) -> data
-        Receive EXACTLY the number of bytes requested from the socket.
-        Blocks until the required number of bytes have been received.
-        """
-        data = self.recv(count)
-        while len(data) < count:
-            d = self.recv(count-len(data))
-            if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
-            data = data + d
-        return data
-
-    def sendall(self, content, *args):
-        """ override socket.socket.sendall method to rewrite the header
-        for non-tunneling proxies if needed
-        """
-        if not self.__httptunnel:
-            content = self.__rewriteproxy(content)
-        return super(socksocket, self).sendall(content, *args)
-
-    def __rewriteproxy(self, header):
-        """ rewrite HTTP request headers to support non-tunneling proxies
-        (i.e. those which do not support the CONNECT method).
-        This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
-        """
-        host, endpt = None, None
-        hdrs = header.split("\r\n")
-        for hdr in hdrs:
-            if hdr.lower().startswith("host:"):
-                host = hdr
-            elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
-                endpt = hdr
-        if host and endpt:
-            hdrs.remove(host)
-            hdrs.remove(endpt)
-            host = host.split(" ")[1]
-            endpt = endpt.split(" ")
-            if (self.__proxy[4] != None and self.__proxy[5] != None):
-                hdrs.insert(0, self.__getauthheader())
-            hdrs.insert(0, "Host: %s" % host)
-            hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
-        return "\r\n".join(hdrs)
-
-    def __getauthheader(self):
-        auth = self.__proxy[4] + ":" + self.__proxy[5]
-        return "Proxy-Authorization: Basic " + base64.b64encode(auth)
-
-    def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
-        """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
-        Sets the proxy to be used.
-        proxytype -    The type of the proxy to be used. Three types
-                are supported: PROXY_TYPE_SOCKS4 (including socks4a),
-                PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
-        addr -        The address of the server (IP or DNS).
-        port -        The port of the server. Defaults to 1080 for SOCKS
-                servers and 8080 for HTTP proxy servers.
-        rdns -        Should DNS queries be preformed on the remote side
-                (rather than the local side). The default is True.
-                Note: This has no effect with SOCKS4 servers.
-        username -    Username to authenticate with to the server.
-                The default is no authentication.
-        password -    Password to authenticate with to the server.
-                Only relevant when username is also provided.
-        """
-        self.__proxy = (proxytype, addr, port, rdns, username, password)
-
-    def __negotiatesocks5(self, destaddr, destport):
-        """__negotiatesocks5(self,destaddr,destport)
-        Negotiates a connection through a SOCKS5 server.
-        """
-        # First we'll send the authentication packages we support.
-        if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
-            # The username/password details were supplied to the
-            # setproxy method so we support the USERNAME/PASSWORD
-            # authentication (in addition to the standard none).
-            self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
-        else:
-            # No username/password were entered, therefore we
-            # only support connections with no authentication.
-            self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
-        # We'll receive the server's response to determine which
-        # method was selected
-        chosenauth = self.__recvall(2)
-        if chosenauth[0:1] != chr(0x05).encode():
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        # Check the chosen authentication method
-        if chosenauth[1:2] == chr(0x00).encode():
-            # No authentication is required
-            pass
-        elif chosenauth[1:2] == chr(0x02).encode():
-            # Okay, we need to perform a basic username/password
-            # authentication.
-            self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
-            authstat = self.__recvall(2)
-            if authstat[0:1] != chr(0x01).encode():
-                # Bad response
-                self.close()
-                raise GeneralProxyError((1, _generalerrors[1]))
-            if authstat[1:2] != chr(0x00).encode():
-                # Authentication failed
-                self.close()
-                raise Socks5AuthError((3, _socks5autherrors[3]))
-            # Authentication succeeded
-        else:
-            # Reaching here is always bad
-            self.close()
-            if chosenauth[1] == chr(0xFF).encode():
-                raise Socks5AuthError((2, _socks5autherrors[2]))
-            else:
-                raise GeneralProxyError((1, _generalerrors[1]))
-        # Now we can request the actual connection
-        req = struct.pack('BBB', 0x05, 0x01, 0x00)
-        # If the given destination address is an IP address, we'll
-        # use the IPv4 address request even if remote resolving was specified.
-        try:
-            ipaddr = socket.inet_aton(destaddr)
-            req = req + chr(0x01).encode() + ipaddr
-        except socket.error:
-            # Well it's not an IP number,  so it's probably a DNS name.
-            if self.__proxy[3]:
-                # Resolve remotely
-                ipaddr = None
-                req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
-            else:
-                # Resolve locally
-                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
-                req = req + chr(0x01).encode() + ipaddr
-        req = req + struct.pack(">H", destport)
-        self.sendall(req)
-        # Get the response
-        resp = self.__recvall(4)
-        if resp[0:1] != chr(0x05).encode():
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        elif resp[1:2] != chr(0x00).encode():
-            # Connection failed
-            self.close()
-            if ord(resp[1:2])<=8:
-                raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
-            else:
-                raise Socks5Error((9, _socks5errors[9]))
-        # Get the bound address/port
-        elif resp[3:4] == chr(0x01).encode():
-            boundaddr = self.__recvall(4)
-        elif resp[3:4] == chr(0x03).encode():
-            resp = resp + self.recv(1)
-            boundaddr = self.__recvall(ord(resp[4:5]))
-        else:
-            self.close()
-            raise GeneralProxyError((1,_generalerrors[1]))
-        boundport = struct.unpack(">H", self.__recvall(2))[0]
-        self.__proxysockname = (boundaddr, boundport)
-        if ipaddr != None:
-            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
-        else:
-            self.__proxypeername = (destaddr, destport)
-
-    def getproxysockname(self):
-        """getsockname() -> address info
-        Returns the bound IP address and port number at the proxy.
-        """
-        return self.__proxysockname
-
-    def getproxypeername(self):
-        """getproxypeername() -> address info
-        Returns the IP and port number of the proxy.
-        """
-        return _orgsocket.getpeername(self)
-
-    def getpeername(self):
-        """getpeername() -> address info
-        Returns the IP address and port number of the destination
-        machine (note: getproxypeername returns the proxy)
-        """
-        return self.__proxypeername
-
-    def __negotiatesocks4(self,destaddr,destport):
-        """__negotiatesocks4(self,destaddr,destport)
-        Negotiates a connection through a SOCKS4 server.
-        """
-        # Check if the destination address provided is an IP address
-        rmtrslv = False
-        try:
-            ipaddr = socket.inet_aton(destaddr)
-        except socket.error:
-            # It's a DNS name. Check where it should be resolved.
-            if self.__proxy[3]:
-                ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
-                rmtrslv = True
-            else:
-                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
-        # Construct the request packet
-        req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
-        # The username parameter is considered userid for SOCKS4
-        if self.__proxy[4] != None:
-            req = req + self.__proxy[4]
-        req = req + chr(0x00).encode()
-        # DNS name if remote resolving is required
-        # NOTE: This is actually an extension to the SOCKS4 protocol
-        # called SOCKS4A and may not be supported in all cases.
-        if rmtrslv:
-            req = req + destaddr + chr(0x00).encode()
-        self.sendall(req)
-        # Get the response from the server
-        resp = self.__recvall(8)
-        if resp[0:1] != chr(0x00).encode():
-            # Bad data
-            self.close()
-            raise GeneralProxyError((1,_generalerrors[1]))
-        if resp[1:2] != chr(0x5A).encode():
-            # Server returned an error
-            self.close()
-            if ord(resp[1:2]) in (91, 92, 93):
-                self.close()
-                raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
-            else:
-                raise Socks4Error((94, _socks4errors[4]))
-        # Get the bound address/port
-        self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
-        if rmtrslv != None:
-            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
-        else:
-            self.__proxypeername = (destaddr, destport)
-
-    def __negotiatehttp(self, destaddr, destport):
-        """__negotiatehttp(self,destaddr,destport)
-        Negotiates a connection through an HTTP server.
-        """
-        # If we need to resolve locally, we do this now
-        if not self.__proxy[3]:
-            addr = socket.gethostbyname(destaddr)
-        else:
-            addr = destaddr
-        headers =  ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
-        headers += ["Host: ", destaddr, "\r\n"]
-        if (self.__proxy[4] != None and self.__proxy[5] != None):
-                headers += [self.__getauthheader(), "\r\n"]
-        headers.append("\r\n")
-        self.sendall("".join(headers).encode())
-        # We read the response until we get the string "\r\n\r\n"
-        resp = self.recv(1)
-        while resp.find("\r\n\r\n".encode()) == -1:
-            resp = resp + self.recv(1)
-        # We just need the first line to check if the connection
-        # was successful
-        statusline = resp.splitlines()[0].split(" ".encode(), 2)
-        if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        try:
-            statuscode = int(statusline[1])
-        except ValueError:
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        if statuscode != 200:
-            self.close()
-            raise HTTPError((statuscode, statusline[2]))
-        self.__proxysockname = ("0.0.0.0", 0)
-        self.__proxypeername = (addr, destport)
-
-    def connect(self, destpair):
-        """connect(self, despair)
-        Connects to the specified destination through a proxy.
-        destpar - A tuple of the IP/DNS address and the port number.
-        (identical to socket's connect).
-        To select the proxy server use setproxy().
-        """
-        # Do a minimal input check first
-        if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
-            raise GeneralProxyError((5, _generalerrors[5]))
-        if self.__proxy[0] == PROXY_TYPE_SOCKS5:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 1080
-            _orgsocket.connect(self, (self.__proxy[1], portnum))
-            self.__negotiatesocks5(destpair[0], destpair[1])
-        elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 1080
-            _orgsocket.connect(self,(self.__proxy[1], portnum))
-            self.__negotiatesocks4(destpair[0], destpair[1])
-        elif self.__proxy[0] == PROXY_TYPE_HTTP:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 8080
-            _orgsocket.connect(self,(self.__proxy[1], portnum))
-            self.__negotiatehttp(destpair[0], destpair[1])
-        elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 8080
-            _orgsocket.connect(self,(self.__proxy[1],portnum))
-            if destpair[1] == 443:
-                self.__negotiatehttp(destpair[0],destpair[1])
-            else:
-                self.__httptunnel = False
-        elif self.__proxy[0] == None:
-            _orgsocket.connect(self, (destpair[0], destpair[1]))
-        else:
-            raise GeneralProxyError((4, _generalerrors[4]))
diff --git a/python/ext-libs/jinja2/__init__.py b/python/ext-libs/jinja2/__init__.py
deleted file mode 100644
index 6d0e988..0000000
--- a/python/ext-libs/jinja2/__init__.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2
-    ~~~~~~
-
-    Jinja2 is a template engine written in pure Python.  It provides a
-    Django inspired non-XML syntax but supports inline expressions and
-    an optional sandboxed environment.
-
-    Nutshell
-    --------
-
-    Here a small example of a Jinja2 template::
-
-        {% extends 'base.html' %}
-        {% block title %}Memberlist{% endblock %}
-        {% block content %}
-          <ul>
-          {% for user in users %}
-            <li><a href="{{ user.url }}">{{ user.username }}</a></li>
-          {% endfor %}
-          </ul>
-        {% endblock %}
-
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-__docformat__ = 'restructuredtext en'
-__version__ = '2.7.2'
-
-# high level interface
-from jinja2.environment import Environment, Template
-
-# loaders
-from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
-     DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
-     ModuleLoader
-
-# bytecode caches
-from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
-     MemcachedBytecodeCache
-
-# undefined types
-from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined
-
-# exceptions
-from jinja2.exceptions import TemplateError, UndefinedError, \
-     TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
-     TemplateAssertionError
-
-# decorators and public utilities
-from jinja2.filters import environmentfilter, contextfilter, \
-     evalcontextfilter
-from jinja2.utils import Markup, escape, clear_caches, \
-     environmentfunction, evalcontextfunction, contextfunction, \
-     is_undefined
-
-__all__ = [
-    'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
-    'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
-    'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
-    'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
-    'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
-    'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
-    'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
-    'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
-    'evalcontextfilter', 'evalcontextfunction'
-]
diff --git a/python/ext-libs/jinja2/_compat.py b/python/ext-libs/jinja2/_compat.py
deleted file mode 100644
index 8fa8a49..0000000
--- a/python/ext-libs/jinja2/_compat.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2._compat
-    ~~~~~~~~~~~~~~
-
-    Some py2/py3 compatibility support based on a stripped down
-    version of six so we don't have to depend on a specific version
-    of it.
-
-    :copyright: Copyright 2013 by the Jinja team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-import sys
-
-PY2 = sys.version_info[0] == 2
-PYPY = hasattr(sys, 'pypy_translation_info')
-_identity = lambda x: x
-
-
-if not PY2:
-    unichr = chr
-    range_type = range
-    text_type = str
-    string_types = (str,)
-
-    iterkeys = lambda d: iter(d.keys())
-    itervalues = lambda d: iter(d.values())
-    iteritems = lambda d: iter(d.items())
-
-    import pickle
-    from io import BytesIO, StringIO
-    NativeStringIO = StringIO
-
-    def reraise(tp, value, tb=None):
-        if value.__traceback__ is not tb:
-            raise value.with_traceback(tb)
-        raise value
-
-    ifilter = filter
-    imap = map
-    izip = zip
-    intern = sys.intern
-
-    implements_iterator = _identity
-    implements_to_string = _identity
-    encode_filename = _identity
-    get_next = lambda x: x.__next__
-
-else:
-    unichr = unichr
-    text_type = unicode
-    range_type = xrange
-    string_types = (str, unicode)
-
-    iterkeys = lambda d: d.iterkeys()
-    itervalues = lambda d: d.itervalues()
-    iteritems = lambda d: d.iteritems()
-
-    import cPickle as pickle
-    from cStringIO import StringIO as BytesIO, StringIO
-    NativeStringIO = BytesIO
-
-    exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
-
-    from itertools import imap, izip, ifilter
-    intern = intern
-
-    def implements_iterator(cls):
-        cls.next = cls.__next__
-        del cls.__next__
-        return cls
-
-    def implements_to_string(cls):
-        cls.__unicode__ = cls.__str__
-        cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
-        return cls
-
-    get_next = lambda x: x.next
-
-    def encode_filename(filename):
-        if isinstance(filename, unicode):
-            return filename.encode('utf-8')
-        return filename
-
-try:
-    next = next
-except NameError:
-    def next(it):
-        return it.next()
-
-
-def with_metaclass(meta, *bases):
-    # This requires a bit of explanation: the basic idea is to make a
-    # dummy metaclass for one level of class instanciation that replaces
-    # itself with the actual metaclass.  Because of internal type checks
-    # we also need to make sure that we downgrade the custom metaclass
-    # for one level to something closer to type (that's why __call__ and
-    # __init__ comes back from type etc.).
-    #
-    # This has the advantage over six.with_metaclass in that it does not
-    # introduce dummy classes into the final MRO.
-    class metaclass(meta):
-        __call__ = type.__call__
-        __init__ = type.__init__
-        def __new__(cls, name, this_bases, d):
-            if this_bases is None:
-                return type.__new__(cls, name, (), d)
-            return meta(name, bases, d)
-    return metaclass('temporary_class', None, {})
-
-
-try:
-    from collections import Mapping as mapping_types
-except ImportError:
-    import UserDict
-    mapping_types = (UserDict.UserDict, UserDict.DictMixin, dict)
-
-
-# common types.  These do exist in the special types module too which however
-# does not exist in IronPython out of the box.  Also that way we don't have
-# to deal with implementation specific stuff here
-class _C(object):
-    def method(self): pass
-def _func():
-    yield None
-function_type = type(_func)
-generator_type = type(_func())
-method_type = type(_C().method)
-code_type = type(_C.method.__code__)
-try:
-    raise TypeError()
-except TypeError:
-    _tb = sys.exc_info()[2]
-    traceback_type = type(_tb)
-    frame_type = type(_tb.tb_frame)
-
-
-try:
-    from urllib.parse import quote_from_bytes as url_quote
-except ImportError:
-    from urllib import quote as url_quote
-
-
-try:
-    from thread import allocate_lock
-except ImportError:
-    try:
-        from threading import Lock as allocate_lock
-    except ImportError:
-        from dummy_thread import allocate_lock
diff --git a/python/ext-libs/jinja2/_stringdefs.py b/python/ext-libs/jinja2/_stringdefs.py
deleted file mode 100644
index da5830e..0000000
--- a/python/ext-libs/jinja2/_stringdefs.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2._stringdefs
-    ~~~~~~~~~~~~~~~~~~
-
-    Strings of all Unicode characters of a certain category.
-    Used for matching in Unicode-aware languages. Run to regenerate.
-
-    Inspired by chartypes_create.py from the MoinMoin project, original
-    implementation from Pygments.
-
-    :copyright: Copyright 2006-2009 by the Jinja team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from jinja2._compat import unichr
-
-Cc = u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f'
-
-Cf = u'\xad\u0600\u0601\u0602\u0603\u06dd\u070f\u17b4\u17b5\u200b\u200c\u200d\u200e\u200f\u202a\u202b\u202c\u202d\u202e\u2060\u2061\u2062\u2063\u206a\u206b\u206c\u206d\u206e\u206f\ufeff\ufff9\ufffa\ufffb'
-
-Cn = u'\u0242\u0243\u0244\u0245\u0246\u0247\u0248\u0249\u024a\u024b\u024c\u024d\u024e\u024f\u0370\u0371\u0372\u0373\u0376\u0377\u0378\u0379\u037b\u037c\u037d\u037f\u0380\u0381\u0382\u0383\u038b\u038d\u03a2\u03cf\u0487\u04cf\u04fa\u04fb\u04fc\u04fd\u04fe\u04ff\u0510\u0511\u0512\u0513\u0514\u0515\u0516\u0517\u0518\u0519\u051a\u051b\u051c\u051d\u051e\u051f\u0520\u0521\u0522\u0523\u0524\u0525\u0526\u0527\u0528\u0529\u052a\u052b\u052c\u052d\u052e\u052f\u0530\u0557\u0558\u0560\u0588\u058b\u058 [...]
-
-Co = u'\ue000\ue001\ue002\ue003\ue004\ue005\ue006\ue007\ue008\ue009\ue00a\ue00b\ue00c\ue00d\ue00e\ue00f\ue010\ue011\ue012\ue013\ue014\ue015\ue016\ue017\ue018\ue019\ue01a\ue01b\ue01c\ue01d\ue01e\ue01f\ue020\ue021\ue022\ue023\ue024\ue025\ue026\ue027\ue028\ue029\ue02a\ue02b\ue02c\ue02d\ue02e\ue02f\ue030\ue031\ue032\ue033\ue034\ue035\ue036\ue037\ue038\ue039\ue03a\ue03b\ue03c\ue03d\ue03e\ue03f\ue040\ue041\ue042\ue043\ue044\ue045\ue046\ue047\ue048\ue049\ue04a\ue04b\ue04c\ue04d\ue04e\ue04f\ue05 [...]
-
-try:
-    Cs = eval(r"'\ud800\ud801\ud802\ud803\ud804\ud805\ud806\ud807\ud808\ud809\ud80a\ud80b\ud80c\ud80d\ud80e\ud80f\ud810\ud811\ud812\ud813\ud814\ud815\ud816\ud817\ud818\ud819\ud81a\ud81b\ud81c\ud81d\ud81e\ud81f\ud820\ud821\ud822\ud823\ud824\ud825\ud826\ud827\ud828\ud829\ud82a\ud82b\ud82c\ud82d\ud82e\ud82f\ud830\ud831\ud832\ud833\ud834\ud835\ud836\ud837\ud838\ud839\ud83a\ud83b\ud83c\ud83d\ud83e\ud83f\ud840\ud841\ud842\ud843\ud844\ud845\ud846\ud847\ud848\ud849\ud84a\ud84b\ud84c\ud84d\ud84e\ [...]
-except UnicodeDecodeError:
-    Cs = '' # Jython can't handle isolated surrogates
-
-Ll = u'abcdefghijklmnopqrstuvwxyz\xaa\xb5\xba\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\ [...]
-
-Lm = u'\u02b0\u02b1\u02b2\u02b3\u02b4\u02b5\u02b6\u02b7\u02b8\u02b9\u02ba\u02bb\u02bc\u02bd\u02be\u02bf\u02c0\u02c1\u02c6\u02c7\u02c8\u02c9\u02ca\u02cb\u02cc\u02cd\u02ce\u02cf\u02d0\u02d1\u02e0\u02e1\u02e2\u02e3\u02e4\u02ee\u037a\u0559\u0640\u06e5\u06e6\u0e46\u0ec6\u10fc\u17d7\u1843\u1d2c\u1d2d\u1d2e\u1d2f\u1d30\u1d31\u1d32\u1d33\u1d34\u1d35\u1d36\u1d37\u1d38\u1d39\u1d3a\u1d3b\u1d3c\u1d3d\u1d3e\u1d3f\u1d40\u1d41\u1d42\u1d43\u1d44\u1d45\u1d46\u1d47\u1d48\u1d49\u1d4a\u1d4b\u1d4c\u1d4d\u1d4 [...]
-
-Lo = u'\u01bb\u01c0\u01c1\u01c2\u01c3\u05d0\u05d1\u05d2\u05d3\u05d4\u05d5\u05d6\u05d7\u05d8\u05d9\u05da\u05db\u05dc\u05dd\u05de\u05df\u05e0\u05e1\u05e2\u05e3\u05e4\u05e5\u05e6\u05e7\u05e8\u05e9\u05ea\u05f0\u05f1\u05f2\u0621\u0622\u0623\u0624\u0625\u0626\u0627\u0628\u0629\u062a\u062b\u062c\u062d\u062e\u062f\u0630\u0631\u0632\u0633\u0634\u0635\u0636\u0637\u0638\u0639\u063a\u0641\u0642\u0643\u0644\u0645\u0646\u0647\u0648\u0649\u064a\u066e\u066f\u0671\u0672\u0673\u0674\u0675\u0676\u0677\u067 [...]
-
-Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88\u1f89\u1f8a\u1f8b\u1f8c\u1f8d\u1f8e\u1f8f\u1f98\u1f99\u1f9a\u1f9b\u1f9c\u1f9d\u1f9e\u1f9f\u1fa8\u1fa9\u1faa\u1fab\u1fac\u1fad\u1fae\u1faf\u1fbc\u1fcc\u1ffc'
-
-Lu = u'ABCDEFGHIJKLMNOPQRSTUVWXYZ\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0 [...]
-
-Mc = u'\u0903\u093e\u093f\u0940\u0949\u094a\u094b\u094c\u0982\u0983\u09be\u09bf\u09c0\u09c7\u09c8\u09cb\u09cc\u09d7\u0a03\u0a3e\u0a3f\u0a40\u0a83\u0abe\u0abf\u0ac0\u0ac9\u0acb\u0acc\u0b02\u0b03\u0b3e\u0b40\u0b47\u0b48\u0b4b\u0b4c\u0b57\u0bbe\u0bbf\u0bc1\u0bc2\u0bc6\u0bc7\u0bc8\u0bca\u0bcb\u0bcc\u0bd7\u0c01\u0c02\u0c03\u0c41\u0c42\u0c43\u0c44\u0c82\u0c83\u0cbe\u0cc0\u0cc1\u0cc2\u0cc3\u0cc4\u0cc7\u0cc8\u0cca\u0ccb\u0cd5\u0cd6\u0d02\u0d03\u0d3e\u0d3f\u0d40\u0d46\u0d47\u0d48\u0d4a\u0d4b\u0d4 [...]
-
-Me = u'\u0488\u0489\u06de\u20dd\u20de\u20df\u20e0\u20e2\u20e3\u20e4'
-
-Mn = u'\u0300\u0301\u0302\u0303\u0304\u0305\u0306\u0307\u0308\u0309\u030a\u030b\u030c\u030d\u030e\u030f\u0310\u0311\u0312\u0313\u0314\u0315\u0316\u0317\u0318\u0319\u031a\u031b\u031c\u031d\u031e\u031f\u0320\u0321\u0322\u0323\u0324\u0325\u0326\u0327\u0328\u0329\u032a\u032b\u032c\u032d\u032e\u032f\u0330\u0331\u0332\u0333\u0334\u0335\u0336\u0337\u0338\u0339\u033a\u033b\u033c\u033d\u033e\u033f\u0340\u0341\u0342\u0343\u0344\u0345\u0346\u0347\u0348\u0349\u034a\u034b\u034c\u034d\u034e\u034f\u035 [...]
-
-Nd = u'0123456789\u0660\u0661\u0662\u0663\u0664\u0665\u0666\u0667\u0668\u0669\u06f0\u06f1\u06f2\u06f3\u06f4\u06f5\u06f6\u06f7\u06f8\u06f9\u0966\u0967\u0968\u0969\u096a\u096b\u096c\u096d\u096e\u096f\u09e6\u09e7\u09e8\u09e9\u09ea\u09eb\u09ec\u09ed\u09ee\u09ef\u0a66\u0a67\u0a68\u0a69\u0a6a\u0a6b\u0a6c\u0a6d\u0a6e\u0a6f\u0ae6\u0ae7\u0ae8\u0ae9\u0aea\u0aeb\u0aec\u0aed\u0aee\u0aef\u0b66\u0b67\u0b68\u0b69\u0b6a\u0b6b\u0b6c\u0b6d\u0b6e\u0b6f\u0be6\u0be7\u0be8\u0be9\u0bea\u0beb\u0bec\u0bed\u0bee\ [...]
-
-Nl = u'\u16ee\u16ef\u16f0\u2160\u2161\u2162\u2163\u2164\u2165\u2166\u2167\u2168\u2169\u216a\u216b\u216c\u216d\u216e\u216f\u2170\u2171\u2172\u2173\u2174\u2175\u2176\u2177\u2178\u2179\u217a\u217b\u217c\u217d\u217e\u217f\u2180\u2181\u2182\u2183\u3007\u3021\u3022\u3023\u3024\u3025\u3026\u3027\u3028\u3029\u3038\u3039\u303a'
-
-No = u'\xb2\xb3\xb9\xbc\xbd\xbe\u09f4\u09f5\u09f6\u09f7\u09f8\u09f9\u0bf0\u0bf1\u0bf2\u0f2a\u0f2b\u0f2c\u0f2d\u0f2e\u0f2f\u0f30\u0f31\u0f32\u0f33\u1369\u136a\u136b\u136c\u136d\u136e\u136f\u1370\u1371\u1372\u1373\u1374\u1375\u1376\u1377\u1378\u1379\u137a\u137b\u137c\u17f0\u17f1\u17f2\u17f3\u17f4\u17f5\u17f6\u17f7\u17f8\u17f9\u2070\u2074\u2075\u2076\u2077\u2078\u2079\u2080\u2081\u2082\u2083\u2084\u2085\u2086\u2087\u2088\u2089\u2153\u2154\u2155\u2156\u2157\u2158\u2159\u215a\u215b\u215c\u215 [...]
-
-Pc = u'_\u203f\u2040\u2054\ufe33\ufe34\ufe4d\ufe4e\ufe4f\uff3f'
-
-Pd = u'-\u058a\u1806\u2010\u2011\u2012\u2013\u2014\u2015\u2e17\u301c\u3030\u30a0\ufe31\ufe32\ufe58\ufe63\uff0d'
-
-Pe = u')]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u232a\u23b5\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e\u301f\ufd3f\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
-
-Pf = u'\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d'
-
-Pi = u'\xab\u2018\u201b\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c'
-
-Po = u'!"#%&\'*,./:;?@\\\xa1\xb7\xbf\u037e\u0387\u055a\u055b\u055c\u055d\u055e\u055f\u0589\u05be\u05c0\u05c3\u05c6\u05f3\u05f4\u060c\u060d\u061b\u061e\u061f\u066a\u066b\u066c\u066d\u06d4\u0700\u0701\u0702\u0703\u0704\u0705\u0706\u0707\u0708\u0709\u070a\u070b\u070c\u070d\u0964\u0965\u0970\u0df4\u0e4f\u0e5a\u0e5b\u0f04\u0f05\u0f06\u0f07\u0f08\u0f09\u0f0a\u0f0b\u0f0c\u0f0d\u0f0e\u0f0f\u0f10\u0f11\u0f12\u0f85\u0fd0\u0fd1\u104a\u104b\u104c\u104d\u104e\u104f\u10fb\u1361\u1362\u1363\u1364\u1365 [...]
-
-Ps = u'([{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2329\u23b4\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3e\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-
-Sc = u'$\xa2\xa3\xa4\xa5\u060b\u09f2\u09f3\u0af1\u0bf9\u0e3f\u17db\u20a0\u20a1\u20a2\u20a3\u20a4\u20a5\u20a6\u20a7\u20a8\u20a9\u20aa\u20ab\u20ac\u20ad\u20ae\u20af\u20b0\u20b1\u20b2\u20b3\u20b4\u20b5\ufdfc\ufe69\uff04\uffe0\uffe1\uffe5\uffe6'
-
-Sk = u'^`\xa8\xaf\xb4\xb8\u02c2\u02c3\u02c4\u02c5\u02d2\u02d3\u02d4\u02d5\u02d6\u02d7\u02d8\u02d9\u02da\u02db\u02dc\u02dd\u02de\u02df\u02e5\u02e6\u02e7\u02e8\u02e9\u02ea\u02eb\u02ec\u02ed\u02ef\u02f0\u02f1\u02f2\u02f3\u02f4\u02f5\u02f6\u02f7\u02f8\u02f9\u02fa\u02fb\u02fc\u02fd\u02fe\u02ff\u0374\u0375\u0384\u0385\u1fbd\u1fbf\u1fc0\u1fc1\u1fcd\u1fce\u1fcf\u1fdd\u1fde\u1fdf\u1fed\u1fee\u1fef\u1ffd\u1ffe\u309b\u309c\ua700\ua701\ua702\ua703\ua704\ua705\ua706\ua707\ua708\ua709\ua70a\ua70b\ua70 [...]
-
-Sm = u'+<=>|~\xac\xb1\xd7\xf7\u03f6\u2044\u2052\u207a\u207b\u207c\u208a\u208b\u208c\u2140\u2141\u2142\u2143\u2144\u214b\u2190\u2191\u2192\u2193\u2194\u219a\u219b\u21a0\u21a3\u21a6\u21ae\u21ce\u21cf\u21d2\u21d4\u21f4\u21f5\u21f6\u21f7\u21f8\u21f9\u21fa\u21fb\u21fc\u21fd\u21fe\u21ff\u2200\u2201\u2202\u2203\u2204\u2205\u2206\u2207\u2208\u2209\u220a\u220b\u220c\u220d\u220e\u220f\u2210\u2211\u2212\u2213\u2214\u2215\u2216\u2217\u2218\u2219\u221a\u221b\u221c\u221d\u221e\u221f\u2220\u2221\u2222\ [...]
-
-So = u'\xa6\xa7\xa9\xae\xb0\xb6\u0482\u060e\u060f\u06e9\u06fd\u06fe\u09fa\u0b70\u0bf3\u0bf4\u0bf5\u0bf6\u0bf7\u0bf8\u0bfa\u0f01\u0f02\u0f03\u0f13\u0f14\u0f15\u0f16\u0f17\u0f1a\u0f1b\u0f1c\u0f1d\u0f1e\u0f1f\u0f34\u0f36\u0f38\u0fbe\u0fbf\u0fc0\u0fc1\u0fc2\u0fc3\u0fc4\u0fc5\u0fc7\u0fc8\u0fc9\u0fca\u0fcb\u0fcc\u0fcf\u1360\u1390\u1391\u1392\u1393\u1394\u1395\u1396\u1397\u1398\u1399\u1940\u19e0\u19e1\u19e2\u19e3\u19e4\u19e5\u19e6\u19e7\u19e8\u19e9\u19ea\u19eb\u19ec\u19ed\u19ee\u19ef\u19f0\u19f [...]
-
-Zl = u'\u2028'
-
-Zp = u'\u2029'
-
-Zs = u' \xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000'
-
-cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
-
-def combine(*args):
-    return u''.join([globals()[cat] for cat in args])
-
-xid_start = u'\u0041-\u005A\u005F\u0061-\u007A\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u01BA\u01BB\u01BC-\u01BF\u01C0-\u01C3\u01C4-\u0241\u0250-\u02AF\u02B0-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EE\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03F5\u03F7-\u0481\u048A-\u04CE\u04D0-\u04F9\u0500-\u050F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0621-\u063A\u0640\u0641-\u064A\u066E-\u066F\u0671-\u06D3\u06D5\u06E5-\u06E6\u06EE-\u06EF\u06FA-\u06FC\u06FF\u0 [...]
-
-xid_continue = u'\u0030-\u0039\u0041-\u005A\u005F\u0061-\u007A\u00AA\u00B5\u00B7\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u01BA\u01BB\u01BC-\u01BF\u01C0-\u01C3\u01C4-\u0241\u0250-\u02AF\u02B0-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EE\u0300-\u036F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03F5\u03F7-\u0481\u0483-\u0486\u048A-\u04CE\u04D0-\u04F9\u0500-\u050F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05B9\u05BB-\u05BD\u05BF\u05C1-\u05C2\u05C4-\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2 [...]
-
-def allexcept(*args):
-    newcats = cats[:]
-    for arg in args:
-        newcats.remove(arg)
-    return u''.join([globals()[cat] for cat in newcats])
-
-if __name__ == '__main__':
-    import unicodedata
-
-    categories = {}
-
-    f = open(__file__.rstrip('co'))
-    try:
-        content = f.read()
-    finally:
-        f.close()
-
-    header = content[:content.find('Cc =')]
-    footer = content[content.find("def combine("):]
-
-    for code in range(65535):
-        c = unichr(code)
-        cat = unicodedata.category(c)
-        categories.setdefault(cat, []).append(c)
-
-    f = open(__file__, 'w')
-    f.write(header)
-
-    for cat in sorted(categories):
-        val = u''.join(categories[cat])
-        if cat == 'Cs':
-            # Jython can't handle isolated surrogates
-            f.write("""\
-try:
-    Cs = eval(r"%r")
-except UnicodeDecodeError:
-    Cs = '' # Jython can't handle isolated surrogates\n\n""" % val)
-        else:
-            f.write('%s = %r\n\n' % (cat, val))
-    f.write('cats = %r\n\n' % sorted(categories.keys()))
-
-    f.write(footer)
-    f.close()
diff --git a/python/ext-libs/jinja2/bccache.py b/python/ext-libs/jinja2/bccache.py
deleted file mode 100644
index 09ff845..0000000
--- a/python/ext-libs/jinja2/bccache.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.bccache
-    ~~~~~~~~~~~~~~
-
-    This module implements the bytecode cache system Jinja is optionally
-    using.  This is useful if you have very complex template situations and
-    the compiliation of all those templates slow down your application too
-    much.
-
-    Situations where this is useful are often forking web applications that
-    are initialized on the first request.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD.
-"""
-from os import path, listdir
-import os
-import sys
-import errno
-import marshal
-import tempfile
-import fnmatch
-from hashlib import sha1
-from jinja2.utils import open_if_exists
-from jinja2._compat import BytesIO, pickle, PY2, text_type
-
-
-# marshal works better on 3.x, one hack less required
-if not PY2:
-    marshal_dump = marshal.dump
-    marshal_load = marshal.load
-else:
-
-    def marshal_dump(code, f):
-        if isinstance(f, file):
-            marshal.dump(code, f)
-        else:
-            f.write(marshal.dumps(code))
-
-    def marshal_load(f):
-        if isinstance(f, file):
-            return marshal.load(f)
-        return marshal.loads(f.read())
-
-
-bc_version = 2
-
-# magic version used to only change with new jinja versions.  With 2.6
-# we change this to also take Python version changes into account.  The
-# reason for this is that Python tends to segfault if fed earlier bytecode
-# versions because someone thought it would be a good idea to reuse opcodes
-# or make Python incompatible with earlier versions.
-bc_magic = 'j2'.encode('ascii') + \
-    pickle.dumps(bc_version, 2) + \
-    pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1])
-
-
-class Bucket(object):
-    """Buckets are used to store the bytecode for one template.  It's created
-    and initialized by the bytecode cache and passed to the loading functions.
-
-    The buckets get an internal checksum from the cache assigned and use this
-    to automatically reject outdated cache material.  Individual bytecode
-    cache subclasses don't have to care about cache invalidation.
-    """
-
-    def __init__(self, environment, key, checksum):
-        self.environment = environment
-        self.key = key
-        self.checksum = checksum
-        self.reset()
-
-    def reset(self):
-        """Resets the bucket (unloads the bytecode)."""
-        self.code = None
-
-    def load_bytecode(self, f):
-        """Loads bytecode from a file or file like object."""
-        # make sure the magic header is correct
-        magic = f.read(len(bc_magic))
-        if magic != bc_magic:
-            self.reset()
-            return
-        # the source code of the file changed, we need to reload
-        checksum = pickle.load(f)
-        if self.checksum != checksum:
-            self.reset()
-            return
-        self.code = marshal_load(f)
-
-    def write_bytecode(self, f):
-        """Dump the bytecode into the file or file like object passed."""
-        if self.code is None:
-            raise TypeError('can\'t write empty bucket')
-        f.write(bc_magic)
-        pickle.dump(self.checksum, f, 2)
-        marshal_dump(self.code, f)
-
-    def bytecode_from_string(self, string):
-        """Load bytecode from a string."""
-        self.load_bytecode(BytesIO(string))
-
-    def bytecode_to_string(self):
-        """Return the bytecode as string."""
-        out = BytesIO()
-        self.write_bytecode(out)
-        return out.getvalue()
-
-
-class BytecodeCache(object):
-    """To implement your own bytecode cache you have to subclass this class
-    and override :meth:`load_bytecode` and :meth:`dump_bytecode`.  Both of
-    these methods are passed a :class:`~jinja2.bccache.Bucket`.
-
-    A very basic bytecode cache that saves the bytecode on the file system::
-
-        from os import path
-
-        class MyCache(BytecodeCache):
-
-            def __init__(self, directory):
-                self.directory = directory
-
-            def load_bytecode(self, bucket):
-                filename = path.join(self.directory, bucket.key)
-                if path.exists(filename):
-                    with open(filename, 'rb') as f:
-                        bucket.load_bytecode(f)
-
-            def dump_bytecode(self, bucket):
-                filename = path.join(self.directory, bucket.key)
-                with open(filename, 'wb') as f:
-                    bucket.write_bytecode(f)
-
-    A more advanced version of a filesystem based bytecode cache is part of
-    Jinja2.
-    """
-
-    def load_bytecode(self, bucket):
-        """Subclasses have to override this method to load bytecode into a
-        bucket.  If they are not able to find code in the cache for the
-        bucket, it must not do anything.
-        """
-        raise NotImplementedError()
-
-    def dump_bytecode(self, bucket):
-        """Subclasses have to override this method to write the bytecode
-        from a bucket back to the cache.  If it unable to do so it must not
-        fail silently but raise an exception.
-        """
-        raise NotImplementedError()
-
-    def clear(self):
-        """Clears the cache.  This method is not used by Jinja2 but should be
-        implemented to allow applications to clear the bytecode cache used
-        by a particular environment.
-        """
-
-    def get_cache_key(self, name, filename=None):
-        """Returns the unique hash key for this template name."""
-        hash = sha1(name.encode('utf-8'))
-        if filename is not None:
-            filename = '|' + filename
-            if isinstance(filename, text_type):
-                filename = filename.encode('utf-8')
-            hash.update(filename)
-        return hash.hexdigest()
-
-    def get_source_checksum(self, source):
-        """Returns a checksum for the source."""
-        return sha1(source.encode('utf-8')).hexdigest()
-
-    def get_bucket(self, environment, name, filename, source):
-        """Return a cache bucket for the given template.  All arguments are
-        mandatory but filename may be `None`.
-        """
-        key = self.get_cache_key(name, filename)
-        checksum = self.get_source_checksum(source)
-        bucket = Bucket(environment, key, checksum)
-        self.load_bytecode(bucket)
-        return bucket
-
-    def set_bucket(self, bucket):
-        """Put the bucket into the cache."""
-        self.dump_bytecode(bucket)
-
-
-class FileSystemBytecodeCache(BytecodeCache):
-    """A bytecode cache that stores bytecode on the filesystem.  It accepts
-    two arguments: The directory where the cache items are stored and a
-    pattern string that is used to build the filename.
-
-    If no directory is specified a default cache directory is selected.  On
-    Windows the user's temp directory is used, on UNIX systems a directory
-    is created for the user in the system temp directory.
-
-    The pattern can be used to have multiple separate caches operate on the
-    same directory.  The default pattern is ``'__jinja2_%s.cache'``.  ``%s``
-    is replaced with the cache key.
-
-    >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
-
-    This bytecode cache supports clearing of the cache using the clear method.
-    """
-
-    def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
-        if directory is None:
-            directory = self._get_default_cache_dir()
-        self.directory = directory
-        self.pattern = pattern
-
-    def _get_default_cache_dir(self):
-        tmpdir = tempfile.gettempdir()
-
-        # On windows the temporary directory is used specific unless
-        # explicitly forced otherwise.  We can just use that.
-        if os.name == 'n':
-            return tmpdir
-        if not hasattr(os, 'getuid'):
-            raise RuntimeError('Cannot determine safe temp directory.  You '
-                               'need to explicitly provide one.')
-
-        dirname = '_jinja2-cache-%d' % os.getuid()
-        actual_dir = os.path.join(tmpdir, dirname)
-        try:
-            # 448 == 0700
-            os.mkdir(actual_dir, 448)
-        except OSError as e:
-            if e.errno != errno.EEXIST:
-                raise
-
-        return actual_dir
-
-    def _get_cache_filename(self, bucket):
-        return path.join(self.directory, self.pattern % bucket.key)
-
-    def load_bytecode(self, bucket):
-        f = open_if_exists(self._get_cache_filename(bucket), 'rb')
-        if f is not None:
-            try:
-                bucket.load_bytecode(f)
-            finally:
-                f.close()
-
-    def dump_bytecode(self, bucket):
-        f = open(self._get_cache_filename(bucket), 'wb')
-        try:
-            bucket.write_bytecode(f)
-        finally:
-            f.close()
-
-    def clear(self):
-        # imported lazily here because google app-engine doesn't support
-        # write access on the file system and the function does not exist
-        # normally.
-        from os import remove
-        files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
-        for filename in files:
-            try:
-                remove(path.join(self.directory, filename))
-            except OSError:
-                pass
-
-
-class MemcachedBytecodeCache(BytecodeCache):
-    """This class implements a bytecode cache that uses a memcache cache for
-    storing the information.  It does not enforce a specific memcache library
-    (tummy's memcache or cmemcache) but will accept any class that provides
-    the minimal interface required.
-
-    Libraries compatible with this class:
-
-    -   `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache
-    -   `python-memcached <http://www.tummy.com/Community/software/python-memcached/>`_
-    -   `cmemcache <http://gijsbert.org/cmemcache/>`_
-
-    (Unfortunately the django cache interface is not compatible because it
-    does not support storing binary data, only unicode.  You can however pass
-    the underlying cache client to the bytecode cache which is available
-    as `django.core.cache.cache._client`.)
-
-    The minimal interface for the client passed to the constructor is this:
-
-    .. class:: MinimalClientInterface
-
-        .. method:: set(key, value[, timeout])
-
-            Stores the bytecode in the cache.  `value` is a string and
-            `timeout` the timeout of the key.  If timeout is not provided
-            a default timeout or no timeout should be assumed, if it's
-            provided it's an integer with the number of seconds the cache
-            item should exist.
-
-        .. method:: get(key)
-
-            Returns the value for the cache key.  If the item does not
-            exist in the cache the return value must be `None`.
-
-    The other arguments to the constructor are the prefix for all keys that
-    is added before the actual cache key and the timeout for the bytecode in
-    the cache system.  We recommend a high (or no) timeout.
-
-    This bytecode cache does not support clearing of used items in the cache.
-    The clear method is a no-operation function.
-
-    .. versionadded:: 2.7
-       Added support for ignoring memcache errors through the
-       `ignore_memcache_errors` parameter.
-    """
-
-    def __init__(self, client, prefix='jinja2/bytecode/', timeout=None,
-                 ignore_memcache_errors=True):
-        self.client = client
-        self.prefix = prefix
-        self.timeout = timeout
-        self.ignore_memcache_errors = ignore_memcache_errors
-
-    def load_bytecode(self, bucket):
-        try:
-            code = self.client.get(self.prefix + bucket.key)
-        except Exception:
-            if not self.ignore_memcache_errors:
-                raise
-            code = None
-        if code is not None:
-            bucket.bytecode_from_string(code)
-
-    def dump_bytecode(self, bucket):
-        args = (self.prefix + bucket.key, bucket.bytecode_to_string())
-        if self.timeout is not None:
-            args += (self.timeout,)
-        try:
-            self.client.set(*args)
-        except Exception:
-            if not self.ignore_memcache_errors:
-                raise
diff --git a/python/ext-libs/jinja2/compiler.py b/python/ext-libs/jinja2/compiler.py
deleted file mode 100644
index 75a60b8..0000000
--- a/python/ext-libs/jinja2/compiler.py
+++ /dev/null
@@ -1,1640 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.compiler
-    ~~~~~~~~~~~~~~~
-
-    Compiles nodes into python code.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-from itertools import chain
-from copy import deepcopy
-from keyword import iskeyword as is_python_keyword
-from jinja2 import nodes
-from jinja2.nodes import EvalContext
-from jinja2.visitor import NodeVisitor
-from jinja2.exceptions import TemplateAssertionError
-from jinja2.utils import Markup, concat, escape
-from jinja2._compat import range_type, next, text_type, string_types, \
-     iteritems, NativeStringIO, imap
-
-
-operators = {
-    'eq':       '==',
-    'ne':       '!=',
-    'gt':       '>',
-    'gteq':     '>=',
-    'lt':       '<',
-    'lteq':     '<=',
-    'in':       'in',
-    'notin':    'not in'
-}
-
-# what method to iterate over items do we want to use for dict iteration
-# in generated code?  on 2.x let's go with iteritems, on 3.x with items
-if hasattr(dict, 'iteritems'):
-    dict_item_iter = 'iteritems'
-else:
-    dict_item_iter = 'items'
-
-
-# does if 0: dummy(x) get us x into the scope?
-def unoptimize_before_dead_code():
-    x = 42
-    def f():
-        if 0: dummy(x)
-    return f
-
-# The getattr is necessary for pypy which does not set this attribute if
-# no closure is on the function
-unoptimize_before_dead_code = bool(
-    getattr(unoptimize_before_dead_code(), '__closure__', None))
-
-
-def generate(node, environment, name, filename, stream=None,
-             defer_init=False):
-    """Generate the python source for a node tree."""
-    if not isinstance(node, nodes.Template):
-        raise TypeError('Can\'t compile non template nodes')
-    generator = CodeGenerator(environment, name, filename, stream, defer_init)
-    generator.visit(node)
-    if stream is None:
-        return generator.stream.getvalue()
-
-
-def has_safe_repr(value):
-    """Does the node have a safe representation?"""
-    if value is None or value is NotImplemented or value is Ellipsis:
-        return True
-    if isinstance(value, (bool, int, float, complex, range_type,
-            Markup) + string_types):
-        return True
-    if isinstance(value, (tuple, list, set, frozenset)):
-        for item in value:
-            if not has_safe_repr(item):
-                return False
-        return True
-    elif isinstance(value, dict):
-        for key, value in iteritems(value):
-            if not has_safe_repr(key):
-                return False
-            if not has_safe_repr(value):
-                return False
-        return True
-    return False
-
-
-def find_undeclared(nodes, names):
-    """Check if the names passed are accessed undeclared.  The return value
-    is a set of all the undeclared names from the sequence of names found.
-    """
-    visitor = UndeclaredNameVisitor(names)
-    try:
-        for node in nodes:
-            visitor.visit(node)
-    except VisitorExit:
-        pass
-    return visitor.undeclared
-
-
-class Identifiers(object):
-    """Tracks the status of identifiers in frames."""
-
-    def __init__(self):
-        # variables that are known to be declared (probably from outer
-        # frames or because they are special for the frame)
-        self.declared = set()
-
-        # undeclared variables from outer scopes
-        self.outer_undeclared = set()
-
-        # names that are accessed without being explicitly declared by
-        # this one or any of the outer scopes.  Names can appear both in
-        # declared and undeclared.
-        self.undeclared = set()
-
-        # names that are declared locally
-        self.declared_locally = set()
-
-        # names that are declared by parameters
-        self.declared_parameter = set()
-
-    def add_special(self, name):
-        """Register a special name like `loop`."""
-        self.undeclared.discard(name)
-        self.declared.add(name)
-
-    def is_declared(self, name):
-        """Check if a name is declared in this or an outer scope."""
-        if name in self.declared_locally or name in self.declared_parameter:
-            return True
-        return name in self.declared
-
-    def copy(self):
-        return deepcopy(self)
-
-
-class Frame(object):
-    """Holds compile time information for us."""
-
-    def __init__(self, eval_ctx, parent=None):
-        self.eval_ctx = eval_ctx
-        self.identifiers = Identifiers()
-
-        # a toplevel frame is the root + soft frames such as if conditions.
-        self.toplevel = False
-
-        # the root frame is basically just the outermost frame, so no if
-        # conditions.  This information is used to optimize inheritance
-        # situations.
-        self.rootlevel = False
-
-        # in some dynamic inheritance situations the compiler needs to add
-        # write tests around output statements.
-        self.require_output_check = parent and parent.require_output_check
-
-        # inside some tags we are using a buffer rather than yield statements.
-        # this for example affects {% filter %} or {% macro %}.  If a frame
-        # is buffered this variable points to the name of the list used as
-        # buffer.
-        self.buffer = None
-
-        # the name of the block we're in, otherwise None.
-        self.block = parent and parent.block or None
-
-        # a set of actually assigned names
-        self.assigned_names = set()
-
-        # the parent of this frame
-        self.parent = parent
-
-        if parent is not None:
-            self.identifiers.declared.update(
-                parent.identifiers.declared |
-                parent.identifiers.declared_parameter |
-                parent.assigned_names
-            )
-            self.identifiers.outer_undeclared.update(
-                parent.identifiers.undeclared -
-                self.identifiers.declared
-            )
-            self.buffer = parent.buffer
-
-    def copy(self):
-        """Create a copy of the current one."""
-        rv = object.__new__(self.__class__)
-        rv.__dict__.update(self.__dict__)
-        rv.identifiers = object.__new__(self.identifiers.__class__)
-        rv.identifiers.__dict__.update(self.identifiers.__dict__)
-        return rv
-
-    def inspect(self, nodes):
-        """Walk the node and check for identifiers.  If the scope is hard (eg:
-        enforce on a python level) overrides from outer scopes are tracked
-        differently.
-        """
-        visitor = FrameIdentifierVisitor(self.identifiers)
-        for node in nodes:
-            visitor.visit(node)
-
-    def find_shadowed(self, extra=()):
-        """Find all the shadowed names.  extra is an iterable of variables
-        that may be defined with `add_special` which may occour scoped.
-        """
-        i = self.identifiers
-        return (i.declared | i.outer_undeclared) & \
-               (i.declared_locally | i.declared_parameter) | \
-               set(x for x in extra if i.is_declared(x))
-
-    def inner(self):
-        """Return an inner frame."""
-        return Frame(self.eval_ctx, self)
-
-    def soft(self):
-        """Return a soft frame.  A soft frame may not be modified as
-        standalone thing as it shares the resources with the frame it
-        was created of, but it's not a rootlevel frame any longer.
-        """
-        rv = self.copy()
-        rv.rootlevel = False
-        return rv
-
-    __copy__ = copy
-
-
-class VisitorExit(RuntimeError):
-    """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
-
-
-class DependencyFinderVisitor(NodeVisitor):
-    """A visitor that collects filter and test calls."""
-
-    def __init__(self):
-        self.filters = set()
-        self.tests = set()
-
-    def visit_Filter(self, node):
-        self.generic_visit(node)
-        self.filters.add(node.name)
-
-    def visit_Test(self, node):
-        self.generic_visit(node)
-        self.tests.add(node.name)
-
-    def visit_Block(self, node):
-        """Stop visiting at blocks."""
-
-
-class UndeclaredNameVisitor(NodeVisitor):
-    """A visitor that checks if a name is accessed without being
-    declared.  This is different from the frame visitor as it will
-    not stop at closure frames.
-    """
-
-    def __init__(self, names):
-        self.names = set(names)
-        self.undeclared = set()
-
-    def visit_Name(self, node):
-        if node.ctx == 'load' and node.name in self.names:
-            self.undeclared.add(node.name)
-            if self.undeclared == self.names:
-                raise VisitorExit()
-        else:
-            self.names.discard(node.name)
-
-    def visit_Block(self, node):
-        """Stop visiting a blocks."""
-
-
-class FrameIdentifierVisitor(NodeVisitor):
-    """A visitor for `Frame.inspect`."""
-
-    def __init__(self, identifiers):
-        self.identifiers = identifiers
-
-    def visit_Name(self, node):
-        """All assignments to names go through this function."""
-        if node.ctx == 'store':
-            self.identifiers.declared_locally.add(node.name)
-        elif node.ctx == 'param':
-            self.identifiers.declared_parameter.add(node.name)
-        elif node.ctx == 'load' and not \
-             self.identifiers.is_declared(node.name):
-            self.identifiers.undeclared.add(node.name)
-
-    def visit_If(self, node):
-        self.visit(node.test)
-        real_identifiers = self.identifiers
-
-        old_names = real_identifiers.declared_locally | \
-                    real_identifiers.declared_parameter
-
-        def inner_visit(nodes):
-            if not nodes:
-                return set()
-            self.identifiers = real_identifiers.copy()
-            for subnode in nodes:
-                self.visit(subnode)
-            rv = self.identifiers.declared_locally - old_names
-            # we have to remember the undeclared variables of this branch
-            # because we will have to pull them.
-            real_identifiers.undeclared.update(self.identifiers.undeclared)
-            self.identifiers = real_identifiers
-            return rv
-
-        body = inner_visit(node.body)
-        else_ = inner_visit(node.else_ or ())
-
-        # the differences between the two branches are also pulled as
-        # undeclared variables
-        real_identifiers.undeclared.update(body.symmetric_difference(else_) -
-                                           real_identifiers.declared)
-
-        # remember those that are declared.
-        real_identifiers.declared_locally.update(body | else_)
-
-    def visit_Macro(self, node):
-        self.identifiers.declared_locally.add(node.name)
-
-    def visit_Import(self, node):
-        self.generic_visit(node)
-        self.identifiers.declared_locally.add(node.target)
-
-    def visit_FromImport(self, node):
-        self.generic_visit(node)
-        for name in node.names:
-            if isinstance(name, tuple):
-                self.identifiers.declared_locally.add(name[1])
-            else:
-                self.identifiers.declared_locally.add(name)
-
-    def visit_Assign(self, node):
-        """Visit assignments in the correct order."""
-        self.visit(node.node)
-        self.visit(node.target)
-
-    def visit_For(self, node):
-        """Visiting stops at for blocks.  However the block sequence
-        is visited as part of the outer scope.
-        """
-        self.visit(node.iter)
-
-    def visit_CallBlock(self, node):
-        self.visit(node.call)
-
-    def visit_FilterBlock(self, node):
-        self.visit(node.filter)
-
-    def visit_Scope(self, node):
-        """Stop visiting at scopes."""
-
-    def visit_Block(self, node):
-        """Stop visiting at blocks."""
-
-
-class CompilerExit(Exception):
-    """Raised if the compiler encountered a situation where it just
-    doesn't make sense to further process the code.  Any block that
-    raises such an exception is not further processed.
-    """
-
-
-class CodeGenerator(NodeVisitor):
-
-    def __init__(self, environment, name, filename, stream=None,
-                 defer_init=False):
-        if stream is None:
-            stream = NativeStringIO()
-        self.environment = environment
-        self.name = name
-        self.filename = filename
-        self.stream = stream
-        self.created_block_context = False
-        self.defer_init = defer_init
-
-        # aliases for imports
-        self.import_aliases = {}
-
-        # a registry for all blocks.  Because blocks are moved out
-        # into the global python scope they are registered here
-        self.blocks = {}
-
-        # the number of extends statements so far
-        self.extends_so_far = 0
-
-        # some templates have a rootlevel extends.  In this case we
-        # can safely assume that we're a child template and do some
-        # more optimizations.
-        self.has_known_extends = False
-
-        # the current line number
-        self.code_lineno = 1
-
-        # registry of all filters and tests (global, not block local)
-        self.tests = {}
-        self.filters = {}
-
-        # the debug information
-        self.debug_info = []
-        self._write_debug_info = None
-
-        # the number of new lines before the next write()
-        self._new_lines = 0
-
-        # the line number of the last written statement
-        self._last_line = 0
-
-        # true if nothing was written so far.
-        self._first_write = True
-
-        # used by the `temporary_identifier` method to get new
-        # unique, temporary identifier
-        self._last_identifier = 0
-
-        # the current indentation
-        self._indentation = 0
-
-    # -- Various compilation helpers
-
-    def fail(self, msg, lineno):
-        """Fail with a :exc:`TemplateAssertionError`."""
-        raise TemplateAssertionError(msg, lineno, self.name, self.filename)
-
-    def temporary_identifier(self):
-        """Get a new unique identifier."""
-        self._last_identifier += 1
-        return 't_%d' % self._last_identifier
-
-    def buffer(self, frame):
-        """Enable buffering for the frame from that point onwards."""
-        frame.buffer = self.temporary_identifier()
-        self.writeline('%s = []' % frame.buffer)
-
-    def return_buffer_contents(self, frame):
-        """Return the buffer contents of the frame."""
-        if frame.eval_ctx.volatile:
-            self.writeline('if context.eval_ctx.autoescape:')
-            self.indent()
-            self.writeline('return Markup(concat(%s))' % frame.buffer)
-            self.outdent()
-            self.writeline('else:')
-            self.indent()
-            self.writeline('return concat(%s)' % frame.buffer)
-            self.outdent()
-        elif frame.eval_ctx.autoescape:
-            self.writeline('return Markup(concat(%s))' % frame.buffer)
-        else:
-            self.writeline('return concat(%s)' % frame.buffer)
-
-    def indent(self):
-        """Indent by one."""
-        self._indentation += 1
-
-    def outdent(self, step=1):
-        """Outdent by step."""
-        self._indentation -= step
-
-    def start_write(self, frame, node=None):
-        """Yield or write into the frame buffer."""
-        if frame.buffer is None:
-            self.writeline('yield ', node)
-        else:
-            self.writeline('%s.append(' % frame.buffer, node)
-
-    def end_write(self, frame):
-        """End the writing process started by `start_write`."""
-        if frame.buffer is not None:
-            self.write(')')
-
-    def simple_write(self, s, frame, node=None):
-        """Simple shortcut for start_write + write + end_write."""
-        self.start_write(frame, node)
-        self.write(s)
-        self.end_write(frame)
-
-    def blockvisit(self, nodes, frame):
-        """Visit a list of nodes as block in a frame.  If the current frame
-        is no buffer a dummy ``if 0: yield None`` is written automatically
-        unless the force_generator parameter is set to False.
-        """
-        if frame.buffer is None:
-            self.writeline('if 0: yield None')
-        else:
-            self.writeline('pass')
-        try:
-            for node in nodes:
-                self.visit(node, frame)
-        except CompilerExit:
-            pass
-
-    def write(self, x):
-        """Write a string into the output stream."""
-        if self._new_lines:
-            if not self._first_write:
-                self.stream.write('\n' * self._new_lines)
-                self.code_lineno += self._new_lines
-                if self._write_debug_info is not None:
-                    self.debug_info.append((self._write_debug_info,
-                                            self.code_lineno))
-                    self._write_debug_info = None
-            self._first_write = False
-            self.stream.write('    ' * self._indentation)
-            self._new_lines = 0
-        self.stream.write(x)
-
-    def writeline(self, x, node=None, extra=0):
-        """Combination of newline and write."""
-        self.newline(node, extra)
-        self.write(x)
-
-    def newline(self, node=None, extra=0):
-        """Add one or more newlines before the next write."""
-        self._new_lines = max(self._new_lines, 1 + extra)
-        if node is not None and node.lineno != self._last_line:
-            self._write_debug_info = node.lineno
-            self._last_line = node.lineno
-
-    def signature(self, node, frame, extra_kwargs=None):
-        """Writes a function call to the stream for the current node.
-        A leading comma is added automatically.  The extra keyword
-        arguments may not include python keywords otherwise a syntax
-        error could occour.  The extra keyword arguments should be given
-        as python dict.
-        """
-        # if any of the given keyword arguments is a python keyword
-        # we have to make sure that no invalid call is created.
-        kwarg_workaround = False
-        for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
-            if is_python_keyword(kwarg):
-                kwarg_workaround = True
-                break
-
-        for arg in node.args:
-            self.write(', ')
-            self.visit(arg, frame)
-
-        if not kwarg_workaround:
-            for kwarg in node.kwargs:
-                self.write(', ')
-                self.visit(kwarg, frame)
-            if extra_kwargs is not None:
-                for key, value in iteritems(extra_kwargs):
-                    self.write(', %s=%s' % (key, value))
-        if node.dyn_args:
-            self.write(', *')
-            self.visit(node.dyn_args, frame)
-
-        if kwarg_workaround:
-            if node.dyn_kwargs is not None:
-                self.write(', **dict({')
-            else:
-                self.write(', **{')
-            for kwarg in node.kwargs:
-                self.write('%r: ' % kwarg.key)
-                self.visit(kwarg.value, frame)
-                self.write(', ')
-            if extra_kwargs is not None:
-                for key, value in iteritems(extra_kwargs):
-                    self.write('%r: %s, ' % (key, value))
-            if node.dyn_kwargs is not None:
-                self.write('}, **')
-                self.visit(node.dyn_kwargs, frame)
-                self.write(')')
-            else:
-                self.write('}')
-
-        elif node.dyn_kwargs is not None:
-            self.write(', **')
-            self.visit(node.dyn_kwargs, frame)
-
-    def pull_locals(self, frame):
-        """Pull all the references identifiers into the local scope."""
-        for name in frame.identifiers.undeclared:
-            self.writeline('l_%s = context.resolve(%r)' % (name, name))
-
-    def pull_dependencies(self, nodes):
-        """Pull all the dependencies."""
-        visitor = DependencyFinderVisitor()
-        for node in nodes:
-            visitor.visit(node)
-        for dependency in 'filters', 'tests':
-            mapping = getattr(self, dependency)
-            for name in getattr(visitor, dependency):
-                if name not in mapping:
-                    mapping[name] = self.temporary_identifier()
-                self.writeline('%s = environment.%s[%r]' %
-                               (mapping[name], dependency, name))
-
-    def unoptimize_scope(self, frame):
-        """Disable Python optimizations for the frame."""
-        # XXX: this is not that nice but it has no real overhead.  It
-        # mainly works because python finds the locals before dead code
-        # is removed.  If that breaks we have to add a dummy function
-        # that just accepts the arguments and does nothing.
-        if frame.identifiers.declared:
-            self.writeline('%sdummy(%s)' % (
-                unoptimize_before_dead_code and 'if 0: ' or '',
-                ', '.join('l_' + name for name in frame.identifiers.declared)
-            ))
-
-    def push_scope(self, frame, extra_vars=()):
-        """This function returns all the shadowed variables in a dict
-        in the form name: alias and will write the required assignments
-        into the current scope.  No indentation takes place.
-
-        This also predefines locally declared variables from the loop
-        body because under some circumstances it may be the case that
-
-        `extra_vars` is passed to `Frame.find_shadowed`.
-        """
-        aliases = {}
-        for name in frame.find_shadowed(extra_vars):
-            aliases[name] = ident = self.temporary_identifier()
-            self.writeline('%s = l_%s' % (ident, name))
-        to_declare = set()
-        for name in frame.identifiers.declared_locally:
-            if name not in aliases:
-                to_declare.add('l_' + name)
-        if to_declare:
-            self.writeline(' = '.join(to_declare) + ' = missing')
-        return aliases
-
-    def pop_scope(self, aliases, frame):
-        """Restore all aliases and delete unused variables."""
-        for name, alias in iteritems(aliases):
-            self.writeline('l_%s = %s' % (name, alias))
-        to_delete = set()
-        for name in frame.identifiers.declared_locally:
-            if name not in aliases:
-                to_delete.add('l_' + name)
-        if to_delete:
-            # we cannot use the del statement here because enclosed
-            # scopes can trigger a SyntaxError:
-            #   a = 42; b = lambda: a; del a
-            self.writeline(' = '.join(to_delete) + ' = missing')
-
-    def function_scoping(self, node, frame, children=None,
-                         find_special=True):
-        """In Jinja a few statements require the help of anonymous
-        functions.  Those are currently macros and call blocks and in
-        the future also recursive loops.  As there is currently
-        technical limitation that doesn't allow reading and writing a
-        variable in a scope where the initial value is coming from an
-        outer scope, this function tries to fall back with a common
-        error message.  Additionally the frame passed is modified so
-        that the argumetns are collected and callers are looked up.
-
-        This will return the modified frame.
-        """
-        # we have to iterate twice over it, make sure that works
-        if children is None:
-            children = node.iter_child_nodes()
-        children = list(children)
-        func_frame = frame.inner()
-        func_frame.inspect(children)
-
-        # variables that are undeclared (accessed before declaration) and
-        # declared locally *and* part of an outside scope raise a template
-        # assertion error. Reason: we can't generate reasonable code from
-        # it without aliasing all the variables.
-        # this could be fixed in Python 3 where we have the nonlocal
-        # keyword or if we switch to bytecode generation
-        overridden_closure_vars = (
-            func_frame.identifiers.undeclared &
-            func_frame.identifiers.declared &
-            (func_frame.identifiers.declared_locally |
-             func_frame.identifiers.declared_parameter)
-        )
-        if overridden_closure_vars:
-            self.fail('It\'s not possible to set and access variables '
-                      'derived from an outer scope! (affects: %s)' %
-                      ', '.join(sorted(overridden_closure_vars)), node.lineno)
-
-        # remove variables from a closure from the frame's undeclared
-        # identifiers.
-        func_frame.identifiers.undeclared -= (
-            func_frame.identifiers.undeclared &
-            func_frame.identifiers.declared
-        )
-
-        # no special variables for this scope, abort early
-        if not find_special:
-            return func_frame
-
-        func_frame.accesses_kwargs = False
-        func_frame.accesses_varargs = False
-        func_frame.accesses_caller = False
-        func_frame.arguments = args = ['l_' + x.name for x in node.args]
-
-        undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
-
-        if 'caller' in undeclared:
-            func_frame.accesses_caller = True
-            func_frame.identifiers.add_special('caller')
-            args.append('l_caller')
-        if 'kwargs' in undeclared:
-            func_frame.accesses_kwargs = True
-            func_frame.identifiers.add_special('kwargs')
-            args.append('l_kwargs')
-        if 'varargs' in undeclared:
-            func_frame.accesses_varargs = True
-            func_frame.identifiers.add_special('varargs')
-            args.append('l_varargs')
-        return func_frame
-
-    def macro_body(self, node, frame, children=None):
-        """Dump the function def of a macro or call block."""
-        frame = self.function_scoping(node, frame, children)
-        # macros are delayed, they never require output checks
-        frame.require_output_check = False
-        args = frame.arguments
-        # XXX: this is an ugly fix for the loop nesting bug
-        # (tests.test_old_bugs.test_loop_call_bug).  This works around
-        # a identifier nesting problem we have in general.  It's just more
-        # likely to happen in loops which is why we work around it.  The
-        # real solution would be "nonlocal" all the identifiers that are
-        # leaking into a new python frame and might be used both unassigned
-        # and assigned.
-        if 'loop' in frame.identifiers.declared:
-            args = args + ['l_loop=l_loop']
-        self.writeline('def macro(%s):' % ', '.join(args), node)
-        self.indent()
-        self.buffer(frame)
-        self.pull_locals(frame)
-        self.blockvisit(node.body, frame)
-        self.return_buffer_contents(frame)
-        self.outdent()
-        return frame
-
-    def macro_def(self, node, frame):
-        """Dump the macro definition for the def created by macro_body."""
-        arg_tuple = ', '.join(repr(x.name) for x in node.args)
-        name = getattr(node, 'name', None)
-        if len(node.args) == 1:
-            arg_tuple += ','
-        self.write('Macro(environment, macro, %r, (%s), (' %
-                   (name, arg_tuple))
-        for arg in node.defaults:
-            self.visit(arg, frame)
-            self.write(', ')
-        self.write('), %r, %r, %r)' % (
-            bool(frame.accesses_kwargs),
-            bool(frame.accesses_varargs),
-            bool(frame.accesses_caller)
-        ))
-
-    def position(self, node):
-        """Return a human readable position for the node."""
-        rv = 'line %d' % node.lineno
-        if self.name is not None:
-            rv += ' in ' + repr(self.name)
-        return rv
-
-    # -- Statement Visitors
-
-    def visit_Template(self, node, frame=None):
-        assert frame is None, 'no root frame allowed'
-        eval_ctx = EvalContext(self.environment, self.name)
-
-        from jinja2.runtime import __all__ as exported
-        self.writeline('from __future__ import division')
-        self.writeline('from jinja2.runtime import ' + ', '.join(exported))
-        if not unoptimize_before_dead_code:
-            self.writeline('dummy = lambda *x: None')
-
-        # if we want a deferred initialization we cannot move the
-        # environment into a local name
-        envenv = not self.defer_init and ', environment=environment' or ''
-
-        # do we have an extends tag at all?  If not, we can save some
-        # overhead by just not processing any inheritance code.
-        have_extends = node.find(nodes.Extends) is not None
-
-        # find all blocks
-        for block in node.find_all(nodes.Block):
-            if block.name in self.blocks:
-                self.fail('block %r defined twice' % block.name, block.lineno)
-            self.blocks[block.name] = block
-
-        # find all imports and import them
-        for import_ in node.find_all(nodes.ImportedName):
-            if import_.importname not in self.import_aliases:
-                imp = import_.importname
-                self.import_aliases[imp] = alias = self.temporary_identifier()
-                if '.' in imp:
-                    module, obj = imp.rsplit('.', 1)
-                    self.writeline('from %s import %s as %s' %
-                                   (module, obj, alias))
-                else:
-                    self.writeline('import %s as %s' % (imp, alias))
-
-        # add the load name
-        self.writeline('name = %r' % self.name)
-
-        # generate the root render function.
-        self.writeline('def root(context%s):' % envenv, extra=1)
-
-        # process the root
-        frame = Frame(eval_ctx)
-        frame.inspect(node.body)
-        frame.toplevel = frame.rootlevel = True
-        frame.require_output_check = have_extends and not self.has_known_extends
-        self.indent()
-        if have_extends:
-            self.writeline('parent_template = None')
-        if 'self' in find_undeclared(node.body, ('self',)):
-            frame.identifiers.add_special('self')
-            self.writeline('l_self = TemplateReference(context)')
-        self.pull_locals(frame)
-        self.pull_dependencies(node.body)
-        self.blockvisit(node.body, frame)
-        self.outdent()
-
-        # make sure that the parent root is called.
-        if have_extends:
-            if not self.has_known_extends:
-                self.indent()
-                self.writeline('if parent_template is not None:')
-            self.indent()
-            self.writeline('for event in parent_template.'
-                           'root_render_func(context):')
-            self.indent()
-            self.writeline('yield event')
-            self.outdent(2 + (not self.has_known_extends))
-
-        # at this point we now have the blocks collected and can visit them too.
-        for name, block in iteritems(self.blocks):
-            block_frame = Frame(eval_ctx)
-            block_frame.inspect(block.body)
-            block_frame.block = name
-            self.writeline('def block_%s(context%s):' % (name, envenv),
-                           block, 1)
-            self.indent()
-            undeclared = find_undeclared(block.body, ('self', 'super'))
-            if 'self' in undeclared:
-                block_frame.identifiers.add_special('self')
-                self.writeline('l_self = TemplateReference(context)')
-            if 'super' in undeclared:
-                block_frame.identifiers.add_special('super')
-                self.writeline('l_super = context.super(%r, '
-                               'block_%s)' % (name, name))
-            self.pull_locals(block_frame)
-            self.pull_dependencies(block.body)
-            self.blockvisit(block.body, block_frame)
-            self.outdent()
-
-        self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
-                                                   for x in self.blocks),
-                       extra=1)
-
-        # add a function that returns the debug info
-        self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
-                                                    in self.debug_info))
-
-    def visit_Block(self, node, frame):
-        """Call a block and register it for the template."""
-        level = 1
-        if frame.toplevel:
-            # if we know that we are a child template, there is no need to
-            # check if we are one
-            if self.has_known_extends:
-                return
-            if self.extends_so_far > 0:
-                self.writeline('if parent_template is None:')
-                self.indent()
-                level += 1
-        context = node.scoped and 'context.derived(locals())' or 'context'
-        self.writeline('for event in context.blocks[%r][0](%s):' % (
-                       node.name, context), node)
-        self.indent()
-        self.simple_write('event', frame)
-        self.outdent(level)
-
-    def visit_Extends(self, node, frame):
-        """Calls the extender."""
-        if not frame.toplevel:
-            self.fail('cannot use extend from a non top-level scope',
-                      node.lineno)
-
-        # if the number of extends statements in general is zero so
-        # far, we don't have to add a check if something extended
-        # the template before this one.
-        if self.extends_so_far > 0:
-
-            # if we have a known extends we just add a template runtime
-            # error into the generated code.  We could catch that at compile
-            # time too, but i welcome it not to confuse users by throwing the
-            # same error at different times just "because we can".
-            if not self.has_known_extends:
-                self.writeline('if parent_template is not None:')
-                self.indent()
-            self.writeline('raise TemplateRuntimeError(%r)' %
-                           'extended multiple times')
-
-            # if we have a known extends already we don't need that code here
-            # as we know that the template execution will end here.
-            if self.has_known_extends:
-                raise CompilerExit()
-            else:
-                self.outdent()
-
-        self.writeline('parent_template = environment.get_template(', node)
-        self.visit(node.template, frame)
-        self.write(', %r)' % self.name)
-        self.writeline('for name, parent_block in parent_template.'
-                       'blocks.%s():' % dict_item_iter)
-        self.indent()
-        self.writeline('context.blocks.setdefault(name, []).'
-                       'append(parent_block)')
-        self.outdent()
-
-        # if this extends statement was in the root level we can take
-        # advantage of that information and simplify the generated code
-        # in the top level from this point onwards
-        if frame.rootlevel:
-            self.has_known_extends = True
-
-        # and now we have one more
-        self.extends_so_far += 1
-
-    def visit_Include(self, node, frame):
-        """Handles includes."""
-        if node.with_context:
-            self.unoptimize_scope(frame)
-        if node.ignore_missing:
-            self.writeline('try:')
-            self.indent()
-
-        func_name = 'get_or_select_template'
-        if isinstance(node.template, nodes.Const):
-            if isinstance(node.template.value, string_types):
-                func_name = 'get_template'
-            elif isinstance(node.template.value, (tuple, list)):
-                func_name = 'select_template'
-        elif isinstance(node.template, (nodes.Tuple, nodes.List)):
-            func_name = 'select_template'
-
-        self.writeline('template = environment.%s(' % func_name, node)
-        self.visit(node.template, frame)
-        self.write(', %r)' % self.name)
-        if node.ignore_missing:
-            self.outdent()
-            self.writeline('except TemplateNotFound:')
-            self.indent()
-            self.writeline('pass')
-            self.outdent()
-            self.writeline('else:')
-            self.indent()
-
-        if node.with_context:
-            self.writeline('for event in template.root_render_func('
-                           'template.new_context(context.parent, True, '
-                           'locals())):')
-        else:
-            self.writeline('for event in template.module._body_stream:')
-
-        self.indent()
-        self.simple_write('event', frame)
-        self.outdent()
-
-        if node.ignore_missing:
-            self.outdent()
-
-    def visit_Import(self, node, frame):
-        """Visit regular imports."""
-        if node.with_context:
-            self.unoptimize_scope(frame)
-        self.writeline('l_%s = ' % node.target, node)
-        if frame.toplevel:
-            self.write('context.vars[%r] = ' % node.target)
-        self.write('environment.get_template(')
-        self.visit(node.template, frame)
-        self.write(', %r).' % self.name)
-        if node.with_context:
-            self.write('make_module(context.parent, True, locals())')
-        else:
-            self.write('module')
-        if frame.toplevel and not node.target.startswith('_'):
-            self.writeline('context.exported_vars.discard(%r)' % node.target)
-        frame.assigned_names.add(node.target)
-
-    def visit_FromImport(self, node, frame):
-        """Visit named imports."""
-        self.newline(node)
-        self.write('included_template = environment.get_template(')
-        self.visit(node.template, frame)
-        self.write(', %r).' % self.name)
-        if node.with_context:
-            self.write('make_module(context.parent, True)')
-        else:
-            self.write('module')
-
-        var_names = []
-        discarded_names = []
-        for name in node.names:
-            if isinstance(name, tuple):
-                name, alias = name
-            else:
-                alias = name
-            self.writeline('l_%s = getattr(included_template, '
-                           '%r, missing)' % (alias, name))
-            self.writeline('if l_%s is missing:' % alias)
-            self.indent()
-            self.writeline('l_%s = environment.undefined(%r %% '
-                           'included_template.__name__, '
-                           'name=%r)' %
-                           (alias, 'the template %%r (imported on %s) does '
-                           'not export the requested name %s' % (
-                                self.position(node),
-                                repr(name)
-                           ), name))
-            self.outdent()
-            if frame.toplevel:
-                var_names.append(alias)
-                if not alias.startswith('_'):
-                    discarded_names.append(alias)
-            frame.assigned_names.add(alias)
-
-        if var_names:
-            if len(var_names) == 1:
-                name = var_names[0]
-                self.writeline('context.vars[%r] = l_%s' % (name, name))
-            else:
-                self.writeline('context.vars.update({%s})' % ', '.join(
-                    '%r: l_%s' % (name, name) for name in var_names
-                ))
-        if discarded_names:
-            if len(discarded_names) == 1:
-                self.writeline('context.exported_vars.discard(%r)' %
-                               discarded_names[0])
-            else:
-                self.writeline('context.exported_vars.difference_'
-                               'update((%s))' % ', '.join(imap(repr, discarded_names)))
-
-    def visit_For(self, node, frame):
-        # when calculating the nodes for the inner frame we have to exclude
-        # the iterator contents from it
-        children = node.iter_child_nodes(exclude=('iter',))
-        if node.recursive:
-            loop_frame = self.function_scoping(node, frame, children,
-                                               find_special=False)
-        else:
-            loop_frame = frame.inner()
-            loop_frame.inspect(children)
-
-        # try to figure out if we have an extended loop.  An extended loop
-        # is necessary if the loop is in recursive mode if the special loop
-        # variable is accessed in the body.
-        extended_loop = node.recursive or 'loop' in \
-                        find_undeclared(node.iter_child_nodes(
-                            only=('body',)), ('loop',))
-
-        # if we don't have an recursive loop we have to find the shadowed
-        # variables at that point.  Because loops can be nested but the loop
-        # variable is a special one we have to enforce aliasing for it.
-        if not node.recursive:
-            aliases = self.push_scope(loop_frame, ('loop',))
-
-        # otherwise we set up a buffer and add a function def
-        else:
-            self.writeline('def loop(reciter, loop_render_func, depth=0):', node)
-            self.indent()
-            self.buffer(loop_frame)
-            aliases = {}
-
-        # make sure the loop variable is a special one and raise a template
-        # assertion error if a loop tries to write to loop
-        if extended_loop:
-            self.writeline('l_loop = missing')
-            loop_frame.identifiers.add_special('loop')
-        for name in node.find_all(nodes.Name):
-            if name.ctx == 'store' and name.name == 'loop':
-                self.fail('Can\'t assign to special loop variable '
-                          'in for-loop target', name.lineno)
-
-        self.pull_locals(loop_frame)
-        if node.else_:
-            iteration_indicator = self.temporary_identifier()
-            self.writeline('%s = 1' % iteration_indicator)
-
-        # Create a fake parent loop if the else or test section of a
-        # loop is accessing the special loop variable and no parent loop
-        # exists.
-        if 'loop' not in aliases and 'loop' in find_undeclared(
-           node.iter_child_nodes(only=('else_', 'test')), ('loop',)):
-            self.writeline("l_loop = environment.undefined(%r, name='loop')" %
-                ("'loop' is undefined. the filter section of a loop as well "
-                 "as the else block don't have access to the special 'loop'"
-                 " variable of the current loop.  Because there is no parent "
-                 "loop it's undefined.  Happened in loop on %s" %
-                 self.position(node)))
-
-        self.writeline('for ', node)
-        self.visit(node.target, loop_frame)
-        self.write(extended_loop and ', l_loop in LoopContext(' or ' in ')
-
-        # if we have an extened loop and a node test, we filter in the
-        # "outer frame".
-        if extended_loop and node.test is not None:
-            self.write('(')
-            self.visit(node.target, loop_frame)
-            self.write(' for ')
-            self.visit(node.target, loop_frame)
-            self.write(' in ')
-            if node.recursive:
-                self.write('reciter')
-            else:
-                self.visit(node.iter, loop_frame)
-            self.write(' if (')
-            test_frame = loop_frame.copy()
-            self.visit(node.test, test_frame)
-            self.write('))')
-
-        elif node.recursive:
-            self.write('reciter')
-        else:
-            self.visit(node.iter, loop_frame)
-
-        if node.recursive:
-            self.write(', loop_render_func, depth):')
-        else:
-            self.write(extended_loop and '):' or ':')
-
-        # tests in not extended loops become a continue
-        if not extended_loop and node.test is not None:
-            self.indent()
-            self.writeline('if not ')
-            self.visit(node.test, loop_frame)
-            self.write(':')
-            self.indent()
-            self.writeline('continue')
-            self.outdent(2)
-
-        self.indent()
-        self.blockvisit(node.body, loop_frame)
-        if node.else_:
-            self.writeline('%s = 0' % iteration_indicator)
-        self.outdent()
-
-        if node.else_:
-            self.writeline('if %s:' % iteration_indicator)
-            self.indent()
-            self.blockvisit(node.else_, loop_frame)
-            self.outdent()
-
-        # reset the aliases if there are any.
-        if not node.recursive:
-            self.pop_scope(aliases, loop_frame)
-
-        # if the node was recursive we have to return the buffer contents
-        # and start the iteration code
-        if node.recursive:
-            self.return_buffer_contents(loop_frame)
-            self.outdent()
-            self.start_write(frame, node)
-            self.write('loop(')
-            self.visit(node.iter, frame)
-            self.write(', loop)')
-            self.end_write(frame)
-
-    def visit_If(self, node, frame):
-        if_frame = frame.soft()
-        self.writeline('if ', node)
-        self.visit(node.test, if_frame)
-        self.write(':')
-        self.indent()
-        self.blockvisit(node.body, if_frame)
-        self.outdent()
-        if node.else_:
-            self.writeline('else:')
-            self.indent()
-            self.blockvisit(node.else_, if_frame)
-            self.outdent()
-
-    def visit_Macro(self, node, frame):
-        macro_frame = self.macro_body(node, frame)
-        self.newline()
-        if frame.toplevel:
-            if not node.name.startswith('_'):
-                self.write('context.exported_vars.add(%r)' % node.name)
-            self.writeline('context.vars[%r] = ' % node.name)
-        self.write('l_%s = ' % node.name)
-        self.macro_def(node, macro_frame)
-        frame.assigned_names.add(node.name)
-
-    def visit_CallBlock(self, node, frame):
-        children = node.iter_child_nodes(exclude=('call',))
-        call_frame = self.macro_body(node, frame, children)
-        self.writeline('caller = ')
-        self.macro_def(node, call_frame)
-        self.start_write(frame, node)
-        self.visit_Call(node.call, call_frame, forward_caller=True)
-        self.end_write(frame)
-
-    def visit_FilterBlock(self, node, frame):
-        filter_frame = frame.inner()
-        filter_frame.inspect(node.iter_child_nodes())
-        aliases = self.push_scope(filter_frame)
-        self.pull_locals(filter_frame)
-        self.buffer(filter_frame)
-        self.blockvisit(node.body, filter_frame)
-        self.start_write(frame, node)
-        self.visit_Filter(node.filter, filter_frame)
-        self.end_write(frame)
-        self.pop_scope(aliases, filter_frame)
-
-    def visit_ExprStmt(self, node, frame):
-        self.newline(node)
-        self.visit(node.node, frame)
-
-    def visit_Output(self, node, frame):
-        # if we have a known extends statement, we don't output anything
-        # if we are in a require_output_check section
-        if self.has_known_extends and frame.require_output_check:
-            return
-
-        if self.environment.finalize:
-            finalize = lambda x: text_type(self.environment.finalize(x))
-        else:
-            finalize = text_type
-
-        # if we are inside a frame that requires output checking, we do so
-        outdent_later = False
-        if frame.require_output_check:
-            self.writeline('if parent_template is None:')
-            self.indent()
-            outdent_later = True
-
-        # try to evaluate as many chunks as possible into a static
-        # string at compile time.
-        body = []
-        for child in node.nodes:
-            try:
-                const = child.as_const(frame.eval_ctx)
-            except nodes.Impossible:
-                body.append(child)
-                continue
-            # the frame can't be volatile here, becaus otherwise the
-            # as_const() function would raise an Impossible exception
-            # at that point.
-            try:
-                if frame.eval_ctx.autoescape:
-                    if hasattr(const, '__html__'):
-                        const = const.__html__()
-                    else:
-                        const = escape(const)
-                const = finalize(const)
-            except Exception:
-                # if something goes wrong here we evaluate the node
-                # at runtime for easier debugging
-                body.append(child)
-                continue
-            if body and isinstance(body[-1], list):
-                body[-1].append(const)
-            else:
-                body.append([const])
-
-        # if we have less than 3 nodes or a buffer we yield or extend/append
-        if len(body) < 3 or frame.buffer is not None:
-            if frame.buffer is not None:
-                # for one item we append, for more we extend
-                if len(body) == 1:
-                    self.writeline('%s.append(' % frame.buffer)
-                else:
-                    self.writeline('%s.extend((' % frame.buffer)
-                self.indent()
-            for item in body:
-                if isinstance(item, list):
-                    val = repr(concat(item))
-                    if frame.buffer is None:
-                        self.writeline('yield ' + val)
-                    else:
-                        self.writeline(val + ', ')
-                else:
-                    if frame.buffer is None:
-                        self.writeline('yield ', item)
-                    else:
-                        self.newline(item)
-                    close = 1
-                    if frame.eval_ctx.volatile:
-                        self.write('(context.eval_ctx.autoescape and'
-                                   ' escape or to_string)(')
-                    elif frame.eval_ctx.autoescape:
-                        self.write('escape(')
-                    else:
-                        self.write('to_string(')
-                    if self.environment.finalize is not None:
-                        self.write('environment.finalize(')
-                        close += 1
-                    self.visit(item, frame)
-                    self.write(')' * close)
-                    if frame.buffer is not None:
-                        self.write(', ')
-            if frame.buffer is not None:
-                # close the open parentheses
-                self.outdent()
-                self.writeline(len(body) == 1 and ')' or '))')
-
-        # otherwise we create a format string as this is faster in that case
-        else:
-            format = []
-            arguments = []
-            for item in body:
-                if isinstance(item, list):
-                    format.append(concat(item).replace('%', '%%'))
-                else:
-                    format.append('%s')
-                    arguments.append(item)
-            self.writeline('yield ')
-            self.write(repr(concat(format)) + ' % (')
-            idx = -1
-            self.indent()
-            for argument in arguments:
-                self.newline(argument)
-                close = 0
-                if frame.eval_ctx.volatile:
-                    self.write('(context.eval_ctx.autoescape and'
-                               ' escape or to_string)(')
-                    close += 1
-                elif frame.eval_ctx.autoescape:
-                    self.write('escape(')
-                    close += 1
-                if self.environment.finalize is not None:
-                    self.write('environment.finalize(')
-                    close += 1
-                self.visit(argument, frame)
-                self.write(')' * close + ', ')
-            self.outdent()
-            self.writeline(')')
-
-        if outdent_later:
-            self.outdent()
-
-    def visit_Assign(self, node, frame):
-        self.newline(node)
-        # toplevel assignments however go into the local namespace and
-        # the current template's context.  We create a copy of the frame
-        # here and add a set so that the Name visitor can add the assigned
-        # names here.
-        if frame.toplevel:
-            assignment_frame = frame.copy()
-            assignment_frame.toplevel_assignments = set()
-        else:
-            assignment_frame = frame
-        self.visit(node.target, assignment_frame)
-        self.write(' = ')
-        self.visit(node.node, frame)
-
-        # make sure toplevel assignments are added to the context.
-        if frame.toplevel:
-            public_names = [x for x in assignment_frame.toplevel_assignments
-                            if not x.startswith('_')]
-            if len(assignment_frame.toplevel_assignments) == 1:
-                name = next(iter(assignment_frame.toplevel_assignments))
-                self.writeline('context.vars[%r] = l_%s' % (name, name))
-            else:
-                self.writeline('context.vars.update({')
-                for idx, name in enumerate(assignment_frame.toplevel_assignments):
-                    if idx:
-                        self.write(', ')
-                    self.write('%r: l_%s' % (name, name))
-                self.write('})')
-            if public_names:
-                if len(public_names) == 1:
-                    self.writeline('context.exported_vars.add(%r)' %
-                                   public_names[0])
-                else:
-                    self.writeline('context.exported_vars.update((%s))' %
-                                   ', '.join(imap(repr, public_names)))
-
-    # -- Expression Visitors
-
-    def visit_Name(self, node, frame):
-        if node.ctx == 'store' and frame.toplevel:
-            frame.toplevel_assignments.add(node.name)
-        self.write('l_' + node.name)
-        frame.assigned_names.add(node.name)
-
-    def visit_Const(self, node, frame):
-        val = node.value
-        if isinstance(val, float):
-            self.write(str(val))
-        else:
-            self.write(repr(val))
-
-    def visit_TemplateData(self, node, frame):
-        try:
-            self.write(repr(node.as_const(frame.eval_ctx)))
-        except nodes.Impossible:
-            self.write('(context.eval_ctx.autoescape and Markup or identity)(%r)'
-                       % node.data)
-
-    def visit_Tuple(self, node, frame):
-        self.write('(')
-        idx = -1
-        for idx, item in enumerate(node.items):
-            if idx:
-                self.write(', ')
-            self.visit(item, frame)
-        self.write(idx == 0 and ',)' or ')')
-
-    def visit_List(self, node, frame):
-        self.write('[')
-        for idx, item in enumerate(node.items):
-            if idx:
-                self.write(', ')
-            self.visit(item, frame)
-        self.write(']')
-
-    def visit_Dict(self, node, frame):
-        self.write('{')
-        for idx, item in enumerate(node.items):
-            if idx:
-                self.write(', ')
-            self.visit(item.key, frame)
-            self.write(': ')
-            self.visit(item.value, frame)
-        self.write('}')
-
-    def binop(operator, interceptable=True):
-        def visitor(self, node, frame):
-            if self.environment.sandboxed and \
-               operator in self.environment.intercepted_binops:
-                self.write('environment.call_binop(context, %r, ' % operator)
-                self.visit(node.left, frame)
-                self.write(', ')
-                self.visit(node.right, frame)
-            else:
-                self.write('(')
-                self.visit(node.left, frame)
-                self.write(' %s ' % operator)
-                self.visit(node.right, frame)
-            self.write(')')
-        return visitor
-
-    def uaop(operator, interceptable=True):
-        def visitor(self, node, frame):
-            if self.environment.sandboxed and \
-               operator in self.environment.intercepted_unops:
-                self.write('environment.call_unop(context, %r, ' % operator)
-                self.visit(node.node, frame)
-            else:
-                self.write('(' + operator)
-                self.visit(node.node, frame)
-            self.write(')')
-        return visitor
-
-    visit_Add = binop('+')
-    visit_Sub = binop('-')
-    visit_Mul = binop('*')
-    visit_Div = binop('/')
-    visit_FloorDiv = binop('//')
-    visit_Pow = binop('**')
-    visit_Mod = binop('%')
-    visit_And = binop('and', interceptable=False)
-    visit_Or = binop('or', interceptable=False)
-    visit_Pos = uaop('+')
-    visit_Neg = uaop('-')
-    visit_Not = uaop('not ', interceptable=False)
-    del binop, uaop
-
-    def visit_Concat(self, node, frame):
-        if frame.eval_ctx.volatile:
-            func_name = '(context.eval_ctx.volatile and' \
-                        ' markup_join or unicode_join)'
-        elif frame.eval_ctx.autoescape:
-            func_name = 'markup_join'
-        else:
-            func_name = 'unicode_join'
-        self.write('%s((' % func_name)
-        for arg in node.nodes:
-            self.visit(arg, frame)
-            self.write(', ')
-        self.write('))')
-
-    def visit_Compare(self, node, frame):
-        self.visit(node.expr, frame)
-        for op in node.ops:
-            self.visit(op, frame)
-
-    def visit_Operand(self, node, frame):
-        self.write(' %s ' % operators[node.op])
-        self.visit(node.expr, frame)
-
-    def visit_Getattr(self, node, frame):
-        self.write('environment.getattr(')
-        self.visit(node.node, frame)
-        self.write(', %r)' % node.attr)
-
-    def visit_Getitem(self, node, frame):
-        # slices bypass the environment getitem method.
-        if isinstance(node.arg, nodes.Slice):
-            self.visit(node.node, frame)
-            self.write('[')
-            self.visit(node.arg, frame)
-            self.write(']')
-        else:
-            self.write('environment.getitem(')
-            self.visit(node.node, frame)
-            self.write(', ')
-            self.visit(node.arg, frame)
-            self.write(')')
-
-    def visit_Slice(self, node, frame):
-        if node.start is not None:
-            self.visit(node.start, frame)
-        self.write(':')
-        if node.stop is not None:
-            self.visit(node.stop, frame)
-        if node.step is not None:
-            self.write(':')
-            self.visit(node.step, frame)
-
-    def visit_Filter(self, node, frame):
-        self.write(self.filters[node.name] + '(')
-        func = self.environment.filters.get(node.name)
-        if func is None:
-            self.fail('no filter named %r' % node.name, node.lineno)
-        if getattr(func, 'contextfilter', False):
-            self.write('context, ')
-        elif getattr(func, 'evalcontextfilter', False):
-            self.write('context.eval_ctx, ')
-        elif getattr(func, 'environmentfilter', False):
-            self.write('environment, ')
-
-        # if the filter node is None we are inside a filter block
-        # and want to write to the current buffer
-        if node.node is not None:
-            self.visit(node.node, frame)
-        elif frame.eval_ctx.volatile:
-            self.write('(context.eval_ctx.autoescape and'
-                       ' Markup(concat(%s)) or concat(%s))' %
-                       (frame.buffer, frame.buffer))
-        elif frame.eval_ctx.autoescape:
-            self.write('Markup(concat(%s))' % frame.buffer)
-        else:
-            self.write('concat(%s)' % frame.buffer)
-        self.signature(node, frame)
-        self.write(')')
-
-    def visit_Test(self, node, frame):
-        self.write(self.tests[node.name] + '(')
-        if node.name not in self.environment.tests:
-            self.fail('no test named %r' % node.name, node.lineno)
-        self.visit(node.node, frame)
-        self.signature(node, frame)
-        self.write(')')
-
-    def visit_CondExpr(self, node, frame):
-        def write_expr2():
-            if node.expr2 is not None:
-                return self.visit(node.expr2, frame)
-            self.write('environment.undefined(%r)' % ('the inline if-'
-                       'expression on %s evaluated to false and '
-                       'no else section was defined.' % self.position(node)))
-
-        self.write('(')
-        self.visit(node.expr1, frame)
-        self.write(' if ')
-        self.visit(node.test, frame)
-        self.write(' else ')
-        write_expr2()
-        self.write(')')
-
-    def visit_Call(self, node, frame, forward_caller=False):
-        if self.environment.sandboxed:
-            self.write('environment.call(context, ')
-        else:
-            self.write('context.call(')
-        self.visit(node.node, frame)
-        extra_kwargs = forward_caller and {'caller': 'caller'} or None
-        self.signature(node, frame, extra_kwargs)
-        self.write(')')
-
-    def visit_Keyword(self, node, frame):
-        self.write(node.key + '=')
-        self.visit(node.value, frame)
-
-    # -- Unused nodes for extensions
-
-    def visit_MarkSafe(self, node, frame):
-        self.write('Markup(')
-        self.visit(node.expr, frame)
-        self.write(')')
-
-    def visit_MarkSafeIfAutoescape(self, node, frame):
-        self.write('(context.eval_ctx.autoescape and Markup or identity)(')
-        self.visit(node.expr, frame)
-        self.write(')')
-
-    def visit_EnvironmentAttribute(self, node, frame):
-        self.write('environment.' + node.name)
-
-    def visit_ExtensionAttribute(self, node, frame):
-        self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
-
-    def visit_ImportedName(self, node, frame):
-        self.write(self.import_aliases[node.importname])
-
-    def visit_InternalName(self, node, frame):
-        self.write(node.name)
-
-    def visit_ContextReference(self, node, frame):
-        self.write('context')
-
-    def visit_Continue(self, node, frame):
-        self.writeline('continue', node)
-
-    def visit_Break(self, node, frame):
-        self.writeline('break', node)
-
-    def visit_Scope(self, node, frame):
-        scope_frame = frame.inner()
-        scope_frame.inspect(node.iter_child_nodes())
-        aliases = self.push_scope(scope_frame)
-        self.pull_locals(scope_frame)
-        self.blockvisit(node.body, scope_frame)
-        self.pop_scope(aliases, scope_frame)
-
-    def visit_EvalContextModifier(self, node, frame):
-        for keyword in node.options:
-            self.writeline('context.eval_ctx.%s = ' % keyword.key)
-            self.visit(keyword.value, frame)
-            try:
-                val = keyword.value.as_const(frame.eval_ctx)
-            except nodes.Impossible:
-                frame.eval_ctx.volatile = True
-            else:
-                setattr(frame.eval_ctx, keyword.key, val)
-
-    def visit_ScopedEvalContextModifier(self, node, frame):
-        old_ctx_name = self.temporary_identifier()
-        safed_ctx = frame.eval_ctx.save()
-        self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
-        self.visit_EvalContextModifier(node, frame)
-        for child in node.body:
-            self.visit(child, frame)
-        frame.eval_ctx.revert(safed_ctx)
-        self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
diff --git a/python/ext-libs/jinja2/constants.py b/python/ext-libs/jinja2/constants.py
deleted file mode 100644
index cab203c..0000000
--- a/python/ext-libs/jinja2/constants.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja.constants
-    ~~~~~~~~~~~~~~~
-
-    Various constants.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-
-
-#: list of lorem ipsum words used by the lipsum() helper function
-LOREM_IPSUM_WORDS = u'''\
-a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
-auctor augue bibendum blandit class commodo condimentum congue consectetuer
-consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
-diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
-elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
-faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
-hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
-justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
-luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
-mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
-nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
-penatibus per pharetra phasellus placerat platea porta porttitor posuere
-potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
-ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
-sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
-tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
-ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
-viverra volutpat vulputate'''
diff --git a/python/ext-libs/jinja2/debug.py b/python/ext-libs/jinja2/debug.py
deleted file mode 100644
index 815cc18..0000000
--- a/python/ext-libs/jinja2/debug.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.debug
-    ~~~~~~~~~~~~
-
-    Implements the debug interface for Jinja.  This module does some pretty
-    ugly stuff with the Python traceback system in order to achieve tracebacks
-    with correct line numbers, locals and contents.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import sys
-import traceback
-from types import TracebackType
-from jinja2.utils import missing, internal_code
-from jinja2.exceptions import TemplateSyntaxError
-from jinja2._compat import iteritems, reraise, code_type
-
-# on pypy we can take advantage of transparent proxies
-try:
-    from __pypy__ import tproxy
-except ImportError:
-    tproxy = None
-
-
-# how does the raise helper look like?
-try:
-    exec("raise TypeError, 'foo'")
-except SyntaxError:
-    raise_helper = 'raise __jinja_exception__[1]'
-except TypeError:
-    raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
-
-
-class TracebackFrameProxy(object):
-    """Proxies a traceback frame."""
-
-    def __init__(self, tb):
-        self.tb = tb
-        self._tb_next = None
-
-    @property
-    def tb_next(self):
-        return self._tb_next
-
-    def set_next(self, next):
-        if tb_set_next is not None:
-            try:
-                tb_set_next(self.tb, next and next.tb or None)
-            except Exception:
-                # this function can fail due to all the hackery it does
-                # on various python implementations.  We just catch errors
-                # down and ignore them if necessary.
-                pass
-        self._tb_next = next
-
-    @property
-    def is_jinja_frame(self):
-        return '__jinja_template__' in self.tb.tb_frame.f_globals
-
-    def __getattr__(self, name):
-        return getattr(self.tb, name)
-
-
-def make_frame_proxy(frame):
-    proxy = TracebackFrameProxy(frame)
-    if tproxy is None:
-        return proxy
-    def operation_handler(operation, *args, **kwargs):
-        if operation in ('__getattribute__', '__getattr__'):
-            return getattr(proxy, args[0])
-        elif operation == '__setattr__':
-            proxy.__setattr__(*args, **kwargs)
-        else:
-            return getattr(proxy, operation)(*args, **kwargs)
-    return tproxy(TracebackType, operation_handler)
-
-
-class ProcessedTraceback(object):
-    """Holds a Jinja preprocessed traceback for printing or reraising."""
-
-    def __init__(self, exc_type, exc_value, frames):
-        assert frames, 'no frames for this traceback?'
-        self.exc_type = exc_type
-        self.exc_value = exc_value
-        self.frames = frames
-
-        # newly concatenate the frames (which are proxies)
-        prev_tb = None
-        for tb in self.frames:
-            if prev_tb is not None:
-                prev_tb.set_next(tb)
-            prev_tb = tb
-        prev_tb.set_next(None)
-
-    def render_as_text(self, limit=None):
-        """Return a string with the traceback."""
-        lines = traceback.format_exception(self.exc_type, self.exc_value,
-                                           self.frames[0], limit=limit)
-        return ''.join(lines).rstrip()
-
-    def render_as_html(self, full=False):
-        """Return a unicode string with the traceback as rendered HTML."""
-        from jinja2.debugrenderer import render_traceback
-        return u'%s\n\n<!--\n%s\n-->' % (
-            render_traceback(self, full=full),
-            self.render_as_text().decode('utf-8', 'replace')
-        )
-
-    @property
-    def is_template_syntax_error(self):
-        """`True` if this is a template syntax error."""
-        return isinstance(self.exc_value, TemplateSyntaxError)
-
-    @property
-    def exc_info(self):
-        """Exception info tuple with a proxy around the frame objects."""
-        return self.exc_type, self.exc_value, self.frames[0]
-
-    @property
-    def standard_exc_info(self):
-        """Standard python exc_info for re-raising"""
-        tb = self.frames[0]
-        # the frame will be an actual traceback (or transparent proxy) if
-        # we are on pypy or a python implementation with support for tproxy
-        if type(tb) is not TracebackType:
-            tb = tb.tb
-        return self.exc_type, self.exc_value, tb
-
-
-def make_traceback(exc_info, source_hint=None):
-    """Creates a processed traceback object from the exc_info."""
-    exc_type, exc_value, tb = exc_info
-    if isinstance(exc_value, TemplateSyntaxError):
-        exc_info = translate_syntax_error(exc_value, source_hint)
-        initial_skip = 0
-    else:
-        initial_skip = 1
-    return translate_exception(exc_info, initial_skip)
-
-
-def translate_syntax_error(error, source=None):
-    """Rewrites a syntax error to please traceback systems."""
-    error.source = source
-    error.translated = True
-    exc_info = (error.__class__, error, None)
-    filename = error.filename
-    if filename is None:
-        filename = '<unknown>'
-    return fake_exc_info(exc_info, filename, error.lineno)
-
-
-def translate_exception(exc_info, initial_skip=0):
-    """If passed an exc_info it will automatically rewrite the exceptions
-    all the way down to the correct line numbers and frames.
-    """
-    tb = exc_info[2]
-    frames = []
-
-    # skip some internal frames if wanted
-    for x in range(initial_skip):
-        if tb is not None:
-            tb = tb.tb_next
-    initial_tb = tb
-
-    while tb is not None:
-        # skip frames decorated with @internalcode.  These are internal
-        # calls we can't avoid and that are useless in template debugging
-        # output.
-        if tb.tb_frame.f_code in internal_code:
-            tb = tb.tb_next
-            continue
-
-        # save a reference to the next frame if we override the current
-        # one with a faked one.
-        next = tb.tb_next
-
-        # fake template exceptions
-        template = tb.tb_frame.f_globals.get('__jinja_template__')
-        if template is not None:
-            lineno = template.get_corresponding_lineno(tb.tb_lineno)
-            tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
-                               lineno)[2]
-
-        frames.append(make_frame_proxy(tb))
-        tb = next
-
-    # if we don't have any exceptions in the frames left, we have to
-    # reraise it unchanged.
-    # XXX: can we backup here?  when could this happen?
-    if not frames:
-        reraise(exc_info[0], exc_info[1], exc_info[2])
-
-    return ProcessedTraceback(exc_info[0], exc_info[1], frames)
-
-
-def fake_exc_info(exc_info, filename, lineno):
-    """Helper for `translate_exception`."""
-    exc_type, exc_value, tb = exc_info
-
-    # figure the real context out
-    if tb is not None:
-        real_locals = tb.tb_frame.f_locals.copy()
-        ctx = real_locals.get('context')
-        if ctx:
-            locals = ctx.get_all()
-        else:
-            locals = {}
-        for name, value in iteritems(real_locals):
-            if name.startswith('l_') and value is not missing:
-                locals[name[2:]] = value
-
-        # if there is a local called __jinja_exception__, we get
-        # rid of it to not break the debug functionality.
-        locals.pop('__jinja_exception__', None)
-    else:
-        locals = {}
-
-    # assamble fake globals we need
-    globals = {
-        '__name__':             filename,
-        '__file__':             filename,
-        '__jinja_exception__':  exc_info[:2],
-
-        # we don't want to keep the reference to the template around
-        # to not cause circular dependencies, but we mark it as Jinja
-        # frame for the ProcessedTraceback
-        '__jinja_template__':   None
-    }
-
-    # and fake the exception
-    code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
-
-    # if it's possible, change the name of the code.  This won't work
-    # on some python environments such as google appengine
-    try:
-        if tb is None:
-            location = 'template'
-        else:
-            function = tb.tb_frame.f_code.co_name
-            if function == 'root':
-                location = 'top-level template code'
-            elif function.startswith('block_'):
-                location = 'block "%s"' % function[6:]
-            else:
-                location = 'template'
-        code = code_type(0, code.co_nlocals, code.co_stacksize,
-                         code.co_flags, code.co_code, code.co_consts,
-                         code.co_names, code.co_varnames, filename,
-                         location, code.co_firstlineno,
-                         code.co_lnotab, (), ())
-    except:
-        pass
-
-    # execute the code and catch the new traceback
-    try:
-        exec(code, globals, locals)
-    except:
-        exc_info = sys.exc_info()
-        new_tb = exc_info[2].tb_next
-
-    # return without this frame
-    return exc_info[:2] + (new_tb,)
-
-
-def _init_ugly_crap():
-    """This function implements a few ugly things so that we can patch the
-    traceback objects.  The function returned allows resetting `tb_next` on
-    any python traceback object.  Do not attempt to use this on non cpython
-    interpreters
-    """
-    import ctypes
-    from types import TracebackType
-
-    # figure out side of _Py_ssize_t
-    if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
-        _Py_ssize_t = ctypes.c_int64
-    else:
-        _Py_ssize_t = ctypes.c_int
-
-    # regular python
-    class _PyObject(ctypes.Structure):
-        pass
-    _PyObject._fields_ = [
-        ('ob_refcnt', _Py_ssize_t),
-        ('ob_type', ctypes.POINTER(_PyObject))
-    ]
-
-    # python with trace
-    if hasattr(sys, 'getobjects'):
-        class _PyObject(ctypes.Structure):
-            pass
-        _PyObject._fields_ = [
-            ('_ob_next', ctypes.POINTER(_PyObject)),
-            ('_ob_prev', ctypes.POINTER(_PyObject)),
-            ('ob_refcnt', _Py_ssize_t),
-            ('ob_type', ctypes.POINTER(_PyObject))
-        ]
-
-    class _Traceback(_PyObject):
-        pass
-    _Traceback._fields_ = [
-        ('tb_next', ctypes.POINTER(_Traceback)),
-        ('tb_frame', ctypes.POINTER(_PyObject)),
-        ('tb_lasti', ctypes.c_int),
-        ('tb_lineno', ctypes.c_int)
-    ]
-
-    def tb_set_next(tb, next):
-        """Set the tb_next attribute of a traceback object."""
-        if not (isinstance(tb, TracebackType) and
-                (next is None or isinstance(next, TracebackType))):
-            raise TypeError('tb_set_next arguments must be traceback objects')
-        obj = _Traceback.from_address(id(tb))
-        if tb.tb_next is not None:
-            old = _Traceback.from_address(id(tb.tb_next))
-            old.ob_refcnt -= 1
-        if next is None:
-            obj.tb_next = ctypes.POINTER(_Traceback)()
-        else:
-            next = _Traceback.from_address(id(next))
-            next.ob_refcnt += 1
-            obj.tb_next = ctypes.pointer(next)
-
-    return tb_set_next
-
-
-# try to get a tb_set_next implementation if we don't have transparent
-# proxies.
-tb_set_next = None
-if tproxy is None:
-    try:
-        tb_set_next = _init_ugly_crap()
-    except:
-        pass
-    del _init_ugly_crap
diff --git a/python/ext-libs/jinja2/defaults.py b/python/ext-libs/jinja2/defaults.py
deleted file mode 100644
index a27cb80..0000000
--- a/python/ext-libs/jinja2/defaults.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.defaults
-    ~~~~~~~~~~~~~~~
-
-    Jinja default filters and tags.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-from jinja2._compat import range_type
-from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner
-
-
-# defaults for the parser / lexer
-BLOCK_START_STRING = '{%'
-BLOCK_END_STRING = '%}'
-VARIABLE_START_STRING = '{{'
-VARIABLE_END_STRING = '}}'
-COMMENT_START_STRING = '{#'
-COMMENT_END_STRING = '#}'
-LINE_STATEMENT_PREFIX = None
-LINE_COMMENT_PREFIX = None
-TRIM_BLOCKS = False
-LSTRIP_BLOCKS = False
-NEWLINE_SEQUENCE = '\n'
-KEEP_TRAILING_NEWLINE = False
-
-
-# default filters, tests and namespace
-from jinja2.filters import FILTERS as DEFAULT_FILTERS
-from jinja2.tests import TESTS as DEFAULT_TESTS
-DEFAULT_NAMESPACE = {
-    'range':        range_type,
-    'dict':         lambda **kw: kw,
-    'lipsum':       generate_lorem_ipsum,
-    'cycler':       Cycler,
-    'joiner':       Joiner
-}
-
-
-# export all constants
-__all__ = tuple(x for x in locals().keys() if x.isupper())
diff --git a/python/ext-libs/jinja2/environment.py b/python/ext-libs/jinja2/environment.py
deleted file mode 100644
index 45fabad..0000000
--- a/python/ext-libs/jinja2/environment.py
+++ /dev/null
@@ -1,1191 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.environment
-    ~~~~~~~~~~~~~~~~~~
-
-    Provides a class that holds runtime and parsing time options.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import os
-import sys
-from jinja2 import nodes
-from jinja2.defaults import BLOCK_START_STRING, \
-     BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
-     COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
-     LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
-     DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \
-     KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
-from jinja2.lexer import get_lexer, TokenStream
-from jinja2.parser import Parser
-from jinja2.nodes import EvalContext
-from jinja2.optimizer import optimize
-from jinja2.compiler import generate
-from jinja2.runtime import Undefined, new_context
-from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \
-     TemplatesNotFound, TemplateRuntimeError
-from jinja2.utils import import_string, LRUCache, Markup, missing, \
-     concat, consume, internalcode
-from jinja2._compat import imap, ifilter, string_types, iteritems, \
-     text_type, reraise, implements_iterator, implements_to_string, \
-     get_next, encode_filename, PY2, PYPY
-from functools import reduce
-
-
-# for direct template usage we have up to ten living environments
-_spontaneous_environments = LRUCache(10)
-
-# the function to create jinja traceback objects.  This is dynamically
-# imported on the first exception in the exception handler.
-_make_traceback = None
-
-
-def get_spontaneous_environment(*args):
-    """Return a new spontaneous environment.  A spontaneous environment is an
-    unnamed and unaccessible (in theory) environment that is used for
-    templates generated from a string and not from the file system.
-    """
-    try:
-        env = _spontaneous_environments.get(args)
-    except TypeError:
-        return Environment(*args)
-    if env is not None:
-        return env
-    _spontaneous_environments[args] = env = Environment(*args)
-    env.shared = True
-    return env
-
-
-def create_cache(size):
-    """Return the cache class for the given size."""
-    if size == 0:
-        return None
-    if size < 0:
-        return {}
-    return LRUCache(size)
-
-
-def copy_cache(cache):
-    """Create an empty copy of the given cache."""
-    if cache is None:
-        return None
-    elif type(cache) is dict:
-        return {}
-    return LRUCache(cache.capacity)
-
-
-def load_extensions(environment, extensions):
-    """Load the extensions from the list and bind it to the environment.
-    Returns a dict of instantiated environments.
-    """
-    result = {}
-    for extension in extensions:
-        if isinstance(extension, string_types):
-            extension = import_string(extension)
-        result[extension.identifier] = extension(environment)
-    return result
-
-
-def _environment_sanity_check(environment):
-    """Perform a sanity check on the environment."""
-    assert issubclass(environment.undefined, Undefined), 'undefined must ' \
-           'be a subclass of undefined because filters depend on it.'
-    assert environment.block_start_string != \
-           environment.variable_start_string != \
-           environment.comment_start_string, 'block, variable and comment ' \
-           'start strings must be different'
-    assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
-           'newline_sequence set to unknown line ending string.'
-    return environment
-
-
-class Environment(object):
-    r"""The core component of Jinja is the `Environment`.  It contains
-    important shared variables like configuration, filters, tests,
-    globals and others.  Instances of this class may be modified if
-    they are not shared and if no template was loaded so far.
-    Modifications on environments after the first template was loaded
-    will lead to surprising effects and undefined behavior.
-
-    Here the possible initialization parameters:
-
-        `block_start_string`
-            The string marking the begin of a block.  Defaults to ``'{%'``.
-
-        `block_end_string`
-            The string marking the end of a block.  Defaults to ``'%}'``.
-
-        `variable_start_string`
-            The string marking the begin of a print statement.
-            Defaults to ``'{{'``.
-
-        `variable_end_string`
-            The string marking the end of a print statement.  Defaults to
-            ``'}}'``.
-
-        `comment_start_string`
-            The string marking the begin of a comment.  Defaults to ``'{#'``.
-
-        `comment_end_string`
-            The string marking the end of a comment.  Defaults to ``'#}'``.
-
-        `line_statement_prefix`
-            If given and a string, this will be used as prefix for line based
-            statements.  See also :ref:`line-statements`.
-
-        `line_comment_prefix`
-            If given and a string, this will be used as prefix for line based
-            based comments.  See also :ref:`line-statements`.
-
-            .. versionadded:: 2.2
-
-        `trim_blocks`
-            If this is set to ``True`` the first newline after a block is
-            removed (block, not variable tag!).  Defaults to `False`.
-
-        `lstrip_blocks`
-            If this is set to ``True`` leading spaces and tabs are stripped
-            from the start of a line to a block.  Defaults to `False`.
-
-        `newline_sequence`
-            The sequence that starts a newline.  Must be one of ``'\r'``,
-            ``'\n'`` or ``'\r\n'``.  The default is ``'\n'`` which is a
-            useful default for Linux and OS X systems as well as web
-            applications.
-
-        `keep_trailing_newline`
-            Preserve the trailing newline when rendering templates.
-            The default is ``False``, which causes a single newline,
-            if present, to be stripped from the end of the template.
-
-            .. versionadded:: 2.7
-
-        `extensions`
-            List of Jinja extensions to use.  This can either be import paths
-            as strings or extension classes.  For more information have a
-            look at :ref:`the extensions documentation <jinja-extensions>`.
-
-        `optimized`
-            should the optimizer be enabled?  Default is `True`.
-
-        `undefined`
-            :class:`Undefined` or a subclass of it that is used to represent
-            undefined values in the template.
-
-        `finalize`
-            A callable that can be used to process the result of a variable
-            expression before it is output.  For example one can convert
-            `None` implicitly into an empty string here.
-
-        `autoescape`
-            If set to true the XML/HTML autoescaping feature is enabled by
-            default.  For more details about auto escaping see
-            :class:`~jinja2.utils.Markup`.  As of Jinja 2.4 this can also
-            be a callable that is passed the template name and has to
-            return `True` or `False` depending on autoescape should be
-            enabled by default.
-
-            .. versionchanged:: 2.4
-               `autoescape` can now be a function
-
-        `loader`
-            The template loader for this environment.
-
-        `cache_size`
-            The size of the cache.  Per default this is ``50`` which means
-            that if more than 50 templates are loaded the loader will clean
-            out the least recently used template.  If the cache size is set to
-            ``0`` templates are recompiled all the time, if the cache size is
-            ``-1`` the cache will not be cleaned.
-
-        `auto_reload`
-            Some loaders load templates from locations where the template
-            sources may change (ie: file system or database).  If
-            `auto_reload` is set to `True` (default) every time a template is
-            requested the loader checks if the source changed and if yes, it
-            will reload the template.  For higher performance it's possible to
-            disable that.
-
-        `bytecode_cache`
-            If set to a bytecode cache object, this object will provide a
-            cache for the internal Jinja bytecode so that templates don't
-            have to be parsed if they were not changed.
-
-            See :ref:`bytecode-cache` for more information.
-    """
-
-    #: if this environment is sandboxed.  Modifying this variable won't make
-    #: the environment sandboxed though.  For a real sandboxed environment
-    #: have a look at jinja2.sandbox.  This flag alone controls the code
-    #: generation by the compiler.
-    sandboxed = False
-
-    #: True if the environment is just an overlay
-    overlayed = False
-
-    #: the environment this environment is linked to if it is an overlay
-    linked_to = None
-
-    #: shared environments have this set to `True`.  A shared environment
-    #: must not be modified
-    shared = False
-
-    #: these are currently EXPERIMENTAL undocumented features.
-    exception_handler = None
-    exception_formatter = None
-
-    def __init__(self,
-                 block_start_string=BLOCK_START_STRING,
-                 block_end_string=BLOCK_END_STRING,
-                 variable_start_string=VARIABLE_START_STRING,
-                 variable_end_string=VARIABLE_END_STRING,
-                 comment_start_string=COMMENT_START_STRING,
-                 comment_end_string=COMMENT_END_STRING,
-                 line_statement_prefix=LINE_STATEMENT_PREFIX,
-                 line_comment_prefix=LINE_COMMENT_PREFIX,
-                 trim_blocks=TRIM_BLOCKS,
-                 lstrip_blocks=LSTRIP_BLOCKS,
-                 newline_sequence=NEWLINE_SEQUENCE,
-                 keep_trailing_newline=KEEP_TRAILING_NEWLINE,
-                 extensions=(),
-                 optimized=True,
-                 undefined=Undefined,
-                 finalize=None,
-                 autoescape=False,
-                 loader=None,
-                 cache_size=50,
-                 auto_reload=True,
-                 bytecode_cache=None):
-        # !!Important notice!!
-        #   The constructor accepts quite a few arguments that should be
-        #   passed by keyword rather than position.  However it's important to
-        #   not change the order of arguments because it's used at least
-        #   internally in those cases:
-        #       -   spontaneous environments (i18n extension and Template)
-        #       -   unittests
-        #   If parameter changes are required only add parameters at the end
-        #   and don't change the arguments (or the defaults!) of the arguments
-        #   existing already.
-
-        # lexer / parser information
-        self.block_start_string = block_start_string
-        self.block_end_string = block_end_string
-        self.variable_start_string = variable_start_string
-        self.variable_end_string = variable_end_string
-        self.comment_start_string = comment_start_string
-        self.comment_end_string = comment_end_string
-        self.line_statement_prefix = line_statement_prefix
-        self.line_comment_prefix = line_comment_prefix
-        self.trim_blocks = trim_blocks
-        self.lstrip_blocks = lstrip_blocks
-        self.newline_sequence = newline_sequence
-        self.keep_trailing_newline = keep_trailing_newline
-
-        # runtime information
-        self.undefined = undefined
-        self.optimized = optimized
-        self.finalize = finalize
-        self.autoescape = autoescape
-
-        # defaults
-        self.filters = DEFAULT_FILTERS.copy()
-        self.tests = DEFAULT_TESTS.copy()
-        self.globals = DEFAULT_NAMESPACE.copy()
-
-        # set the loader provided
-        self.loader = loader
-        self.cache = create_cache(cache_size)
-        self.bytecode_cache = bytecode_cache
-        self.auto_reload = auto_reload
-
-        # load extensions
-        self.extensions = load_extensions(self, extensions)
-
-        _environment_sanity_check(self)
-
-    def add_extension(self, extension):
-        """Adds an extension after the environment was created.
-
-        .. versionadded:: 2.5
-        """
-        self.extensions.update(load_extensions(self, [extension]))
-
-    def extend(self, **attributes):
-        """Add the items to the instance of the environment if they do not exist
-        yet.  This is used by :ref:`extensions <writing-extensions>` to register
-        callbacks and configuration values without breaking inheritance.
-        """
-        for key, value in iteritems(attributes):
-            if not hasattr(self, key):
-                setattr(self, key, value)
-
-    def overlay(self, block_start_string=missing, block_end_string=missing,
-                variable_start_string=missing, variable_end_string=missing,
-                comment_start_string=missing, comment_end_string=missing,
-                line_statement_prefix=missing, line_comment_prefix=missing,
-                trim_blocks=missing, lstrip_blocks=missing,
-                extensions=missing, optimized=missing,
-                undefined=missing, finalize=missing, autoescape=missing,
-                loader=missing, cache_size=missing, auto_reload=missing,
-                bytecode_cache=missing):
-        """Create a new overlay environment that shares all the data with the
-        current environment except of cache and the overridden attributes.
-        Extensions cannot be removed for an overlayed environment.  An overlayed
-        environment automatically gets all the extensions of the environment it
-        is linked to plus optional extra extensions.
-
-        Creating overlays should happen after the initial environment was set
-        up completely.  Not all attributes are truly linked, some are just
-        copied over so modifications on the original environment may not shine
-        through.
-        """
-        args = dict(locals())
-        del args['self'], args['cache_size'], args['extensions']
-
-        rv = object.__new__(self.__class__)
-        rv.__dict__.update(self.__dict__)
-        rv.overlayed = True
-        rv.linked_to = self
-
-        for key, value in iteritems(args):
-            if value is not missing:
-                setattr(rv, key, value)
-
-        if cache_size is not missing:
-            rv.cache = create_cache(cache_size)
-        else:
-            rv.cache = copy_cache(self.cache)
-
-        rv.extensions = {}
-        for key, value in iteritems(self.extensions):
-            rv.extensions[key] = value.bind(rv)
-        if extensions is not missing:
-            rv.extensions.update(load_extensions(rv, extensions))
-
-        return _environment_sanity_check(rv)
-
-    lexer = property(get_lexer, doc="The lexer for this environment.")
-
-    def iter_extensions(self):
-        """Iterates over the extensions by priority."""
-        return iter(sorted(self.extensions.values(),
-                           key=lambda x: x.priority))
-
-    def getitem(self, obj, argument):
-        """Get an item or attribute of an object but prefer the item."""
-        try:
-            return obj[argument]
-        except (TypeError, LookupError):
-            if isinstance(argument, string_types):
-                try:
-                    attr = str(argument)
-                except Exception:
-                    pass
-                else:
-                    try:
-                        return getattr(obj, attr)
-                    except AttributeError:
-                        pass
-            return self.undefined(obj=obj, name=argument)
-
-    def getattr(self, obj, attribute):
-        """Get an item or attribute of an object but prefer the attribute.
-        Unlike :meth:`getitem` the attribute *must* be a bytestring.
-        """
-        try:
-            return getattr(obj, attribute)
-        except AttributeError:
-            pass
-        try:
-            return obj[attribute]
-        except (TypeError, LookupError, AttributeError):
-            return self.undefined(obj=obj, name=attribute)
-
-    def call_filter(self, name, value, args=None, kwargs=None,
-                    context=None, eval_ctx=None):
-        """Invokes a filter on a value the same way the compiler does it.
-
-        .. versionadded:: 2.7
-        """
-        func = self.filters.get(name)
-        if func is None:
-            raise TemplateRuntimeError('no filter named %r' % name)
-        args = [value] + list(args or ())
-        if getattr(func, 'contextfilter', False):
-            if context is None:
-                raise TemplateRuntimeError('Attempted to invoke context '
-                                           'filter without context')
-            args.insert(0, context)
-        elif getattr(func, 'evalcontextfilter', False):
-            if eval_ctx is None:
-                if context is not None:
-                    eval_ctx = context.eval_ctx
-                else:
-                    eval_ctx = EvalContext(self)
-            args.insert(0, eval_ctx)
-        elif getattr(func, 'environmentfilter', False):
-            args.insert(0, self)
-        return func(*args, **(kwargs or {}))
-
-    def call_test(self, name, value, args=None, kwargs=None):
-        """Invokes a test on a value the same way the compiler does it.
-
-        .. versionadded:: 2.7
-        """
-        func = self.tests.get(name)
-        if func is None:
-            raise TemplateRuntimeError('no test named %r' % name)
-        return func(value, *(args or ()), **(kwargs or {}))
-
-    @internalcode
-    def parse(self, source, name=None, filename=None):
-        """Parse the sourcecode and return the abstract syntax tree.  This
-        tree of nodes is used by the compiler to convert the template into
-        executable source- or bytecode.  This is useful for debugging or to
-        extract information from templates.
-
-        If you are :ref:`developing Jinja2 extensions <writing-extensions>`
-        this gives you a good overview of the node tree generated.
-        """
-        try:
-            return self._parse(source, name, filename)
-        except TemplateSyntaxError:
-            exc_info = sys.exc_info()
-        self.handle_exception(exc_info, source_hint=source)
-
-    def _parse(self, source, name, filename):
-        """Internal parsing function used by `parse` and `compile`."""
-        return Parser(self, source, name, encode_filename(filename)).parse()
-
-    def lex(self, source, name=None, filename=None):
-        """Lex the given sourcecode and return a generator that yields
-        tokens as tuples in the form ``(lineno, token_type, value)``.
-        This can be useful for :ref:`extension development <writing-extensions>`
-        and debugging templates.
-
-        This does not perform preprocessing.  If you want the preprocessing
-        of the extensions to be applied you have to filter source through
-        the :meth:`preprocess` method.
-        """
-        source = text_type(source)
-        try:
-            return self.lexer.tokeniter(source, name, filename)
-        except TemplateSyntaxError:
-            exc_info = sys.exc_info()
-        self.handle_exception(exc_info, source_hint=source)
-
-    def preprocess(self, source, name=None, filename=None):
-        """Preprocesses the source with all extensions.  This is automatically
-        called for all parsing and compiling methods but *not* for :meth:`lex`
-        because there you usually only want the actual source tokenized.
-        """
-        return reduce(lambda s, e: e.preprocess(s, name, filename),
-                      self.iter_extensions(), text_type(source))
-
-    def _tokenize(self, source, name, filename=None, state=None):
-        """Called by the parser to do the preprocessing and filtering
-        for all the extensions.  Returns a :class:`~jinja2.lexer.TokenStream`.
-        """
-        source = self.preprocess(source, name, filename)
-        stream = self.lexer.tokenize(source, name, filename, state)
-        for ext in self.iter_extensions():
-            stream = ext.filter_stream(stream)
-            if not isinstance(stream, TokenStream):
-                stream = TokenStream(stream, name, filename)
-        return stream
-
-    def _generate(self, source, name, filename, defer_init=False):
-        """Internal hook that can be overridden to hook a different generate
-        method in.
-
-        .. versionadded:: 2.5
-        """
-        return generate(source, self, name, filename, defer_init=defer_init)
-
-    def _compile(self, source, filename):
-        """Internal hook that can be overridden to hook a different compile
-        method in.
-
-        .. versionadded:: 2.5
-        """
-        return compile(source, filename, 'exec')
-
-    @internalcode
-    def compile(self, source, name=None, filename=None, raw=False,
-                defer_init=False):
-        """Compile a node or template source code.  The `name` parameter is
-        the load name of the template after it was joined using
-        :meth:`join_path` if necessary, not the filename on the file system.
-        the `filename` parameter is the estimated filename of the template on
-        the file system.  If the template came from a database or memory this
-        can be omitted.
-
-        The return value of this method is a python code object.  If the `raw`
-        parameter is `True` the return value will be a string with python
-        code equivalent to the bytecode returned otherwise.  This method is
-        mainly used internally.
-
-        `defer_init` is use internally to aid the module code generator.  This
-        causes the generated code to be able to import without the global
-        environment variable to be set.
-
-        .. versionadded:: 2.4
-           `defer_init` parameter added.
-        """
-        source_hint = None
-        try:
-            if isinstance(source, string_types):
-                source_hint = source
-                source = self._parse(source, name, filename)
-            if self.optimized:
-                source = optimize(source, self)
-            source = self._generate(source, name, filename,
-                                    defer_init=defer_init)
-            if raw:
-                return source
-            if filename is None:
-                filename = '<template>'
-            else:
-                filename = encode_filename(filename)
-            return self._compile(source, filename)
-        except TemplateSyntaxError:
-            exc_info = sys.exc_info()
-        self.handle_exception(exc_info, source_hint=source)
-
-    def compile_expression(self, source, undefined_to_none=True):
-        """A handy helper method that returns a callable that accepts keyword
-        arguments that appear as variables in the expression.  If called it
-        returns the result of the expression.
-
-        This is useful if applications want to use the same rules as Jinja
-        in template "configuration files" or similar situations.
-
-        Example usage:
-
-        >>> env = Environment()
-        >>> expr = env.compile_expression('foo == 42')
-        >>> expr(foo=23)
-        False
-        >>> expr(foo=42)
-        True
-
-        Per default the return value is converted to `None` if the
-        expression returns an undefined value.  This can be changed
-        by setting `undefined_to_none` to `False`.
-
-        >>> env.compile_expression('var')() is None
-        True
-        >>> env.compile_expression('var', undefined_to_none=False)()
-        Undefined
-
-        .. versionadded:: 2.1
-        """
-        parser = Parser(self, source, state='variable')
-        exc_info = None
-        try:
-            expr = parser.parse_expression()
-            if not parser.stream.eos:
-                raise TemplateSyntaxError('chunk after expression',
-                                          parser.stream.current.lineno,
-                                          None, None)
-            expr.set_environment(self)
-        except TemplateSyntaxError:
-            exc_info = sys.exc_info()
-        if exc_info is not None:
-            self.handle_exception(exc_info, source_hint=source)
-        body = [nodes.Assign(nodes.Name('result', 'store'), expr, lineno=1)]
-        template = self.from_string(nodes.Template(body, lineno=1))
-        return TemplateExpression(template, undefined_to_none)
-
-    def compile_templates(self, target, extensions=None, filter_func=None,
-                          zip='deflated', log_function=None,
-                          ignore_errors=True, py_compile=False):
-        """Finds all the templates the loader can find, compiles them
-        and stores them in `target`.  If `zip` is `None`, instead of in a
-        zipfile, the templates will be will be stored in a directory.
-        By default a deflate zip algorithm is used, to switch to
-        the stored algorithm, `zip` can be set to ``'stored'``.
-
-        `extensions` and `filter_func` are passed to :meth:`list_templates`.
-        Each template returned will be compiled to the target folder or
-        zipfile.
-
-        By default template compilation errors are ignored.  In case a
-        log function is provided, errors are logged.  If you want template
-        syntax errors to abort the compilation you can set `ignore_errors`
-        to `False` and you will get an exception on syntax errors.
-
-        If `py_compile` is set to `True` .pyc files will be written to the
-        target instead of standard .py files.  This flag does not do anything
-        on pypy and Python 3 where pyc files are not picked up by itself and
-        don't give much benefit.
-
-        .. versionadded:: 2.4
-        """
-        from jinja2.loaders import ModuleLoader
-
-        if log_function is None:
-            log_function = lambda x: None
-
-        if py_compile:
-            if not PY2 or PYPY:
-                from warnings import warn
-                warn(Warning('py_compile has no effect on pypy or Python 3'))
-                py_compile = False
-            else:
-                import imp, marshal
-                py_header = imp.get_magic() + \
-                    u'\xff\xff\xff\xff'.encode('iso-8859-15')
-
-                # Python 3.3 added a source filesize to the header
-                if sys.version_info >= (3, 3):
-                    py_header += u'\x00\x00\x00\x00'.encode('iso-8859-15')
-
-        def write_file(filename, data, mode):
-            if zip:
-                info = ZipInfo(filename)
-                info.external_attr = 0o755 << 16
-                zip_file.writestr(info, data)
-            else:
-                f = open(os.path.join(target, filename), mode)
-                try:
-                    f.write(data)
-                finally:
-                    f.close()
-
-        if zip is not None:
-            from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
-            zip_file = ZipFile(target, 'w', dict(deflated=ZIP_DEFLATED,
-                                                 stored=ZIP_STORED)[zip])
-            log_function('Compiling into Zip archive "%s"' % target)
-        else:
-            if not os.path.isdir(target):
-                os.makedirs(target)
-            log_function('Compiling into folder "%s"' % target)
-
-        try:
-            for name in self.list_templates(extensions, filter_func):
-                source, filename, _ = self.loader.get_source(self, name)
-                try:
-                    code = self.compile(source, name, filename, True, True)
-                except TemplateSyntaxError as e:
-                    if not ignore_errors:
-                        raise
-                    log_function('Could not compile "%s": %s' % (name, e))
-                    continue
-
-                filename = ModuleLoader.get_module_filename(name)
-
-                if py_compile:
-                    c = self._compile(code, encode_filename(filename))
-                    write_file(filename + 'c', py_header +
-                               marshal.dumps(c), 'wb')
-                    log_function('Byte-compiled "%s" as %s' %
-                                 (name, filename + 'c'))
-                else:
-                    write_file(filename, code, 'w')
-                    log_function('Compiled "%s" as %s' % (name, filename))
-        finally:
-            if zip:
-                zip_file.close()
-
-        log_function('Finished compiling templates')
-
-    def list_templates(self, extensions=None, filter_func=None):
-        """Returns a list of templates for this environment.  This requires
-        that the loader supports the loader's
-        :meth:`~BaseLoader.list_templates` method.
-
-        If there are other files in the template folder besides the
-        actual templates, the returned list can be filtered.  There are two
-        ways: either `extensions` is set to a list of file extensions for
-        templates, or a `filter_func` can be provided which is a callable that
-        is passed a template name and should return `True` if it should end up
-        in the result list.
-
-        If the loader does not support that, a :exc:`TypeError` is raised.
-
-        .. versionadded:: 2.4
-        """
-        x = self.loader.list_templates()
-        if extensions is not None:
-            if filter_func is not None:
-                raise TypeError('either extensions or filter_func '
-                                'can be passed, but not both')
-            filter_func = lambda x: '.' in x and \
-                                    x.rsplit('.', 1)[1] in extensions
-        if filter_func is not None:
-            x = ifilter(filter_func, x)
-        return x
-
-    def handle_exception(self, exc_info=None, rendered=False, source_hint=None):
-        """Exception handling helper.  This is used internally to either raise
-        rewritten exceptions or return a rendered traceback for the template.
-        """
-        global _make_traceback
-        if exc_info is None:
-            exc_info = sys.exc_info()
-
-        # the debugging module is imported when it's used for the first time.
-        # we're doing a lot of stuff there and for applications that do not
-        # get any exceptions in template rendering there is no need to load
-        # all of that.
-        if _make_traceback is None:
-            from jinja2.debug import make_traceback as _make_traceback
-        traceback = _make_traceback(exc_info, source_hint)
-        if rendered and self.exception_formatter is not None:
-            return self.exception_formatter(traceback)
-        if self.exception_handler is not None:
-            self.exception_handler(traceback)
-        exc_type, exc_value, tb = traceback.standard_exc_info
-        reraise(exc_type, exc_value, tb)
-
-    def join_path(self, template, parent):
-        """Join a template with the parent.  By default all the lookups are
-        relative to the loader root so this method returns the `template`
-        parameter unchanged, but if the paths should be relative to the
-        parent template, this function can be used to calculate the real
-        template name.
-
-        Subclasses may override this method and implement template path
-        joining here.
-        """
-        return template
-
-    @internalcode
-    def _load_template(self, name, globals):
-        if self.loader is None:
-            raise TypeError('no loader for this environment specified')
-        if self.cache is not None:
-            template = self.cache.get(name)
-            if template is not None and (not self.auto_reload or \
-                                         template.is_up_to_date):
-                return template
-        template = self.loader.load(self, name, globals)
-        if self.cache is not None:
-            self.cache[name] = template
-        return template
-
-    @internalcode
-    def get_template(self, name, parent=None, globals=None):
-        """Load a template from the loader.  If a loader is configured this
-        method ask the loader for the template and returns a :class:`Template`.
-        If the `parent` parameter is not `None`, :meth:`join_path` is called
-        to get the real template name before loading.
-
-        The `globals` parameter can be used to provide template wide globals.
-        These variables are available in the context at render time.
-
-        If the template does not exist a :exc:`TemplateNotFound` exception is
-        raised.
-
-        .. versionchanged:: 2.4
-           If `name` is a :class:`Template` object it is returned from the
-           function unchanged.
-        """
-        if isinstance(name, Template):
-            return name
-        if parent is not None:
-            name = self.join_path(name, parent)
-        return self._load_template(name, self.make_globals(globals))
-
-    @internalcode
-    def select_template(self, names, parent=None, globals=None):
-        """Works like :meth:`get_template` but tries a number of templates
-        before it fails.  If it cannot find any of the templates, it will
-        raise a :exc:`TemplatesNotFound` exception.
-
-        .. versionadded:: 2.3
-
-        .. versionchanged:: 2.4
-           If `names` contains a :class:`Template` object it is returned
-           from the function unchanged.
-        """
-        if not names:
-            raise TemplatesNotFound(message=u'Tried to select from an empty list '
-                                            u'of templates.')
-        globals = self.make_globals(globals)
-        for name in names:
-            if isinstance(name, Template):
-                return name
-            if parent is not None:
-                name = self.join_path(name, parent)
-            try:
-                return self._load_template(name, globals)
-            except TemplateNotFound:
-                pass
-        raise TemplatesNotFound(names)
-
-    @internalcode
-    def get_or_select_template(self, template_name_or_list,
-                               parent=None, globals=None):
-        """Does a typecheck and dispatches to :meth:`select_template`
-        if an iterable of template names is given, otherwise to
-        :meth:`get_template`.
-
-        .. versionadded:: 2.3
-        """
-        if isinstance(template_name_or_list, string_types):
-            return self.get_template(template_name_or_list, parent, globals)
-        elif isinstance(template_name_or_list, Template):
-            return template_name_or_list
-        return self.select_template(template_name_or_list, parent, globals)
-
-    def from_string(self, source, globals=None, template_class=None):
-        """Load a template from a string.  This parses the source given and
-        returns a :class:`Template` object.
-        """
-        globals = self.make_globals(globals)
-        cls = template_class or self.template_class
-        return cls.from_code(self, self.compile(source), globals, None)
-
-    def make_globals(self, d):
-        """Return a dict for the globals."""
-        if not d:
-            return self.globals
-        return dict(self.globals, **d)
-
-
-class Template(object):
-    """The central template object.  This class represents a compiled template
-    and is used to evaluate it.
-
-    Normally the template object is generated from an :class:`Environment` but
-    it also has a constructor that makes it possible to create a template
-    instance directly using the constructor.  It takes the same arguments as
-    the environment constructor but it's not possible to specify a loader.
-
-    Every template object has a few methods and members that are guaranteed
-    to exist.  However it's important that a template object should be
-    considered immutable.  Modifications on the object are not supported.
-
-    Template objects created from the constructor rather than an environment
-    do have an `environment` attribute that points to a temporary environment
-    that is probably shared with other templates created with the constructor
-    and compatible settings.
-
-    >>> template = Template('Hello {{ name }}!')
-    >>> template.render(name='John Doe')
-    u'Hello John Doe!'
-
-    >>> stream = template.stream(name='John Doe')
-    >>> stream.next()
-    u'Hello John Doe!'
-    >>> stream.next()
-    Traceback (most recent call last):
-        ...
-    StopIteration
-    """
-
-    def __new__(cls, source,
-                block_start_string=BLOCK_START_STRING,
-                block_end_string=BLOCK_END_STRING,
-                variable_start_string=VARIABLE_START_STRING,
-                variable_end_string=VARIABLE_END_STRING,
-                comment_start_string=COMMENT_START_STRING,
-                comment_end_string=COMMENT_END_STRING,
-                line_statement_prefix=LINE_STATEMENT_PREFIX,
-                line_comment_prefix=LINE_COMMENT_PREFIX,
-                trim_blocks=TRIM_BLOCKS,
-                lstrip_blocks=LSTRIP_BLOCKS,
-                newline_sequence=NEWLINE_SEQUENCE,
-                keep_trailing_newline=KEEP_TRAILING_NEWLINE,
-                extensions=(),
-                optimized=True,
-                undefined=Undefined,
-                finalize=None,
-                autoescape=False):
-        env = get_spontaneous_environment(
-            block_start_string, block_end_string, variable_start_string,
-            variable_end_string, comment_start_string, comment_end_string,
-            line_statement_prefix, line_comment_prefix, trim_blocks,
-            lstrip_blocks, newline_sequence, keep_trailing_newline,
-            frozenset(extensions), optimized, undefined, finalize, autoescape,
-            None, 0, False, None)
-        return env.from_string(source, template_class=cls)
-
-    @classmethod
-    def from_code(cls, environment, code, globals, uptodate=None):
-        """Creates a template object from compiled code and the globals.  This
-        is used by the loaders and environment to create a template object.
-        """
-        namespace = {
-            'environment':  environment,
-            '__file__':     code.co_filename
-        }
-        exec(code, namespace)
-        rv = cls._from_namespace(environment, namespace, globals)
-        rv._uptodate = uptodate
-        return rv
-
-    @classmethod
-    def from_module_dict(cls, environment, module_dict, globals):
-        """Creates a template object from a module.  This is used by the
-        module loader to create a template object.
-
-        .. versionadded:: 2.4
-        """
-        return cls._from_namespace(environment, module_dict, globals)
-
-    @classmethod
-    def _from_namespace(cls, environment, namespace, globals):
-        t = object.__new__(cls)
-        t.environment = environment
-        t.globals = globals
-        t.name = namespace['name']
-        t.filename = namespace['__file__']
-        t.blocks = namespace['blocks']
-
-        # render function and module
-        t.root_render_func = namespace['root']
-        t._module = None
-
-        # debug and loader helpers
-        t._debug_info = namespace['debug_info']
-        t._uptodate = None
-
-        # store the reference
-        namespace['environment'] = environment
-        namespace['__jinja_template__'] = t
-
-        return t
-
-    def render(self, *args, **kwargs):
-        """This method accepts the same arguments as the `dict` constructor:
-        A dict, a dict subclass or some keyword arguments.  If no arguments
-        are given the context will be empty.  These two calls do the same::
-
-            template.render(knights='that say nih')
-            template.render({'knights': 'that say nih'})
-
-        This will return the rendered template as unicode string.
-        """
-        vars = dict(*args, **kwargs)
-        try:
-            return concat(self.root_render_func(self.new_context(vars)))
-        except Exception:
-            exc_info = sys.exc_info()
-        return self.environment.handle_exception(exc_info, True)
-
-    def stream(self, *args, **kwargs):
-        """Works exactly like :meth:`generate` but returns a
-        :class:`TemplateStream`.
-        """
-        return TemplateStream(self.generate(*args, **kwargs))
-
-    def generate(self, *args, **kwargs):
-        """For very large templates it can be useful to not render the whole
-        template at once but evaluate each statement after another and yield
-        piece for piece.  This method basically does exactly that and returns
-        a generator that yields one item after another as unicode strings.
-
-        It accepts the same arguments as :meth:`render`.
-        """
-        vars = dict(*args, **kwargs)
-        try:
-            for event in self.root_render_func(self.new_context(vars)):
-                yield event
-        except Exception:
-            exc_info = sys.exc_info()
-        else:
-            return
-        yield self.environment.handle_exception(exc_info, True)
-
-    def new_context(self, vars=None, shared=False, locals=None):
-        """Create a new :class:`Context` for this template.  The vars
-        provided will be passed to the template.  Per default the globals
-        are added to the context.  If shared is set to `True` the data
-        is passed as it to the context without adding the globals.
-
-        `locals` can be a dict of local variables for internal usage.
-        """
-        return new_context(self.environment, self.name, self.blocks,
-                           vars, shared, self.globals, locals)
-
-    def make_module(self, vars=None, shared=False, locals=None):
-        """This method works like the :attr:`module` attribute when called
-        without arguments but it will evaluate the template on every call
-        rather than caching it.  It's also possible to provide
-        a dict which is then used as context.  The arguments are the same
-        as for the :meth:`new_context` method.
-        """
-        return TemplateModule(self, self.new_context(vars, shared, locals))
-
-    @property
-    def module(self):
-        """The template as module.  This is used for imports in the
-        template runtime but is also useful if one wants to access
-        exported template variables from the Python layer:
-
-        >>> t = Template('{% macro foo() %}42{% endmacro %}23')
-        >>> unicode(t.module)
-        u'23'
-        >>> t.module.foo()
-        u'42'
-        """
-        if self._module is not None:
-            return self._module
-        self._module = rv = self.make_module()
-        return rv
-
-    def get_corresponding_lineno(self, lineno):
-        """Return the source line number of a line number in the
-        generated bytecode as they are not in sync.
-        """
-        for template_line, code_line in reversed(self.debug_info):
-            if code_line <= lineno:
-                return template_line
-        return 1
-
-    @property
-    def is_up_to_date(self):
-        """If this variable is `False` there is a newer version available."""
-        if self._uptodate is None:
-            return True
-        return self._uptodate()
-
-    @property
-    def debug_info(self):
-        """The debug info mapping."""
-        return [tuple(imap(int, x.split('='))) for x in
-                self._debug_info.split('&')]
-
-    def __repr__(self):
-        if self.name is None:
-            name = 'memory:%x' % id(self)
-        else:
-            name = repr(self.name)
-        return '<%s %s>' % (self.__class__.__name__, name)
-
-
- at implements_to_string
-class TemplateModule(object):
-    """Represents an imported template.  All the exported names of the
-    template are available as attributes on this object.  Additionally
-    converting it into an unicode- or bytestrings renders the contents.
-    """
-
-    def __init__(self, template, context):
-        self._body_stream = list(template.root_render_func(context))
-        self.__dict__.update(context.get_exported())
-        self.__name__ = template.name
-
-    def __html__(self):
-        return Markup(concat(self._body_stream))
-
-    def __str__(self):
-        return concat(self._body_stream)
-
-    def __repr__(self):
-        if self.__name__ is None:
-            name = 'memory:%x' % id(self)
-        else:
-            name = repr(self.__name__)
-        return '<%s %s>' % (self.__class__.__name__, name)
-
-
-class TemplateExpression(object):
-    """The :meth:`jinja2.Environment.compile_expression` method returns an
-    instance of this object.  It encapsulates the expression-like access
-    to the template with an expression it wraps.
-    """
-
-    def __init__(self, template, undefined_to_none):
-        self._template = template
-        self._undefined_to_none = undefined_to_none
-
-    def __call__(self, *args, **kwargs):
-        context = self._template.new_context(dict(*args, **kwargs))
-        consume(self._template.root_render_func(context))
-        rv = context.vars['result']
-        if self._undefined_to_none and isinstance(rv, Undefined):
-            rv = None
-        return rv
-
-
- at implements_iterator
-class TemplateStream(object):
-    """A template stream works pretty much like an ordinary python generator
-    but it can buffer multiple items to reduce the number of total iterations.
-    Per default the output is unbuffered which means that for every unbuffered
-    instruction in the template one unicode string is yielded.
-
-    If buffering is enabled with a buffer size of 5, five items are combined
-    into a new unicode string.  This is mainly useful if you are streaming
-    big templates to a client via WSGI which flushes after each iteration.
-    """
-
-    def __init__(self, gen):
-        self._gen = gen
-        self.disable_buffering()
-
-    def dump(self, fp, encoding=None, errors='strict'):
-        """Dump the complete stream into a file or file-like object.
-        Per default unicode strings are written, if you want to encode
-        before writing specify an `encoding`.
-
-        Example usage::
-
-            Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
-        """
-        close = False
-        if isinstance(fp, string_types):
-            fp = open(fp, encoding is None and 'w' or 'wb')
-            close = True
-        try:
-            if encoding is not None:
-                iterable = (x.encode(encoding, errors) for x in self)
-            else:
-                iterable = self
-            if hasattr(fp, 'writelines'):
-                fp.writelines(iterable)
-            else:
-                for item in iterable:
-                    fp.write(item)
-        finally:
-            if close:
-                fp.close()
-
-    def disable_buffering(self):
-        """Disable the output buffering."""
-        self._next = get_next(self._gen)
-        self.buffered = False
-
-    def enable_buffering(self, size=5):
-        """Enable buffering.  Buffer `size` items before yielding them."""
-        if size <= 1:
-            raise ValueError('buffer size too small')
-
-        def generator(next):
-            buf = []
-            c_size = 0
-            push = buf.append
-
-            while 1:
-                try:
-                    while c_size < size:
-                        c = next()
-                        push(c)
-                        if c:
-                            c_size += 1
-                except StopIteration:
-                    if not c_size:
-                        return
-                yield concat(buf)
-                del buf[:]
-                c_size = 0
-
-        self.buffered = True
-        self._next = get_next(generator(get_next(self._gen)))
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        return self._next()
-
-
-# hook in default template class.  if anyone reads this comment: ignore that
-# it's possible to use custom templates ;-)
-Environment.template_class = Template
diff --git a/python/ext-libs/jinja2/exceptions.py b/python/ext-libs/jinja2/exceptions.py
deleted file mode 100644
index c9df6dc..0000000
--- a/python/ext-libs/jinja2/exceptions.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.exceptions
-    ~~~~~~~~~~~~~~~~~
-
-    Jinja exceptions.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-from jinja2._compat import imap, text_type, PY2, implements_to_string
-
-
-class TemplateError(Exception):
-    """Baseclass for all template errors."""
-
-    if PY2:
-        def __init__(self, message=None):
-            if message is not None:
-                message = text_type(message).encode('utf-8')
-            Exception.__init__(self, message)
-
-        @property
-        def message(self):
-            if self.args:
-                message = self.args[0]
-                if message is not None:
-                    return message.decode('utf-8', 'replace')
-
-        def __unicode__(self):
-            return self.message or u''
-    else:
-        def __init__(self, message=None):
-            Exception.__init__(self, message)
-
-        @property
-        def message(self):
-            if self.args:
-                message = self.args[0]
-                if message is not None:
-                    return message
-
-
- at implements_to_string
-class TemplateNotFound(IOError, LookupError, TemplateError):
-    """Raised if a template does not exist."""
-
-    # looks weird, but removes the warning descriptor that just
-    # bogusly warns us about message being deprecated
-    message = None
-
-    def __init__(self, name, message=None):
-        IOError.__init__(self)
-        if message is None:
-            message = name
-        self.message = message
-        self.name = name
-        self.templates = [name]
-
-    def __str__(self):
-        return self.message
-
-
-class TemplatesNotFound(TemplateNotFound):
-    """Like :class:`TemplateNotFound` but raised if multiple templates
-    are selected.  This is a subclass of :class:`TemplateNotFound`
-    exception, so just catching the base exception will catch both.
-
-    .. versionadded:: 2.2
-    """
-
-    def __init__(self, names=(), message=None):
-        if message is None:
-            message = u'none of the templates given were found: ' + \
-                      u', '.join(imap(text_type, names))
-        TemplateNotFound.__init__(self, names and names[-1] or None, message)
-        self.templates = list(names)
-
-
- at implements_to_string
-class TemplateSyntaxError(TemplateError):
-    """Raised to tell the user that there is a problem with the template."""
-
-    def __init__(self, message, lineno, name=None, filename=None):
-        TemplateError.__init__(self, message)
-        self.lineno = lineno
-        self.name = name
-        self.filename = filename
-        self.source = None
-
-        # this is set to True if the debug.translate_syntax_error
-        # function translated the syntax error into a new traceback
-        self.translated = False
-
-    def __str__(self):
-        # for translated errors we only return the message
-        if self.translated:
-            return self.message
-
-        # otherwise attach some stuff
-        location = 'line %d' % self.lineno
-        name = self.filename or self.name
-        if name:
-            location = 'File "%s", %s' % (name, location)
-        lines = [self.message, '  ' + location]
-
-        # if the source is set, add the line to the output
-        if self.source is not None:
-            try:
-                line = self.source.splitlines()[self.lineno - 1]
-            except IndexError:
-                line = None
-            if line:
-                lines.append('    ' + line.strip())
-
-        return u'\n'.join(lines)
-
-
-class TemplateAssertionError(TemplateSyntaxError):
-    """Like a template syntax error, but covers cases where something in the
-    template caused an error at compile time that wasn't necessarily caused
-    by a syntax error.  However it's a direct subclass of
-    :exc:`TemplateSyntaxError` and has the same attributes.
-    """
-
-
-class TemplateRuntimeError(TemplateError):
-    """A generic runtime error in the template engine.  Under some situations
-    Jinja may raise this exception.
-    """
-
-
-class UndefinedError(TemplateRuntimeError):
-    """Raised if a template tries to operate on :class:`Undefined`."""
-
-
-class SecurityError(TemplateRuntimeError):
-    """Raised if a template tries to do something insecure if the
-    sandbox is enabled.
-    """
-
-
-class FilterArgumentError(TemplateRuntimeError):
-    """This error is raised if a filter was called with inappropriate
-    arguments
-    """
diff --git a/python/ext-libs/jinja2/ext.py b/python/ext-libs/jinja2/ext.py
deleted file mode 100644
index 11693fb..0000000
--- a/python/ext-libs/jinja2/ext.py
+++ /dev/null
@@ -1,636 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.ext
-    ~~~~~~~~~~
-
-    Jinja extensions allow to add custom tags similar to the way django custom
-    tags work.  By default two example extensions exist: an i18n and a cache
-    extension.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD.
-"""
-from jinja2 import nodes
-from jinja2.defaults import BLOCK_START_STRING, \
-     BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
-     COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
-     LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
-     KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
-from jinja2.environment import Environment
-from jinja2.runtime import concat
-from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError
-from jinja2.utils import contextfunction, import_string, Markup
-from jinja2._compat import next, with_metaclass, string_types, iteritems
-
-
-# the only real useful gettext functions for a Jinja template.  Note
-# that ugettext must be assigned to gettext as Jinja doesn't support
-# non unicode strings.
-GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext')
-
-
-class ExtensionRegistry(type):
-    """Gives the extension an unique identifier."""
-
-    def __new__(cls, name, bases, d):
-        rv = type.__new__(cls, name, bases, d)
-        rv.identifier = rv.__module__ + '.' + rv.__name__
-        return rv
-
-
-class Extension(with_metaclass(ExtensionRegistry, object)):
-    """Extensions can be used to add extra functionality to the Jinja template
-    system at the parser level.  Custom extensions are bound to an environment
-    but may not store environment specific data on `self`.  The reason for
-    this is that an extension can be bound to another environment (for
-    overlays) by creating a copy and reassigning the `environment` attribute.
-
-    As extensions are created by the environment they cannot accept any
-    arguments for configuration.  One may want to work around that by using
-    a factory function, but that is not possible as extensions are identified
-    by their import name.  The correct way to configure the extension is
-    storing the configuration values on the environment.  Because this way the
-    environment ends up acting as central configuration storage the
-    attributes may clash which is why extensions have to ensure that the names
-    they choose for configuration are not too generic.  ``prefix`` for example
-    is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
-    name as includes the name of the extension (fragment cache).
-    """
-
-    #: if this extension parses this is the list of tags it's listening to.
-    tags = set()
-
-    #: the priority of that extension.  This is especially useful for
-    #: extensions that preprocess values.  A lower value means higher
-    #: priority.
-    #:
-    #: .. versionadded:: 2.4
-    priority = 100
-
-    def __init__(self, environment):
-        self.environment = environment
-
-    def bind(self, environment):
-        """Create a copy of this extension bound to another environment."""
-        rv = object.__new__(self.__class__)
-        rv.__dict__.update(self.__dict__)
-        rv.environment = environment
-        return rv
-
-    def preprocess(self, source, name, filename=None):
-        """This method is called before the actual lexing and can be used to
-        preprocess the source.  The `filename` is optional.  The return value
-        must be the preprocessed source.
-        """
-        return source
-
-    def filter_stream(self, stream):
-        """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
-        to filter tokens returned.  This method has to return an iterable of
-        :class:`~jinja2.lexer.Token`\s, but it doesn't have to return a
-        :class:`~jinja2.lexer.TokenStream`.
-
-        In the `ext` folder of the Jinja2 source distribution there is a file
-        called `inlinegettext.py` which implements a filter that utilizes this
-        method.
-        """
-        return stream
-
-    def parse(self, parser):
-        """If any of the :attr:`tags` matched this method is called with the
-        parser as first argument.  The token the parser stream is pointing at
-        is the name token that matched.  This method has to return one or a
-        list of multiple nodes.
-        """
-        raise NotImplementedError()
-
-    def attr(self, name, lineno=None):
-        """Return an attribute node for the current extension.  This is useful
-        to pass constants on extensions to generated template code.
-
-        ::
-
-            self.attr('_my_attribute', lineno=lineno)
-        """
-        return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
-
-    def call_method(self, name, args=None, kwargs=None, dyn_args=None,
-                    dyn_kwargs=None, lineno=None):
-        """Call a method of the extension.  This is a shortcut for
-        :meth:`attr` + :class:`jinja2.nodes.Call`.
-        """
-        if args is None:
-            args = []
-        if kwargs is None:
-            kwargs = []
-        return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
-                          dyn_args, dyn_kwargs, lineno=lineno)
-
-
- at contextfunction
-def _gettext_alias(__context, *args, **kwargs):
-    return __context.call(__context.resolve('gettext'), *args, **kwargs)
-
-
-def _make_new_gettext(func):
-    @contextfunction
-    def gettext(__context, __string, **variables):
-        rv = __context.call(func, __string)
-        if __context.eval_ctx.autoescape:
-            rv = Markup(rv)
-        return rv % variables
-    return gettext
-
-
-def _make_new_ngettext(func):
-    @contextfunction
-    def ngettext(__context, __singular, __plural, __num, **variables):
-        variables.setdefault('num', __num)
-        rv = __context.call(func, __singular, __plural, __num)
-        if __context.eval_ctx.autoescape:
-            rv = Markup(rv)
-        return rv % variables
-    return ngettext
-
-
-class InternationalizationExtension(Extension):
-    """This extension adds gettext support to Jinja2."""
-    tags = set(['trans'])
-
-    # TODO: the i18n extension is currently reevaluating values in a few
-    # situations.  Take this example:
-    #   {% trans count=something() %}{{ count }} foo{% pluralize
-    #     %}{{ count }} fooss{% endtrans %}
-    # something is called twice here.  One time for the gettext value and
-    # the other time for the n-parameter of the ngettext function.
-
-    def __init__(self, environment):
-        Extension.__init__(self, environment)
-        environment.globals['_'] = _gettext_alias
-        environment.extend(
-            install_gettext_translations=self._install,
-            install_null_translations=self._install_null,
-            install_gettext_callables=self._install_callables,
-            uninstall_gettext_translations=self._uninstall,
-            extract_translations=self._extract,
-            newstyle_gettext=False
-        )
-
-    def _install(self, translations, newstyle=None):
-        gettext = getattr(translations, 'ugettext', None)
-        if gettext is None:
-            gettext = translations.gettext
-        ngettext = getattr(translations, 'ungettext', None)
-        if ngettext is None:
-            ngettext = translations.ngettext
-        self._install_callables(gettext, ngettext, newstyle)
-
-    def _install_null(self, newstyle=None):
-        self._install_callables(
-            lambda x: x,
-            lambda s, p, n: (n != 1 and (p,) or (s,))[0],
-            newstyle
-        )
-
-    def _install_callables(self, gettext, ngettext, newstyle=None):
-        if newstyle is not None:
-            self.environment.newstyle_gettext = newstyle
-        if self.environment.newstyle_gettext:
-            gettext = _make_new_gettext(gettext)
-            ngettext = _make_new_ngettext(ngettext)
-        self.environment.globals.update(
-            gettext=gettext,
-            ngettext=ngettext
-        )
-
-    def _uninstall(self, translations):
-        for key in 'gettext', 'ngettext':
-            self.environment.globals.pop(key, None)
-
-    def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
-        if isinstance(source, string_types):
-            source = self.environment.parse(source)
-        return extract_from_ast(source, gettext_functions)
-
-    def parse(self, parser):
-        """Parse a translatable tag."""
-        lineno = next(parser.stream).lineno
-        num_called_num = False
-
-        # find all the variables referenced.  Additionally a variable can be
-        # defined in the body of the trans block too, but this is checked at
-        # a later state.
-        plural_expr = None
-        plural_expr_assignment = None
-        variables = {}
-        while parser.stream.current.type != 'block_end':
-            if variables:
-                parser.stream.expect('comma')
-
-            # skip colon for python compatibility
-            if parser.stream.skip_if('colon'):
-                break
-
-            name = parser.stream.expect('name')
-            if name.value in variables:
-                parser.fail('translatable variable %r defined twice.' %
-                            name.value, name.lineno,
-                            exc=TemplateAssertionError)
-
-            # expressions
-            if parser.stream.current.type == 'assign':
-                next(parser.stream)
-                variables[name.value] = var = parser.parse_expression()
-            else:
-                variables[name.value] = var = nodes.Name(name.value, 'load')
-
-            if plural_expr is None:
-                if isinstance(var, nodes.Call):
-                    plural_expr = nodes.Name('_trans', 'load')
-                    variables[name.value] = plural_expr
-                    plural_expr_assignment = nodes.Assign(
-                        nodes.Name('_trans', 'store'), var)
-                else:
-                    plural_expr = var
-                num_called_num = name.value == 'num'
-
-        parser.stream.expect('block_end')
-
-        plural = plural_names = None
-        have_plural = False
-        referenced = set()
-
-        # now parse until endtrans or pluralize
-        singular_names, singular = self._parse_block(parser, True)
-        if singular_names:
-            referenced.update(singular_names)
-            if plural_expr is None:
-                plural_expr = nodes.Name(singular_names[0], 'load')
-                num_called_num = singular_names[0] == 'num'
-
-        # if we have a pluralize block, we parse that too
-        if parser.stream.current.test('name:pluralize'):
-            have_plural = True
-            next(parser.stream)
-            if parser.stream.current.type != 'block_end':
-                name = parser.stream.expect('name')
-                if name.value not in variables:
-                    parser.fail('unknown variable %r for pluralization' %
-                                name.value, name.lineno,
-                                exc=TemplateAssertionError)
-                plural_expr = variables[name.value]
-                num_called_num = name.value == 'num'
-            parser.stream.expect('block_end')
-            plural_names, plural = self._parse_block(parser, False)
-            next(parser.stream)
-            referenced.update(plural_names)
-        else:
-            next(parser.stream)
-
-        # register free names as simple name expressions
-        for var in referenced:
-            if var not in variables:
-                variables[var] = nodes.Name(var, 'load')
-
-        if not have_plural:
-            plural_expr = None
-        elif plural_expr is None:
-            parser.fail('pluralize without variables', lineno)
-
-        node = self._make_node(singular, plural, variables, plural_expr,
-                               bool(referenced),
-                               num_called_num and have_plural)
-        node.set_lineno(lineno)
-        if plural_expr_assignment is not None:
-            return [plural_expr_assignment, node]
-        else:
-            return node
-
-    def _parse_block(self, parser, allow_pluralize):
-        """Parse until the next block tag with a given name."""
-        referenced = []
-        buf = []
-        while 1:
-            if parser.stream.current.type == 'data':
-                buf.append(parser.stream.current.value.replace('%', '%%'))
-                next(parser.stream)
-            elif parser.stream.current.type == 'variable_begin':
-                next(parser.stream)
-                name = parser.stream.expect('name').value
-                referenced.append(name)
-                buf.append('%%(%s)s' % name)
-                parser.stream.expect('variable_end')
-            elif parser.stream.current.type == 'block_begin':
-                next(parser.stream)
-                if parser.stream.current.test('name:endtrans'):
-                    break
-                elif parser.stream.current.test('name:pluralize'):
-                    if allow_pluralize:
-                        break
-                    parser.fail('a translatable section can have only one '
-                                'pluralize section')
-                parser.fail('control structures in translatable sections are '
-                            'not allowed')
-            elif parser.stream.eos:
-                parser.fail('unclosed translation block')
-            else:
-                assert False, 'internal parser error'
-
-        return referenced, concat(buf)
-
-    def _make_node(self, singular, plural, variables, plural_expr,
-                   vars_referenced, num_called_num):
-        """Generates a useful node from the data provided."""
-        # no variables referenced?  no need to escape for old style
-        # gettext invocations only if there are vars.
-        if not vars_referenced and not self.environment.newstyle_gettext:
-            singular = singular.replace('%%', '%')
-            if plural:
-                plural = plural.replace('%%', '%')
-
-        # singular only:
-        if plural_expr is None:
-            gettext = nodes.Name('gettext', 'load')
-            node = nodes.Call(gettext, [nodes.Const(singular)],
-                              [], None, None)
-
-        # singular and plural
-        else:
-            ngettext = nodes.Name('ngettext', 'load')
-            node = nodes.Call(ngettext, [
-                nodes.Const(singular),
-                nodes.Const(plural),
-                plural_expr
-            ], [], None, None)
-
-        # in case newstyle gettext is used, the method is powerful
-        # enough to handle the variable expansion and autoescape
-        # handling itself
-        if self.environment.newstyle_gettext:
-            for key, value in iteritems(variables):
-                # the function adds that later anyways in case num was
-                # called num, so just skip it.
-                if num_called_num and key == 'num':
-                    continue
-                node.kwargs.append(nodes.Keyword(key, value))
-
-        # otherwise do that here
-        else:
-            # mark the return value as safe if we are in an
-            # environment with autoescaping turned on
-            node = nodes.MarkSafeIfAutoescape(node)
-            if variables:
-                node = nodes.Mod(node, nodes.Dict([
-                    nodes.Pair(nodes.Const(key), value)
-                    for key, value in variables.items()
-                ]))
-        return nodes.Output([node])
-
-
-class ExprStmtExtension(Extension):
-    """Adds a `do` tag to Jinja2 that works like the print statement just
-    that it doesn't print the return value.
-    """
-    tags = set(['do'])
-
-    def parse(self, parser):
-        node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
-        node.node = parser.parse_tuple()
-        return node
-
-
-class LoopControlExtension(Extension):
-    """Adds break and continue to the template engine."""
-    tags = set(['break', 'continue'])
-
-    def parse(self, parser):
-        token = next(parser.stream)
-        if token.value == 'break':
-            return nodes.Break(lineno=token.lineno)
-        return nodes.Continue(lineno=token.lineno)
-
-
-class WithExtension(Extension):
-    """Adds support for a django-like with block."""
-    tags = set(['with'])
-
-    def parse(self, parser):
-        node = nodes.Scope(lineno=next(parser.stream).lineno)
-        assignments = []
-        while parser.stream.current.type != 'block_end':
-            lineno = parser.stream.current.lineno
-            if assignments:
-                parser.stream.expect('comma')
-            target = parser.parse_assign_target()
-            parser.stream.expect('assign')
-            expr = parser.parse_expression()
-            assignments.append(nodes.Assign(target, expr, lineno=lineno))
-        node.body = assignments + \
-            list(parser.parse_statements(('name:endwith',),
-                                         drop_needle=True))
-        return node
-
-
-class AutoEscapeExtension(Extension):
-    """Changes auto escape rules for a scope."""
-    tags = set(['autoescape'])
-
-    def parse(self, parser):
-        node = nodes.ScopedEvalContextModifier(lineno=next(parser.stream).lineno)
-        node.options = [
-            nodes.Keyword('autoescape', parser.parse_expression())
-        ]
-        node.body = parser.parse_statements(('name:endautoescape',),
-                                            drop_needle=True)
-        return nodes.Scope([node])
-
-
-def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS,
-                     babel_style=True):
-    """Extract localizable strings from the given template node.  Per
-    default this function returns matches in babel style that means non string
-    parameters as well as keyword arguments are returned as `None`.  This
-    allows Babel to figure out what you really meant if you are using
-    gettext functions that allow keyword arguments for placeholder expansion.
-    If you don't want that behavior set the `babel_style` parameter to `False`
-    which causes only strings to be returned and parameters are always stored
-    in tuples.  As a consequence invalid gettext calls (calls without a single
-    string parameter or string parameters after non-string parameters) are
-    skipped.
-
-    This example explains the behavior:
-
-    >>> from jinja2 import Environment
-    >>> env = Environment()
-    >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
-    >>> list(extract_from_ast(node))
-    [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
-    >>> list(extract_from_ast(node, babel_style=False))
-    [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
-
-    For every string found this function yields a ``(lineno, function,
-    message)`` tuple, where:
-
-    * ``lineno`` is the number of the line on which the string was found,
-    * ``function`` is the name of the ``gettext`` function used (if the
-      string was extracted from embedded Python code), and
-    *  ``message`` is the string itself (a ``unicode`` object, or a tuple
-       of ``unicode`` objects for functions with multiple string arguments).
-
-    This extraction function operates on the AST and is because of that unable
-    to extract any comments.  For comment support you have to use the babel
-    extraction interface or extract comments yourself.
-    """
-    for node in node.find_all(nodes.Call):
-        if not isinstance(node.node, nodes.Name) or \
-           node.node.name not in gettext_functions:
-            continue
-
-        strings = []
-        for arg in node.args:
-            if isinstance(arg, nodes.Const) and \
-               isinstance(arg.value, string_types):
-                strings.append(arg.value)
-            else:
-                strings.append(None)
-
-        for arg in node.kwargs:
-            strings.append(None)
-        if node.dyn_args is not None:
-            strings.append(None)
-        if node.dyn_kwargs is not None:
-            strings.append(None)
-
-        if not babel_style:
-            strings = tuple(x for x in strings if x is not None)
-            if not strings:
-                continue
-        else:
-            if len(strings) == 1:
-                strings = strings[0]
-            else:
-                strings = tuple(strings)
-        yield node.lineno, node.node.name, strings
-
-
-class _CommentFinder(object):
-    """Helper class to find comments in a token stream.  Can only
-    find comments for gettext calls forwards.  Once the comment
-    from line 4 is found, a comment for line 1 will not return a
-    usable value.
-    """
-
-    def __init__(self, tokens, comment_tags):
-        self.tokens = tokens
-        self.comment_tags = comment_tags
-        self.offset = 0
-        self.last_lineno = 0
-
-    def find_backwards(self, offset):
-        try:
-            for _, token_type, token_value in \
-                    reversed(self.tokens[self.offset:offset]):
-                if token_type in ('comment', 'linecomment'):
-                    try:
-                        prefix, comment = token_value.split(None, 1)
-                    except ValueError:
-                        continue
-                    if prefix in self.comment_tags:
-                        return [comment.rstrip()]
-            return []
-        finally:
-            self.offset = offset
-
-    def find_comments(self, lineno):
-        if not self.comment_tags or self.last_lineno > lineno:
-            return []
-        for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]):
-            if token_lineno > lineno:
-                return self.find_backwards(self.offset + idx)
-        return self.find_backwards(len(self.tokens))
-
-
-def babel_extract(fileobj, keywords, comment_tags, options):
-    """Babel extraction method for Jinja templates.
-
-    .. versionchanged:: 2.3
-       Basic support for translation comments was added.  If `comment_tags`
-       is now set to a list of keywords for extraction, the extractor will
-       try to find the best preceding comment that begins with one of the
-       keywords.  For best results, make sure to not have more than one
-       gettext call in one line of code and the matching comment in the
-       same line or the line before.
-
-    .. versionchanged:: 2.5.1
-       The `newstyle_gettext` flag can be set to `True` to enable newstyle
-       gettext calls.
-
-    .. versionchanged:: 2.7
-       A `silent` option can now be provided.  If set to `False` template
-       syntax errors are propagated instead of being ignored.
-
-    :param fileobj: the file-like object the messages should be extracted from
-    :param keywords: a list of keywords (i.e. function names) that should be
-                     recognized as translation functions
-    :param comment_tags: a list of translator tags to search for and include
-                         in the results.
-    :param options: a dictionary of additional options (optional)
-    :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
-             (comments will be empty currently)
-    """
-    extensions = set()
-    for extension in options.get('extensions', '').split(','):
-        extension = extension.strip()
-        if not extension:
-            continue
-        extensions.add(import_string(extension))
-    if InternationalizationExtension not in extensions:
-        extensions.add(InternationalizationExtension)
-
-    def getbool(options, key, default=False):
-        return options.get(key, str(default)).lower() in \
-            ('1', 'on', 'yes', 'true')
-
-    silent = getbool(options, 'silent', True)
-    environment = Environment(
-        options.get('block_start_string', BLOCK_START_STRING),
-        options.get('block_end_string', BLOCK_END_STRING),
-        options.get('variable_start_string', VARIABLE_START_STRING),
-        options.get('variable_end_string', VARIABLE_END_STRING),
-        options.get('comment_start_string', COMMENT_START_STRING),
-        options.get('comment_end_string', COMMENT_END_STRING),
-        options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX,
-        options.get('line_comment_prefix') or LINE_COMMENT_PREFIX,
-        getbool(options, 'trim_blocks', TRIM_BLOCKS),
-        getbool(options, 'lstrip_blocks', LSTRIP_BLOCKS),
-        NEWLINE_SEQUENCE,
-        getbool(options, 'keep_trailing_newline', KEEP_TRAILING_NEWLINE),
-        frozenset(extensions),
-        cache_size=0,
-        auto_reload=False
-    )
-
-    if getbool(options, 'newstyle_gettext'):
-        environment.newstyle_gettext = True
-
-    source = fileobj.read().decode(options.get('encoding', 'utf-8'))
-    try:
-        node = environment.parse(source)
-        tokens = list(environment.lex(environment.preprocess(source)))
-    except TemplateSyntaxError as e:
-        if not silent:
-            raise
-        # skip templates with syntax errors
-        return
-
-    finder = _CommentFinder(tokens, comment_tags)
-    for lineno, func, message in extract_from_ast(node, keywords):
-        yield lineno, func, message, finder.find_comments(lineno)
-
-
-#: nicer import names
-i18n = InternationalizationExtension
-do = ExprStmtExtension
-loopcontrols = LoopControlExtension
-with_ = WithExtension
-autoescape = AutoEscapeExtension
diff --git a/python/ext-libs/jinja2/filters.py b/python/ext-libs/jinja2/filters.py
deleted file mode 100644
index fd0db04..0000000
--- a/python/ext-libs/jinja2/filters.py
+++ /dev/null
@@ -1,987 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.filters
-    ~~~~~~~~~~~~~~
-
-    Bundled jinja filters.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import re
-import math
-
-from random import choice
-from operator import itemgetter
-from itertools import groupby
-from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \
-     unicode_urlencode
-from jinja2.runtime import Undefined
-from jinja2.exceptions import FilterArgumentError
-from jinja2._compat import next, imap, string_types, text_type, iteritems
-
-
-_word_re = re.compile(r'\w+(?u)')
-
-
-def contextfilter(f):
-    """Decorator for marking context dependent filters. The current
-    :class:`Context` will be passed as first argument.
-    """
-    f.contextfilter = True
-    return f
-
-
-def evalcontextfilter(f):
-    """Decorator for marking eval-context dependent filters.  An eval
-    context object is passed as first argument.  For more information
-    about the eval context, see :ref:`eval-context`.
-
-    .. versionadded:: 2.4
-    """
-    f.evalcontextfilter = True
-    return f
-
-
-def environmentfilter(f):
-    """Decorator for marking evironment dependent filters.  The current
-    :class:`Environment` is passed to the filter as first argument.
-    """
-    f.environmentfilter = True
-    return f
-
-
-def make_attrgetter(environment, attribute):
-    """Returns a callable that looks up the given attribute from a
-    passed object with the rules of the environment.  Dots are allowed
-    to access attributes of attributes.  Integer parts in paths are
-    looked up as integers.
-    """
-    if not isinstance(attribute, string_types) \
-       or ('.' not in attribute and not attribute.isdigit()):
-        return lambda x: environment.getitem(x, attribute)
-    attribute = attribute.split('.')
-    def attrgetter(item):
-        for part in attribute:
-            if part.isdigit():
-                part = int(part)
-            item = environment.getitem(item, part)
-        return item
-    return attrgetter
-
-
-def do_forceescape(value):
-    """Enforce HTML escaping.  This will probably double escape variables."""
-    if hasattr(value, '__html__'):
-        value = value.__html__()
-    return escape(text_type(value))
-
-
-def do_urlencode(value):
-    """Escape strings for use in URLs (uses UTF-8 encoding).  It accepts both
-    dictionaries and regular strings as well as pairwise iterables.
-
-    .. versionadded:: 2.7
-    """
-    itemiter = None
-    if isinstance(value, dict):
-        itemiter = iteritems(value)
-    elif not isinstance(value, string_types):
-        try:
-            itemiter = iter(value)
-        except TypeError:
-            pass
-    if itemiter is None:
-        return unicode_urlencode(value)
-    return u'&'.join(unicode_urlencode(k) + '=' +
-                     unicode_urlencode(v) for k, v in itemiter)
-
-
- at evalcontextfilter
-def do_replace(eval_ctx, s, old, new, count=None):
-    """Return a copy of the value with all occurrences of a substring
-    replaced with a new one. The first argument is the substring
-    that should be replaced, the second is the replacement string.
-    If the optional third argument ``count`` is given, only the first
-    ``count`` occurrences are replaced:
-
-    .. sourcecode:: jinja
-
-        {{ "Hello World"|replace("Hello", "Goodbye") }}
-            -> Goodbye World
-
-        {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-            -> d'oh, d'oh, aaargh
-    """
-    if count is None:
-        count = -1
-    if not eval_ctx.autoescape:
-        return text_type(s).replace(text_type(old), text_type(new), count)
-    if hasattr(old, '__html__') or hasattr(new, '__html__') and \
-       not hasattr(s, '__html__'):
-        s = escape(s)
-    else:
-        s = soft_unicode(s)
-    return s.replace(soft_unicode(old), soft_unicode(new), count)
-
-
-def do_upper(s):
-    """Convert a value to uppercase."""
-    return soft_unicode(s).upper()
-
-
-def do_lower(s):
-    """Convert a value to lowercase."""
-    return soft_unicode(s).lower()
-
-
- at evalcontextfilter
-def do_xmlattr(_eval_ctx, d, autospace=True):
-    """Create an SGML/XML attribute string based on the items in a dict.
-    All values that are neither `none` nor `undefined` are automatically
-    escaped:
-
-    .. sourcecode:: html+jinja
-
-        <ul{{ {'class': 'my_list', 'missing': none,
-                'id': 'list-%d'|format(variable)}|xmlattr }}>
-        ...
-        </ul>
-
-    Results in something like this:
-
-    .. sourcecode:: html
-
-        <ul class="my_list" id="list-42">
-        ...
-        </ul>
-
-    As you can see it automatically prepends a space in front of the item
-    if the filter returned something unless the second parameter is false.
-    """
-    rv = u' '.join(
-        u'%s="%s"' % (escape(key), escape(value))
-        for key, value in iteritems(d)
-        if value is not None and not isinstance(value, Undefined)
-    )
-    if autospace and rv:
-        rv = u' ' + rv
-    if _eval_ctx.autoescape:
-        rv = Markup(rv)
-    return rv
-
-
-def do_capitalize(s):
-    """Capitalize a value. The first character will be uppercase, all others
-    lowercase.
-    """
-    return soft_unicode(s).capitalize()
-
-
-def do_title(s):
-    """Return a titlecased version of the value. I.e. words will start with
-    uppercase letters, all remaining characters are lowercase.
-    """
-    rv = []
-    for item in re.compile(r'([-\s]+)(?u)').split(s):
-        if not item:
-            continue
-        rv.append(item[0].upper() + item[1:].lower())
-    return ''.join(rv)
-
-
-def do_dictsort(value, case_sensitive=False, by='key'):
-    """Sort a dict and yield (key, value) pairs. Because python dicts are
-    unsorted you may want to use this function to order them by either
-    key or value:
-
-    .. sourcecode:: jinja
-
-        {% for item in mydict|dictsort %}
-            sort the dict by key, case insensitive
-
-        {% for item in mydict|dictsort(true) %}
-            sort the dict by key, case sensitive
-
-        {% for item in mydict|dictsort(false, 'value') %}
-            sort the dict by key, case insensitive, sorted
-            normally and ordered by value.
-    """
-    if by == 'key':
-        pos = 0
-    elif by == 'value':
-        pos = 1
-    else:
-        raise FilterArgumentError('You can only sort by either '
-                                  '"key" or "value"')
-    def sort_func(item):
-        value = item[pos]
-        if isinstance(value, string_types) and not case_sensitive:
-            value = value.lower()
-        return value
-
-    return sorted(value.items(), key=sort_func)
-
-
- at environmentfilter
-def do_sort(environment, value, reverse=False, case_sensitive=False,
-            attribute=None):
-    """Sort an iterable.  Per default it sorts ascending, if you pass it
-    true as first argument it will reverse the sorting.
-
-    If the iterable is made of strings the third parameter can be used to
-    control the case sensitiveness of the comparison which is disabled by
-    default.
-
-    .. sourcecode:: jinja
-
-        {% for item in iterable|sort %}
-            ...
-        {% endfor %}
-
-    It is also possible to sort by an attribute (for example to sort
-    by the date of an object) by specifying the `attribute` parameter:
-
-    .. sourcecode:: jinja
-
-        {% for item in iterable|sort(attribute='date') %}
-            ...
-        {% endfor %}
-
-    .. versionchanged:: 2.6
-       The `attribute` parameter was added.
-    """
-    if not case_sensitive:
-        def sort_func(item):
-            if isinstance(item, string_types):
-                item = item.lower()
-            return item
-    else:
-        sort_func = None
-    if attribute is not None:
-        getter = make_attrgetter(environment, attribute)
-        def sort_func(item, processor=sort_func or (lambda x: x)):
-            return processor(getter(item))
-    return sorted(value, key=sort_func, reverse=reverse)
-
-
-def do_default(value, default_value=u'', boolean=False):
-    """If the value is undefined it will return the passed default value,
-    otherwise the value of the variable:
-
-    .. sourcecode:: jinja
-
-        {{ my_variable|default('my_variable is not defined') }}
-
-    This will output the value of ``my_variable`` if the variable was
-    defined, otherwise ``'my_variable is not defined'``. If you want
-    to use default with variables that evaluate to false you have to
-    set the second parameter to `true`:
-
-    .. sourcecode:: jinja
-
-        {{ ''|default('the string was empty', true) }}
-    """
-    if isinstance(value, Undefined) or (boolean and not value):
-        return default_value
-    return value
-
-
- at evalcontextfilter
-def do_join(eval_ctx, value, d=u'', attribute=None):
-    """Return a string which is the concatenation of the strings in the
-    sequence. The separator between elements is an empty string per
-    default, you can define it with the optional parameter:
-
-    .. sourcecode:: jinja
-
-        {{ [1, 2, 3]|join('|') }}
-            -> 1|2|3
-
-        {{ [1, 2, 3]|join }}
-            -> 123
-
-    It is also possible to join certain attributes of an object:
-
-    .. sourcecode:: jinja
-
-        {{ users|join(', ', attribute='username') }}
-
-    .. versionadded:: 2.6
-       The `attribute` parameter was added.
-    """
-    if attribute is not None:
-        value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
-
-    # no automatic escaping?  joining is a lot eaiser then
-    if not eval_ctx.autoescape:
-        return text_type(d).join(imap(text_type, value))
-
-    # if the delimiter doesn't have an html representation we check
-    # if any of the items has.  If yes we do a coercion to Markup
-    if not hasattr(d, '__html__'):
-        value = list(value)
-        do_escape = False
-        for idx, item in enumerate(value):
-            if hasattr(item, '__html__'):
-                do_escape = True
-            else:
-                value[idx] = text_type(item)
-        if do_escape:
-            d = escape(d)
-        else:
-            d = text_type(d)
-        return d.join(value)
-
-    # no html involved, to normal joining
-    return soft_unicode(d).join(imap(soft_unicode, value))
-
-
-def do_center(value, width=80):
-    """Centers the value in a field of a given width."""
-    return text_type(value).center(width)
-
-
- at environmentfilter
-def do_first(environment, seq):
-    """Return the first item of a sequence."""
-    try:
-        return next(iter(seq))
-    except StopIteration:
-        return environment.undefined('No first item, sequence was empty.')
-
-
- at environmentfilter
-def do_last(environment, seq):
-    """Return the last item of a sequence."""
-    try:
-        return next(iter(reversed(seq)))
-    except StopIteration:
-        return environment.undefined('No last item, sequence was empty.')
-
-
- at environmentfilter
-def do_random(environment, seq):
-    """Return a random item from the sequence."""
-    try:
-        return choice(seq)
-    except IndexError:
-        return environment.undefined('No random item, sequence was empty.')
-
-
-def do_filesizeformat(value, binary=False):
-    """Format the value like a 'human-readable' file size (i.e. 13 kB,
-    4.1 MB, 102 Bytes, etc).  Per default decimal prefixes are used (Mega,
-    Giga, etc.), if the second parameter is set to `True` the binary
-    prefixes are used (Mebi, Gibi).
-    """
-    bytes = float(value)
-    base = binary and 1024 or 1000
-    prefixes = [
-        (binary and 'KiB' or 'kB'),
-        (binary and 'MiB' or 'MB'),
-        (binary and 'GiB' or 'GB'),
-        (binary and 'TiB' or 'TB'),
-        (binary and 'PiB' or 'PB'),
-        (binary and 'EiB' or 'EB'),
-        (binary and 'ZiB' or 'ZB'),
-        (binary and 'YiB' or 'YB')
-    ]
-    if bytes == 1:
-        return '1 Byte'
-    elif bytes < base:
-        return '%d Bytes' % bytes
-    else:
-        for i, prefix in enumerate(prefixes):
-            unit = base ** (i + 2)
-            if bytes < unit:
-                return '%.1f %s' % ((base * bytes / unit), prefix)
-        return '%.1f %s' % ((base * bytes / unit), prefix)
-
-
-def do_pprint(value, verbose=False):
-    """Pretty print a variable. Useful for debugging.
-
-    With Jinja 1.2 onwards you can pass it a parameter.  If this parameter
-    is truthy the output will be more verbose (this requires `pretty`)
-    """
-    return pformat(value, verbose=verbose)
-
-
- at evalcontextfilter
-def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False):
-    """Converts URLs in plain text into clickable links.
-
-    If you pass the filter an additional integer it will shorten the urls
-    to that number. Also a third argument exists that makes the urls
-    "nofollow":
-
-    .. sourcecode:: jinja
-
-        {{ mytext|urlize(40, true) }}
-            links are shortened to 40 chars and defined with rel="nofollow"
-    """
-    rv = urlize(value, trim_url_limit, nofollow)
-    if eval_ctx.autoescape:
-        rv = Markup(rv)
-    return rv
-
-
-def do_indent(s, width=4, indentfirst=False):
-    """Return a copy of the passed string, each line indented by
-    4 spaces. The first line is not indented. If you want to
-    change the number of spaces or indent the first line too
-    you can pass additional parameters to the filter:
-
-    .. sourcecode:: jinja
-
-        {{ mytext|indent(2, true) }}
-            indent by two spaces and indent the first line too.
-    """
-    indention = u' ' * width
-    rv = (u'\n' + indention).join(s.splitlines())
-    if indentfirst:
-        rv = indention + rv
-    return rv
-
-
-def do_truncate(s, length=255, killwords=False, end='...'):
-    """Return a truncated copy of the string. The length is specified
-    with the first parameter which defaults to ``255``. If the second
-    parameter is ``true`` the filter will cut the text at length. Otherwise
-    it will discard the last word. If the text was in fact
-    truncated it will append an ellipsis sign (``"..."``). If you want a
-    different ellipsis sign than ``"..."`` you can specify it using the
-    third parameter.
-
-    .. sourcecode:: jinja
-
-        {{ "foo bar"|truncate(5) }}
-            -> "foo ..."
-        {{ "foo bar"|truncate(5, True) }}
-            -> "foo b..."
-    """
-    if len(s) <= length:
-        return s
-    elif killwords:
-        return s[:length] + end
-    words = s.split(' ')
-    result = []
-    m = 0
-    for word in words:
-        m += len(word) + 1
-        if m > length:
-            break
-        result.append(word)
-    result.append(end)
-    return u' '.join(result)
-
- at environmentfilter
-def do_wordwrap(environment, s, width=79, break_long_words=True,
-                wrapstring=None):
-    """
-    Return a copy of the string passed to the filter wrapped after
-    ``79`` characters.  You can override this default using the first
-    parameter.  If you set the second parameter to `false` Jinja will not
-    split words apart if they are longer than `width`. By default, the newlines
-    will be the default newlines for the environment, but this can be changed
-    using the wrapstring keyword argument.
-
-    .. versionadded:: 2.7
-       Added support for the `wrapstring` parameter.
-    """
-    if not wrapstring:
-        wrapstring = environment.newline_sequence
-    import textwrap
-    return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False,
-                                   replace_whitespace=False,
-                                   break_long_words=break_long_words))
-
-
-def do_wordcount(s):
-    """Count the words in that string."""
-    return len(_word_re.findall(s))
-
-
-def do_int(value, default=0):
-    """Convert the value into an integer. If the
-    conversion doesn't work it will return ``0``. You can
-    override this default using the first parameter.
-    """
-    try:
-        return int(value)
-    except (TypeError, ValueError):
-        # this quirk is necessary so that "42.23"|int gives 42.
-        try:
-            return int(float(value))
-        except (TypeError, ValueError):
-            return default
-
-
-def do_float(value, default=0.0):
-    """Convert the value into a floating point number. If the
-    conversion doesn't work it will return ``0.0``. You can
-    override this default using the first parameter.
-    """
-    try:
-        return float(value)
-    except (TypeError, ValueError):
-        return default
-
-
-def do_format(value, *args, **kwargs):
-    """
-    Apply python string formatting on an object:
-
-    .. sourcecode:: jinja
-
-        {{ "%s - %s"|format("Hello?", "Foo!") }}
-            -> Hello? - Foo!
-    """
-    if args and kwargs:
-        raise FilterArgumentError('can\'t handle positional and keyword '
-                                  'arguments at the same time')
-    return soft_unicode(value) % (kwargs or args)
-
-
-def do_trim(value):
-    """Strip leading and trailing whitespace."""
-    return soft_unicode(value).strip()
-
-
-def do_striptags(value):
-    """Strip SGML/XML tags and replace adjacent whitespace by one space.
-    """
-    if hasattr(value, '__html__'):
-        value = value.__html__()
-    return Markup(text_type(value)).striptags()
-
-
-def do_slice(value, slices, fill_with=None):
-    """Slice an iterator and return a list of lists containing
-    those items. Useful if you want to create a div containing
-    three ul tags that represent columns:
-
-    .. sourcecode:: html+jinja
-
-        <div class="columwrapper">
-          {%- for column in items|slice(3) %}
-            <ul class="column-{{ loop.index }}">
-            {%- for item in column %}
-              <li>{{ item }}</li>
-            {%- endfor %}
-            </ul>
-          {%- endfor %}
-        </div>
-
-    If you pass it a second argument it's used to fill missing
-    values on the last iteration.
-    """
-    seq = list(value)
-    length = len(seq)
-    items_per_slice = length // slices
-    slices_with_extra = length % slices
-    offset = 0
-    for slice_number in range(slices):
-        start = offset + slice_number * items_per_slice
-        if slice_number < slices_with_extra:
-            offset += 1
-        end = offset + (slice_number + 1) * items_per_slice
-        tmp = seq[start:end]
-        if fill_with is not None and slice_number >= slices_with_extra:
-            tmp.append(fill_with)
-        yield tmp
-
-
-def do_batch(value, linecount, fill_with=None):
-    """
-    A filter that batches items. It works pretty much like `slice`
-    just the other way round. It returns a list of lists with the
-    given number of items. If you provide a second parameter this
-    is used to fill up missing items. See this example:
-
-    .. sourcecode:: html+jinja
-
-        <table>
-        {%- for row in items|batch(3, ' ') %}
-          <tr>
-          {%- for column in row %}
-            <td>{{ column }}</td>
-          {%- endfor %}
-          </tr>
-        {%- endfor %}
-        </table>
-    """
-    result = []
-    tmp = []
-    for item in value:
-        if len(tmp) == linecount:
-            yield tmp
-            tmp = []
-        tmp.append(item)
-    if tmp:
-        if fill_with is not None and len(tmp) < linecount:
-            tmp += [fill_with] * (linecount - len(tmp))
-        yield tmp
-
-
-def do_round(value, precision=0, method='common'):
-    """Round the number to a given precision. The first
-    parameter specifies the precision (default is ``0``), the
-    second the rounding method:
-
-    - ``'common'`` rounds either up or down
-    - ``'ceil'`` always rounds up
-    - ``'floor'`` always rounds down
-
-    If you don't specify a method ``'common'`` is used.
-
-    .. sourcecode:: jinja
-
-        {{ 42.55|round }}
-            -> 43.0
-        {{ 42.55|round(1, 'floor') }}
-            -> 42.5
-
-    Note that even if rounded to 0 precision, a float is returned.  If
-    you need a real integer, pipe it through `int`:
-
-    .. sourcecode:: jinja
-
-        {{ 42.55|round|int }}
-            -> 43
-    """
-    if not method in ('common', 'ceil', 'floor'):
-        raise FilterArgumentError('method must be common, ceil or floor')
-    if method == 'common':
-        return round(value, precision)
-    func = getattr(math, method)
-    return func(value * (10 ** precision)) / (10 ** precision)
-
-
- at environmentfilter
-def do_groupby(environment, value, attribute):
-    """Group a sequence of objects by a common attribute.
-
-    If you for example have a list of dicts or objects that represent persons
-    with `gender`, `first_name` and `last_name` attributes and you want to
-    group all users by genders you can do something like the following
-    snippet:
-
-    .. sourcecode:: html+jinja
-
-        <ul>
-        {% for group in persons|groupby('gender') %}
-            <li>{{ group.grouper }}<ul>
-            {% for person in group.list %}
-                <li>{{ person.first_name }} {{ person.last_name }}</li>
-            {% endfor %}</ul></li>
-        {% endfor %}
-        </ul>
-
-    Additionally it's possible to use tuple unpacking for the grouper and
-    list:
-
-    .. sourcecode:: html+jinja
-
-        <ul>
-        {% for grouper, list in persons|groupby('gender') %}
-            ...
-        {% endfor %}
-        </ul>
-
-    As you can see the item we're grouping by is stored in the `grouper`
-    attribute and the `list` contains all the objects that have this grouper
-    in common.
-
-    .. versionchanged:: 2.6
-       It's now possible to use dotted notation to group by the child
-       attribute of another attribute.
-    """
-    expr = make_attrgetter(environment, attribute)
-    return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
-
-
-class _GroupTuple(tuple):
-    __slots__ = ()
-    grouper = property(itemgetter(0))
-    list = property(itemgetter(1))
-
-    def __new__(cls, xxx_todo_changeme):
-        (key, value) = xxx_todo_changeme
-        return tuple.__new__(cls, (key, list(value)))
-
-
- at environmentfilter
-def do_sum(environment, iterable, attribute=None, start=0):
-    """Returns the sum of a sequence of numbers plus the value of parameter
-    'start' (which defaults to 0).  When the sequence is empty it returns
-    start.
-
-    It is also possible to sum up only certain attributes:
-
-    .. sourcecode:: jinja
-
-        Total: {{ items|sum(attribute='price') }}
-
-    .. versionchanged:: 2.6
-       The `attribute` parameter was added to allow suming up over
-       attributes.  Also the `start` parameter was moved on to the right.
-    """
-    if attribute is not None:
-        iterable = imap(make_attrgetter(environment, attribute), iterable)
-    return sum(iterable, start)
-
-
-def do_list(value):
-    """Convert the value into a list.  If it was a string the returned list
-    will be a list of characters.
-    """
-    return list(value)
-
-
-def do_mark_safe(value):
-    """Mark the value as safe which means that in an environment with automatic
-    escaping enabled this variable will not be escaped.
-    """
-    return Markup(value)
-
-
-def do_mark_unsafe(value):
-    """Mark a value as unsafe.  This is the reverse operation for :func:`safe`."""
-    return text_type(value)
-
-
-def do_reverse(value):
-    """Reverse the object or return an iterator the iterates over it the other
-    way round.
-    """
-    if isinstance(value, string_types):
-        return value[::-1]
-    try:
-        return reversed(value)
-    except TypeError:
-        try:
-            rv = list(value)
-            rv.reverse()
-            return rv
-        except TypeError:
-            raise FilterArgumentError('argument must be iterable')
-
-
- at environmentfilter
-def do_attr(environment, obj, name):
-    """Get an attribute of an object.  ``foo|attr("bar")`` works like
-    ``foo["bar"]`` just that always an attribute is returned and items are not
-    looked up.
-
-    See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
-    """
-    try:
-        name = str(name)
-    except UnicodeError:
-        pass
-    else:
-        try:
-            value = getattr(obj, name)
-        except AttributeError:
-            pass
-        else:
-            if environment.sandboxed and not \
-               environment.is_safe_attribute(obj, name, value):
-                return environment.unsafe_undefined(obj, name)
-            return value
-    return environment.undefined(obj=obj, name=name)
-
-
- at contextfilter
-def do_map(*args, **kwargs):
-    """Applies a filter on a sequence of objects or looks up an attribute.
-    This is useful when dealing with lists of objects but you are really
-    only interested in a certain value of it.
-
-    The basic usage is mapping on an attribute.  Imagine you have a list
-    of users but you are only interested in a list of usernames:
-
-    .. sourcecode:: jinja
-
-        Users on this page: {{ users|map(attribute='username')|join(', ') }}
-
-    Alternatively you can let it invoke a filter by passing the name of the
-    filter and the arguments afterwards.  A good example would be applying a
-    text conversion filter on a sequence:
-
-    .. sourcecode:: jinja
-
-        Users on this page: {{ titles|map('lower')|join(', ') }}
-
-    .. versionadded:: 2.7
-    """
-    context = args[0]
-    seq = args[1]
-
-    if len(args) == 2 and 'attribute' in kwargs:
-        attribute = kwargs.pop('attribute')
-        if kwargs:
-            raise FilterArgumentError('Unexpected keyword argument %r' %
-                next(iter(kwargs)))
-        func = make_attrgetter(context.environment, attribute)
-    else:
-        try:
-            name = args[2]
-            args = args[3:]
-        except LookupError:
-            raise FilterArgumentError('map requires a filter argument')
-        func = lambda item: context.environment.call_filter(
-            name, item, args, kwargs, context=context)
-
-    if seq:
-        for item in seq:
-            yield func(item)
-
-
- at contextfilter
-def do_select(*args, **kwargs):
-    """Filters a sequence of objects by appying a test to either the object
-    or the attribute and only selecting the ones with the test succeeding.
-
-    Example usage:
-
-    .. sourcecode:: jinja
-
-        {{ numbers|select("odd") }}
-
-    .. versionadded:: 2.7
-    """
-    return _select_or_reject(args, kwargs, lambda x: x, False)
-
-
- at contextfilter
-def do_reject(*args, **kwargs):
-    """Filters a sequence of objects by appying a test to either the object
-    or the attribute and rejecting the ones with the test succeeding.
-
-    Example usage:
-
-    .. sourcecode:: jinja
-
-        {{ numbers|reject("odd") }}
-
-    .. versionadded:: 2.7
-    """
-    return _select_or_reject(args, kwargs, lambda x: not x, False)
-
-
- at contextfilter
-def do_selectattr(*args, **kwargs):
-    """Filters a sequence of objects by appying a test to either the object
-    or the attribute and only selecting the ones with the test succeeding.
-
-    Example usage:
-
-    .. sourcecode:: jinja
-
-        {{ users|selectattr("is_active") }}
-        {{ users|selectattr("email", "none") }}
-
-    .. versionadded:: 2.7
-    """
-    return _select_or_reject(args, kwargs, lambda x: x, True)
-
-
- at contextfilter
-def do_rejectattr(*args, **kwargs):
-    """Filters a sequence of objects by appying a test to either the object
-    or the attribute and rejecting the ones with the test succeeding.
-
-    .. sourcecode:: jinja
-
-        {{ users|rejectattr("is_active") }}
-        {{ users|rejectattr("email", "none") }}
-
-    .. versionadded:: 2.7
-    """
-    return _select_or_reject(args, kwargs, lambda x: not x, True)
-
-
-def _select_or_reject(args, kwargs, modfunc, lookup_attr):
-    context = args[0]
-    seq = args[1]
-    if lookup_attr:
-        try:
-            attr = args[2]
-        except LookupError:
-            raise FilterArgumentError('Missing parameter for attribute name')
-        transfunc = make_attrgetter(context.environment, attr)
-        off = 1
-    else:
-        off = 0
-        transfunc = lambda x: x
-
-    try:
-        name = args[2 + off]
-        args = args[3 + off:]
-        func = lambda item: context.environment.call_test(
-            name, item, args, kwargs)
-    except LookupError:
-        func = bool
-
-    if seq:
-        for item in seq:
-            if modfunc(func(transfunc(item))):
-                yield item
-
-
-FILTERS = {
-    'attr':                 do_attr,
-    'replace':              do_replace,
-    'upper':                do_upper,
-    'lower':                do_lower,
-    'escape':               escape,
-    'e':                    escape,
-    'forceescape':          do_forceescape,
-    'capitalize':           do_capitalize,
-    'title':                do_title,
-    'default':              do_default,
-    'd':                    do_default,
-    'join':                 do_join,
-    'count':                len,
-    'dictsort':             do_dictsort,
-    'sort':                 do_sort,
-    'length':               len,
-    'reverse':              do_reverse,
-    'center':               do_center,
-    'indent':               do_indent,
-    'title':                do_title,
-    'capitalize':           do_capitalize,
-    'first':                do_first,
-    'last':                 do_last,
-    'map':                  do_map,
-    'random':               do_random,
-    'reject':               do_reject,
-    'rejectattr':           do_rejectattr,
-    'filesizeformat':       do_filesizeformat,
-    'pprint':               do_pprint,
-    'truncate':             do_truncate,
-    'wordwrap':             do_wordwrap,
-    'wordcount':            do_wordcount,
-    'int':                  do_int,
-    'float':                do_float,
-    'string':               soft_unicode,
-    'list':                 do_list,
-    'urlize':               do_urlize,
-    'format':               do_format,
-    'trim':                 do_trim,
-    'striptags':            do_striptags,
-    'select':               do_select,
-    'selectattr':           do_selectattr,
-    'slice':                do_slice,
-    'batch':                do_batch,
-    'sum':                  do_sum,
-    'abs':                  abs,
-    'round':                do_round,
-    'groupby':              do_groupby,
-    'safe':                 do_mark_safe,
-    'xmlattr':              do_xmlattr,
-    'urlencode':            do_urlencode
-}
diff --git a/python/ext-libs/jinja2/lexer.py b/python/ext-libs/jinja2/lexer.py
deleted file mode 100644
index a501285..0000000
--- a/python/ext-libs/jinja2/lexer.py
+++ /dev/null
@@ -1,733 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.lexer
-    ~~~~~~~~~~~~
-
-    This module implements a Jinja / Python combination lexer. The
-    `Lexer` class provided by this module is used to do some preprocessing
-    for Jinja.
-
-    On the one hand it filters out invalid operators like the bitshift
-    operators we don't allow in templates. On the other hand it separates
-    template code and python code in expressions.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import re
-
-from operator import itemgetter
-from collections import deque
-from jinja2.exceptions import TemplateSyntaxError
-from jinja2.utils import LRUCache
-from jinja2._compat import next, iteritems, implements_iterator, text_type, \
-     intern
-
-
-# cache for the lexers. Exists in order to be able to have multiple
-# environments with the same lexer
-_lexer_cache = LRUCache(50)
-
-# static regular expressions
-whitespace_re = re.compile(r'\s+', re.U)
-string_re = re.compile(r"('([^'\\]*(?:\\.[^'\\]*)*)'"
-                       r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S)
-integer_re = re.compile(r'\d+')
-
-# we use the unicode identifier rule if this python version is able
-# to handle unicode identifiers, otherwise the standard ASCII one.
-try:
-    compile('föö', '<unknown>', 'eval')
-except SyntaxError:
-    name_re = re.compile(r'\b[a-zA-Z_][a-zA-Z0-9_]*\b')
-else:
-    from jinja2 import _stringdefs
-    name_re = re.compile(r'[%s][%s]*' % (_stringdefs.xid_start,
-                                         _stringdefs.xid_continue))
-
-float_re = re.compile(r'(?<!\.)\d+\.\d+')
-newline_re = re.compile(r'(\r\n|\r|\n)')
-
-# internal the tokens and keep references to them
-TOKEN_ADD = intern('add')
-TOKEN_ASSIGN = intern('assign')
-TOKEN_COLON = intern('colon')
-TOKEN_COMMA = intern('comma')
-TOKEN_DIV = intern('div')
-TOKEN_DOT = intern('dot')
-TOKEN_EQ = intern('eq')
-TOKEN_FLOORDIV = intern('floordiv')
-TOKEN_GT = intern('gt')
-TOKEN_GTEQ = intern('gteq')
-TOKEN_LBRACE = intern('lbrace')
-TOKEN_LBRACKET = intern('lbracket')
-TOKEN_LPAREN = intern('lparen')
-TOKEN_LT = intern('lt')
-TOKEN_LTEQ = intern('lteq')
-TOKEN_MOD = intern('mod')
-TOKEN_MUL = intern('mul')
-TOKEN_NE = intern('ne')
-TOKEN_PIPE = intern('pipe')
-TOKEN_POW = intern('pow')
-TOKEN_RBRACE = intern('rbrace')
-TOKEN_RBRACKET = intern('rbracket')
-TOKEN_RPAREN = intern('rparen')
-TOKEN_SEMICOLON = intern('semicolon')
-TOKEN_SUB = intern('sub')
-TOKEN_TILDE = intern('tilde')
-TOKEN_WHITESPACE = intern('whitespace')
-TOKEN_FLOAT = intern('float')
-TOKEN_INTEGER = intern('integer')
-TOKEN_NAME = intern('name')
-TOKEN_STRING = intern('string')
-TOKEN_OPERATOR = intern('operator')
-TOKEN_BLOCK_BEGIN = intern('block_begin')
-TOKEN_BLOCK_END = intern('block_end')
-TOKEN_VARIABLE_BEGIN = intern('variable_begin')
-TOKEN_VARIABLE_END = intern('variable_end')
-TOKEN_RAW_BEGIN = intern('raw_begin')
-TOKEN_RAW_END = intern('raw_end')
-TOKEN_COMMENT_BEGIN = intern('comment_begin')
-TOKEN_COMMENT_END = intern('comment_end')
-TOKEN_COMMENT = intern('comment')
-TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin')
-TOKEN_LINESTATEMENT_END = intern('linestatement_end')
-TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin')
-TOKEN_LINECOMMENT_END = intern('linecomment_end')
-TOKEN_LINECOMMENT = intern('linecomment')
-TOKEN_DATA = intern('data')
-TOKEN_INITIAL = intern('initial')
-TOKEN_EOF = intern('eof')
-
-# bind operators to token types
-operators = {
-    '+':            TOKEN_ADD,
-    '-':            TOKEN_SUB,
-    '/':            TOKEN_DIV,
-    '//':           TOKEN_FLOORDIV,
-    '*':            TOKEN_MUL,
-    '%':            TOKEN_MOD,
-    '**':           TOKEN_POW,
-    '~':            TOKEN_TILDE,
-    '[':            TOKEN_LBRACKET,
-    ']':            TOKEN_RBRACKET,
-    '(':            TOKEN_LPAREN,
-    ')':            TOKEN_RPAREN,
-    '{':            TOKEN_LBRACE,
-    '}':            TOKEN_RBRACE,
-    '==':           TOKEN_EQ,
-    '!=':           TOKEN_NE,
-    '>':            TOKEN_GT,
-    '>=':           TOKEN_GTEQ,
-    '<':            TOKEN_LT,
-    '<=':           TOKEN_LTEQ,
-    '=':            TOKEN_ASSIGN,
-    '.':            TOKEN_DOT,
-    ':':            TOKEN_COLON,
-    '|':            TOKEN_PIPE,
-    ',':            TOKEN_COMMA,
-    ';':            TOKEN_SEMICOLON
-}
-
-reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
-assert len(operators) == len(reverse_operators), 'operators dropped'
-operator_re = re.compile('(%s)' % '|'.join(re.escape(x) for x in
-                         sorted(operators, key=lambda x: -len(x))))
-
-ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT,
-                            TOKEN_COMMENT_END, TOKEN_WHITESPACE,
-                            TOKEN_WHITESPACE, TOKEN_LINECOMMENT_BEGIN,
-                            TOKEN_LINECOMMENT_END, TOKEN_LINECOMMENT])
-ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA,
-                             TOKEN_COMMENT, TOKEN_LINECOMMENT])
-
-
-def _describe_token_type(token_type):
-    if token_type in reverse_operators:
-        return reverse_operators[token_type]
-    return {
-        TOKEN_COMMENT_BEGIN:        'begin of comment',
-        TOKEN_COMMENT_END:          'end of comment',
-        TOKEN_COMMENT:              'comment',
-        TOKEN_LINECOMMENT:          'comment',
-        TOKEN_BLOCK_BEGIN:          'begin of statement block',
-        TOKEN_BLOCK_END:            'end of statement block',
-        TOKEN_VARIABLE_BEGIN:       'begin of print statement',
-        TOKEN_VARIABLE_END:         'end of print statement',
-        TOKEN_LINESTATEMENT_BEGIN:  'begin of line statement',
-        TOKEN_LINESTATEMENT_END:    'end of line statement',
-        TOKEN_DATA:                 'template data / text',
-        TOKEN_EOF:                  'end of template'
-    }.get(token_type, token_type)
-
-
-def describe_token(token):
-    """Returns a description of the token."""
-    if token.type == 'name':
-        return token.value
-    return _describe_token_type(token.type)
-
-
-def describe_token_expr(expr):
-    """Like `describe_token` but for token expressions."""
-    if ':' in expr:
-        type, value = expr.split(':', 1)
-        if type == 'name':
-            return value
-    else:
-        type = expr
-    return _describe_token_type(type)
-
-
-def count_newlines(value):
-    """Count the number of newline characters in the string.  This is
-    useful for extensions that filter a stream.
-    """
-    return len(newline_re.findall(value))
-
-
-def compile_rules(environment):
-    """Compiles all the rules from the environment into a list of rules."""
-    e = re.escape
-    rules = [
-        (len(environment.comment_start_string), 'comment',
-         e(environment.comment_start_string)),
-        (len(environment.block_start_string), 'block',
-         e(environment.block_start_string)),
-        (len(environment.variable_start_string), 'variable',
-         e(environment.variable_start_string))
-    ]
-
-    if environment.line_statement_prefix is not None:
-        rules.append((len(environment.line_statement_prefix), 'linestatement',
-                      r'^[ \t\v]*' + e(environment.line_statement_prefix)))
-    if environment.line_comment_prefix is not None:
-        rules.append((len(environment.line_comment_prefix), 'linecomment',
-                      r'(?:^|(?<=\S))[^\S\r\n]*' +
-                      e(environment.line_comment_prefix)))
-
-    return [x[1:] for x in sorted(rules, reverse=True)]
-
-
-class Failure(object):
-    """Class that raises a `TemplateSyntaxError` if called.
-    Used by the `Lexer` to specify known errors.
-    """
-
-    def __init__(self, message, cls=TemplateSyntaxError):
-        self.message = message
-        self.error_class = cls
-
-    def __call__(self, lineno, filename):
-        raise self.error_class(self.message, lineno, filename)
-
-
-class Token(tuple):
-    """Token class."""
-    __slots__ = ()
-    lineno, type, value = (property(itemgetter(x)) for x in range(3))
-
-    def __new__(cls, lineno, type, value):
-        return tuple.__new__(cls, (lineno, intern(str(type)), value))
-
-    def __str__(self):
-        if self.type in reverse_operators:
-            return reverse_operators[self.type]
-        elif self.type == 'name':
-            return self.value
-        return self.type
-
-    def test(self, expr):
-        """Test a token against a token expression.  This can either be a
-        token type or ``'token_type:token_value'``.  This can only test
-        against string values and types.
-        """
-        # here we do a regular string equality check as test_any is usually
-        # passed an iterable of not interned strings.
-        if self.type == expr:
-            return True
-        elif ':' in expr:
-            return expr.split(':', 1) == [self.type, self.value]
-        return False
-
-    def test_any(self, *iterable):
-        """Test against multiple token expressions."""
-        for expr in iterable:
-            if self.test(expr):
-                return True
-        return False
-
-    def __repr__(self):
-        return 'Token(%r, %r, %r)' % (
-            self.lineno,
-            self.type,
-            self.value
-        )
-
-
- at implements_iterator
-class TokenStreamIterator(object):
-    """The iterator for tokenstreams.  Iterate over the stream
-    until the eof token is reached.
-    """
-
-    def __init__(self, stream):
-        self.stream = stream
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        token = self.stream.current
-        if token.type is TOKEN_EOF:
-            self.stream.close()
-            raise StopIteration()
-        next(self.stream)
-        return token
-
-
- at implements_iterator
-class TokenStream(object):
-    """A token stream is an iterable that yields :class:`Token`\s.  The
-    parser however does not iterate over it but calls :meth:`next` to go
-    one token ahead.  The current active token is stored as :attr:`current`.
-    """
-
-    def __init__(self, generator, name, filename):
-        self._iter = iter(generator)
-        self._pushed = deque()
-        self.name = name
-        self.filename = filename
-        self.closed = False
-        self.current = Token(1, TOKEN_INITIAL, '')
-        next(self)
-
-    def __iter__(self):
-        return TokenStreamIterator(self)
-
-    def __bool__(self):
-        return bool(self._pushed) or self.current.type is not TOKEN_EOF
-    __nonzero__ = __bool__  # py2
-
-    eos = property(lambda x: not x, doc="Are we at the end of the stream?")
-
-    def push(self, token):
-        """Push a token back to the stream."""
-        self._pushed.append(token)
-
-    def look(self):
-        """Look at the next token."""
-        old_token = next(self)
-        result = self.current
-        self.push(result)
-        self.current = old_token
-        return result
-
-    def skip(self, n=1):
-        """Got n tokens ahead."""
-        for x in range(n):
-            next(self)
-
-    def next_if(self, expr):
-        """Perform the token test and return the token if it matched.
-        Otherwise the return value is `None`.
-        """
-        if self.current.test(expr):
-            return next(self)
-
-    def skip_if(self, expr):
-        """Like :meth:`next_if` but only returns `True` or `False`."""
-        return self.next_if(expr) is not None
-
-    def __next__(self):
-        """Go one token ahead and return the old one"""
-        rv = self.current
-        if self._pushed:
-            self.current = self._pushed.popleft()
-        elif self.current.type is not TOKEN_EOF:
-            try:
-                self.current = next(self._iter)
-            except StopIteration:
-                self.close()
-        return rv
-
-    def close(self):
-        """Close the stream."""
-        self.current = Token(self.current.lineno, TOKEN_EOF, '')
-        self._iter = None
-        self.closed = True
-
-    def expect(self, expr):
-        """Expect a given token type and return it.  This accepts the same
-        argument as :meth:`jinja2.lexer.Token.test`.
-        """
-        if not self.current.test(expr):
-            expr = describe_token_expr(expr)
-            if self.current.type is TOKEN_EOF:
-                raise TemplateSyntaxError('unexpected end of template, '
-                                          'expected %r.' % expr,
-                                          self.current.lineno,
-                                          self.name, self.filename)
-            raise TemplateSyntaxError("expected token %r, got %r" %
-                                      (expr, describe_token(self.current)),
-                                      self.current.lineno,
-                                      self.name, self.filename)
-        try:
-            return self.current
-        finally:
-            next(self)
-
-
-def get_lexer(environment):
-    """Return a lexer which is probably cached."""
-    key = (environment.block_start_string,
-           environment.block_end_string,
-           environment.variable_start_string,
-           environment.variable_end_string,
-           environment.comment_start_string,
-           environment.comment_end_string,
-           environment.line_statement_prefix,
-           environment.line_comment_prefix,
-           environment.trim_blocks,
-           environment.lstrip_blocks,
-           environment.newline_sequence,
-           environment.keep_trailing_newline)
-    lexer = _lexer_cache.get(key)
-    if lexer is None:
-        lexer = Lexer(environment)
-        _lexer_cache[key] = lexer
-    return lexer
-
-
-class Lexer(object):
-    """Class that implements a lexer for a given environment. Automatically
-    created by the environment class, usually you don't have to do that.
-
-    Note that the lexer is not automatically bound to an environment.
-    Multiple environments can share the same lexer.
-    """
-
-    def __init__(self, environment):
-        # shortcuts
-        c = lambda x: re.compile(x, re.M | re.S)
-        e = re.escape
-
-        # lexing rules for tags
-        tag_rules = [
-            (whitespace_re, TOKEN_WHITESPACE, None),
-            (float_re, TOKEN_FLOAT, None),
-            (integer_re, TOKEN_INTEGER, None),
-            (name_re, TOKEN_NAME, None),
-            (string_re, TOKEN_STRING, None),
-            (operator_re, TOKEN_OPERATOR, None)
-        ]
-
-        # assemble the root lexing rule. because "|" is ungreedy
-        # we have to sort by length so that the lexer continues working
-        # as expected when we have parsing rules like <% for block and
-        # <%= for variables. (if someone wants asp like syntax)
-        # variables are just part of the rules if variable processing
-        # is required.
-        root_tag_rules = compile_rules(environment)
-
-        # block suffix if trimming is enabled
-        block_suffix_re = environment.trim_blocks and '\\n?' or ''
-
-        # strip leading spaces if lstrip_blocks is enabled
-        prefix_re = {}
-        if environment.lstrip_blocks:
-            # use '{%+' to manually disable lstrip_blocks behavior
-            no_lstrip_re = e('+')
-            # detect overlap between block and variable or comment strings
-            block_diff = c(r'^%s(.*)' % e(environment.block_start_string))
-            # make sure we don't mistake a block for a variable or a comment
-            m = block_diff.match(environment.comment_start_string)
-            no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
-            m = block_diff.match(environment.variable_start_string)
-            no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
-
-            # detect overlap between comment and variable strings
-            comment_diff = c(r'^%s(.*)' % e(environment.comment_start_string))
-            m = comment_diff.match(environment.variable_start_string)
-            no_variable_re = m and r'(?!%s)' % e(m.group(1)) or ''
-
-            lstrip_re = r'^[ \t]*'
-            block_prefix_re = r'%s%s(?!%s)|%s\+?' % (
-                    lstrip_re,
-                    e(environment.block_start_string),
-                    no_lstrip_re,
-                    e(environment.block_start_string),
-                    )
-            comment_prefix_re = r'%s%s%s|%s\+?' % (
-                    lstrip_re,
-                    e(environment.comment_start_string),
-                    no_variable_re,
-                    e(environment.comment_start_string),
-                    )
-            prefix_re['block'] = block_prefix_re
-            prefix_re['comment'] = comment_prefix_re
-        else:
-            block_prefix_re = '%s' % e(environment.block_start_string)
-
-        self.newline_sequence = environment.newline_sequence
-        self.keep_trailing_newline = environment.keep_trailing_newline
-
-        # global lexing rules
-        self.rules = {
-            'root': [
-                # directives
-                (c('(.*?)(?:%s)' % '|'.join(
-                    [r'(?P<raw_begin>(?:\s*%s\-|%s)\s*raw\s*(?:\-%s\s*|%s))' % (
-                        e(environment.block_start_string),
-                        block_prefix_re,
-                        e(environment.block_end_string),
-                        e(environment.block_end_string)
-                    )] + [
-                        r'(?P<%s_begin>\s*%s\-|%s)' % (n, r, prefix_re.get(n,r))
-                        for n, r in root_tag_rules
-                    ])), (TOKEN_DATA, '#bygroup'), '#bygroup'),
-                # data
-                (c('.+'), TOKEN_DATA, None)
-            ],
-            # comments
-            TOKEN_COMMENT_BEGIN: [
-                (c(r'(.*?)((?:\-%s\s*|%s)%s)' % (
-                    e(environment.comment_end_string),
-                    e(environment.comment_end_string),
-                    block_suffix_re
-                )), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'),
-                (c('(.)'), (Failure('Missing end of comment tag'),), None)
-            ],
-            # blocks
-            TOKEN_BLOCK_BEGIN: [
-                (c('(?:\-%s\s*|%s)%s' % (
-                    e(environment.block_end_string),
-                    e(environment.block_end_string),
-                    block_suffix_re
-                )), TOKEN_BLOCK_END, '#pop'),
-            ] + tag_rules,
-            # variables
-            TOKEN_VARIABLE_BEGIN: [
-                (c('\-%s\s*|%s' % (
-                    e(environment.variable_end_string),
-                    e(environment.variable_end_string)
-                )), TOKEN_VARIABLE_END, '#pop')
-            ] + tag_rules,
-            # raw block
-            TOKEN_RAW_BEGIN: [
-                (c('(.*?)((?:\s*%s\-|%s)\s*endraw\s*(?:\-%s\s*|%s%s))' % (
-                    e(environment.block_start_string),
-                    block_prefix_re,
-                    e(environment.block_end_string),
-                    e(environment.block_end_string),
-                    block_suffix_re
-                )), (TOKEN_DATA, TOKEN_RAW_END), '#pop'),
-                (c('(.)'), (Failure('Missing end of raw directive'),), None)
-            ],
-            # line statements
-            TOKEN_LINESTATEMENT_BEGIN: [
-                (c(r'\s*(\n|$)'), TOKEN_LINESTATEMENT_END, '#pop')
-            ] + tag_rules,
-            # line comments
-            TOKEN_LINECOMMENT_BEGIN: [
-                (c(r'(.*?)()(?=\n|$)'), (TOKEN_LINECOMMENT,
-                 TOKEN_LINECOMMENT_END), '#pop')
-            ]
-        }
-
-    def _normalize_newlines(self, value):
-        """Called for strings and template data to normalize it to unicode."""
-        return newline_re.sub(self.newline_sequence, value)
-
-    def tokenize(self, source, name=None, filename=None, state=None):
-        """Calls tokeniter + tokenize and wraps it in a token stream.
-        """
-        stream = self.tokeniter(source, name, filename, state)
-        return TokenStream(self.wrap(stream, name, filename), name, filename)
-
-    def wrap(self, stream, name=None, filename=None):
-        """This is called with the stream as returned by `tokenize` and wraps
-        every token in a :class:`Token` and converts the value.
-        """
-        for lineno, token, value in stream:
-            if token in ignored_tokens:
-                continue
-            elif token == 'linestatement_begin':
-                token = 'block_begin'
-            elif token == 'linestatement_end':
-                token = 'block_end'
-            # we are not interested in those tokens in the parser
-            elif token in ('raw_begin', 'raw_end'):
-                continue
-            elif token == 'data':
-                value = self._normalize_newlines(value)
-            elif token == 'keyword':
-                token = value
-            elif token == 'name':
-                value = str(value)
-            elif token == 'string':
-                # try to unescape string
-                try:
-                    value = self._normalize_newlines(value[1:-1]) \
-                        .encode('ascii', 'backslashreplace') \
-                        .decode('unicode-escape')
-                except Exception as e:
-                    msg = str(e).split(':')[-1].strip()
-                    raise TemplateSyntaxError(msg, lineno, name, filename)
-                # if we can express it as bytestring (ascii only)
-                # we do that for support of semi broken APIs
-                # as datetime.datetime.strftime.  On python 3 this
-                # call becomes a noop thanks to 2to3
-                try:
-                    value = str(value)
-                except UnicodeError:
-                    pass
-            elif token == 'integer':
-                value = int(value)
-            elif token == 'float':
-                value = float(value)
-            elif token == 'operator':
-                token = operators[value]
-            yield Token(lineno, token, value)
-
-    def tokeniter(self, source, name, filename=None, state=None):
-        """This method tokenizes the text and returns the tokens in a
-        generator.  Use this method if you just want to tokenize a template.
-        """
-        source = text_type(source)
-        lines = source.splitlines()
-        if self.keep_trailing_newline and source:
-            for newline in ('\r\n', '\r', '\n'):
-                if source.endswith(newline):
-                    lines.append('')
-                    break
-        source = '\n'.join(lines)
-        pos = 0
-        lineno = 1
-        stack = ['root']
-        if state is not None and state != 'root':
-            assert state in ('variable', 'block'), 'invalid state'
-            stack.append(state + '_begin')
-        else:
-            state = 'root'
-        statetokens = self.rules[stack[-1]]
-        source_length = len(source)
-
-        balancing_stack = []
-
-        while 1:
-            # tokenizer loop
-            for regex, tokens, new_state in statetokens:
-                m = regex.match(source, pos)
-                # if no match we try again with the next rule
-                if m is None:
-                    continue
-
-                # we only match blocks and variables if braces / parentheses
-                # are balanced. continue parsing with the lower rule which
-                # is the operator rule. do this only if the end tags look
-                # like operators
-                if balancing_stack and \
-                   tokens in ('variable_end', 'block_end',
-                              'linestatement_end'):
-                    continue
-
-                # tuples support more options
-                if isinstance(tokens, tuple):
-                    for idx, token in enumerate(tokens):
-                        # failure group
-                        if token.__class__ is Failure:
-                            raise token(lineno, filename)
-                        # bygroup is a bit more complex, in that case we
-                        # yield for the current token the first named
-                        # group that matched
-                        elif token == '#bygroup':
-                            for key, value in iteritems(m.groupdict()):
-                                if value is not None:
-                                    yield lineno, key, value
-                                    lineno += value.count('\n')
-                                    break
-                            else:
-                                raise RuntimeError('%r wanted to resolve '
-                                                   'the token dynamically'
-                                                   ' but no group matched'
-                                                   % regex)
-                        # normal group
-                        else:
-                            data = m.group(idx + 1)
-                            if data or token not in ignore_if_empty:
-                                yield lineno, token, data
-                            lineno += data.count('\n')
-
-                # strings as token just are yielded as it.
-                else:
-                    data = m.group()
-                    # update brace/parentheses balance
-                    if tokens == 'operator':
-                        if data == '{':
-                            balancing_stack.append('}')
-                        elif data == '(':
-                            balancing_stack.append(')')
-                        elif data == '[':
-                            balancing_stack.append(']')
-                        elif data in ('}', ')', ']'):
-                            if not balancing_stack:
-                                raise TemplateSyntaxError('unexpected \'%s\'' %
-                                                          data, lineno, name,
-                                                          filename)
-                            expected_op = balancing_stack.pop()
-                            if expected_op != data:
-                                raise TemplateSyntaxError('unexpected \'%s\', '
-                                                          'expected \'%s\'' %
-                                                          (data, expected_op),
-                                                          lineno, name,
-                                                          filename)
-                    # yield items
-                    if data or tokens not in ignore_if_empty:
-                        yield lineno, tokens, data
-                    lineno += data.count('\n')
-
-                # fetch new position into new variable so that we can check
-                # if there is a internal parsing error which would result
-                # in an infinite loop
-                pos2 = m.end()
-
-                # handle state changes
-                if new_state is not None:
-                    # remove the uppermost state
-                    if new_state == '#pop':
-                        stack.pop()
-                    # resolve the new state by group checking
-                    elif new_state == '#bygroup':
-                        for key, value in iteritems(m.groupdict()):
-                            if value is not None:
-                                stack.append(key)
-                                break
-                        else:
-                            raise RuntimeError('%r wanted to resolve the '
-                                               'new state dynamically but'
-                                               ' no group matched' %
-                                               regex)
-                    # direct state name given
-                    else:
-                        stack.append(new_state)
-                    statetokens = self.rules[stack[-1]]
-                # we are still at the same position and no stack change.
-                # this means a loop without break condition, avoid that and
-                # raise error
-                elif pos2 == pos:
-                    raise RuntimeError('%r yielded empty string without '
-                                       'stack change' % regex)
-                # publish new function and start again
-                pos = pos2
-                break
-            # if loop terminated without break we haven't found a single match
-            # either we are at the end of the file or we have a problem
-            else:
-                # end of text
-                if pos >= source_length:
-                    return
-                # something went wrong
-                raise TemplateSyntaxError('unexpected char %r at %d' %
-                                          (source[pos], pos), lineno,
-                                          name, filename)
diff --git a/python/ext-libs/jinja2/loaders.py b/python/ext-libs/jinja2/loaders.py
deleted file mode 100644
index cc9c683..0000000
--- a/python/ext-libs/jinja2/loaders.py
+++ /dev/null
@@ -1,471 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.loaders
-    ~~~~~~~~~~~~~~
-
-    Jinja loader classes.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import os
-import sys
-import weakref
-from types import ModuleType
-from os import path
-from hashlib import sha1
-from jinja2.exceptions import TemplateNotFound
-from jinja2.utils import open_if_exists, internalcode
-from jinja2._compat import string_types, iteritems
-
-
-def split_template_path(template):
-    """Split a path into segments and perform a sanity check.  If it detects
-    '..' in the path it will raise a `TemplateNotFound` error.
-    """
-    pieces = []
-    for piece in template.split('/'):
-        if path.sep in piece \
-           or (path.altsep and path.altsep in piece) or \
-           piece == path.pardir:
-            raise TemplateNotFound(template)
-        elif piece and piece != '.':
-            pieces.append(piece)
-    return pieces
-
-
-class BaseLoader(object):
-    """Baseclass for all loaders.  Subclass this and override `get_source` to
-    implement a custom loading mechanism.  The environment provides a
-    `get_template` method that calls the loader's `load` method to get the
-    :class:`Template` object.
-
-    A very basic example for a loader that looks up templates on the file
-    system could look like this::
-
-        from jinja2 import BaseLoader, TemplateNotFound
-        from os.path import join, exists, getmtime
-
-        class MyLoader(BaseLoader):
-
-            def __init__(self, path):
-                self.path = path
-
-            def get_source(self, environment, template):
-                path = join(self.path, template)
-                if not exists(path):
-                    raise TemplateNotFound(template)
-                mtime = getmtime(path)
-                with file(path) as f:
-                    source = f.read().decode('utf-8')
-                return source, path, lambda: mtime == getmtime(path)
-    """
-
-    #: if set to `False` it indicates that the loader cannot provide access
-    #: to the source of templates.
-    #:
-    #: .. versionadded:: 2.4
-    has_source_access = True
-
-    def get_source(self, environment, template):
-        """Get the template source, filename and reload helper for a template.
-        It's passed the environment and template name and has to return a
-        tuple in the form ``(source, filename, uptodate)`` or raise a
-        `TemplateNotFound` error if it can't locate the template.
-
-        The source part of the returned tuple must be the source of the
-        template as unicode string or a ASCII bytestring.  The filename should
-        be the name of the file on the filesystem if it was loaded from there,
-        otherwise `None`.  The filename is used by python for the tracebacks
-        if no loader extension is used.
-
-        The last item in the tuple is the `uptodate` function.  If auto
-        reloading is enabled it's always called to check if the template
-        changed.  No arguments are passed so the function must store the
-        old state somewhere (for example in a closure).  If it returns `False`
-        the template will be reloaded.
-        """
-        if not self.has_source_access:
-            raise RuntimeError('%s cannot provide access to the source' %
-                               self.__class__.__name__)
-        raise TemplateNotFound(template)
-
-    def list_templates(self):
-        """Iterates over all templates.  If the loader does not support that
-        it should raise a :exc:`TypeError` which is the default behavior.
-        """
-        raise TypeError('this loader cannot iterate over all templates')
-
-    @internalcode
-    def load(self, environment, name, globals=None):
-        """Loads a template.  This method looks up the template in the cache
-        or loads one by calling :meth:`get_source`.  Subclasses should not
-        override this method as loaders working on collections of other
-        loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
-        will not call this method but `get_source` directly.
-        """
-        code = None
-        if globals is None:
-            globals = {}
-
-        # first we try to get the source for this template together
-        # with the filename and the uptodate function.
-        source, filename, uptodate = self.get_source(environment, name)
-
-        # try to load the code from the bytecode cache if there is a
-        # bytecode cache configured.
-        bcc = environment.bytecode_cache
-        if bcc is not None:
-            bucket = bcc.get_bucket(environment, name, filename, source)
-            code = bucket.code
-
-        # if we don't have code so far (not cached, no longer up to
-        # date) etc. we compile the template
-        if code is None:
-            code = environment.compile(source, name, filename)
-
-        # if the bytecode cache is available and the bucket doesn't
-        # have a code so far, we give the bucket the new code and put
-        # it back to the bytecode cache.
-        if bcc is not None and bucket.code is None:
-            bucket.code = code
-            bcc.set_bucket(bucket)
-
-        return environment.template_class.from_code(environment, code,
-                                                    globals, uptodate)
-
-
-class FileSystemLoader(BaseLoader):
-    """Loads templates from the file system.  This loader can find templates
-    in folders on the file system and is the preferred way to load them.
-
-    The loader takes the path to the templates as string, or if multiple
-    locations are wanted a list of them which is then looked up in the
-    given order:
-
-    >>> loader = FileSystemLoader('/path/to/templates')
-    >>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
-
-    Per default the template encoding is ``'utf-8'`` which can be changed
-    by setting the `encoding` parameter to something else.
-    """
-
-    def __init__(self, searchpath, encoding='utf-8'):
-        if isinstance(searchpath, string_types):
-            searchpath = [searchpath]
-        self.searchpath = list(searchpath)
-        self.encoding = encoding
-
-    def get_source(self, environment, template):
-        pieces = split_template_path(template)
-        for searchpath in self.searchpath:
-            filename = path.join(searchpath, *pieces)
-            f = open_if_exists(filename)
-            if f is None:
-                continue
-            try:
-                contents = f.read().decode(self.encoding)
-            finally:
-                f.close()
-
-            mtime = path.getmtime(filename)
-            def uptodate():
-                try:
-                    return path.getmtime(filename) == mtime
-                except OSError:
-                    return False
-            return contents, filename, uptodate
-        raise TemplateNotFound(template)
-
-    def list_templates(self):
-        found = set()
-        for searchpath in self.searchpath:
-            for dirpath, dirnames, filenames in os.walk(searchpath):
-                for filename in filenames:
-                    template = os.path.join(dirpath, filename) \
-                        [len(searchpath):].strip(os.path.sep) \
-                                          .replace(os.path.sep, '/')
-                    if template[:2] == './':
-                        template = template[2:]
-                    if template not in found:
-                        found.add(template)
-        return sorted(found)
-
-
-class PackageLoader(BaseLoader):
-    """Load templates from python eggs or packages.  It is constructed with
-    the name of the python package and the path to the templates in that
-    package::
-
-        loader = PackageLoader('mypackage', 'views')
-
-    If the package path is not given, ``'templates'`` is assumed.
-
-    Per default the template encoding is ``'utf-8'`` which can be changed
-    by setting the `encoding` parameter to something else.  Due to the nature
-    of eggs it's only possible to reload templates if the package was loaded
-    from the file system and not a zip file.
-    """
-
-    def __init__(self, package_name, package_path='templates',
-                 encoding='utf-8'):
-        from pkg_resources import DefaultProvider, ResourceManager, \
-                                  get_provider
-        provider = get_provider(package_name)
-        self.encoding = encoding
-        self.manager = ResourceManager()
-        self.filesystem_bound = isinstance(provider, DefaultProvider)
-        self.provider = provider
-        self.package_path = package_path
-
-    def get_source(self, environment, template):
-        pieces = split_template_path(template)
-        p = '/'.join((self.package_path,) + tuple(pieces))
-        if not self.provider.has_resource(p):
-            raise TemplateNotFound(template)
-
-        filename = uptodate = None
-        if self.filesystem_bound:
-            filename = self.provider.get_resource_filename(self.manager, p)
-            mtime = path.getmtime(filename)
-            def uptodate():
-                try:
-                    return path.getmtime(filename) == mtime
-                except OSError:
-                    return False
-
-        source = self.provider.get_resource_string(self.manager, p)
-        return source.decode(self.encoding), filename, uptodate
-
-    def list_templates(self):
-        path = self.package_path
-        if path[:2] == './':
-            path = path[2:]
-        elif path == '.':
-            path = ''
-        offset = len(path)
-        results = []
-        def _walk(path):
-            for filename in self.provider.resource_listdir(path):
-                fullname = path + '/' + filename
-                if self.provider.resource_isdir(fullname):
-                    _walk(fullname)
-                else:
-                    results.append(fullname[offset:].lstrip('/'))
-        _walk(path)
-        results.sort()
-        return results
-
-
-class DictLoader(BaseLoader):
-    """Loads a template from a python dict.  It's passed a dict of unicode
-    strings bound to template names.  This loader is useful for unittesting:
-
-    >>> loader = DictLoader({'index.html': 'source here'})
-
-    Because auto reloading is rarely useful this is disabled per default.
-    """
-
-    def __init__(self, mapping):
-        self.mapping = mapping
-
-    def get_source(self, environment, template):
-        if template in self.mapping:
-            source = self.mapping[template]
-            return source, None, lambda: source == self.mapping.get(template)
-        raise TemplateNotFound(template)
-
-    def list_templates(self):
-        return sorted(self.mapping)
-
-
-class FunctionLoader(BaseLoader):
-    """A loader that is passed a function which does the loading.  The
-    function becomes the name of the template passed and has to return either
-    an unicode string with the template source, a tuple in the form ``(source,
-    filename, uptodatefunc)`` or `None` if the template does not exist.
-
-    >>> def load_template(name):
-    ...     if name == 'index.html':
-    ...         return '...'
-    ...
-    >>> loader = FunctionLoader(load_template)
-
-    The `uptodatefunc` is a function that is called if autoreload is enabled
-    and has to return `True` if the template is still up to date.  For more
-    details have a look at :meth:`BaseLoader.get_source` which has the same
-    return value.
-    """
-
-    def __init__(self, load_func):
-        self.load_func = load_func
-
-    def get_source(self, environment, template):
-        rv = self.load_func(template)
-        if rv is None:
-            raise TemplateNotFound(template)
-        elif isinstance(rv, string_types):
-            return rv, None, None
-        return rv
-
-
-class PrefixLoader(BaseLoader):
-    """A loader that is passed a dict of loaders where each loader is bound
-    to a prefix.  The prefix is delimited from the template by a slash per
-    default, which can be changed by setting the `delimiter` argument to
-    something else::
-
-        loader = PrefixLoader({
-            'app1':     PackageLoader('mypackage.app1'),
-            'app2':     PackageLoader('mypackage.app2')
-        })
-
-    By loading ``'app1/index.html'`` the file from the app1 package is loaded,
-    by loading ``'app2/index.html'`` the file from the second.
-    """
-
-    def __init__(self, mapping, delimiter='/'):
-        self.mapping = mapping
-        self.delimiter = delimiter
-
-    def get_loader(self, template):
-        try:
-            prefix, name = template.split(self.delimiter, 1)
-            loader = self.mapping[prefix]
-        except (ValueError, KeyError):
-            raise TemplateNotFound(template)
-        return loader, name
-
-    def get_source(self, environment, template):
-        loader, name = self.get_loader(template)
-        try:
-            return loader.get_source(environment, name)
-        except TemplateNotFound:
-            # re-raise the exception with the correct fileame here.
-            # (the one that includes the prefix)
-            raise TemplateNotFound(template)
-
-    @internalcode
-    def load(self, environment, name, globals=None):
-        loader, local_name = self.get_loader(name)
-        try:
-            return loader.load(environment, local_name, globals)
-        except TemplateNotFound:
-            # re-raise the exception with the correct fileame here.
-            # (the one that includes the prefix)
-            raise TemplateNotFound(name)
-
-    def list_templates(self):
-        result = []
-        for prefix, loader in iteritems(self.mapping):
-            for template in loader.list_templates():
-                result.append(prefix + self.delimiter + template)
-        return result
-
-
-class ChoiceLoader(BaseLoader):
-    """This loader works like the `PrefixLoader` just that no prefix is
-    specified.  If a template could not be found by one loader the next one
-    is tried.
-
-    >>> loader = ChoiceLoader([
-    ...     FileSystemLoader('/path/to/user/templates'),
-    ...     FileSystemLoader('/path/to/system/templates')
-    ... ])
-
-    This is useful if you want to allow users to override builtin templates
-    from a different location.
-    """
-
-    def __init__(self, loaders):
-        self.loaders = loaders
-
-    def get_source(self, environment, template):
-        for loader in self.loaders:
-            try:
-                return loader.get_source(environment, template)
-            except TemplateNotFound:
-                pass
-        raise TemplateNotFound(template)
-
-    @internalcode
-    def load(self, environment, name, globals=None):
-        for loader in self.loaders:
-            try:
-                return loader.load(environment, name, globals)
-            except TemplateNotFound:
-                pass
-        raise TemplateNotFound(name)
-
-    def list_templates(self):
-        found = set()
-        for loader in self.loaders:
-            found.update(loader.list_templates())
-        return sorted(found)
-
-
-class _TemplateModule(ModuleType):
-    """Like a normal module but with support for weak references"""
-
-
-class ModuleLoader(BaseLoader):
-    """This loader loads templates from precompiled templates.
-
-    Example usage:
-
-    >>> loader = ChoiceLoader([
-    ...     ModuleLoader('/path/to/compiled/templates'),
-    ...     FileSystemLoader('/path/to/templates')
-    ... ])
-
-    Templates can be precompiled with :meth:`Environment.compile_templates`.
-    """
-
-    has_source_access = False
-
-    def __init__(self, path):
-        package_name = '_jinja2_module_templates_%x' % id(self)
-
-        # create a fake module that looks for the templates in the
-        # path given.
-        mod = _TemplateModule(package_name)
-        if isinstance(path, string_types):
-            path = [path]
-        else:
-            path = list(path)
-        mod.__path__ = path
-
-        sys.modules[package_name] = weakref.proxy(mod,
-            lambda x: sys.modules.pop(package_name, None))
-
-        # the only strong reference, the sys.modules entry is weak
-        # so that the garbage collector can remove it once the
-        # loader that created it goes out of business.
-        self.module = mod
-        self.package_name = package_name
-
-    @staticmethod
-    def get_template_key(name):
-        return 'tmpl_' + sha1(name.encode('utf-8')).hexdigest()
-
-    @staticmethod
-    def get_module_filename(name):
-        return ModuleLoader.get_template_key(name) + '.py'
-
-    @internalcode
-    def load(self, environment, name, globals=None):
-        key = self.get_template_key(name)
-        module = '%s.%s' % (self.package_name, key)
-        mod = getattr(self.module, module, None)
-        if mod is None:
-            try:
-                mod = __import__(module, None, None, ['root'])
-            except ImportError:
-                raise TemplateNotFound(name)
-
-            # remove the entry from sys.modules, we only want the attribute
-            # on the module object we have stored on the loader.
-            sys.modules.pop(module, None)
-
-        return environment.template_class.from_module_dict(
-            environment, mod.__dict__, globals)
diff --git a/python/ext-libs/jinja2/meta.py b/python/ext-libs/jinja2/meta.py
deleted file mode 100644
index 3110cff..0000000
--- a/python/ext-libs/jinja2/meta.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.meta
-    ~~~~~~~~~~~
-
-    This module implements various functions that exposes information about
-    templates that might be interesting for various kinds of applications.
-
-    :copyright: (c) 2010 by the Jinja Team, see AUTHORS for more details.
-    :license: BSD, see LICENSE for more details.
-"""
-from jinja2 import nodes
-from jinja2.compiler import CodeGenerator
-from jinja2._compat import string_types
-
-
-class TrackingCodeGenerator(CodeGenerator):
-    """We abuse the code generator for introspection."""
-
-    def __init__(self, environment):
-        CodeGenerator.__init__(self, environment, '<introspection>',
-                               '<introspection>')
-        self.undeclared_identifiers = set()
-
-    def write(self, x):
-        """Don't write."""
-
-    def pull_locals(self, frame):
-        """Remember all undeclared identifiers."""
-        self.undeclared_identifiers.update(frame.identifiers.undeclared)
-
-
-def find_undeclared_variables(ast):
-    """Returns a set of all variables in the AST that will be looked up from
-    the context at runtime.  Because at compile time it's not known which
-    variables will be used depending on the path the execution takes at
-    runtime, all variables are returned.
-
-    >>> from jinja2 import Environment, meta
-    >>> env = Environment()
-    >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
-    >>> meta.find_undeclared_variables(ast)
-    set(['bar'])
-
-    .. admonition:: Implementation
-
-       Internally the code generator is used for finding undeclared variables.
-       This is good to know because the code generator might raise a
-       :exc:`TemplateAssertionError` during compilation and as a matter of
-       fact this function can currently raise that exception as well.
-    """
-    codegen = TrackingCodeGenerator(ast.environment)
-    codegen.visit(ast)
-    return codegen.undeclared_identifiers
-
-
-def find_referenced_templates(ast):
-    """Finds all the referenced templates from the AST.  This will return an
-    iterator over all the hardcoded template extensions, inclusions and
-    imports.  If dynamic inheritance or inclusion is used, `None` will be
-    yielded.
-
-    >>> from jinja2 import Environment, meta
-    >>> env = Environment()
-    >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
-    >>> list(meta.find_referenced_templates(ast))
-    ['layout.html', None]
-
-    This function is useful for dependency tracking.  For example if you want
-    to rebuild parts of the website after a layout template has changed.
-    """
-    for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import,
-                              nodes.Include)):
-        if not isinstance(node.template, nodes.Const):
-            # a tuple with some non consts in there
-            if isinstance(node.template, (nodes.Tuple, nodes.List)):
-                for template_name in node.template.items:
-                    # something const, only yield the strings and ignore
-                    # non-string consts that really just make no sense
-                    if isinstance(template_name, nodes.Const):
-                        if isinstance(template_name.value, string_types):
-                            yield template_name.value
-                    # something dynamic in there
-                    else:
-                        yield None
-            # something dynamic we don't know about here
-            else:
-                yield None
-            continue
-        # constant is a basestring, direct template name
-        if isinstance(node.template.value, string_types):
-            yield node.template.value
-        # a tuple or list (latter *should* not happen) made of consts,
-        # yield the consts that are strings.  We could warn here for
-        # non string values
-        elif isinstance(node, nodes.Include) and \
-             isinstance(node.template.value, (tuple, list)):
-            for template_name in node.template.value:
-                if isinstance(template_name, string_types):
-                    yield template_name
-        # something else we don't care about, we could warn here
-        else:
-            yield None
diff --git a/python/ext-libs/jinja2/nodes.py b/python/ext-libs/jinja2/nodes.py
deleted file mode 100644
index c5697e6..0000000
--- a/python/ext-libs/jinja2/nodes.py
+++ /dev/null
@@ -1,914 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.nodes
-    ~~~~~~~~~~~~
-
-    This module implements additional nodes derived from the ast base node.
-
-    It also provides some node tree helper functions like `in_lineno` and
-    `get_nodes` used by the parser and translator in order to normalize
-    python and jinja nodes.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import operator
-
-from collections import deque
-from jinja2.utils import Markup
-from jinja2._compat import next, izip, with_metaclass, text_type, \
-     method_type, function_type
-
-
-#: the types we support for context functions
-_context_function_types = (function_type, method_type)
-
-
-_binop_to_func = {
-    '*':        operator.mul,
-    '/':        operator.truediv,
-    '//':       operator.floordiv,
-    '**':       operator.pow,
-    '%':        operator.mod,
-    '+':        operator.add,
-    '-':        operator.sub
-}
-
-_uaop_to_func = {
-    'not':      operator.not_,
-    '+':        operator.pos,
-    '-':        operator.neg
-}
-
-_cmpop_to_func = {
-    'eq':       operator.eq,
-    'ne':       operator.ne,
-    'gt':       operator.gt,
-    'gteq':     operator.ge,
-    'lt':       operator.lt,
-    'lteq':     operator.le,
-    'in':       lambda a, b: a in b,
-    'notin':    lambda a, b: a not in b
-}
-
-
-class Impossible(Exception):
-    """Raised if the node could not perform a requested action."""
-
-
-class NodeType(type):
-    """A metaclass for nodes that handles the field and attribute
-    inheritance.  fields and attributes from the parent class are
-    automatically forwarded to the child."""
-
-    def __new__(cls, name, bases, d):
-        for attr in 'fields', 'attributes':
-            storage = []
-            storage.extend(getattr(bases[0], attr, ()))
-            storage.extend(d.get(attr, ()))
-            assert len(bases) == 1, 'multiple inheritance not allowed'
-            assert len(storage) == len(set(storage)), 'layout conflict'
-            d[attr] = tuple(storage)
-        d.setdefault('abstract', False)
-        return type.__new__(cls, name, bases, d)
-
-
-class EvalContext(object):
-    """Holds evaluation time information.  Custom attributes can be attached
-    to it in extensions.
-    """
-
-    def __init__(self, environment, template_name=None):
-        self.environment = environment
-        if callable(environment.autoescape):
-            self.autoescape = environment.autoescape(template_name)
-        else:
-            self.autoescape = environment.autoescape
-        self.volatile = False
-
-    def save(self):
-        return self.__dict__.copy()
-
-    def revert(self, old):
-        self.__dict__.clear()
-        self.__dict__.update(old)
-
-
-def get_eval_context(node, ctx):
-    if ctx is None:
-        if node.environment is None:
-            raise RuntimeError('if no eval context is passed, the '
-                               'node must have an attached '
-                               'environment.')
-        return EvalContext(node.environment)
-    return ctx
-
-
-class Node(with_metaclass(NodeType, object)):
-    """Baseclass for all Jinja2 nodes.  There are a number of nodes available
-    of different types.  There are four major types:
-
-    -   :class:`Stmt`: statements
-    -   :class:`Expr`: expressions
-    -   :class:`Helper`: helper nodes
-    -   :class:`Template`: the outermost wrapper node
-
-    All nodes have fields and attributes.  Fields may be other nodes, lists,
-    or arbitrary values.  Fields are passed to the constructor as regular
-    positional arguments, attributes as keyword arguments.  Each node has
-    two attributes: `lineno` (the line number of the node) and `environment`.
-    The `environment` attribute is set at the end of the parsing process for
-    all nodes automatically.
-    """
-    fields = ()
-    attributes = ('lineno', 'environment')
-    abstract = True
-
-    def __init__(self, *fields, **attributes):
-        if self.abstract:
-            raise TypeError('abstract nodes are not instanciable')
-        if fields:
-            if len(fields) != len(self.fields):
-                if not self.fields:
-                    raise TypeError('%r takes 0 arguments' %
-                                    self.__class__.__name__)
-                raise TypeError('%r takes 0 or %d argument%s' % (
-                    self.__class__.__name__,
-                    len(self.fields),
-                    len(self.fields) != 1 and 's' or ''
-                ))
-            for name, arg in izip(self.fields, fields):
-                setattr(self, name, arg)
-        for attr in self.attributes:
-            setattr(self, attr, attributes.pop(attr, None))
-        if attributes:
-            raise TypeError('unknown attribute %r' %
-                            next(iter(attributes)))
-
-    def iter_fields(self, exclude=None, only=None):
-        """This method iterates over all fields that are defined and yields
-        ``(key, value)`` tuples.  Per default all fields are returned, but
-        it's possible to limit that to some fields by providing the `only`
-        parameter or to exclude some using the `exclude` parameter.  Both
-        should be sets or tuples of field names.
-        """
-        for name in self.fields:
-            if (exclude is only is None) or \
-               (exclude is not None and name not in exclude) or \
-               (only is not None and name in only):
-                try:
-                    yield name, getattr(self, name)
-                except AttributeError:
-                    pass
-
-    def iter_child_nodes(self, exclude=None, only=None):
-        """Iterates over all direct child nodes of the node.  This iterates
-        over all fields and yields the values of they are nodes.  If the value
-        of a field is a list all the nodes in that list are returned.
-        """
-        for field, item in self.iter_fields(exclude, only):
-            if isinstance(item, list):
-                for n in item:
-                    if isinstance(n, Node):
-                        yield n
-            elif isinstance(item, Node):
-                yield item
-
-    def find(self, node_type):
-        """Find the first node of a given type.  If no such node exists the
-        return value is `None`.
-        """
-        for result in self.find_all(node_type):
-            return result
-
-    def find_all(self, node_type):
-        """Find all the nodes of a given type.  If the type is a tuple,
-        the check is performed for any of the tuple items.
-        """
-        for child in self.iter_child_nodes():
-            if isinstance(child, node_type):
-                yield child
-            for result in child.find_all(node_type):
-                yield result
-
-    def set_ctx(self, ctx):
-        """Reset the context of a node and all child nodes.  Per default the
-        parser will all generate nodes that have a 'load' context as it's the
-        most common one.  This method is used in the parser to set assignment
-        targets and other nodes to a store context.
-        """
-        todo = deque([self])
-        while todo:
-            node = todo.popleft()
-            if 'ctx' in node.fields:
-                node.ctx = ctx
-            todo.extend(node.iter_child_nodes())
-        return self
-
-    def set_lineno(self, lineno, override=False):
-        """Set the line numbers of the node and children."""
-        todo = deque([self])
-        while todo:
-            node = todo.popleft()
-            if 'lineno' in node.attributes:
-                if node.lineno is None or override:
-                    node.lineno = lineno
-            todo.extend(node.iter_child_nodes())
-        return self
-
-    def set_environment(self, environment):
-        """Set the environment for all nodes."""
-        todo = deque([self])
-        while todo:
-            node = todo.popleft()
-            node.environment = environment
-            todo.extend(node.iter_child_nodes())
-        return self
-
-    def __eq__(self, other):
-        return type(self) is type(other) and \
-               tuple(self.iter_fields()) == tuple(other.iter_fields())
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    # Restore Python 2 hashing behavior on Python 3
-    __hash__ = object.__hash__
-
-    def __repr__(self):
-        return '%s(%s)' % (
-            self.__class__.__name__,
-            ', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
-                      arg in self.fields)
-        )
-
-
-class Stmt(Node):
-    """Base node for all statements."""
-    abstract = True
-
-
-class Helper(Node):
-    """Nodes that exist in a specific context only."""
-    abstract = True
-
-
-class Template(Node):
-    """Node that represents a template.  This must be the outermost node that
-    is passed to the compiler.
-    """
-    fields = ('body',)
-
-
-class Output(Stmt):
-    """A node that holds multiple expressions which are then printed out.
-    This is used both for the `print` statement and the regular template data.
-    """
-    fields = ('nodes',)
-
-
-class Extends(Stmt):
-    """Represents an extends statement."""
-    fields = ('template',)
-
-
-class For(Stmt):
-    """The for loop.  `target` is the target for the iteration (usually a
-    :class:`Name` or :class:`Tuple`), `iter` the iterable.  `body` is a list
-    of nodes that are used as loop-body, and `else_` a list of nodes for the
-    `else` block.  If no else node exists it has to be an empty list.
-
-    For filtered nodes an expression can be stored as `test`, otherwise `None`.
-    """
-    fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
-
-
-class If(Stmt):
-    """If `test` is true, `body` is rendered, else `else_`."""
-    fields = ('test', 'body', 'else_')
-
-
-class Macro(Stmt):
-    """A macro definition.  `name` is the name of the macro, `args` a list of
-    arguments and `defaults` a list of defaults if there are any.  `body` is
-    a list of nodes for the macro body.
-    """
-    fields = ('name', 'args', 'defaults', 'body')
-
-
-class CallBlock(Stmt):
-    """Like a macro without a name but a call instead.  `call` is called with
-    the unnamed macro as `caller` argument this node holds.
-    """
-    fields = ('call', 'args', 'defaults', 'body')
-
-
-class FilterBlock(Stmt):
-    """Node for filter sections."""
-    fields = ('body', 'filter')
-
-
-class Block(Stmt):
-    """A node that represents a block."""
-    fields = ('name', 'body', 'scoped')
-
-
-class Include(Stmt):
-    """A node that represents the include tag."""
-    fields = ('template', 'with_context', 'ignore_missing')
-
-
-class Import(Stmt):
-    """A node that represents the import tag."""
-    fields = ('template', 'target', 'with_context')
-
-
-class FromImport(Stmt):
-    """A node that represents the from import tag.  It's important to not
-    pass unsafe names to the name attribute.  The compiler translates the
-    attribute lookups directly into getattr calls and does *not* use the
-    subscript callback of the interface.  As exported variables may not
-    start with double underscores (which the parser asserts) this is not a
-    problem for regular Jinja code, but if this node is used in an extension
-    extra care must be taken.
-
-    The list of names may contain tuples if aliases are wanted.
-    """
-    fields = ('template', 'names', 'with_context')
-
-
-class ExprStmt(Stmt):
-    """A statement that evaluates an expression and discards the result."""
-    fields = ('node',)
-
-
-class Assign(Stmt):
-    """Assigns an expression to a target."""
-    fields = ('target', 'node')
-
-
-class Expr(Node):
-    """Baseclass for all expressions."""
-    abstract = True
-
-    def as_const(self, eval_ctx=None):
-        """Return the value of the expression as constant or raise
-        :exc:`Impossible` if this was not possible.
-
-        An :class:`EvalContext` can be provided, if none is given
-        a default context is created which requires the nodes to have
-        an attached environment.
-
-        .. versionchanged:: 2.4
-           the `eval_ctx` parameter was added.
-        """
-        raise Impossible()
-
-    def can_assign(self):
-        """Check if it's possible to assign something to this node."""
-        return False
-
-
-class BinExpr(Expr):
-    """Baseclass for all binary expressions."""
-    fields = ('left', 'right')
-    operator = None
-    abstract = True
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        # intercepted operators cannot be folded at compile time
-        if self.environment.sandboxed and \
-           self.operator in self.environment.intercepted_binops:
-            raise Impossible()
-        f = _binop_to_func[self.operator]
-        try:
-            return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
-        except Exception:
-            raise Impossible()
-
-
-class UnaryExpr(Expr):
-    """Baseclass for all unary expressions."""
-    fields = ('node',)
-    operator = None
-    abstract = True
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        # intercepted operators cannot be folded at compile time
-        if self.environment.sandboxed and \
-           self.operator in self.environment.intercepted_unops:
-            raise Impossible()
-        f = _uaop_to_func[self.operator]
-        try:
-            return f(self.node.as_const(eval_ctx))
-        except Exception:
-            raise Impossible()
-
-
-class Name(Expr):
-    """Looks up a name or stores a value in a name.
-    The `ctx` of the node can be one of the following values:
-
-    -   `store`: store a value in the name
-    -   `load`: load that name
-    -   `param`: like `store` but if the name was defined as function parameter.
-    """
-    fields = ('name', 'ctx')
-
-    def can_assign(self):
-        return self.name not in ('true', 'false', 'none',
-                                 'True', 'False', 'None')
-
-
-class Literal(Expr):
-    """Baseclass for literals."""
-    abstract = True
-
-
-class Const(Literal):
-    """All constant values.  The parser will return this node for simple
-    constants such as ``42`` or ``"foo"`` but it can be used to store more
-    complex values such as lists too.  Only constants with a safe
-    representation (objects where ``eval(repr(x)) == x`` is true).
-    """
-    fields = ('value',)
-
-    def as_const(self, eval_ctx=None):
-        return self.value
-
-    @classmethod
-    def from_untrusted(cls, value, lineno=None, environment=None):
-        """Return a const object if the value is representable as
-        constant value in the generated code, otherwise it will raise
-        an `Impossible` exception.
-        """
-        from .compiler import has_safe_repr
-        if not has_safe_repr(value):
-            raise Impossible()
-        return cls(value, lineno=lineno, environment=environment)
-
-
-class TemplateData(Literal):
-    """A constant template string."""
-    fields = ('data',)
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        if eval_ctx.volatile:
-            raise Impossible()
-        if eval_ctx.autoescape:
-            return Markup(self.data)
-        return self.data
-
-
-class Tuple(Literal):
-    """For loop unpacking and some other things like multiple arguments
-    for subscripts.  Like for :class:`Name` `ctx` specifies if the tuple
-    is used for loading the names or storing.
-    """
-    fields = ('items', 'ctx')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return tuple(x.as_const(eval_ctx) for x in self.items)
-
-    def can_assign(self):
-        for item in self.items:
-            if not item.can_assign():
-                return False
-        return True
-
-
-class List(Literal):
-    """Any list literal such as ``[1, 2, 3]``"""
-    fields = ('items',)
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return [x.as_const(eval_ctx) for x in self.items]
-
-
-class Dict(Literal):
-    """Any dict literal such as ``{1: 2, 3: 4}``.  The items must be a list of
-    :class:`Pair` nodes.
-    """
-    fields = ('items',)
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return dict(x.as_const(eval_ctx) for x in self.items)
-
-
-class Pair(Helper):
-    """A key, value pair for dicts."""
-    fields = ('key', 'value')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
-
-
-class Keyword(Helper):
-    """A key, value pair for keyword arguments where key is a string."""
-    fields = ('key', 'value')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return self.key, self.value.as_const(eval_ctx)
-
-
-class CondExpr(Expr):
-    """A conditional expression (inline if expression).  (``{{
-    foo if bar else baz }}``)
-    """
-    fields = ('test', 'expr1', 'expr2')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        if self.test.as_const(eval_ctx):
-            return self.expr1.as_const(eval_ctx)
-
-        # if we evaluate to an undefined object, we better do that at runtime
-        if self.expr2 is None:
-            raise Impossible()
-
-        return self.expr2.as_const(eval_ctx)
-
-
-class Filter(Expr):
-    """This node applies a filter on an expression.  `name` is the name of
-    the filter, the rest of the fields are the same as for :class:`Call`.
-
-    If the `node` of a filter is `None` the contents of the last buffer are
-    filtered.  Buffers are created by macros and filter blocks.
-    """
-    fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        if eval_ctx.volatile or self.node is None:
-            raise Impossible()
-        # we have to be careful here because we call filter_ below.
-        # if this variable would be called filter, 2to3 would wrap the
-        # call in a list beause it is assuming we are talking about the
-        # builtin filter function here which no longer returns a list in
-        # python 3.  because of that, do not rename filter_ to filter!
-        filter_ = self.environment.filters.get(self.name)
-        if filter_ is None or getattr(filter_, 'contextfilter', False):
-            raise Impossible()
-        obj = self.node.as_const(eval_ctx)
-        args = [x.as_const(eval_ctx) for x in self.args]
-        if getattr(filter_, 'evalcontextfilter', False):
-            args.insert(0, eval_ctx)
-        elif getattr(filter_, 'environmentfilter', False):
-            args.insert(0, self.environment)
-        kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
-        if self.dyn_args is not None:
-            try:
-                args.extend(self.dyn_args.as_const(eval_ctx))
-            except Exception:
-                raise Impossible()
-        if self.dyn_kwargs is not None:
-            try:
-                kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
-            except Exception:
-                raise Impossible()
-        try:
-            return filter_(obj, *args, **kwargs)
-        except Exception:
-            raise Impossible()
-
-
-class Test(Expr):
-    """Applies a test on an expression.  `name` is the name of the test, the
-    rest of the fields are the same as for :class:`Call`.
-    """
-    fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
-
-
-class Call(Expr):
-    """Calls an expression.  `args` is a list of arguments, `kwargs` a list
-    of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
-    and `dyn_kwargs` has to be either `None` or a node that is used as
-    node for dynamic positional (``*args``) or keyword (``**kwargs``)
-    arguments.
-    """
-    fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        if eval_ctx.volatile:
-            raise Impossible()
-        obj = self.node.as_const(eval_ctx)
-
-        # don't evaluate context functions
-        args = [x.as_const(eval_ctx) for x in self.args]
-        if isinstance(obj, _context_function_types):
-            if getattr(obj, 'contextfunction', False):
-                raise Impossible()
-            elif getattr(obj, 'evalcontextfunction', False):
-                args.insert(0, eval_ctx)
-            elif getattr(obj, 'environmentfunction', False):
-                args.insert(0, self.environment)
-
-        kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
-        if self.dyn_args is not None:
-            try:
-                args.extend(self.dyn_args.as_const(eval_ctx))
-            except Exception:
-                raise Impossible()
-        if self.dyn_kwargs is not None:
-            try:
-                kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
-            except Exception:
-                raise Impossible()
-        try:
-            return obj(*args, **kwargs)
-        except Exception:
-            raise Impossible()
-
-
-class Getitem(Expr):
-    """Get an attribute or item from an expression and prefer the item."""
-    fields = ('node', 'arg', 'ctx')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        if self.ctx != 'load':
-            raise Impossible()
-        try:
-            return self.environment.getitem(self.node.as_const(eval_ctx),
-                                            self.arg.as_const(eval_ctx))
-        except Exception:
-            raise Impossible()
-
-    def can_assign(self):
-        return False
-
-
-class Getattr(Expr):
-    """Get an attribute or item from an expression that is a ascii-only
-    bytestring and prefer the attribute.
-    """
-    fields = ('node', 'attr', 'ctx')
-
-    def as_const(self, eval_ctx=None):
-        if self.ctx != 'load':
-            raise Impossible()
-        try:
-            eval_ctx = get_eval_context(self, eval_ctx)
-            return self.environment.getattr(self.node.as_const(eval_ctx),
-                                            self.attr)
-        except Exception:
-            raise Impossible()
-
-    def can_assign(self):
-        return False
-
-
-class Slice(Expr):
-    """Represents a slice object.  This must only be used as argument for
-    :class:`Subscript`.
-    """
-    fields = ('start', 'stop', 'step')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        def const(obj):
-            if obj is None:
-                return None
-            return obj.as_const(eval_ctx)
-        return slice(const(self.start), const(self.stop), const(self.step))
-
-
-class Concat(Expr):
-    """Concatenates the list of expressions provided after converting them to
-    unicode.
-    """
-    fields = ('nodes',)
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
-
-
-class Compare(Expr):
-    """Compares an expression with some other expressions.  `ops` must be a
-    list of :class:`Operand`\s.
-    """
-    fields = ('expr', 'ops')
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        result = value = self.expr.as_const(eval_ctx)
-        try:
-            for op in self.ops:
-                new_value = op.expr.as_const(eval_ctx)
-                result = _cmpop_to_func[op.op](value, new_value)
-                value = new_value
-        except Exception:
-            raise Impossible()
-        return result
-
-
-class Operand(Helper):
-    """Holds an operator and an expression."""
-    fields = ('op', 'expr')
-
-if __debug__:
-    Operand.__doc__ += '\nThe following operators are available: ' + \
-        ', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
-                  set(_uaop_to_func) | set(_cmpop_to_func)))
-
-
-class Mul(BinExpr):
-    """Multiplies the left with the right node."""
-    operator = '*'
-
-
-class Div(BinExpr):
-    """Divides the left by the right node."""
-    operator = '/'
-
-
-class FloorDiv(BinExpr):
-    """Divides the left by the right node and truncates conver the
-    result into an integer by truncating.
-    """
-    operator = '//'
-
-
-class Add(BinExpr):
-    """Add the left to the right node."""
-    operator = '+'
-
-
-class Sub(BinExpr):
-    """Substract the right from the left node."""
-    operator = '-'
-
-
-class Mod(BinExpr):
-    """Left modulo right."""
-    operator = '%'
-
-
-class Pow(BinExpr):
-    """Left to the power of right."""
-    operator = '**'
-
-
-class And(BinExpr):
-    """Short circuited AND."""
-    operator = 'and'
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
-
-
-class Or(BinExpr):
-    """Short circuited OR."""
-    operator = 'or'
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
-
-
-class Not(UnaryExpr):
-    """Negate the expression."""
-    operator = 'not'
-
-
-class Neg(UnaryExpr):
-    """Make the expression negative."""
-    operator = '-'
-
-
-class Pos(UnaryExpr):
-    """Make the expression positive (noop for most expressions)"""
-    operator = '+'
-
-
-# Helpers for extensions
-
-
-class EnvironmentAttribute(Expr):
-    """Loads an attribute from the environment object.  This is useful for
-    extensions that want to call a callback stored on the environment.
-    """
-    fields = ('name',)
-
-
-class ExtensionAttribute(Expr):
-    """Returns the attribute of an extension bound to the environment.
-    The identifier is the identifier of the :class:`Extension`.
-
-    This node is usually constructed by calling the
-    :meth:`~jinja2.ext.Extension.attr` method on an extension.
-    """
-    fields = ('identifier', 'name')
-
-
-class ImportedName(Expr):
-    """If created with an import name the import name is returned on node
-    access.  For example ``ImportedName('cgi.escape')`` returns the `escape`
-    function from the cgi module on evaluation.  Imports are optimized by the
-    compiler so there is no need to assign them to local variables.
-    """
-    fields = ('importname',)
-
-
-class InternalName(Expr):
-    """An internal name in the compiler.  You cannot create these nodes
-    yourself but the parser provides a
-    :meth:`~jinja2.parser.Parser.free_identifier` method that creates
-    a new identifier for you.  This identifier is not available from the
-    template and is not threated specially by the compiler.
-    """
-    fields = ('name',)
-
-    def __init__(self):
-        raise TypeError('Can\'t create internal names.  Use the '
-                        '`free_identifier` method on a parser.')
-
-
-class MarkSafe(Expr):
-    """Mark the wrapped expression as safe (wrap it as `Markup`)."""
-    fields = ('expr',)
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        return Markup(self.expr.as_const(eval_ctx))
-
-
-class MarkSafeIfAutoescape(Expr):
-    """Mark the wrapped expression as safe (wrap it as `Markup`) but
-    only if autoescaping is active.
-
-    .. versionadded:: 2.5
-    """
-    fields = ('expr',)
-
-    def as_const(self, eval_ctx=None):
-        eval_ctx = get_eval_context(self, eval_ctx)
-        if eval_ctx.volatile:
-            raise Impossible()
-        expr = self.expr.as_const(eval_ctx)
-        if eval_ctx.autoescape:
-            return Markup(expr)
-        return expr
-
-
-class ContextReference(Expr):
-    """Returns the current template context.  It can be used like a
-    :class:`Name` node, with a ``'load'`` ctx and will return the
-    current :class:`~jinja2.runtime.Context` object.
-
-    Here an example that assigns the current template name to a
-    variable named `foo`::
-
-        Assign(Name('foo', ctx='store'),
-               Getattr(ContextReference(), 'name'))
-    """
-
-
-class Continue(Stmt):
-    """Continue a loop."""
-
-
-class Break(Stmt):
-    """Break a loop."""
-
-
-class Scope(Stmt):
-    """An artificial scope."""
-    fields = ('body',)
-
-
-class EvalContextModifier(Stmt):
-    """Modifies the eval context.  For each option that should be modified,
-    a :class:`Keyword` has to be added to the :attr:`options` list.
-
-    Example to change the `autoescape` setting::
-
-        EvalContextModifier(options=[Keyword('autoescape', Const(True))])
-    """
-    fields = ('options',)
-
-
-class ScopedEvalContextModifier(EvalContextModifier):
-    """Modifies the eval context and reverts it later.  Works exactly like
-    :class:`EvalContextModifier` but will only modify the
-    :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
-    """
-    fields = ('body',)
-
-
-# make sure nobody creates custom nodes
-def _failing_new(*args, **kwargs):
-    raise TypeError('can\'t create custom node types')
-NodeType.__new__ = staticmethod(_failing_new); del _failing_new
diff --git a/python/ext-libs/jinja2/optimizer.py b/python/ext-libs/jinja2/optimizer.py
deleted file mode 100644
index 00eab11..0000000
--- a/python/ext-libs/jinja2/optimizer.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.optimizer
-    ~~~~~~~~~~~~~~~~
-
-    The jinja optimizer is currently trying to constant fold a few expressions
-    and modify the AST in place so that it should be easier to evaluate it.
-
-    Because the AST does not contain all the scoping information and the
-    compiler has to find that out, we cannot do all the optimizations we
-    want.  For example loop unrolling doesn't work because unrolled loops would
-    have a different scoping.
-
-    The solution would be a second syntax tree that has the scoping rules stored.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD.
-"""
-from jinja2 import nodes
-from jinja2.visitor import NodeTransformer
-
-
-def optimize(node, environment):
-    """The context hint can be used to perform an static optimization
-    based on the context given."""
-    optimizer = Optimizer(environment)
-    return optimizer.visit(node)
-
-
-class Optimizer(NodeTransformer):
-
-    def __init__(self, environment):
-        self.environment = environment
-
-    def visit_If(self, node):
-        """Eliminate dead code."""
-        # do not optimize ifs that have a block inside so that it doesn't
-        # break super().
-        if node.find(nodes.Block) is not None:
-            return self.generic_visit(node)
-        try:
-            val = self.visit(node.test).as_const()
-        except nodes.Impossible:
-            return self.generic_visit(node)
-        if val:
-            body = node.body
-        else:
-            body = node.else_
-        result = []
-        for node in body:
-            result.extend(self.visit_list(node))
-        return result
-
-    def fold(self, node):
-        """Do constant folding."""
-        node = self.generic_visit(node)
-        try:
-            return nodes.Const.from_untrusted(node.as_const(),
-                                              lineno=node.lineno,
-                                              environment=self.environment)
-        except nodes.Impossible:
-            return node
-
-    visit_Add = visit_Sub = visit_Mul = visit_Div = visit_FloorDiv = \
-    visit_Pow = visit_Mod = visit_And = visit_Or = visit_Pos = visit_Neg = \
-    visit_Not = visit_Compare = visit_Getitem = visit_Getattr = visit_Call = \
-    visit_Filter = visit_Test = visit_CondExpr = fold
-    del fold
diff --git a/python/ext-libs/jinja2/parser.py b/python/ext-libs/jinja2/parser.py
deleted file mode 100644
index f60cd01..0000000
--- a/python/ext-libs/jinja2/parser.py
+++ /dev/null
@@ -1,895 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.parser
-    ~~~~~~~~~~~~~
-
-    Implements the template parser.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-from jinja2 import nodes
-from jinja2.exceptions import TemplateSyntaxError, TemplateAssertionError
-from jinja2.lexer import describe_token, describe_token_expr
-from jinja2._compat import next, imap
-
-
-#: statements that callinto 
-_statement_keywords = frozenset(['for', 'if', 'block', 'extends', 'print',
-                                 'macro', 'include', 'from', 'import',
-                                 'set'])
-_compare_operators = frozenset(['eq', 'ne', 'lt', 'lteq', 'gt', 'gteq'])
-
-
-class Parser(object):
-    """This is the central parsing class Jinja2 uses.  It's passed to
-    extensions and can be used to parse expressions or statements.
-    """
-
-    def __init__(self, environment, source, name=None, filename=None,
-                 state=None):
-        self.environment = environment
-        self.stream = environment._tokenize(source, name, filename, state)
-        self.name = name
-        self.filename = filename
-        self.closed = False
-        self.extensions = {}
-        for extension in environment.iter_extensions():
-            for tag in extension.tags:
-                self.extensions[tag] = extension.parse
-        self._last_identifier = 0
-        self._tag_stack = []
-        self._end_token_stack = []
-
-    def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
-        """Convenience method that raises `exc` with the message, passed
-        line number or last line number as well as the current name and
-        filename.
-        """
-        if lineno is None:
-            lineno = self.stream.current.lineno
-        raise exc(msg, lineno, self.name, self.filename)
-
-    def _fail_ut_eof(self, name, end_token_stack, lineno):
-        expected = []
-        for exprs in end_token_stack:
-            expected.extend(imap(describe_token_expr, exprs))
-        if end_token_stack:
-            currently_looking = ' or '.join(
-                "'%s'" % describe_token_expr(expr)
-                for expr in end_token_stack[-1])
-        else:
-            currently_looking = None
-
-        if name is None:
-            message = ['Unexpected end of template.']
-        else:
-            message = ['Encountered unknown tag \'%s\'.' % name]
-
-        if currently_looking:
-            if name is not None and name in expected:
-                message.append('You probably made a nesting mistake. Jinja '
-                               'is expecting this tag, but currently looking '
-                               'for %s.' % currently_looking)
-            else:
-                message.append('Jinja was looking for the following tags: '
-                               '%s.' % currently_looking)
-
-        if self._tag_stack:
-            message.append('The innermost block that needs to be '
-                           'closed is \'%s\'.' % self._tag_stack[-1])
-
-        self.fail(' '.join(message), lineno)
-
-    def fail_unknown_tag(self, name, lineno=None):
-        """Called if the parser encounters an unknown tag.  Tries to fail
-        with a human readable error message that could help to identify
-        the problem.
-        """
-        return self._fail_ut_eof(name, self._end_token_stack, lineno)
-
-    def fail_eof(self, end_tokens=None, lineno=None):
-        """Like fail_unknown_tag but for end of template situations."""
-        stack = list(self._end_token_stack)
-        if end_tokens is not None:
-            stack.append(end_tokens)
-        return self._fail_ut_eof(None, stack, lineno)
-
-    def is_tuple_end(self, extra_end_rules=None):
-        """Are we at the end of a tuple?"""
-        if self.stream.current.type in ('variable_end', 'block_end', 'rparen'):
-            return True
-        elif extra_end_rules is not None:
-            return self.stream.current.test_any(extra_end_rules)
-        return False
-
-    def free_identifier(self, lineno=None):
-        """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
-        self._last_identifier += 1
-        rv = object.__new__(nodes.InternalName)
-        nodes.Node.__init__(rv, 'fi%d' % self._last_identifier, lineno=lineno)
-        return rv
-
-    def parse_statement(self):
-        """Parse a single statement."""
-        token = self.stream.current
-        if token.type != 'name':
-            self.fail('tag name expected', token.lineno)
-        self._tag_stack.append(token.value)
-        pop_tag = True
-        try:
-            if token.value in _statement_keywords:
-                return getattr(self, 'parse_' + self.stream.current.value)()
-            if token.value == 'call':
-                return self.parse_call_block()
-            if token.value == 'filter':
-                return self.parse_filter_block()
-            ext = self.extensions.get(token.value)
-            if ext is not None:
-                return ext(self)
-
-            # did not work out, remove the token we pushed by accident
-            # from the stack so that the unknown tag fail function can
-            # produce a proper error message.
-            self._tag_stack.pop()
-            pop_tag = False
-            self.fail_unknown_tag(token.value, token.lineno)
-        finally:
-            if pop_tag:
-                self._tag_stack.pop()
-
-    def parse_statements(self, end_tokens, drop_needle=False):
-        """Parse multiple statements into a list until one of the end tokens
-        is reached.  This is used to parse the body of statements as it also
-        parses template data if appropriate.  The parser checks first if the
-        current token is a colon and skips it if there is one.  Then it checks
-        for the block end and parses until if one of the `end_tokens` is
-        reached.  Per default the active token in the stream at the end of
-        the call is the matched end token.  If this is not wanted `drop_needle`
-        can be set to `True` and the end token is removed.
-        """
-        # the first token may be a colon for python compatibility
-        self.stream.skip_if('colon')
-
-        # in the future it would be possible to add whole code sections
-        # by adding some sort of end of statement token and parsing those here.
-        self.stream.expect('block_end')
-        result = self.subparse(end_tokens)
-
-        # we reached the end of the template too early, the subparser
-        # does not check for this, so we do that now
-        if self.stream.current.type == 'eof':
-            self.fail_eof(end_tokens)
-
-        if drop_needle:
-            next(self.stream)
-        return result
-
-    def parse_set(self):
-        """Parse an assign statement."""
-        lineno = next(self.stream).lineno
-        target = self.parse_assign_target()
-        self.stream.expect('assign')
-        expr = self.parse_tuple()
-        return nodes.Assign(target, expr, lineno=lineno)
-
-    def parse_for(self):
-        """Parse a for loop."""
-        lineno = self.stream.expect('name:for').lineno
-        target = self.parse_assign_target(extra_end_rules=('name:in',))
-        self.stream.expect('name:in')
-        iter = self.parse_tuple(with_condexpr=False,
-                                extra_end_rules=('name:recursive',))
-        test = None
-        if self.stream.skip_if('name:if'):
-            test = self.parse_expression()
-        recursive = self.stream.skip_if('name:recursive')
-        body = self.parse_statements(('name:endfor', 'name:else'))
-        if next(self.stream).value == 'endfor':
-            else_ = []
-        else:
-            else_ = self.parse_statements(('name:endfor',), drop_needle=True)
-        return nodes.For(target, iter, body, else_, test,
-                         recursive, lineno=lineno)
-
-    def parse_if(self):
-        """Parse an if construct."""
-        node = result = nodes.If(lineno=self.stream.expect('name:if').lineno)
-        while 1:
-            node.test = self.parse_tuple(with_condexpr=False)
-            node.body = self.parse_statements(('name:elif', 'name:else',
-                                               'name:endif'))
-            token = next(self.stream)
-            if token.test('name:elif'):
-                new_node = nodes.If(lineno=self.stream.current.lineno)
-                node.else_ = [new_node]
-                node = new_node
-                continue
-            elif token.test('name:else'):
-                node.else_ = self.parse_statements(('name:endif',),
-                                                   drop_needle=True)
-            else:
-                node.else_ = []
-            break
-        return result
-
-    def parse_block(self):
-        node = nodes.Block(lineno=next(self.stream).lineno)
-        node.name = self.stream.expect('name').value
-        node.scoped = self.stream.skip_if('name:scoped')
-
-        # common problem people encounter when switching from django
-        # to jinja.  we do not support hyphens in block names, so let's
-        # raise a nicer error message in that case.
-        if self.stream.current.type == 'sub':
-            self.fail('Block names in Jinja have to be valid Python '
-                      'identifiers and may not contain hyphens, use an '
-                      'underscore instead.')
-
-        node.body = self.parse_statements(('name:endblock',), drop_needle=True)
-        self.stream.skip_if('name:' + node.name)
-        return node
-
-    def parse_extends(self):
-        node = nodes.Extends(lineno=next(self.stream).lineno)
-        node.template = self.parse_expression()
-        return node
-
-    def parse_import_context(self, node, default):
-        if self.stream.current.test_any('name:with', 'name:without') and \
-           self.stream.look().test('name:context'):
-            node.with_context = next(self.stream).value == 'with'
-            self.stream.skip()
-        else:
-            node.with_context = default
-        return node
-
-    def parse_include(self):
-        node = nodes.Include(lineno=next(self.stream).lineno)
-        node.template = self.parse_expression()
-        if self.stream.current.test('name:ignore') and \
-           self.stream.look().test('name:missing'):
-            node.ignore_missing = True
-            self.stream.skip(2)
-        else:
-            node.ignore_missing = False
-        return self.parse_import_context(node, True)
-
-    def parse_import(self):
-        node = nodes.Import(lineno=next(self.stream).lineno)
-        node.template = self.parse_expression()
-        self.stream.expect('name:as')
-        node.target = self.parse_assign_target(name_only=True).name
-        return self.parse_import_context(node, False)
-
-    def parse_from(self):
-        node = nodes.FromImport(lineno=next(self.stream).lineno)
-        node.template = self.parse_expression()
-        self.stream.expect('name:import')
-        node.names = []
-
-        def parse_context():
-            if self.stream.current.value in ('with', 'without') and \
-               self.stream.look().test('name:context'):
-                node.with_context = next(self.stream).value == 'with'
-                self.stream.skip()
-                return True
-            return False
-
-        while 1:
-            if node.names:
-                self.stream.expect('comma')
-            if self.stream.current.type == 'name':
-                if parse_context():
-                    break
-                target = self.parse_assign_target(name_only=True)
-                if target.name.startswith('_'):
-                    self.fail('names starting with an underline can not '
-                              'be imported', target.lineno,
-                              exc=TemplateAssertionError)
-                if self.stream.skip_if('name:as'):
-                    alias = self.parse_assign_target(name_only=True)
-                    node.names.append((target.name, alias.name))
-                else:
-                    node.names.append(target.name)
-                if parse_context() or self.stream.current.type != 'comma':
-                    break
-            else:
-                break
-        if not hasattr(node, 'with_context'):
-            node.with_context = False
-            self.stream.skip_if('comma')
-        return node
-
-    def parse_signature(self, node):
-        node.args = args = []
-        node.defaults = defaults = []
-        self.stream.expect('lparen')
-        while self.stream.current.type != 'rparen':
-            if args:
-                self.stream.expect('comma')
-            arg = self.parse_assign_target(name_only=True)
-            arg.set_ctx('param')
-            if self.stream.skip_if('assign'):
-                defaults.append(self.parse_expression())
-            args.append(arg)
-        self.stream.expect('rparen')
-
-    def parse_call_block(self):
-        node = nodes.CallBlock(lineno=next(self.stream).lineno)
-        if self.stream.current.type == 'lparen':
-            self.parse_signature(node)
-        else:
-            node.args = []
-            node.defaults = []
-
-        node.call = self.parse_expression()
-        if not isinstance(node.call, nodes.Call):
-            self.fail('expected call', node.lineno)
-        node.body = self.parse_statements(('name:endcall',), drop_needle=True)
-        return node
-
-    def parse_filter_block(self):
-        node = nodes.FilterBlock(lineno=next(self.stream).lineno)
-        node.filter = self.parse_filter(None, start_inline=True)
-        node.body = self.parse_statements(('name:endfilter',),
-                                          drop_needle=True)
-        return node
-
-    def parse_macro(self):
-        node = nodes.Macro(lineno=next(self.stream).lineno)
-        node.name = self.parse_assign_target(name_only=True).name
-        self.parse_signature(node)
-        node.body = self.parse_statements(('name:endmacro',),
-                                          drop_needle=True)
-        return node
-
-    def parse_print(self):
-        node = nodes.Output(lineno=next(self.stream).lineno)
-        node.nodes = []
-        while self.stream.current.type != 'block_end':
-            if node.nodes:
-                self.stream.expect('comma')
-            node.nodes.append(self.parse_expression())
-        return node
-
-    def parse_assign_target(self, with_tuple=True, name_only=False,
-                            extra_end_rules=None):
-        """Parse an assignment target.  As Jinja2 allows assignments to
-        tuples, this function can parse all allowed assignment targets.  Per
-        default assignments to tuples are parsed, that can be disable however
-        by setting `with_tuple` to `False`.  If only assignments to names are
-        wanted `name_only` can be set to `True`.  The `extra_end_rules`
-        parameter is forwarded to the tuple parsing function.
-        """
-        if name_only:
-            token = self.stream.expect('name')
-            target = nodes.Name(token.value, 'store', lineno=token.lineno)
-        else:
-            if with_tuple:
-                target = self.parse_tuple(simplified=True,
-                                          extra_end_rules=extra_end_rules)
-            else:
-                target = self.parse_primary()
-            target.set_ctx('store')
-        if not target.can_assign():
-            self.fail('can\'t assign to %r' % target.__class__.
-                      __name__.lower(), target.lineno)
-        return target
-
-    def parse_expression(self, with_condexpr=True):
-        """Parse an expression.  Per default all expressions are parsed, if
-        the optional `with_condexpr` parameter is set to `False` conditional
-        expressions are not parsed.
-        """
-        if with_condexpr:
-            return self.parse_condexpr()
-        return self.parse_or()
-
-    def parse_condexpr(self):
-        lineno = self.stream.current.lineno
-        expr1 = self.parse_or()
-        while self.stream.skip_if('name:if'):
-            expr2 = self.parse_or()
-            if self.stream.skip_if('name:else'):
-                expr3 = self.parse_condexpr()
-            else:
-                expr3 = None
-            expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return expr1
-
-    def parse_or(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_and()
-        while self.stream.skip_if('name:or'):
-            right = self.parse_and()
-            left = nodes.Or(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_and(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_not()
-        while self.stream.skip_if('name:and'):
-            right = self.parse_not()
-            left = nodes.And(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_not(self):
-        if self.stream.current.test('name:not'):
-            lineno = next(self.stream).lineno
-            return nodes.Not(self.parse_not(), lineno=lineno)
-        return self.parse_compare()
-
-    def parse_compare(self):
-        lineno = self.stream.current.lineno
-        expr = self.parse_add()
-        ops = []
-        while 1:
-            token_type = self.stream.current.type
-            if token_type in _compare_operators:
-                next(self.stream)
-                ops.append(nodes.Operand(token_type, self.parse_add()))
-            elif self.stream.skip_if('name:in'):
-                ops.append(nodes.Operand('in', self.parse_add()))
-            elif self.stream.current.test('name:not') and \
-                 self.stream.look().test('name:in'):
-                self.stream.skip(2)
-                ops.append(nodes.Operand('notin', self.parse_add()))
-            else:
-                break
-            lineno = self.stream.current.lineno
-        if not ops:
-            return expr
-        return nodes.Compare(expr, ops, lineno=lineno)
-
-    def parse_add(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_sub()
-        while self.stream.current.type == 'add':
-            next(self.stream)
-            right = self.parse_sub()
-            left = nodes.Add(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_sub(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_concat()
-        while self.stream.current.type == 'sub':
-            next(self.stream)
-            right = self.parse_concat()
-            left = nodes.Sub(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_concat(self):
-        lineno = self.stream.current.lineno
-        args = [self.parse_mul()]
-        while self.stream.current.type == 'tilde':
-            next(self.stream)
-            args.append(self.parse_mul())
-        if len(args) == 1:
-            return args[0]
-        return nodes.Concat(args, lineno=lineno)
-
-    def parse_mul(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_div()
-        while self.stream.current.type == 'mul':
-            next(self.stream)
-            right = self.parse_div()
-            left = nodes.Mul(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_div(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_floordiv()
-        while self.stream.current.type == 'div':
-            next(self.stream)
-            right = self.parse_floordiv()
-            left = nodes.Div(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_floordiv(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_mod()
-        while self.stream.current.type == 'floordiv':
-            next(self.stream)
-            right = self.parse_mod()
-            left = nodes.FloorDiv(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_mod(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_pow()
-        while self.stream.current.type == 'mod':
-            next(self.stream)
-            right = self.parse_pow()
-            left = nodes.Mod(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_pow(self):
-        lineno = self.stream.current.lineno
-        left = self.parse_unary()
-        while self.stream.current.type == 'pow':
-            next(self.stream)
-            right = self.parse_unary()
-            left = nodes.Pow(left, right, lineno=lineno)
-            lineno = self.stream.current.lineno
-        return left
-
-    def parse_unary(self, with_filter=True):
-        token_type = self.stream.current.type
-        lineno = self.stream.current.lineno
-        if token_type == 'sub':
-            next(self.stream)
-            node = nodes.Neg(self.parse_unary(False), lineno=lineno)
-        elif token_type == 'add':
-            next(self.stream)
-            node = nodes.Pos(self.parse_unary(False), lineno=lineno)
-        else:
-            node = self.parse_primary()
-        node = self.parse_postfix(node)
-        if with_filter:
-            node = self.parse_filter_expr(node)
-        return node
-
-    def parse_primary(self):
-        token = self.stream.current
-        if token.type == 'name':
-            if token.value in ('true', 'false', 'True', 'False'):
-                node = nodes.Const(token.value in ('true', 'True'),
-                                   lineno=token.lineno)
-            elif token.value in ('none', 'None'):
-                node = nodes.Const(None, lineno=token.lineno)
-            else:
-                node = nodes.Name(token.value, 'load', lineno=token.lineno)
-            next(self.stream)
-        elif token.type == 'string':
-            next(self.stream)
-            buf = [token.value]
-            lineno = token.lineno
-            while self.stream.current.type == 'string':
-                buf.append(self.stream.current.value)
-                next(self.stream)
-            node = nodes.Const(''.join(buf), lineno=lineno)
-        elif token.type in ('integer', 'float'):
-            next(self.stream)
-            node = nodes.Const(token.value, lineno=token.lineno)
-        elif token.type == 'lparen':
-            next(self.stream)
-            node = self.parse_tuple(explicit_parentheses=True)
-            self.stream.expect('rparen')
-        elif token.type == 'lbracket':
-            node = self.parse_list()
-        elif token.type == 'lbrace':
-            node = self.parse_dict()
-        else:
-            self.fail("unexpected '%s'" % describe_token(token), token.lineno)
-        return node
-
-    def parse_tuple(self, simplified=False, with_condexpr=True,
-                    extra_end_rules=None, explicit_parentheses=False):
-        """Works like `parse_expression` but if multiple expressions are
-        delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
-        This method could also return a regular expression instead of a tuple
-        if no commas where found.
-
-        The default parsing mode is a full tuple.  If `simplified` is `True`
-        only names and literals are parsed.  The `no_condexpr` parameter is
-        forwarded to :meth:`parse_expression`.
-
-        Because tuples do not require delimiters and may end in a bogus comma
-        an extra hint is needed that marks the end of a tuple.  For example
-        for loops support tuples between `for` and `in`.  In that case the
-        `extra_end_rules` is set to ``['name:in']``.
-
-        `explicit_parentheses` is true if the parsing was triggered by an
-        expression in parentheses.  This is used to figure out if an empty
-        tuple is a valid expression or not.
-        """
-        lineno = self.stream.current.lineno
-        if simplified:
-            parse = self.parse_primary
-        elif with_condexpr:
-            parse = self.parse_expression
-        else:
-            parse = lambda: self.parse_expression(with_condexpr=False)
-        args = []
-        is_tuple = False
-        while 1:
-            if args:
-                self.stream.expect('comma')
-            if self.is_tuple_end(extra_end_rules):
-                break
-            args.append(parse())
-            if self.stream.current.type == 'comma':
-                is_tuple = True
-            else:
-                break
-            lineno = self.stream.current.lineno
-
-        if not is_tuple:
-            if args:
-                return args[0]
-
-            # if we don't have explicit parentheses, an empty tuple is
-            # not a valid expression.  This would mean nothing (literally
-            # nothing) in the spot of an expression would be an empty
-            # tuple.
-            if not explicit_parentheses:
-                self.fail('Expected an expression, got \'%s\'' %
-                          describe_token(self.stream.current))
-
-        return nodes.Tuple(args, 'load', lineno=lineno)
-
-    def parse_list(self):
-        token = self.stream.expect('lbracket')
-        items = []
-        while self.stream.current.type != 'rbracket':
-            if items:
-                self.stream.expect('comma')
-            if self.stream.current.type == 'rbracket':
-                break
-            items.append(self.parse_expression())
-        self.stream.expect('rbracket')
-        return nodes.List(items, lineno=token.lineno)
-
-    def parse_dict(self):
-        token = self.stream.expect('lbrace')
-        items = []
-        while self.stream.current.type != 'rbrace':
-            if items:
-                self.stream.expect('comma')
-            if self.stream.current.type == 'rbrace':
-                break
-            key = self.parse_expression()
-            self.stream.expect('colon')
-            value = self.parse_expression()
-            items.append(nodes.Pair(key, value, lineno=key.lineno))
-        self.stream.expect('rbrace')
-        return nodes.Dict(items, lineno=token.lineno)
-
-    def parse_postfix(self, node):
-        while 1:
-            token_type = self.stream.current.type
-            if token_type == 'dot' or token_type == 'lbracket':
-                node = self.parse_subscript(node)
-            # calls are valid both after postfix expressions (getattr
-            # and getitem) as well as filters and tests
-            elif token_type == 'lparen':
-                node = self.parse_call(node)
-            else:
-                break
-        return node
-
-    def parse_filter_expr(self, node):
-        while 1:
-            token_type = self.stream.current.type
-            if token_type == 'pipe':
-                node = self.parse_filter(node)
-            elif token_type == 'name' and self.stream.current.value == 'is':
-                node = self.parse_test(node)
-            # calls are valid both after postfix expressions (getattr
-            # and getitem) as well as filters and tests
-            elif token_type == 'lparen':
-                node = self.parse_call(node)
-            else:
-                break
-        return node
-
-    def parse_subscript(self, node):
-        token = next(self.stream)
-        if token.type == 'dot':
-            attr_token = self.stream.current
-            next(self.stream)
-            if attr_token.type == 'name':
-                return nodes.Getattr(node, attr_token.value, 'load',
-                                     lineno=token.lineno)
-            elif attr_token.type != 'integer':
-                self.fail('expected name or number', attr_token.lineno)
-            arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
-            return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
-        if token.type == 'lbracket':
-            args = []
-            while self.stream.current.type != 'rbracket':
-                if args:
-                    self.stream.expect('comma')
-                args.append(self.parse_subscribed())
-            self.stream.expect('rbracket')
-            if len(args) == 1:
-                arg = args[0]
-            else:
-                arg = nodes.Tuple(args, 'load', lineno=token.lineno)
-            return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
-        self.fail('expected subscript expression', self.lineno)
-
-    def parse_subscribed(self):
-        lineno = self.stream.current.lineno
-
-        if self.stream.current.type == 'colon':
-            next(self.stream)
-            args = [None]
-        else:
-            node = self.parse_expression()
-            if self.stream.current.type != 'colon':
-                return node
-            next(self.stream)
-            args = [node]
-
-        if self.stream.current.type == 'colon':
-            args.append(None)
-        elif self.stream.current.type not in ('rbracket', 'comma'):
-            args.append(self.parse_expression())
-        else:
-            args.append(None)
-
-        if self.stream.current.type == 'colon':
-            next(self.stream)
-            if self.stream.current.type not in ('rbracket', 'comma'):
-                args.append(self.parse_expression())
-            else:
-                args.append(None)
-        else:
-            args.append(None)
-
-        return nodes.Slice(lineno=lineno, *args)
-
-    def parse_call(self, node):
-        token = self.stream.expect('lparen')
-        args = []
-        kwargs = []
-        dyn_args = dyn_kwargs = None
-        require_comma = False
-
-        def ensure(expr):
-            if not expr:
-                self.fail('invalid syntax for function call expression',
-                          token.lineno)
-
-        while self.stream.current.type != 'rparen':
-            if require_comma:
-                self.stream.expect('comma')
-                # support for trailing comma
-                if self.stream.current.type == 'rparen':
-                    break
-            if self.stream.current.type == 'mul':
-                ensure(dyn_args is None and dyn_kwargs is None)
-                next(self.stream)
-                dyn_args = self.parse_expression()
-            elif self.stream.current.type == 'pow':
-                ensure(dyn_kwargs is None)
-                next(self.stream)
-                dyn_kwargs = self.parse_expression()
-            else:
-                ensure(dyn_args is None and dyn_kwargs is None)
-                if self.stream.current.type == 'name' and \
-                    self.stream.look().type == 'assign':
-                    key = self.stream.current.value
-                    self.stream.skip(2)
-                    value = self.parse_expression()
-                    kwargs.append(nodes.Keyword(key, value,
-                                                lineno=value.lineno))
-                else:
-                    ensure(not kwargs)
-                    args.append(self.parse_expression())
-
-            require_comma = True
-        self.stream.expect('rparen')
-
-        if node is None:
-            return args, kwargs, dyn_args, dyn_kwargs
-        return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs,
-                          lineno=token.lineno)
-
-    def parse_filter(self, node, start_inline=False):
-        while self.stream.current.type == 'pipe' or start_inline:
-            if not start_inline:
-                next(self.stream)
-            token = self.stream.expect('name')
-            name = token.value
-            while self.stream.current.type == 'dot':
-                next(self.stream)
-                name += '.' + self.stream.expect('name').value
-            if self.stream.current.type == 'lparen':
-                args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
-            else:
-                args = []
-                kwargs = []
-                dyn_args = dyn_kwargs = None
-            node = nodes.Filter(node, name, args, kwargs, dyn_args,
-                                dyn_kwargs, lineno=token.lineno)
-            start_inline = False
-        return node
-
-    def parse_test(self, node):
-        token = next(self.stream)
-        if self.stream.current.test('name:not'):
-            next(self.stream)
-            negated = True
-        else:
-            negated = False
-        name = self.stream.expect('name').value
-        while self.stream.current.type == 'dot':
-            next(self.stream)
-            name += '.' + self.stream.expect('name').value
-        dyn_args = dyn_kwargs = None
-        kwargs = []
-        if self.stream.current.type == 'lparen':
-            args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
-        elif self.stream.current.type in ('name', 'string', 'integer',
-                                          'float', 'lparen', 'lbracket',
-                                          'lbrace') and not \
-             self.stream.current.test_any('name:else', 'name:or',
-                                          'name:and'):
-            if self.stream.current.test('name:is'):
-                self.fail('You cannot chain multiple tests with is')
-            args = [self.parse_expression()]
-        else:
-            args = []
-        node = nodes.Test(node, name, args, kwargs, dyn_args,
-                          dyn_kwargs, lineno=token.lineno)
-        if negated:
-            node = nodes.Not(node, lineno=token.lineno)
-        return node
-
-    def subparse(self, end_tokens=None):
-        body = []
-        data_buffer = []
-        add_data = data_buffer.append
-
-        if end_tokens is not None:
-            self._end_token_stack.append(end_tokens)
-
-        def flush_data():
-            if data_buffer:
-                lineno = data_buffer[0].lineno
-                body.append(nodes.Output(data_buffer[:], lineno=lineno))
-                del data_buffer[:]
-
-        try:
-            while self.stream:
-                token = self.stream.current
-                if token.type == 'data':
-                    if token.value:
-                        add_data(nodes.TemplateData(token.value,
-                                                    lineno=token.lineno))
-                    next(self.stream)
-                elif token.type == 'variable_begin':
-                    next(self.stream)
-                    add_data(self.parse_tuple(with_condexpr=True))
-                    self.stream.expect('variable_end')
-                elif token.type == 'block_begin':
-                    flush_data()
-                    next(self.stream)
-                    if end_tokens is not None and \
-                       self.stream.current.test_any(*end_tokens):
-                        return body
-                    rv = self.parse_statement()
-                    if isinstance(rv, list):
-                        body.extend(rv)
-                    else:
-                        body.append(rv)
-                    self.stream.expect('block_end')
-                else:
-                    raise AssertionError('internal parsing error')
-
-            flush_data()
-        finally:
-            if end_tokens is not None:
-                self._end_token_stack.pop()
-
-        return body
-
-    def parse(self):
-        """Parse the whole template into a `Template` node."""
-        result = nodes.Template(self.subparse(), lineno=1)
-        result.set_environment(self.environment)
-        return result
diff --git a/python/ext-libs/jinja2/runtime.py b/python/ext-libs/jinja2/runtime.py
deleted file mode 100644
index 7791c64..0000000
--- a/python/ext-libs/jinja2/runtime.py
+++ /dev/null
@@ -1,581 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.runtime
-    ~~~~~~~~~~~~~~
-
-    Runtime helpers.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD.
-"""
-from itertools import chain
-from jinja2.nodes import EvalContext, _context_function_types
-from jinja2.utils import Markup, soft_unicode, escape, missing, concat, \
-     internalcode, object_type_repr
-from jinja2.exceptions import UndefinedError, TemplateRuntimeError, \
-     TemplateNotFound
-from jinja2._compat import next, imap, text_type, iteritems, \
-     implements_iterator, implements_to_string, string_types, PY2
-
-
-# these variables are exported to the template runtime
-__all__ = ['LoopContext', 'TemplateReference', 'Macro', 'Markup',
-           'TemplateRuntimeError', 'missing', 'concat', 'escape',
-           'markup_join', 'unicode_join', 'to_string', 'identity',
-           'TemplateNotFound']
-
-#: the name of the function that is used to convert something into
-#: a string.  We can just use the text type here.
-to_string = text_type
-
-#: the identity function.  Useful for certain things in the environment
-identity = lambda x: x
-
-_last_iteration = object()
-
-
-def markup_join(seq):
-    """Concatenation that escapes if necessary and converts to unicode."""
-    buf = []
-    iterator = imap(soft_unicode, seq)
-    for arg in iterator:
-        buf.append(arg)
-        if hasattr(arg, '__html__'):
-            return Markup(u'').join(chain(buf, iterator))
-    return concat(buf)
-
-
-def unicode_join(seq):
-    """Simple args to unicode conversion and concatenation."""
-    return concat(imap(text_type, seq))
-
-
-def new_context(environment, template_name, blocks, vars=None,
-                shared=None, globals=None, locals=None):
-    """Internal helper to for context creation."""
-    if vars is None:
-        vars = {}
-    if shared:
-        parent = vars
-    else:
-        parent = dict(globals or (), **vars)
-    if locals:
-        # if the parent is shared a copy should be created because
-        # we don't want to modify the dict passed
-        if shared:
-            parent = dict(parent)
-        for key, value in iteritems(locals):
-            if key[:2] == 'l_' and value is not missing:
-                parent[key[2:]] = value
-    return Context(environment, parent, template_name, blocks)
-
-
-class TemplateReference(object):
-    """The `self` in templates."""
-
-    def __init__(self, context):
-        self.__context = context
-
-    def __getitem__(self, name):
-        blocks = self.__context.blocks[name]
-        return BlockReference(name, self.__context, blocks, 0)
-
-    def __repr__(self):
-        return '<%s %r>' % (
-            self.__class__.__name__,
-            self.__context.name
-        )
-
-
-class Context(object):
-    """The template context holds the variables of a template.  It stores the
-    values passed to the template and also the names the template exports.
-    Creating instances is neither supported nor useful as it's created
-    automatically at various stages of the template evaluation and should not
-    be created by hand.
-
-    The context is immutable.  Modifications on :attr:`parent` **must not**
-    happen and modifications on :attr:`vars` are allowed from generated
-    template code only.  Template filters and global functions marked as
-    :func:`contextfunction`\s get the active context passed as first argument
-    and are allowed to access the context read-only.
-
-    The template context supports read only dict operations (`get`,
-    `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
-    `__getitem__`, `__contains__`).  Additionally there is a :meth:`resolve`
-    method that doesn't fail with a `KeyError` but returns an
-    :class:`Undefined` object for missing variables.
-    """
-    __slots__ = ('parent', 'vars', 'environment', 'eval_ctx', 'exported_vars',
-                 'name', 'blocks', '__weakref__')
-
-    def __init__(self, environment, parent, name, blocks):
-        self.parent = parent
-        self.vars = {}
-        self.environment = environment
-        self.eval_ctx = EvalContext(self.environment, name)
-        self.exported_vars = set()
-        self.name = name
-
-        # create the initial mapping of blocks.  Whenever template inheritance
-        # takes place the runtime will update this mapping with the new blocks
-        # from the template.
-        self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
-
-    def super(self, name, current):
-        """Render a parent block."""
-        try:
-            blocks = self.blocks[name]
-            index = blocks.index(current) + 1
-            blocks[index]
-        except LookupError:
-            return self.environment.undefined('there is no parent block '
-                                              'called %r.' % name,
-                                              name='super')
-        return BlockReference(name, self, blocks, index)
-
-    def get(self, key, default=None):
-        """Returns an item from the template context, if it doesn't exist
-        `default` is returned.
-        """
-        try:
-            return self[key]
-        except KeyError:
-            return default
-
-    def resolve(self, key):
-        """Looks up a variable like `__getitem__` or `get` but returns an
-        :class:`Undefined` object with the name of the name looked up.
-        """
-        if key in self.vars:
-            return self.vars[key]
-        if key in self.parent:
-            return self.parent[key]
-        return self.environment.undefined(name=key)
-
-    def get_exported(self):
-        """Get a new dict with the exported variables."""
-        return dict((k, self.vars[k]) for k in self.exported_vars)
-
-    def get_all(self):
-        """Return a copy of the complete context as dict including the
-        exported variables.
-        """
-        return dict(self.parent, **self.vars)
-
-    @internalcode
-    def call(__self, __obj, *args, **kwargs):
-        """Call the callable with the arguments and keyword arguments
-        provided but inject the active context or environment as first
-        argument if the callable is a :func:`contextfunction` or
-        :func:`environmentfunction`.
-        """
-        if __debug__:
-            __traceback_hide__ = True
-
-        # Allow callable classes to take a context
-        fn = __obj.__call__
-        for fn_type in ('contextfunction',
-                        'evalcontextfunction',
-                        'environmentfunction'):
-            if hasattr(fn, fn_type):
-                __obj = fn
-                break
-
-        if isinstance(__obj, _context_function_types):
-            if getattr(__obj, 'contextfunction', 0):
-                args = (__self,) + args
-            elif getattr(__obj, 'evalcontextfunction', 0):
-                args = (__self.eval_ctx,) + args
-            elif getattr(__obj, 'environmentfunction', 0):
-                args = (__self.environment,) + args
-        try:
-            return __obj(*args, **kwargs)
-        except StopIteration:
-            return __self.environment.undefined('value was undefined because '
-                                                'a callable raised a '
-                                                'StopIteration exception')
-
-    def derived(self, locals=None):
-        """Internal helper function to create a derived context."""
-        context = new_context(self.environment, self.name, {},
-                              self.parent, True, None, locals)
-        context.vars.update(self.vars)
-        context.eval_ctx = self.eval_ctx
-        context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
-        return context
-
-    def _all(meth):
-        proxy = lambda self: getattr(self.get_all(), meth)()
-        proxy.__doc__ = getattr(dict, meth).__doc__
-        proxy.__name__ = meth
-        return proxy
-
-    keys = _all('keys')
-    values = _all('values')
-    items = _all('items')
-
-    # not available on python 3
-    if PY2:
-        iterkeys = _all('iterkeys')
-        itervalues = _all('itervalues')
-        iteritems = _all('iteritems')
-    del _all
-
-    def __contains__(self, name):
-        return name in self.vars or name in self.parent
-
-    def __getitem__(self, key):
-        """Lookup a variable or raise `KeyError` if the variable is
-        undefined.
-        """
-        item = self.resolve(key)
-        if isinstance(item, Undefined):
-            raise KeyError(key)
-        return item
-
-    def __repr__(self):
-        return '<%s %s of %r>' % (
-            self.__class__.__name__,
-            repr(self.get_all()),
-            self.name
-        )
-
-
-# register the context as mapping if possible
-try:
-    from collections import Mapping
-    Mapping.register(Context)
-except ImportError:
-    pass
-
-
-class BlockReference(object):
-    """One block on a template reference."""
-
-    def __init__(self, name, context, stack, depth):
-        self.name = name
-        self._context = context
-        self._stack = stack
-        self._depth = depth
-
-    @property
-    def super(self):
-        """Super the block."""
-        if self._depth + 1 >= len(self._stack):
-            return self._context.environment. \
-                undefined('there is no parent block called %r.' %
-                          self.name, name='super')
-        return BlockReference(self.name, self._context, self._stack,
-                              self._depth + 1)
-
-    @internalcode
-    def __call__(self):
-        rv = concat(self._stack[self._depth](self._context))
-        if self._context.eval_ctx.autoescape:
-            rv = Markup(rv)
-        return rv
-
-
-class LoopContext(object):
-    """A loop context for dynamic iteration."""
-
-    def __init__(self, iterable, recurse=None, depth0=0):
-        self._iterator = iter(iterable)
-        self._recurse = recurse
-        self._after = self._safe_next()
-        self.index0 = -1
-        self.depth0 = depth0
-
-        # try to get the length of the iterable early.  This must be done
-        # here because there are some broken iterators around where there
-        # __len__ is the number of iterations left (i'm looking at your
-        # listreverseiterator!).
-        try:
-            self._length = len(iterable)
-        except (TypeError, AttributeError):
-            self._length = None
-
-    def cycle(self, *args):
-        """Cycles among the arguments with the current loop index."""
-        if not args:
-            raise TypeError('no items for cycling given')
-        return args[self.index0 % len(args)]
-
-    first = property(lambda x: x.index0 == 0)
-    last = property(lambda x: x._after is _last_iteration)
-    index = property(lambda x: x.index0 + 1)
-    revindex = property(lambda x: x.length - x.index0)
-    revindex0 = property(lambda x: x.length - x.index)
-    depth = property(lambda x: x.depth0 + 1)
-
-    def __len__(self):
-        return self.length
-
-    def __iter__(self):
-        return LoopContextIterator(self)
-
-    def _safe_next(self):
-        try:
-            return next(self._iterator)
-        except StopIteration:
-            return _last_iteration
-
-    @internalcode
-    def loop(self, iterable):
-        if self._recurse is None:
-            raise TypeError('Tried to call non recursive loop.  Maybe you '
-                            "forgot the 'recursive' modifier.")
-        return self._recurse(iterable, self._recurse, self.depth0 + 1)
-
-    # a nifty trick to enhance the error message if someone tried to call
-    # the the loop without or with too many arguments.
-    __call__ = loop
-    del loop
-
-    @property
-    def length(self):
-        if self._length is None:
-            # if was not possible to get the length of the iterator when
-            # the loop context was created (ie: iterating over a generator)
-            # we have to convert the iterable into a sequence and use the
-            # length of that.
-            iterable = tuple(self._iterator)
-            self._iterator = iter(iterable)
-            self._length = len(iterable) + self.index0 + 1
-        return self._length
-
-    def __repr__(self):
-        return '<%s %r/%r>' % (
-            self.__class__.__name__,
-            self.index,
-            self.length
-        )
-
-
- at implements_iterator
-class LoopContextIterator(object):
-    """The iterator for a loop context."""
-    __slots__ = ('context',)
-
-    def __init__(self, context):
-        self.context = context
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        ctx = self.context
-        ctx.index0 += 1
-        if ctx._after is _last_iteration:
-            raise StopIteration()
-        next_elem = ctx._after
-        ctx._after = ctx._safe_next()
-        return next_elem, ctx
-
-
-class Macro(object):
-    """Wraps a macro function."""
-
-    def __init__(self, environment, func, name, arguments, defaults,
-                 catch_kwargs, catch_varargs, caller):
-        self._environment = environment
-        self._func = func
-        self._argument_count = len(arguments)
-        self.name = name
-        self.arguments = arguments
-        self.defaults = defaults
-        self.catch_kwargs = catch_kwargs
-        self.catch_varargs = catch_varargs
-        self.caller = caller
-
-    @internalcode
-    def __call__(self, *args, **kwargs):
-        # try to consume the positional arguments
-        arguments = list(args[:self._argument_count])
-        off = len(arguments)
-
-        # if the number of arguments consumed is not the number of
-        # arguments expected we start filling in keyword arguments
-        # and defaults.
-        if off != self._argument_count:
-            for idx, name in enumerate(self.arguments[len(arguments):]):
-                try:
-                    value = kwargs.pop(name)
-                except KeyError:
-                    try:
-                        value = self.defaults[idx - self._argument_count + off]
-                    except IndexError:
-                        value = self._environment.undefined(
-                            'parameter %r was not provided' % name, name=name)
-                arguments.append(value)
-
-        # it's important that the order of these arguments does not change
-        # if not also changed in the compiler's `function_scoping` method.
-        # the order is caller, keyword arguments, positional arguments!
-        if self.caller:
-            caller = kwargs.pop('caller', None)
-            if caller is None:
-                caller = self._environment.undefined('No caller defined',
-                                                     name='caller')
-            arguments.append(caller)
-        if self.catch_kwargs:
-            arguments.append(kwargs)
-        elif kwargs:
-            raise TypeError('macro %r takes no keyword argument %r' %
-                            (self.name, next(iter(kwargs))))
-        if self.catch_varargs:
-            arguments.append(args[self._argument_count:])
-        elif len(args) > self._argument_count:
-            raise TypeError('macro %r takes not more than %d argument(s)' %
-                            (self.name, len(self.arguments)))
-        return self._func(*arguments)
-
-    def __repr__(self):
-        return '<%s %s>' % (
-            self.__class__.__name__,
-            self.name is None and 'anonymous' or repr(self.name)
-        )
-
-
- at implements_to_string
-class Undefined(object):
-    """The default undefined type.  This undefined type can be printed and
-    iterated over, but every other access will raise an :exc:`UndefinedError`:
-
-    >>> foo = Undefined(name='foo')
-    >>> str(foo)
-    ''
-    >>> not foo
-    True
-    >>> foo + 42
-    Traceback (most recent call last):
-      ...
-    UndefinedError: 'foo' is undefined
-    """
-    __slots__ = ('_undefined_hint', '_undefined_obj', '_undefined_name',
-                 '_undefined_exception')
-
-    def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
-        self._undefined_hint = hint
-        self._undefined_obj = obj
-        self._undefined_name = name
-        self._undefined_exception = exc
-
-    @internalcode
-    def _fail_with_undefined_error(self, *args, **kwargs):
-        """Regular callback function for undefined objects that raises an
-        `UndefinedError` on call.
-        """
-        if self._undefined_hint is None:
-            if self._undefined_obj is missing:
-                hint = '%r is undefined' % self._undefined_name
-            elif not isinstance(self._undefined_name, string_types):
-                hint = '%s has no element %r' % (
-                    object_type_repr(self._undefined_obj),
-                    self._undefined_name
-                )
-            else:
-                hint = '%r has no attribute %r' % (
-                    object_type_repr(self._undefined_obj),
-                    self._undefined_name
-                )
-        else:
-            hint = self._undefined_hint
-        raise self._undefined_exception(hint)
-
-    @internalcode
-    def __getattr__(self, name):
-        if name[:2] == '__':
-            raise AttributeError(name)
-        return self._fail_with_undefined_error()
-
-    __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \
-    __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \
-    __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \
-    __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \
-    __float__ = __complex__ = __pow__ = __rpow__ = \
-        _fail_with_undefined_error
-
-    def __eq__(self, other):
-        return type(self) is type(other)
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __hash__(self):
-        return id(type(self))
-
-    def __str__(self):
-        return u''
-
-    def __len__(self):
-        return 0
-
-    def __iter__(self):
-        if 0:
-            yield None
-
-    def __nonzero__(self):
-        return False
-
-    def __repr__(self):
-        return 'Undefined'
-
-
- at implements_to_string
-class DebugUndefined(Undefined):
-    """An undefined that returns the debug info when printed.
-
-    >>> foo = DebugUndefined(name='foo')
-    >>> str(foo)
-    '{{ foo }}'
-    >>> not foo
-    True
-    >>> foo + 42
-    Traceback (most recent call last):
-      ...
-    UndefinedError: 'foo' is undefined
-    """
-    __slots__ = ()
-
-    def __str__(self):
-        if self._undefined_hint is None:
-            if self._undefined_obj is missing:
-                return u'{{ %s }}' % self._undefined_name
-            return '{{ no such element: %s[%r] }}' % (
-                object_type_repr(self._undefined_obj),
-                self._undefined_name
-            )
-        return u'{{ undefined value printed: %s }}' % self._undefined_hint
-
-
- at implements_to_string
-class StrictUndefined(Undefined):
-    """An undefined that barks on print and iteration as well as boolean
-    tests and all kinds of comparisons.  In other words: you can do nothing
-    with it except checking if it's defined using the `defined` test.
-
-    >>> foo = StrictUndefined(name='foo')
-    >>> str(foo)
-    Traceback (most recent call last):
-      ...
-    UndefinedError: 'foo' is undefined
-    >>> not foo
-    Traceback (most recent call last):
-      ...
-    UndefinedError: 'foo' is undefined
-    >>> foo + 42
-    Traceback (most recent call last):
-      ...
-    UndefinedError: 'foo' is undefined
-    """
-    __slots__ = ()
-    __iter__ = __str__ = __len__ = __nonzero__ = __eq__ = \
-        __ne__ = __bool__ = __hash__ = \
-        Undefined._fail_with_undefined_error
-
-
-# remove remaining slots attributes, after the metaclass did the magic they
-# are unneeded and irritating as they contain wrong data for the subclasses.
-del Undefined.__slots__, DebugUndefined.__slots__, StrictUndefined.__slots__
diff --git a/python/ext-libs/jinja2/sandbox.py b/python/ext-libs/jinja2/sandbox.py
deleted file mode 100644
index da479c1..0000000
--- a/python/ext-libs/jinja2/sandbox.py
+++ /dev/null
@@ -1,368 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.sandbox
-    ~~~~~~~~~~~~~~
-
-    Adds a sandbox layer to Jinja as it was the default behavior in the old
-    Jinja 1 releases.  This sandbox is slightly different from Jinja 1 as the
-    default behavior is easier to use.
-
-    The behavior can be changed by subclassing the environment.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD.
-"""
-import operator
-from jinja2.environment import Environment
-from jinja2.exceptions import SecurityError
-from jinja2._compat import string_types, function_type, method_type, \
-     traceback_type, code_type, frame_type, generator_type, PY2
-
-
-#: maximum number of items a range may produce
-MAX_RANGE = 100000
-
-#: attributes of function objects that are considered unsafe.
-UNSAFE_FUNCTION_ATTRIBUTES = set(['func_closure', 'func_code', 'func_dict',
-                                  'func_defaults', 'func_globals'])
-
-#: unsafe method attributes.  function attributes are unsafe for methods too
-UNSAFE_METHOD_ATTRIBUTES = set(['im_class', 'im_func', 'im_self'])
-
-#: unsafe generator attirbutes.
-UNSAFE_GENERATOR_ATTRIBUTES = set(['gi_frame', 'gi_code'])
-
-# On versions > python 2 the special attributes on functions are gone,
-# but they remain on methods and generators for whatever reason.
-if not PY2:
-    UNSAFE_FUNCTION_ATTRIBUTES = set()
-
-import warnings
-
-# make sure we don't warn in python 2.6 about stuff we don't care about
-warnings.filterwarnings('ignore', 'the sets module', DeprecationWarning,
-                        module='jinja2.sandbox')
-
-from collections import deque
-
-_mutable_set_types = (set,)
-_mutable_mapping_types = (dict,)
-_mutable_sequence_types = (list,)
-
-
-# on python 2.x we can register the user collection types
-try:
-    from UserDict import UserDict, DictMixin
-    from UserList import UserList
-    _mutable_mapping_types += (UserDict, DictMixin)
-    _mutable_set_types += (UserList,)
-except ImportError:
-    pass
-
-# if sets is still available, register the mutable set from there as well
-try:
-    from sets import Set
-    _mutable_set_types += (Set,)
-except ImportError:
-    pass
-
-#: register Python 2.6 abstract base classes
-try:
-    from collections import MutableSet, MutableMapping, MutableSequence
-    _mutable_set_types += (MutableSet,)
-    _mutable_mapping_types += (MutableMapping,)
-    _mutable_sequence_types += (MutableSequence,)
-except ImportError:
-    pass
-
-_mutable_spec = (
-    (_mutable_set_types, frozenset([
-        'add', 'clear', 'difference_update', 'discard', 'pop', 'remove',
-        'symmetric_difference_update', 'update'
-    ])),
-    (_mutable_mapping_types, frozenset([
-        'clear', 'pop', 'popitem', 'setdefault', 'update'
-    ])),
-    (_mutable_sequence_types, frozenset([
-        'append', 'reverse', 'insert', 'sort', 'extend', 'remove'
-    ])),
-    (deque, frozenset([
-        'append', 'appendleft', 'clear', 'extend', 'extendleft', 'pop',
-        'popleft', 'remove', 'rotate'
-    ]))
-)
-
-
-def safe_range(*args):
-    """A range that can't generate ranges with a length of more than
-    MAX_RANGE items.
-    """
-    rng = range(*args)
-    if len(rng) > MAX_RANGE:
-        raise OverflowError('range too big, maximum size for range is %d' %
-                            MAX_RANGE)
-    return rng
-
-
-def unsafe(f):
-    """Marks a function or method as unsafe.
-
-    ::
-
-        @unsafe
-        def delete(self):
-            pass
-    """
-    f.unsafe_callable = True
-    return f
-
-
-def is_internal_attribute(obj, attr):
-    """Test if the attribute given is an internal python attribute.  For
-    example this function returns `True` for the `func_code` attribute of
-    python objects.  This is useful if the environment method
-    :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
-
-    >>> from jinja2.sandbox import is_internal_attribute
-    >>> is_internal_attribute(lambda: None, "func_code")
-    True
-    >>> is_internal_attribute((lambda x:x).func_code, 'co_code')
-    True
-    >>> is_internal_attribute(str, "upper")
-    False
-    """
-    if isinstance(obj, function_type):
-        if attr in UNSAFE_FUNCTION_ATTRIBUTES:
-            return True
-    elif isinstance(obj, method_type):
-        if attr in UNSAFE_FUNCTION_ATTRIBUTES or \
-           attr in UNSAFE_METHOD_ATTRIBUTES:
-            return True
-    elif isinstance(obj, type):
-        if attr == 'mro':
-            return True
-    elif isinstance(obj, (code_type, traceback_type, frame_type)):
-        return True
-    elif isinstance(obj, generator_type):
-        if attr in UNSAFE_GENERATOR_ATTRIBUTES:
-            return True
-    return attr.startswith('__')
-
-
-def modifies_known_mutable(obj, attr):
-    """This function checks if an attribute on a builtin mutable object
-    (list, dict, set or deque) would modify it if called.  It also supports
-    the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
-    with Python 2.6 onwards the abstract base classes `MutableSet`,
-    `MutableMapping`, and `MutableSequence`.
-
-    >>> modifies_known_mutable({}, "clear")
-    True
-    >>> modifies_known_mutable({}, "keys")
-    False
-    >>> modifies_known_mutable([], "append")
-    True
-    >>> modifies_known_mutable([], "index")
-    False
-
-    If called with an unsupported object (such as unicode) `False` is
-    returned.
-
-    >>> modifies_known_mutable("foo", "upper")
-    False
-    """
-    for typespec, unsafe in _mutable_spec:
-        if isinstance(obj, typespec):
-            return attr in unsafe
-    return False
-
-
-class SandboxedEnvironment(Environment):
-    """The sandboxed environment.  It works like the regular environment but
-    tells the compiler to generate sandboxed code.  Additionally subclasses of
-    this environment may override the methods that tell the runtime what
-    attributes or functions are safe to access.
-
-    If the template tries to access insecure code a :exc:`SecurityError` is
-    raised.  However also other exceptions may occour during the rendering so
-    the caller has to ensure that all exceptions are catched.
-    """
-    sandboxed = True
-
-    #: default callback table for the binary operators.  A copy of this is
-    #: available on each instance of a sandboxed environment as
-    #: :attr:`binop_table`
-    default_binop_table = {
-        '+':        operator.add,
-        '-':        operator.sub,
-        '*':        operator.mul,
-        '/':        operator.truediv,
-        '//':       operator.floordiv,
-        '**':       operator.pow,
-        '%':        operator.mod
-    }
-
-    #: default callback table for the unary operators.  A copy of this is
-    #: available on each instance of a sandboxed environment as
-    #: :attr:`unop_table`
-    default_unop_table = {
-        '+':        operator.pos,
-        '-':        operator.neg
-    }
-
-    #: a set of binary operators that should be intercepted.  Each operator
-    #: that is added to this set (empty by default) is delegated to the
-    #: :meth:`call_binop` method that will perform the operator.  The default
-    #: operator callback is specified by :attr:`binop_table`.
-    #:
-    #: The following binary operators are interceptable:
-    #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
-    #:
-    #: The default operation form the operator table corresponds to the
-    #: builtin function.  Intercepted calls are always slower than the native
-    #: operator call, so make sure only to intercept the ones you are
-    #: interested in.
-    #:
-    #: .. versionadded:: 2.6
-    intercepted_binops = frozenset()
-
-    #: a set of unary operators that should be intercepted.  Each operator
-    #: that is added to this set (empty by default) is delegated to the
-    #: :meth:`call_unop` method that will perform the operator.  The default
-    #: operator callback is specified by :attr:`unop_table`.
-    #:
-    #: The following unary operators are interceptable: ``+``, ``-``
-    #:
-    #: The default operation form the operator table corresponds to the
-    #: builtin function.  Intercepted calls are always slower than the native
-    #: operator call, so make sure only to intercept the ones you are
-    #: interested in.
-    #:
-    #: .. versionadded:: 2.6
-    intercepted_unops = frozenset()
-
-    def intercept_unop(self, operator):
-        """Called during template compilation with the name of a unary
-        operator to check if it should be intercepted at runtime.  If this
-        method returns `True`, :meth:`call_unop` is excuted for this unary
-        operator.  The default implementation of :meth:`call_unop` will use
-        the :attr:`unop_table` dictionary to perform the operator with the
-        same logic as the builtin one.
-
-        The following unary operators are interceptable: ``+`` and ``-``
-
-        Intercepted calls are always slower than the native operator call,
-        so make sure only to intercept the ones you are interested in.
-
-        .. versionadded:: 2.6
-        """
-        return False
-
-
-    def __init__(self, *args, **kwargs):
-        Environment.__init__(self, *args, **kwargs)
-        self.globals['range'] = safe_range
-        self.binop_table = self.default_binop_table.copy()
-        self.unop_table = self.default_unop_table.copy()
-
-    def is_safe_attribute(self, obj, attr, value):
-        """The sandboxed environment will call this method to check if the
-        attribute of an object is safe to access.  Per default all attributes
-        starting with an underscore are considered private as well as the
-        special attributes of internal python objects as returned by the
-        :func:`is_internal_attribute` function.
-        """
-        return not (attr.startswith('_') or is_internal_attribute(obj, attr))
-
-    def is_safe_callable(self, obj):
-        """Check if an object is safely callable.  Per default a function is
-        considered safe unless the `unsafe_callable` attribute exists and is
-        True.  Override this method to alter the behavior, but this won't
-        affect the `unsafe` decorator from this module.
-        """
-        return not (getattr(obj, 'unsafe_callable', False) or
-                    getattr(obj, 'alters_data', False))
-
-    def call_binop(self, context, operator, left, right):
-        """For intercepted binary operator calls (:meth:`intercepted_binops`)
-        this function is executed instead of the builtin operator.  This can
-        be used to fine tune the behavior of certain operators.
-
-        .. versionadded:: 2.6
-        """
-        return self.binop_table[operator](left, right)
-
-    def call_unop(self, context, operator, arg):
-        """For intercepted unary operator calls (:meth:`intercepted_unops`)
-        this function is executed instead of the builtin operator.  This can
-        be used to fine tune the behavior of certain operators.
-
-        .. versionadded:: 2.6
-        """
-        return self.unop_table[operator](arg)
-
-    def getitem(self, obj, argument):
-        """Subscribe an object from sandboxed code."""
-        try:
-            return obj[argument]
-        except (TypeError, LookupError):
-            if isinstance(argument, string_types):
-                try:
-                    attr = str(argument)
-                except Exception:
-                    pass
-                else:
-                    try:
-                        value = getattr(obj, attr)
-                    except AttributeError:
-                        pass
-                    else:
-                        if self.is_safe_attribute(obj, argument, value):
-                            return value
-                        return self.unsafe_undefined(obj, argument)
-        return self.undefined(obj=obj, name=argument)
-
-    def getattr(self, obj, attribute):
-        """Subscribe an object from sandboxed code and prefer the
-        attribute.  The attribute passed *must* be a bytestring.
-        """
-        try:
-            value = getattr(obj, attribute)
-        except AttributeError:
-            try:
-                return obj[attribute]
-            except (TypeError, LookupError):
-                pass
-        else:
-            if self.is_safe_attribute(obj, attribute, value):
-                return value
-            return self.unsafe_undefined(obj, attribute)
-        return self.undefined(obj=obj, name=attribute)
-
-    def unsafe_undefined(self, obj, attribute):
-        """Return an undefined object for unsafe attributes."""
-        return self.undefined('access to attribute %r of %r '
-                              'object is unsafe.' % (
-            attribute,
-            obj.__class__.__name__
-        ), name=attribute, obj=obj, exc=SecurityError)
-
-    def call(__self, __context, __obj, *args, **kwargs):
-        """Call an object from sandboxed code."""
-        # the double prefixes are to avoid double keyword argument
-        # errors when proxying the call.
-        if not __self.is_safe_callable(__obj):
-            raise SecurityError('%r is not safely callable' % (__obj,))
-        return __context.call(__obj, *args, **kwargs)
-
-
-class ImmutableSandboxedEnvironment(SandboxedEnvironment):
-    """Works exactly like the regular `SandboxedEnvironment` but does not
-    permit modifications on the builtin mutable objects `list`, `set`, and
-    `dict` by using the :func:`modifies_known_mutable` function.
-    """
-
-    def is_safe_attribute(self, obj, attr, value):
-        if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
-            return False
-        return not modifies_known_mutable(obj, attr)
diff --git a/python/ext-libs/jinja2/tests.py b/python/ext-libs/jinja2/tests.py
deleted file mode 100644
index 48a3e06..0000000
--- a/python/ext-libs/jinja2/tests.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.tests
-    ~~~~~~~~~~~~
-
-    Jinja test functions. Used with the "is" operator.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import re
-from jinja2.runtime import Undefined
-from jinja2._compat import text_type, string_types, mapping_types
-
-
-number_re = re.compile(r'^-?\d+(\.\d+)?$')
-regex_type = type(number_re)
-
-
-test_callable = callable
-
-
-def test_odd(value):
-    """Return true if the variable is odd."""
-    return value % 2 == 1
-
-
-def test_even(value):
-    """Return true if the variable is even."""
-    return value % 2 == 0
-
-
-def test_divisibleby(value, num):
-    """Check if a variable is divisible by a number."""
-    return value % num == 0
-
-
-def test_defined(value):
-    """Return true if the variable is defined:
-
-    .. sourcecode:: jinja
-
-        {% if variable is defined %}
-            value of variable: {{ variable }}
-        {% else %}
-            variable is not defined
-        {% endif %}
-
-    See the :func:`default` filter for a simple way to set undefined
-    variables.
-    """
-    return not isinstance(value, Undefined)
-
-
-def test_undefined(value):
-    """Like :func:`defined` but the other way round."""
-    return isinstance(value, Undefined)
-
-
-def test_none(value):
-    """Return true if the variable is none."""
-    return value is None
-
-
-def test_lower(value):
-    """Return true if the variable is lowercased."""
-    return text_type(value).islower()
-
-
-def test_upper(value):
-    """Return true if the variable is uppercased."""
-    return text_type(value).isupper()
-
-
-def test_string(value):
-    """Return true if the object is a string."""
-    return isinstance(value, string_types)
-
-
-def test_mapping(value):
-    """Return true if the object is a mapping (dict etc.).
-
-    .. versionadded:: 2.6
-    """
-    return isinstance(value, mapping_types)
-
-
-def test_number(value):
-    """Return true if the variable is a number."""
-    return isinstance(value, (int, float, complex))
-
-
-def test_sequence(value):
-    """Return true if the variable is a sequence. Sequences are variables
-    that are iterable.
-    """
-    try:
-        len(value)
-        value.__getitem__
-    except:
-        return False
-    return True
-
-
-def test_sameas(value, other):
-    """Check if an object points to the same memory address than another
-    object:
-
-    .. sourcecode:: jinja
-
-        {% if foo.attribute is sameas false %}
-            the foo attribute really is the `False` singleton
-        {% endif %}
-    """
-    return value is other
-
-
-def test_iterable(value):
-    """Check if it's possible to iterate over an object."""
-    try:
-        iter(value)
-    except TypeError:
-        return False
-    return True
-
-
-def test_escaped(value):
-    """Check if the value is escaped."""
-    return hasattr(value, '__html__')
-
-
-TESTS = {
-    'odd':              test_odd,
-    'even':             test_even,
-    'divisibleby':      test_divisibleby,
-    'defined':          test_defined,
-    'undefined':        test_undefined,
-    'none':             test_none,
-    'lower':            test_lower,
-    'upper':            test_upper,
-    'string':           test_string,
-    'mapping':          test_mapping,
-    'number':           test_number,
-    'sequence':         test_sequence,
-    'iterable':         test_iterable,
-    'callable':         test_callable,
-    'sameas':           test_sameas,
-    'escaped':          test_escaped
-}
diff --git a/python/ext-libs/jinja2/testsuite/__init__.py b/python/ext-libs/jinja2/testsuite/__init__.py
deleted file mode 100644
index 635c83e..0000000
--- a/python/ext-libs/jinja2/testsuite/__init__.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite
-    ~~~~~~~~~~~~~~~~
-
-    All the unittests of Jinja2.  These tests can be executed by
-    either running run-tests.py using multiple Python versions at
-    the same time.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import os
-import re
-import sys
-import unittest
-from traceback import format_exception
-from jinja2 import loaders
-from jinja2._compat import PY2
-
-
-here = os.path.dirname(os.path.abspath(__file__))
-
-dict_loader = loaders.DictLoader({
-    'justdict.html':        'FOO'
-})
-package_loader = loaders.PackageLoader('jinja2.testsuite.res', 'templates')
-filesystem_loader = loaders.FileSystemLoader(here + '/res/templates')
-function_loader = loaders.FunctionLoader({'justfunction.html': 'FOO'}.get)
-choice_loader = loaders.ChoiceLoader([dict_loader, package_loader])
-prefix_loader = loaders.PrefixLoader({
-    'a':        filesystem_loader,
-    'b':        dict_loader
-})
-
-
-class JinjaTestCase(unittest.TestCase):
-
-    ### use only these methods for testing.  If you need standard
-    ### unittest method, wrap them!
-
-    def setup(self):
-        pass
-
-    def teardown(self):
-        pass
-
-    def setUp(self):
-        self.setup()
-
-    def tearDown(self):
-        self.teardown()
-
-    def assert_equal(self, a, b):
-        return self.assertEqual(a, b)
-
-    def assert_raises(self, *args, **kwargs):
-        return self.assertRaises(*args, **kwargs)
-
-    def assert_traceback_matches(self, callback, expected_tb):
-        try:
-            callback()
-        except Exception as e:
-            tb = format_exception(*sys.exc_info())
-            if re.search(expected_tb.strip(), ''.join(tb)) is None:
-                raise self.fail('Traceback did not match:\n\n%s\nexpected:\n%s'
-                    % (''.join(tb), expected_tb))
-        else:
-            self.fail('Expected exception')
-
-
-def find_all_tests(suite):
-    """Yields all the tests and their names from a given suite."""
-    suites = [suite]
-    while suites:
-        s = suites.pop()
-        try:
-            suites.extend(s)
-        except TypeError:
-            yield s, '%s.%s.%s' % (
-                s.__class__.__module__,
-                s.__class__.__name__,
-                s._testMethodName
-            )
-
-
-class BetterLoader(unittest.TestLoader):
-    """A nicer loader that solves two problems.  First of all we are setting
-    up tests from different sources and we're doing this programmatically
-    which breaks the default loading logic so this is required anyways.
-    Secondly this loader has a nicer interpolation for test names than the
-    default one so you can just do ``run-tests.py ViewTestCase`` and it
-    will work.
-    """
-
-    def getRootSuite(self):
-        return suite()
-
-    def loadTestsFromName(self, name, module=None):
-        root = self.getRootSuite()
-        if name == 'suite':
-            return root
-
-        all_tests = []
-        for testcase, testname in find_all_tests(root):
-            if testname == name or \
-               testname.endswith('.' + name) or \
-               ('.' + name + '.') in testname or \
-               testname.startswith(name + '.'):
-                all_tests.append(testcase)
-
-        if not all_tests:
-            raise LookupError('could not find test case for "%s"' % name)
-
-        if len(all_tests) == 1:
-            return all_tests[0]
-        rv = unittest.TestSuite()
-        for test in all_tests:
-            rv.addTest(test)
-        return rv
-
-
-def suite():
-    from jinja2.testsuite import ext, filters, tests, core_tags, \
-         loader, inheritance, imports, lexnparse, security, api, \
-         regression, debug, utils, bytecode_cache, doctests
-    suite = unittest.TestSuite()
-    suite.addTest(ext.suite())
-    suite.addTest(filters.suite())
-    suite.addTest(tests.suite())
-    suite.addTest(core_tags.suite())
-    suite.addTest(loader.suite())
-    suite.addTest(inheritance.suite())
-    suite.addTest(imports.suite())
-    suite.addTest(lexnparse.suite())
-    suite.addTest(security.suite())
-    suite.addTest(api.suite())
-    suite.addTest(regression.suite())
-    suite.addTest(debug.suite())
-    suite.addTest(utils.suite())
-    suite.addTest(bytecode_cache.suite())
-
-    # doctests will not run on python 3 currently.  Too many issues
-    # with that, do not test that on that platform.
-    if PY2:
-        suite.addTest(doctests.suite())
-
-    return suite
-
-
-def main():
-    """Runs the testsuite as command line application."""
-    try:
-        unittest.main(testLoader=BetterLoader(), defaultTest='suite')
-    except Exception as e:
-        print('Error: %s' % e)
diff --git a/python/ext-libs/jinja2/testsuite/api.py b/python/ext-libs/jinja2/testsuite/api.py
deleted file mode 100644
index 1b68bf8..0000000
--- a/python/ext-libs/jinja2/testsuite/api.py
+++ /dev/null
@@ -1,261 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.api
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Tests the public API and related stuff.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-import os
-import tempfile
-import shutil
-
-from jinja2.testsuite import JinjaTestCase
-from jinja2._compat import next
-
-from jinja2 import Environment, Undefined, DebugUndefined, \
-     StrictUndefined, UndefinedError, meta, \
-     is_undefined, Template, DictLoader
-from jinja2.utils import Cycler
-
-env = Environment()
-
-
-class ExtendedAPITestCase(JinjaTestCase):
-
-    def test_item_and_attribute(self):
-        from jinja2.sandbox import SandboxedEnvironment
-
-        for env in Environment(), SandboxedEnvironment():
-            # the |list is necessary for python3
-            tmpl = env.from_string('{{ foo.items()|list }}')
-            assert tmpl.render(foo={'items': 42}) == "[('items', 42)]"
-            tmpl = env.from_string('{{ foo|attr("items")()|list }}')
-            assert tmpl.render(foo={'items': 42}) == "[('items', 42)]"
-            tmpl = env.from_string('{{ foo["items"] }}')
-            assert tmpl.render(foo={'items': 42}) == '42'
-
-    def test_finalizer(self):
-        def finalize_none_empty(value):
-            if value is None:
-                value = u''
-            return value
-        env = Environment(finalize=finalize_none_empty)
-        tmpl = env.from_string('{% for item in seq %}|{{ item }}{% endfor %}')
-        assert tmpl.render(seq=(None, 1, "foo")) == '||1|foo'
-        tmpl = env.from_string('<{{ none }}>')
-        assert tmpl.render() == '<>'
-
-    def test_cycler(self):
-        items = 1, 2, 3
-        c = Cycler(*items)
-        for item in items + items:
-            assert c.current == item
-            assert next(c) == item
-        next(c)
-        assert c.current == 2
-        c.reset()
-        assert c.current == 1
-
-    def test_expressions(self):
-        expr = env.compile_expression("foo")
-        assert expr() is None
-        assert expr(foo=42) == 42
-        expr2 = env.compile_expression("foo", undefined_to_none=False)
-        assert is_undefined(expr2())
-
-        expr = env.compile_expression("42 + foo")
-        assert expr(foo=42) == 84
-
-    def test_template_passthrough(self):
-        t = Template('Content')
-        assert env.get_template(t) is t
-        assert env.select_template([t]) is t
-        assert env.get_or_select_template([t]) is t
-        assert env.get_or_select_template(t) is t
-
-    def test_autoescape_autoselect(self):
-        def select_autoescape(name):
-            if name is None or '.' not in name:
-                return False
-            return name.endswith('.html')
-        env = Environment(autoescape=select_autoescape,
-                          loader=DictLoader({
-            'test.txt':     '{{ foo }}',
-            'test.html':    '{{ foo }}'
-        }))
-        t = env.get_template('test.txt')
-        assert t.render(foo='<foo>') == '<foo>'
-        t = env.get_template('test.html')
-        assert t.render(foo='<foo>') == '<foo>'
-        t = env.from_string('{{ foo }}')
-        assert t.render(foo='<foo>') == '<foo>'
-
-
-class MetaTestCase(JinjaTestCase):
-
-    def test_find_undeclared_variables(self):
-        ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
-        x = meta.find_undeclared_variables(ast)
-        assert x == set(['bar'])
-
-        ast = env.parse('{% set foo = 42 %}{{ bar + foo }}'
-                        '{% macro meh(x) %}{{ x }}{% endmacro %}'
-                        '{% for item in seq %}{{ muh(item) + meh(seq) }}{% endfor %}')
-        x = meta.find_undeclared_variables(ast)
-        assert x == set(['bar', 'seq', 'muh'])
-
-    def test_find_refererenced_templates(self):
-        ast = env.parse('{% extends "layout.html" %}{% include helper %}')
-        i = meta.find_referenced_templates(ast)
-        assert next(i) == 'layout.html'
-        assert next(i) is None
-        assert list(i) == []
-
-        ast = env.parse('{% extends "layout.html" %}'
-                        '{% from "test.html" import a, b as c %}'
-                        '{% import "meh.html" as meh %}'
-                        '{% include "muh.html" %}')
-        i = meta.find_referenced_templates(ast)
-        assert list(i) == ['layout.html', 'test.html', 'meh.html', 'muh.html']
-
-    def test_find_included_templates(self):
-        ast = env.parse('{% include ["foo.html", "bar.html"] %}')
-        i = meta.find_referenced_templates(ast)
-        assert list(i) == ['foo.html', 'bar.html']
-
-        ast = env.parse('{% include ("foo.html", "bar.html") %}')
-        i = meta.find_referenced_templates(ast)
-        assert list(i) == ['foo.html', 'bar.html']
-
-        ast = env.parse('{% include ["foo.html", "bar.html", foo] %}')
-        i = meta.find_referenced_templates(ast)
-        assert list(i) == ['foo.html', 'bar.html', None]
-
-        ast = env.parse('{% include ("foo.html", "bar.html", foo) %}')
-        i = meta.find_referenced_templates(ast)
-        assert list(i) == ['foo.html', 'bar.html', None]
-
-
-class StreamingTestCase(JinjaTestCase):
-
-    def test_basic_streaming(self):
-        tmpl = env.from_string("<ul>{% for item in seq %}<li>{{ loop.index "
-                               "}} - {{ item }}</li>{%- endfor %}</ul>")
-        stream = tmpl.stream(seq=list(range(4)))
-        self.assert_equal(next(stream), '<ul>')
-        self.assert_equal(next(stream), '<li>1 - 0</li>')
-        self.assert_equal(next(stream), '<li>2 - 1</li>')
-        self.assert_equal(next(stream), '<li>3 - 2</li>')
-        self.assert_equal(next(stream), '<li>4 - 3</li>')
-        self.assert_equal(next(stream), '</ul>')
-
-    def test_buffered_streaming(self):
-        tmpl = env.from_string("<ul>{% for item in seq %}<li>{{ loop.index "
-                               "}} - {{ item }}</li>{%- endfor %}</ul>")
-        stream = tmpl.stream(seq=list(range(4)))
-        stream.enable_buffering(size=3)
-        self.assert_equal(next(stream), u'<ul><li>1 - 0</li><li>2 - 1</li>')
-        self.assert_equal(next(stream), u'<li>3 - 2</li><li>4 - 3</li></ul>')
-
-    def test_streaming_behavior(self):
-        tmpl = env.from_string("")
-        stream = tmpl.stream()
-        assert not stream.buffered
-        stream.enable_buffering(20)
-        assert stream.buffered
-        stream.disable_buffering()
-        assert not stream.buffered
-
-    def test_dump_stream(self):
-        tmp = tempfile.mkdtemp()
-        try:
-            tmpl = env.from_string(u"\u2713")
-            stream = tmpl.stream()
-            stream.dump(os.path.join(tmp, 'dump.txt'), 'utf-8')
-            with open(os.path.join(tmp, 'dump.txt'), 'rb') as f:
-                self.assertEqual(f.read(), b'\xe2\x9c\x93')
-        finally:
-            shutil.rmtree(tmp)
-
-
-class UndefinedTestCase(JinjaTestCase):
-
-    def test_stopiteration_is_undefined(self):
-        def test():
-            raise StopIteration()
-        t = Template('A{{ test() }}B')
-        assert t.render(test=test) == 'AB'
-        t = Template('A{{ test().missingattribute }}B')
-        self.assert_raises(UndefinedError, t.render, test=test)
-
-    def test_undefined_and_special_attributes(self):
-        try:
-            Undefined('Foo').__dict__
-        except AttributeError:
-            pass
-        else:
-            assert False, "Expected actual attribute error"
-
-    def test_default_undefined(self):
-        env = Environment(undefined=Undefined)
-        self.assert_equal(env.from_string('{{ missing }}').render(), u'')
-        self.assert_raises(UndefinedError,
-                           env.from_string('{{ missing.attribute }}').render)
-        self.assert_equal(env.from_string('{{ missing|list }}').render(), '[]')
-        self.assert_equal(env.from_string('{{ missing is not defined }}').render(), 'True')
-        self.assert_equal(env.from_string('{{ foo.missing }}').render(foo=42), '')
-        self.assert_equal(env.from_string('{{ not missing }}').render(), 'True')
-
-    def test_debug_undefined(self):
-        env = Environment(undefined=DebugUndefined)
-        self.assert_equal(env.from_string('{{ missing }}').render(), '{{ missing }}')
-        self.assert_raises(UndefinedError,
-                           env.from_string('{{ missing.attribute }}').render)
-        self.assert_equal(env.from_string('{{ missing|list }}').render(), '[]')
-        self.assert_equal(env.from_string('{{ missing is not defined }}').render(), 'True')
-        self.assert_equal(env.from_string('{{ foo.missing }}').render(foo=42),
-                          u"{{ no such element: int object['missing'] }}")
-        self.assert_equal(env.from_string('{{ not missing }}').render(), 'True')
-
-    def test_strict_undefined(self):
-        env = Environment(undefined=StrictUndefined)
-        self.assert_raises(UndefinedError, env.from_string('{{ missing }}').render)
-        self.assert_raises(UndefinedError, env.from_string('{{ missing.attribute }}').render)
-        self.assert_raises(UndefinedError, env.from_string('{{ missing|list }}').render)
-        self.assert_equal(env.from_string('{{ missing is not defined }}').render(), 'True')
-        self.assert_raises(UndefinedError, env.from_string('{{ foo.missing }}').render, foo=42)
-        self.assert_raises(UndefinedError, env.from_string('{{ not missing }}').render)
-        self.assert_equal(env.from_string('{{ missing|default("default", true) }}').render(), 'default')
-
-    def test_indexing_gives_undefined(self):
-        t = Template("{{ var[42].foo }}")
-        self.assert_raises(UndefinedError, t.render, var=0)
-
-    def test_none_gives_proper_error(self):
-        try:
-            Environment().getattr(None, 'split')()
-        except UndefinedError as e:
-            assert e.message == "'None' has no attribute 'split'"
-        else:
-            assert False, 'expected exception'
-
-    def test_object_repr(self):
-        try:
-            Undefined(obj=42, name='upper')()
-        except UndefinedError as e:
-            assert e.message == "'int object' has no attribute 'upper'"
-        else:
-            assert False, 'expected exception'
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(ExtendedAPITestCase))
-    suite.addTest(unittest.makeSuite(MetaTestCase))
-    suite.addTest(unittest.makeSuite(StreamingTestCase))
-    suite.addTest(unittest.makeSuite(UndefinedTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/bytecode_cache.py b/python/ext-libs/jinja2/testsuite/bytecode_cache.py
deleted file mode 100644
index 9f5c635..0000000
--- a/python/ext-libs/jinja2/testsuite/bytecode_cache.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.bytecode_cache
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Test bytecode caching
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase, package_loader
-
-from jinja2 import Environment
-from jinja2.bccache import FileSystemBytecodeCache
-from jinja2.exceptions import TemplateNotFound
-
-bytecode_cache = FileSystemBytecodeCache()
-env = Environment(
-    loader=package_loader,
-    bytecode_cache=bytecode_cache,
-)
-
-
-class ByteCodeCacheTestCase(JinjaTestCase):
-
-    def test_simple(self):
-        tmpl = env.get_template('test.html')
-        assert tmpl.render().strip() == 'BAR'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(ByteCodeCacheTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/core_tags.py b/python/ext-libs/jinja2/testsuite/core_tags.py
deleted file mode 100644
index f1a20fd..0000000
--- a/python/ext-libs/jinja2/testsuite/core_tags.py
+++ /dev/null
@@ -1,305 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.core_tags
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Test the core tags like for and if.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Environment, TemplateSyntaxError, UndefinedError, \
-     DictLoader
-
-env = Environment()
-
-
-class ForLoopTestCase(JinjaTestCase):
-
-    def test_simple(self):
-        tmpl = env.from_string('{% for item in seq %}{{ item }}{% endfor %}')
-        assert tmpl.render(seq=list(range(10))) == '0123456789'
-
-    def test_else(self):
-        tmpl = env.from_string('{% for item in seq %}XXX{% else %}...{% endfor %}')
-        assert tmpl.render() == '...'
-
-    def test_empty_blocks(self):
-        tmpl = env.from_string('<{% for item in seq %}{% else %}{% endfor %}>')
-        assert tmpl.render() == '<>'
-
-    def test_context_vars(self):
-        tmpl = env.from_string('''{% for item in seq -%}
-        {{ loop.index }}|{{ loop.index0 }}|{{ loop.revindex }}|{{
-            loop.revindex0 }}|{{ loop.first }}|{{ loop.last }}|{{
-           loop.length }}###{% endfor %}''')
-        one, two, _ = tmpl.render(seq=[0, 1]).split('###')
-        (one_index, one_index0, one_revindex, one_revindex0, one_first,
-         one_last, one_length) = one.split('|')
-        (two_index, two_index0, two_revindex, two_revindex0, two_first,
-         two_last, two_length) = two.split('|')
-
-        assert int(one_index) == 1 and int(two_index) == 2
-        assert int(one_index0) == 0 and int(two_index0) == 1
-        assert int(one_revindex) == 2 and int(two_revindex) == 1
-        assert int(one_revindex0) == 1 and int(two_revindex0) == 0
-        assert one_first == 'True' and two_first == 'False'
-        assert one_last == 'False' and two_last == 'True'
-        assert one_length == two_length == '2'
-
-    def test_cycling(self):
-        tmpl = env.from_string('''{% for item in seq %}{{
-            loop.cycle('<1>', '<2>') }}{% endfor %}{%
-            for item in seq %}{{ loop.cycle(*through) }}{% endfor %}''')
-        output = tmpl.render(seq=list(range(4)), through=('<1>', '<2>'))
-        assert output == '<1><2>' * 4
-
-    def test_scope(self):
-        tmpl = env.from_string('{% for item in seq %}{% endfor %}{{ item }}')
-        output = tmpl.render(seq=list(range(10)))
-        assert not output
-
-    def test_varlen(self):
-        def inner():
-            for item in range(5):
-                yield item
-        tmpl = env.from_string('{% for item in iter %}{{ item }}{% endfor %}')
-        output = tmpl.render(iter=inner())
-        assert output == '01234'
-
-    def test_noniter(self):
-        tmpl = env.from_string('{% for item in none %}...{% endfor %}')
-        self.assert_raises(TypeError, tmpl.render)
-
-    def test_recursive(self):
-        tmpl = env.from_string('''{% for item in seq recursive -%}
-            [{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}]
-        {%- endfor %}''')
-        assert tmpl.render(seq=[
-            dict(a=1, b=[dict(a=1), dict(a=2)]),
-            dict(a=2, b=[dict(a=1), dict(a=2)]),
-            dict(a=3, b=[dict(a='a')])
-        ]) == '[1<[1][2]>][2<[1][2]>][3<[a]>]'
-
-    def test_recursive_depth0(self):
-        tmpl = env.from_string('''{% for item in seq recursive -%}
-            [{{ loop.depth0 }}:{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}]
-        {%- endfor %}''')
-        self.assertEqual(tmpl.render(seq=[
-            dict(a=1, b=[dict(a=1), dict(a=2)]),
-            dict(a=2, b=[dict(a=1), dict(a=2)]),
-            dict(a=3, b=[dict(a='a')])
-        ]), '[0:1<[1:1][1:2]>][0:2<[1:1][1:2]>][0:3<[1:a]>]')
-
-    def test_recursive_depth(self):
-        tmpl = env.from_string('''{% for item in seq recursive -%}
-            [{{ loop.depth }}:{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}]
-        {%- endfor %}''')
-        self.assertEqual(tmpl.render(seq=[
-            dict(a=1, b=[dict(a=1), dict(a=2)]),
-            dict(a=2, b=[dict(a=1), dict(a=2)]),
-            dict(a=3, b=[dict(a='a')])
-        ]), '[1:1<[2:1][2:2]>][1:2<[2:1][2:2]>][1:3<[2:a]>]')
-
-    def test_looploop(self):
-        tmpl = env.from_string('''{% for row in table %}
-            {%- set rowloop = loop -%}
-            {% for cell in row -%}
-                [{{ rowloop.index }}|{{ loop.index }}]
-            {%- endfor %}
-        {%- endfor %}''')
-        assert tmpl.render(table=['ab', 'cd']) == '[1|1][1|2][2|1][2|2]'
-
-    def test_reversed_bug(self):
-        tmpl = env.from_string('{% for i in items %}{{ i }}'
-                               '{% if not loop.last %}'
-                               ',{% endif %}{% endfor %}')
-        assert tmpl.render(items=reversed([3, 2, 1])) == '1,2,3'
-
-    def test_loop_errors(self):
-        tmpl = env.from_string('''{% for item in [1] if loop.index
-                                      == 0 %}...{% endfor %}''')
-        self.assert_raises(UndefinedError, tmpl.render)
-        tmpl = env.from_string('''{% for item in [] %}...{% else
-            %}{{ loop }}{% endfor %}''')
-        assert tmpl.render() == ''
-
-    def test_loop_filter(self):
-        tmpl = env.from_string('{% for item in range(10) if item '
-                               'is even %}[{{ item }}]{% endfor %}')
-        assert tmpl.render() == '[0][2][4][6][8]'
-        tmpl = env.from_string('''
-            {%- for item in range(10) if item is even %}[{{
-                loop.index }}:{{ item }}]{% endfor %}''')
-        assert tmpl.render() == '[1:0][2:2][3:4][4:6][5:8]'
-
-    def test_loop_unassignable(self):
-        self.assert_raises(TemplateSyntaxError, env.from_string,
-                           '{% for loop in seq %}...{% endfor %}')
-
-    def test_scoped_special_var(self):
-        t = env.from_string('{% for s in seq %}[{{ loop.first }}{% for c in s %}'
-                            '|{{ loop.first }}{% endfor %}]{% endfor %}')
-        assert t.render(seq=('ab', 'cd')) == '[True|True|False][False|True|False]'
-
-    def test_scoped_loop_var(self):
-        t = env.from_string('{% for x in seq %}{{ loop.first }}'
-                            '{% for y in seq %}{% endfor %}{% endfor %}')
-        assert t.render(seq='ab') == 'TrueFalse'
-        t = env.from_string('{% for x in seq %}{% for y in seq %}'
-                            '{{ loop.first }}{% endfor %}{% endfor %}')
-        assert t.render(seq='ab') == 'TrueFalseTrueFalse'
-
-    def test_recursive_empty_loop_iter(self):
-        t = env.from_string('''
-        {%- for item in foo recursive -%}{%- endfor -%}
-        ''')
-        assert t.render(dict(foo=[])) == ''
-
-    def test_call_in_loop(self):
-        t = env.from_string('''
-        {%- macro do_something() -%}
-            [{{ caller() }}]
-        {%- endmacro %}
-
-        {%- for i in [1, 2, 3] %}
-            {%- call do_something() -%}
-                {{ i }}
-            {%- endcall %}
-        {%- endfor -%}
-        ''')
-        assert t.render() == '[1][2][3]'
-
-    def test_scoping_bug(self):
-        t = env.from_string('''
-        {%- for item in foo %}...{{ item }}...{% endfor %}
-        {%- macro item(a) %}...{{ a }}...{% endmacro %}
-        {{- item(2) -}}
-        ''')
-        assert t.render(foo=(1,)) == '...1......2...'
-
-    def test_unpacking(self):
-        tmpl = env.from_string('{% for a, b, c in [[1, 2, 3]] %}'
-            '{{ a }}|{{ b }}|{{ c }}{% endfor %}')
-        assert tmpl.render() == '1|2|3'
-
-
-class IfConditionTestCase(JinjaTestCase):
-
-    def test_simple(self):
-        tmpl = env.from_string('''{% if true %}...{% endif %}''')
-        assert tmpl.render() == '...'
-
-    def test_elif(self):
-        tmpl = env.from_string('''{% if false %}XXX{% elif true
-            %}...{% else %}XXX{% endif %}''')
-        assert tmpl.render() == '...'
-
-    def test_else(self):
-        tmpl = env.from_string('{% if false %}XXX{% else %}...{% endif %}')
-        assert tmpl.render() == '...'
-
-    def test_empty(self):
-        tmpl = env.from_string('[{% if true %}{% else %}{% endif %}]')
-        assert tmpl.render() == '[]'
-
-    def test_complete(self):
-        tmpl = env.from_string('{% if a %}A{% elif b %}B{% elif c == d %}'
-                               'C{% else %}D{% endif %}')
-        assert tmpl.render(a=0, b=False, c=42, d=42.0) == 'C'
-
-    def test_no_scope(self):
-        tmpl = env.from_string('{% if a %}{% set foo = 1 %}{% endif %}{{ foo }}')
-        assert tmpl.render(a=True) == '1'
-        tmpl = env.from_string('{% if true %}{% set foo = 1 %}{% endif %}{{ foo }}')
-        assert tmpl.render() == '1'
-
-
-class MacrosTestCase(JinjaTestCase):
-    env = Environment(trim_blocks=True)
-
-    def test_simple(self):
-        tmpl = self.env.from_string('''\
-{% macro say_hello(name) %}Hello {{ name }}!{% endmacro %}
-{{ say_hello('Peter') }}''')
-        assert tmpl.render() == 'Hello Peter!'
-
-    def test_scoping(self):
-        tmpl = self.env.from_string('''\
-{% macro level1(data1) %}
-{% macro level2(data2) %}{{ data1 }}|{{ data2 }}{% endmacro %}
-{{ level2('bar') }}{% endmacro %}
-{{ level1('foo') }}''')
-        assert tmpl.render() == 'foo|bar'
-
-    def test_arguments(self):
-        tmpl = self.env.from_string('''\
-{% macro m(a, b, c='c', d='d') %}{{ a }}|{{ b }}|{{ c }}|{{ d }}{% endmacro %}
-{{ m() }}|{{ m('a') }}|{{ m('a', 'b') }}|{{ m(1, 2, 3) }}''')
-        assert tmpl.render() == '||c|d|a||c|d|a|b|c|d|1|2|3|d'
-
-    def test_varargs(self):
-        tmpl = self.env.from_string('''\
-{% macro test() %}{{ varargs|join('|') }}{% endmacro %}\
-{{ test(1, 2, 3) }}''')
-        assert tmpl.render() == '1|2|3'
-
-    def test_simple_call(self):
-        tmpl = self.env.from_string('''\
-{% macro test() %}[[{{ caller() }}]]{% endmacro %}\
-{% call test() %}data{% endcall %}''')
-        assert tmpl.render() == '[[data]]'
-
-    def test_complex_call(self):
-        tmpl = self.env.from_string('''\
-{% macro test() %}[[{{ caller('data') }}]]{% endmacro %}\
-{% call(data) test() %}{{ data }}{% endcall %}''')
-        assert tmpl.render() == '[[data]]'
-
-    def test_caller_undefined(self):
-        tmpl = self.env.from_string('''\
-{% set caller = 42 %}\
-{% macro test() %}{{ caller is not defined }}{% endmacro %}\
-{{ test() }}''')
-        assert tmpl.render() == 'True'
-
-    def test_include(self):
-        self.env = Environment(loader=DictLoader({'include':
-            '{% macro test(foo) %}[{{ foo }}]{% endmacro %}'}))
-        tmpl = self.env.from_string('{% from "include" import test %}{{ test("foo") }}')
-        assert tmpl.render() == '[foo]'
-
-    def test_macro_api(self):
-        tmpl = self.env.from_string('{% macro foo(a, b) %}{% endmacro %}'
-                               '{% macro bar() %}{{ varargs }}{{ kwargs }}{% endmacro %}'
-                               '{% macro baz() %}{{ caller() }}{% endmacro %}')
-        assert tmpl.module.foo.arguments == ('a', 'b')
-        assert tmpl.module.foo.defaults == ()
-        assert tmpl.module.foo.name == 'foo'
-        assert not tmpl.module.foo.caller
-        assert not tmpl.module.foo.catch_kwargs
-        assert not tmpl.module.foo.catch_varargs
-        assert tmpl.module.bar.arguments == ()
-        assert tmpl.module.bar.defaults == ()
-        assert not tmpl.module.bar.caller
-        assert tmpl.module.bar.catch_kwargs
-        assert tmpl.module.bar.catch_varargs
-        assert tmpl.module.baz.caller
-
-    def test_callself(self):
-        tmpl = self.env.from_string('{% macro foo(x) %}{{ x }}{% if x > 1 %}|'
-                                    '{{ foo(x - 1) }}{% endif %}{% endmacro %}'
-                                    '{{ foo(5) }}')
-        assert tmpl.render() == '5|4|3|2|1'
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(ForLoopTestCase))
-    suite.addTest(unittest.makeSuite(IfConditionTestCase))
-    suite.addTest(unittest.makeSuite(MacrosTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/debug.py b/python/ext-libs/jinja2/testsuite/debug.py
deleted file mode 100644
index 2588a83..0000000
--- a/python/ext-libs/jinja2/testsuite/debug.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.debug
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Tests the debug system.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase, filesystem_loader
-
-from jinja2 import Environment, TemplateSyntaxError
-
-env = Environment(loader=filesystem_loader)
-
-
-class DebugTestCase(JinjaTestCase):
-
-    def test_runtime_error(self):
-        def test():
-            tmpl.render(fail=lambda: 1 / 0)
-        tmpl = env.get_template('broken.html')
-        self.assert_traceback_matches(test, r'''
-  File ".*?broken.html", line 2, in (top-level template code|<module>)
-    \{\{ fail\(\) \}\}
-  File ".*?debug.pyc?", line \d+, in <lambda>
-    tmpl\.render\(fail=lambda: 1 / 0\)
-ZeroDivisionError: (int(eger)? )?division (or modulo )?by zero
-''')
-
-    def test_syntax_error(self):
-        # XXX: the .*? is necessary for python3 which does not hide
-        # some of the stack frames we don't want to show.  Not sure
-        # what's up with that, but that is not that critical.  Should
-        # be fixed though.
-        self.assert_traceback_matches(lambda: env.get_template('syntaxerror.html'), r'''(?sm)
-  File ".*?syntaxerror.html", line 4, in (template|<module>)
-    \{% endif %\}.*?
-(jinja2\.exceptions\.)?TemplateSyntaxError: Encountered unknown tag 'endif'. Jinja was looking for the following tags: 'endfor' or 'else'. The innermost block that needs to be closed is 'for'.
-    ''')
-
-    def test_regular_syntax_error(self):
-        def test():
-            raise TemplateSyntaxError('wtf', 42)
-        self.assert_traceback_matches(test, r'''
-  File ".*debug.pyc?", line \d+, in test
-    raise TemplateSyntaxError\('wtf', 42\)
-(jinja2\.exceptions\.)?TemplateSyntaxError: wtf
-  line 42''')
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(DebugTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/doctests.py b/python/ext-libs/jinja2/testsuite/doctests.py
deleted file mode 100644
index 616d3b6..0000000
--- a/python/ext-libs/jinja2/testsuite/doctests.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.doctests
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    The doctests.  Collects all tests we want to test from
-    the Jinja modules.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-import doctest
-
-
-def suite():
-    from jinja2 import utils, sandbox, runtime, meta, loaders, \
-        ext, environment, bccache, nodes
-    suite = unittest.TestSuite()
-    suite.addTest(doctest.DocTestSuite(utils))
-    suite.addTest(doctest.DocTestSuite(sandbox))
-    suite.addTest(doctest.DocTestSuite(runtime))
-    suite.addTest(doctest.DocTestSuite(meta))
-    suite.addTest(doctest.DocTestSuite(loaders))
-    suite.addTest(doctest.DocTestSuite(ext))
-    suite.addTest(doctest.DocTestSuite(environment))
-    suite.addTest(doctest.DocTestSuite(bccache))
-    suite.addTest(doctest.DocTestSuite(nodes))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/ext.py b/python/ext-libs/jinja2/testsuite/ext.py
deleted file mode 100644
index 0f93be9..0000000
--- a/python/ext-libs/jinja2/testsuite/ext.py
+++ /dev/null
@@ -1,459 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.ext
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Tests for the extensions.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import re
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Environment, DictLoader, contextfunction, nodes
-from jinja2.exceptions import TemplateAssertionError
-from jinja2.ext import Extension
-from jinja2.lexer import Token, count_newlines
-from jinja2._compat import next, BytesIO, itervalues, text_type
-
-importable_object = 23
-
-_gettext_re = re.compile(r'_\((.*?)\)(?s)')
-
-
-i18n_templates = {
-    'master.html': '<title>{{ page_title|default(_("missing")) }}</title>'
-                   '{% block body %}{% endblock %}',
-    'child.html': '{% extends "master.html" %}{% block body %}'
-                  '{% trans %}watch out{% endtrans %}{% endblock %}',
-    'plural.html': '{% trans user_count %}One user online{% pluralize %}'
-                   '{{ user_count }} users online{% endtrans %}',
-    'plural2.html': '{% trans user_count=get_user_count() %}{{ user_count }}s'
-                    '{% pluralize %}{{ user_count }}p{% endtrans %}',
-    'stringformat.html': '{{ _("User: %(num)s")|format(num=user_count) }}'
-}
-
-newstyle_i18n_templates = {
-    'master.html': '<title>{{ page_title|default(_("missing")) }}</title>'
-                   '{% block body %}{% endblock %}',
-    'child.html': '{% extends "master.html" %}{% block body %}'
-                  '{% trans %}watch out{% endtrans %}{% endblock %}',
-    'plural.html': '{% trans user_count %}One user online{% pluralize %}'
-                   '{{ user_count }} users online{% endtrans %}',
-    'stringformat.html': '{{ _("User: %(num)s", num=user_count) }}',
-    'ngettext.html': '{{ ngettext("%(num)s apple", "%(num)s apples", apples) }}',
-    'ngettext_long.html': '{% trans num=apples %}{{ num }} apple{% pluralize %}'
-                          '{{ num }} apples{% endtrans %}',
-    'transvars1.html': '{% trans %}User: {{ num }}{% endtrans %}',
-    'transvars2.html': '{% trans num=count %}User: {{ num }}{% endtrans %}',
-    'transvars3.html': '{% trans count=num %}User: {{ count }}{% endtrans %}',
-    'novars.html': '{% trans %}%(hello)s{% endtrans %}',
-    'vars.html': '{% trans %}{{ foo }}%(foo)s{% endtrans %}',
-    'explicitvars.html': '{% trans foo="42" %}%(foo)s{% endtrans %}'
-}
-
-
-languages = {
-    'de': {
-        'missing':                      u'fehlend',
-        'watch out':                    u'pass auf',
-        'One user online':              u'Ein Benutzer online',
-        '%(user_count)s users online':  u'%(user_count)s Benutzer online',
-        'User: %(num)s':                u'Benutzer: %(num)s',
-        'User: %(count)s':              u'Benutzer: %(count)s',
-        '%(num)s apple':                u'%(num)s Apfel',
-        '%(num)s apples':               u'%(num)s Äpfel'
-    }
-}
-
-
- at contextfunction
-def gettext(context, string):
-    language = context.get('LANGUAGE', 'en')
-    return languages.get(language, {}).get(string, string)
-
-
- at contextfunction
-def ngettext(context, s, p, n):
-    language = context.get('LANGUAGE', 'en')
-    if n != 1:
-        return languages.get(language, {}).get(p, p)
-    return languages.get(language, {}).get(s, s)
-
-
-i18n_env = Environment(
-    loader=DictLoader(i18n_templates),
-    extensions=['jinja2.ext.i18n']
-)
-i18n_env.globals.update({
-    '_':            gettext,
-    'gettext':      gettext,
-    'ngettext':     ngettext
-})
-
-newstyle_i18n_env = Environment(
-    loader=DictLoader(newstyle_i18n_templates),
-    extensions=['jinja2.ext.i18n']
-)
-newstyle_i18n_env.install_gettext_callables(gettext, ngettext, newstyle=True)
-
-class TestExtension(Extension):
-    tags = set(['test'])
-    ext_attr = 42
-
-    def parse(self, parser):
-        return nodes.Output([self.call_method('_dump', [
-            nodes.EnvironmentAttribute('sandboxed'),
-            self.attr('ext_attr'),
-            nodes.ImportedName(__name__ + '.importable_object'),
-            nodes.ContextReference()
-        ])]).set_lineno(next(parser.stream).lineno)
-
-    def _dump(self, sandboxed, ext_attr, imported_object, context):
-        return '%s|%s|%s|%s' % (
-            sandboxed,
-            ext_attr,
-            imported_object,
-            context.blocks
-        )
-
-
-class PreprocessorExtension(Extension):
-
-    def preprocess(self, source, name, filename=None):
-        return source.replace('[[TEST]]', '({{ foo }})')
-
-
-class StreamFilterExtension(Extension):
-
-    def filter_stream(self, stream):
-        for token in stream:
-            if token.type == 'data':
-                for t in self.interpolate(token):
-                    yield t
-            else:
-                yield token
-
-    def interpolate(self, token):
-        pos = 0
-        end = len(token.value)
-        lineno = token.lineno
-        while 1:
-            match = _gettext_re.search(token.value, pos)
-            if match is None:
-                break
-            value = token.value[pos:match.start()]
-            if value:
-                yield Token(lineno, 'data', value)
-            lineno += count_newlines(token.value)
-            yield Token(lineno, 'variable_begin', None)
-            yield Token(lineno, 'name', 'gettext')
-            yield Token(lineno, 'lparen', None)
-            yield Token(lineno, 'string', match.group(1))
-            yield Token(lineno, 'rparen', None)
-            yield Token(lineno, 'variable_end', None)
-            pos = match.end()
-        if pos < end:
-            yield Token(lineno, 'data', token.value[pos:])
-
-
-class ExtensionsTestCase(JinjaTestCase):
-
-    def test_extend_late(self):
-        env = Environment()
-        env.add_extension('jinja2.ext.autoescape')
-        t = env.from_string('{% autoescape true %}{{ "<test>" }}{% endautoescape %}')
-        assert t.render() == '<test>'
-
-    def test_loop_controls(self):
-        env = Environment(extensions=['jinja2.ext.loopcontrols'])
-
-        tmpl = env.from_string('''
-            {%- for item in [1, 2, 3, 4] %}
-                {%- if item % 2 == 0 %}{% continue %}{% endif -%}
-                {{ item }}
-            {%- endfor %}''')
-        assert tmpl.render() == '13'
-
-        tmpl = env.from_string('''
-            {%- for item in [1, 2, 3, 4] %}
-                {%- if item > 2 %}{% break %}{% endif -%}
-                {{ item }}
-            {%- endfor %}''')
-        assert tmpl.render() == '12'
-
-    def test_do(self):
-        env = Environment(extensions=['jinja2.ext.do'])
-        tmpl = env.from_string('''
-            {%- set items = [] %}
-            {%- for char in "foo" %}
-                {%- do items.append(loop.index0 ~ char) %}
-            {%- endfor %}{{ items|join(', ') }}''')
-        assert tmpl.render() == '0f, 1o, 2o'
-
-    def test_with(self):
-        env = Environment(extensions=['jinja2.ext.with_'])
-        tmpl = env.from_string('''\
-        {% with a=42, b=23 -%}
-            {{ a }} = {{ b }}
-        {% endwith -%}
-            {{ a }} = {{ b }}\
-        ''')
-        assert [x.strip() for x in tmpl.render(a=1, b=2).splitlines()] \
-            == ['42 = 23', '1 = 2']
-
-    def test_extension_nodes(self):
-        env = Environment(extensions=[TestExtension])
-        tmpl = env.from_string('{% test %}')
-        assert tmpl.render() == 'False|42|23|{}'
-
-    def test_identifier(self):
-        assert TestExtension.identifier == __name__ + '.TestExtension'
-
-    def test_rebinding(self):
-        original = Environment(extensions=[TestExtension])
-        overlay = original.overlay()
-        for env in original, overlay:
-            for ext in itervalues(env.extensions):
-                assert ext.environment is env
-
-    def test_preprocessor_extension(self):
-        env = Environment(extensions=[PreprocessorExtension])
-        tmpl = env.from_string('{[[TEST]]}')
-        assert tmpl.render(foo=42) == '{(42)}'
-
-    def test_streamfilter_extension(self):
-        env = Environment(extensions=[StreamFilterExtension])
-        env.globals['gettext'] = lambda x: x.upper()
-        tmpl = env.from_string('Foo _(bar) Baz')
-        out = tmpl.render()
-        assert out == 'Foo BAR Baz'
-
-    def test_extension_ordering(self):
-        class T1(Extension):
-            priority = 1
-        class T2(Extension):
-            priority = 2
-        env = Environment(extensions=[T1, T2])
-        ext = list(env.iter_extensions())
-        assert ext[0].__class__ is T1
-        assert ext[1].__class__ is T2
-
-
-class InternationalizationTestCase(JinjaTestCase):
-
-    def test_trans(self):
-        tmpl = i18n_env.get_template('child.html')
-        assert tmpl.render(LANGUAGE='de') == '<title>fehlend</title>pass auf'
-
-    def test_trans_plural(self):
-        tmpl = i18n_env.get_template('plural.html')
-        assert tmpl.render(LANGUAGE='de', user_count=1) == 'Ein Benutzer online'
-        assert tmpl.render(LANGUAGE='de', user_count=2) == '2 Benutzer online'
-
-    def test_trans_plural_with_functions(self):
-        tmpl = i18n_env.get_template('plural2.html')
-        def get_user_count():
-            get_user_count.called += 1
-            return 1
-        get_user_count.called = 0
-        assert tmpl.render(LANGUAGE='de', get_user_count=get_user_count) == '1s'
-        assert get_user_count.called == 1
-
-    def test_complex_plural(self):
-        tmpl = i18n_env.from_string('{% trans foo=42, count=2 %}{{ count }} item{% '
-                                    'pluralize count %}{{ count }} items{% endtrans %}')
-        assert tmpl.render() == '2 items'
-        self.assert_raises(TemplateAssertionError, i18n_env.from_string,
-                           '{% trans foo %}...{% pluralize bar %}...{% endtrans %}')
-
-    def test_trans_stringformatting(self):
-        tmpl = i18n_env.get_template('stringformat.html')
-        assert tmpl.render(LANGUAGE='de', user_count=5) == 'Benutzer: 5'
-
-    def test_extract(self):
-        from jinja2.ext import babel_extract
-        source = BytesIO('''
-        {{ gettext('Hello World') }}
-        {% trans %}Hello World{% endtrans %}
-        {% trans %}{{ users }} user{% pluralize %}{{ users }} users{% endtrans %}
-        '''.encode('ascii')) # make python 3 happy
-        assert list(babel_extract(source, ('gettext', 'ngettext', '_'), [], {})) == [
-            (2, 'gettext', u'Hello World', []),
-            (3, 'gettext', u'Hello World', []),
-            (4, 'ngettext', (u'%(users)s user', u'%(users)s users', None), [])
-        ]
-
-    def test_comment_extract(self):
-        from jinja2.ext import babel_extract
-        source = BytesIO('''
-        {# trans first #}
-        {{ gettext('Hello World') }}
-        {% trans %}Hello World{% endtrans %}{# trans second #}
-        {#: third #}
-        {% trans %}{{ users }} user{% pluralize %}{{ users }} users{% endtrans %}
-        '''.encode('utf-8')) # make python 3 happy
-        assert list(babel_extract(source, ('gettext', 'ngettext', '_'), ['trans', ':'], {})) == [
-            (3, 'gettext', u'Hello World', ['first']),
-            (4, 'gettext', u'Hello World', ['second']),
-            (6, 'ngettext', (u'%(users)s user', u'%(users)s users', None), ['third'])
-        ]
-
-
-class NewstyleInternationalizationTestCase(JinjaTestCase):
-
-    def test_trans(self):
-        tmpl = newstyle_i18n_env.get_template('child.html')
-        assert tmpl.render(LANGUAGE='de') == '<title>fehlend</title>pass auf'
-
-    def test_trans_plural(self):
-        tmpl = newstyle_i18n_env.get_template('plural.html')
-        assert tmpl.render(LANGUAGE='de', user_count=1) == 'Ein Benutzer online'
-        assert tmpl.render(LANGUAGE='de', user_count=2) == '2 Benutzer online'
-
-    def test_complex_plural(self):
-        tmpl = newstyle_i18n_env.from_string('{% trans foo=42, count=2 %}{{ count }} item{% '
-                                    'pluralize count %}{{ count }} items{% endtrans %}')
-        assert tmpl.render() == '2 items'
-        self.assert_raises(TemplateAssertionError, i18n_env.from_string,
-                           '{% trans foo %}...{% pluralize bar %}...{% endtrans %}')
-
-    def test_trans_stringformatting(self):
-        tmpl = newstyle_i18n_env.get_template('stringformat.html')
-        assert tmpl.render(LANGUAGE='de', user_count=5) == 'Benutzer: 5'
-
-    def test_newstyle_plural(self):
-        tmpl = newstyle_i18n_env.get_template('ngettext.html')
-        assert tmpl.render(LANGUAGE='de', apples=1) == '1 Apfel'
-        assert tmpl.render(LANGUAGE='de', apples=5) == u'5 Äpfel'
-
-    def test_autoescape_support(self):
-        env = Environment(extensions=['jinja2.ext.autoescape',
-                                      'jinja2.ext.i18n'])
-        env.install_gettext_callables(lambda x: u'<strong>Wert: %(name)s</strong>',
-                                      lambda s, p, n: s, newstyle=True)
-        t = env.from_string('{% autoescape ae %}{{ gettext("foo", name='
-                            '"<test>") }}{% endautoescape %}')
-        assert t.render(ae=True) == '<strong>Wert: <test></strong>'
-        assert t.render(ae=False) == '<strong>Wert: <test></strong>'
-
-    def test_num_used_twice(self):
-        tmpl = newstyle_i18n_env.get_template('ngettext_long.html')
-        assert tmpl.render(apples=5, LANGUAGE='de') == u'5 Äpfel'
-
-    def test_num_called_num(self):
-        source = newstyle_i18n_env.compile('''
-            {% trans num=3 %}{{ num }} apple{% pluralize
-            %}{{ num }} apples{% endtrans %}
-        ''', raw=True)
-        # quite hacky, but the only way to properly test that.  The idea is
-        # that the generated code does not pass num twice (although that
-        # would work) for better performance.  This only works on the
-        # newstyle gettext of course
-        assert re.search(r"l_ngettext, u?'\%\(num\)s apple', u?'\%\(num\)s "
-                         r"apples', 3", source) is not None
-
-    def test_trans_vars(self):
-        t1 = newstyle_i18n_env.get_template('transvars1.html')
-        t2 = newstyle_i18n_env.get_template('transvars2.html')
-        t3 = newstyle_i18n_env.get_template('transvars3.html')
-        assert t1.render(num=1, LANGUAGE='de') == 'Benutzer: 1'
-        assert t2.render(count=23, LANGUAGE='de') == 'Benutzer: 23'
-        assert t3.render(num=42, LANGUAGE='de') == 'Benutzer: 42'
-
-    def test_novars_vars_escaping(self):
-        t = newstyle_i18n_env.get_template('novars.html')
-        assert t.render() == '%(hello)s'
-        t = newstyle_i18n_env.get_template('vars.html')
-        assert t.render(foo='42') == '42%(foo)s'
-        t = newstyle_i18n_env.get_template('explicitvars.html')
-        assert t.render() == '%(foo)s'
-
-
-class AutoEscapeTestCase(JinjaTestCase):
-
-    def test_scoped_setting(self):
-        env = Environment(extensions=['jinja2.ext.autoescape'],
-                          autoescape=True)
-        tmpl = env.from_string('''
-            {{ "<HelloWorld>" }}
-            {% autoescape false %}
-                {{ "<HelloWorld>" }}
-            {% endautoescape %}
-            {{ "<HelloWorld>" }}
-        ''')
-        assert tmpl.render().split() == \
-            [u'<HelloWorld>', u'<HelloWorld>', u'<HelloWorld>']
-
-        env = Environment(extensions=['jinja2.ext.autoescape'],
-                          autoescape=False)
-        tmpl = env.from_string('''
-            {{ "<HelloWorld>" }}
-            {% autoescape true %}
-                {{ "<HelloWorld>" }}
-            {% endautoescape %}
-            {{ "<HelloWorld>" }}
-        ''')
-        assert tmpl.render().split() == \
-            [u'<HelloWorld>', u'<HelloWorld>', u'<HelloWorld>']
-
-    def test_nonvolatile(self):
-        env = Environment(extensions=['jinja2.ext.autoescape'],
-                          autoescape=True)
-        tmpl = env.from_string('{{ {"foo": "<test>"}|xmlattr|escape }}')
-        assert tmpl.render() == ' foo="<test>"'
-        tmpl = env.from_string('{% autoescape false %}{{ {"foo": "<test>"}'
-                               '|xmlattr|escape }}{% endautoescape %}')
-        assert tmpl.render() == ' foo="&lt;test&gt;"'
-
-    def test_volatile(self):
-        env = Environment(extensions=['jinja2.ext.autoescape'],
-                          autoescape=True)
-        tmpl = env.from_string('{% autoescape foo %}{{ {"foo": "<test>"}'
-                               '|xmlattr|escape }}{% endautoescape %}')
-        assert tmpl.render(foo=False) == ' foo="&lt;test&gt;"'
-        assert tmpl.render(foo=True) == ' foo="<test>"'
-
-    def test_scoping(self):
-        env = Environment(extensions=['jinja2.ext.autoescape'])
-        tmpl = env.from_string('{% autoescape true %}{% set x = "<x>" %}{{ x }}'
-                               '{% endautoescape %}{{ x }}{{ "<y>" }}')
-        assert tmpl.render(x=1) == '<x>1<y>'
-
-    def test_volatile_scoping(self):
-        env = Environment(extensions=['jinja2.ext.autoescape'])
-        tmplsource = '''
-        {% autoescape val %}
-            {% macro foo(x) %}
-                [{{ x }}]
-            {% endmacro %}
-            {{ foo().__class__.__name__ }}
-        {% endautoescape %}
-        {{ '<testing>' }}
-        '''
-        tmpl = env.from_string(tmplsource)
-        assert tmpl.render(val=True).split()[0] == 'Markup'
-        assert tmpl.render(val=False).split()[0] == text_type.__name__
-
-        # looking at the source we should see <testing> there in raw
-        # (and then escaped as well)
-        env = Environment(extensions=['jinja2.ext.autoescape'])
-        pysource = env.compile(tmplsource, raw=True)
-        assert '<testing>\\n' in pysource
-
-        env = Environment(extensions=['jinja2.ext.autoescape'],
-                          autoescape=True)
-        pysource = env.compile(tmplsource, raw=True)
-        assert '<testing>\\n' in pysource
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(ExtensionsTestCase))
-    suite.addTest(unittest.makeSuite(InternationalizationTestCase))
-    suite.addTest(unittest.makeSuite(NewstyleInternationalizationTestCase))
-    suite.addTest(unittest.makeSuite(AutoEscapeTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/filters.py b/python/ext-libs/jinja2/testsuite/filters.py
deleted file mode 100644
index 282dd2d..0000000
--- a/python/ext-libs/jinja2/testsuite/filters.py
+++ /dev/null
@@ -1,515 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.filters
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Tests for the jinja filters.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Markup, Environment
-from jinja2._compat import text_type, implements_to_string
-
-env = Environment()
-
-
-class FilterTestCase(JinjaTestCase):
-
-    def test_filter_calling(self):
-        rv = env.call_filter('sum', [1, 2, 3])
-        self.assert_equal(rv, 6)
-
-    def test_capitalize(self):
-        tmpl = env.from_string('{{ "foo bar"|capitalize }}')
-        assert tmpl.render() == 'Foo bar'
-
-    def test_center(self):
-        tmpl = env.from_string('{{ "foo"|center(9) }}')
-        assert tmpl.render() == '   foo   '
-
-    def test_default(self):
-        tmpl = env.from_string(
-            "{{ missing|default('no') }}|{{ false|default('no') }}|"
-            "{{ false|default('no', true) }}|{{ given|default('no') }}"
-        )
-        assert tmpl.render(given='yes') == 'no|False|no|yes'
-
-    def test_dictsort(self):
-        tmpl = env.from_string(
-            '{{ foo|dictsort }}|'
-            '{{ foo|dictsort(true) }}|'
-            '{{ foo|dictsort(false, "value") }}'
-        )
-        out = tmpl.render(foo={"aa": 0, "b": 1, "c": 2, "AB": 3})
-        assert out == ("[('aa', 0), ('AB', 3), ('b', 1), ('c', 2)]|"
-                       "[('AB', 3), ('aa', 0), ('b', 1), ('c', 2)]|"
-                       "[('aa', 0), ('b', 1), ('c', 2), ('AB', 3)]")
-
-    def test_batch(self):
-        tmpl = env.from_string("{{ foo|batch(3)|list }}|"
-                               "{{ foo|batch(3, 'X')|list }}")
-        out = tmpl.render(foo=list(range(10)))
-        assert out == ("[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]|"
-                       "[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 'X', 'X']]")
-
-    def test_slice(self):
-        tmpl = env.from_string('{{ foo|slice(3)|list }}|'
-                               '{{ foo|slice(3, "X")|list }}')
-        out = tmpl.render(foo=list(range(10)))
-        assert out == ("[[0, 1, 2, 3], [4, 5, 6], [7, 8, 9]]|"
-                       "[[0, 1, 2, 3], [4, 5, 6, 'X'], [7, 8, 9, 'X']]")
-
-    def test_escape(self):
-        tmpl = env.from_string('''{{ '<">&'|escape }}''')
-        out = tmpl.render()
-        assert out == '<">&'
-
-    def test_striptags(self):
-        tmpl = env.from_string('''{{ foo|striptags }}''')
-        out = tmpl.render(foo='  <p>just a small   \n <a href="#">'
-                          'example</a> link</p>\n<p>to a webpage</p> '
-                          '<!-- <p>and some commented stuff</p> -->')
-        assert out == 'just a small example link to a webpage'
-
-    def test_filesizeformat(self):
-        tmpl = env.from_string(
-            '{{ 100|filesizeformat }}|'
-            '{{ 1000|filesizeformat }}|'
-            '{{ 1000000|filesizeformat }}|'
-            '{{ 1000000000|filesizeformat }}|'
-            '{{ 1000000000000|filesizeformat }}|'
-            '{{ 100|filesizeformat(true) }}|'
-            '{{ 1000|filesizeformat(true) }}|'
-            '{{ 1000000|filesizeformat(true) }}|'
-            '{{ 1000000000|filesizeformat(true) }}|'
-            '{{ 1000000000000|filesizeformat(true) }}'
-        )
-        out = tmpl.render()
-        self.assert_equal(out, (
-            '100 Bytes|1.0 kB|1.0 MB|1.0 GB|1.0 TB|100 Bytes|'
-            '1000 Bytes|976.6 KiB|953.7 MiB|931.3 GiB'
-        ))
-
-    def test_filesizeformat_issue59(self):
-        tmpl = env.from_string(
-            '{{ 300|filesizeformat }}|'
-            '{{ 3000|filesizeformat }}|'
-            '{{ 3000000|filesizeformat }}|'
-            '{{ 3000000000|filesizeformat }}|'
-            '{{ 3000000000000|filesizeformat }}|'
-            '{{ 300|filesizeformat(true) }}|'
-            '{{ 3000|filesizeformat(true) }}|'
-            '{{ 3000000|filesizeformat(true) }}'
-        )
-        out = tmpl.render()
-        self.assert_equal(out, (
-            '300 Bytes|3.0 kB|3.0 MB|3.0 GB|3.0 TB|300 Bytes|'
-            '2.9 KiB|2.9 MiB'
-        ))
-
-
-    def test_first(self):
-        tmpl = env.from_string('{{ foo|first }}')
-        out = tmpl.render(foo=list(range(10)))
-        assert out == '0'
-
-    def test_float(self):
-        tmpl = env.from_string('{{ "42"|float }}|'
-                               '{{ "ajsghasjgd"|float }}|'
-                               '{{ "32.32"|float }}')
-        out = tmpl.render()
-        assert out == '42.0|0.0|32.32'
-
-    def test_format(self):
-        tmpl = env.from_string('''{{ "%s|%s"|format("a", "b") }}''')
-        out = tmpl.render()
-        assert out == 'a|b'
-
-    def test_indent(self):
-        tmpl = env.from_string('{{ foo|indent(2) }}|{{ foo|indent(2, true) }}')
-        text = '\n'.join([' '.join(['foo', 'bar'] * 2)] * 2)
-        out = tmpl.render(foo=text)
-        assert out == ('foo bar foo bar\n  foo bar foo bar|  '
-                       'foo bar foo bar\n  foo bar foo bar')
-
-    def test_int(self):
-        tmpl = env.from_string('{{ "42"|int }}|{{ "ajsghasjgd"|int }}|'
-                               '{{ "32.32"|int }}')
-        out = tmpl.render()
-        assert out == '42|0|32'
-
-    def test_join(self):
-        tmpl = env.from_string('{{ [1, 2, 3]|join("|") }}')
-        out = tmpl.render()
-        assert out == '1|2|3'
-
-        env2 = Environment(autoescape=True)
-        tmpl = env2.from_string('{{ ["<foo>", "<span>foo</span>"|safe]|join }}')
-        assert tmpl.render() == '<foo><span>foo</span>'
-
-    def test_join_attribute(self):
-        class User(object):
-            def __init__(self, username):
-                self.username = username
-        tmpl = env.from_string('''{{ users|join(', ', 'username') }}''')
-        assert tmpl.render(users=map(User, ['foo', 'bar'])) == 'foo, bar'
-
-    def test_last(self):
-        tmpl = env.from_string('''{{ foo|last }}''')
-        out = tmpl.render(foo=list(range(10)))
-        assert out == '9'
-
-    def test_length(self):
-        tmpl = env.from_string('''{{ "hello world"|length }}''')
-        out = tmpl.render()
-        assert out == '11'
-
-    def test_lower(self):
-        tmpl = env.from_string('''{{ "FOO"|lower }}''')
-        out = tmpl.render()
-        assert out == 'foo'
-
-    def test_pprint(self):
-        from pprint import pformat
-        tmpl = env.from_string('''{{ data|pprint }}''')
-        data = list(range(1000))
-        assert tmpl.render(data=data) == pformat(data)
-
-    def test_random(self):
-        tmpl = env.from_string('''{{ seq|random }}''')
-        seq = list(range(100))
-        for _ in range(10):
-            assert int(tmpl.render(seq=seq)) in seq
-
-    def test_reverse(self):
-        tmpl = env.from_string('{{ "foobar"|reverse|join }}|'
-                               '{{ [1, 2, 3]|reverse|list }}')
-        assert tmpl.render() == 'raboof|[3, 2, 1]'
-
-    def test_string(self):
-        x = [1, 2, 3, 4, 5]
-        tmpl = env.from_string('''{{ obj|string }}''')
-        assert tmpl.render(obj=x) == text_type(x)
-
-    def test_title(self):
-        tmpl = env.from_string('''{{ "foo bar"|title }}''')
-        assert tmpl.render() == "Foo Bar"
-        tmpl = env.from_string('''{{ "foo's bar"|title }}''')
-        assert tmpl.render() == "Foo's Bar"
-        tmpl = env.from_string('''{{ "foo   bar"|title }}''')
-        assert tmpl.render() == "Foo   Bar"
-        tmpl = env.from_string('''{{ "f bar f"|title }}''')
-        assert tmpl.render() == "F Bar F"
-        tmpl = env.from_string('''{{ "foo-bar"|title }}''')
-        assert tmpl.render() == "Foo-Bar"
-        tmpl = env.from_string('''{{ "foo\tbar"|title }}''')
-        assert tmpl.render() == "Foo\tBar"
-        tmpl = env.from_string('''{{ "FOO\tBAR"|title }}''')
-        assert tmpl.render() == "Foo\tBar"
-
-    def test_truncate(self):
-        tmpl = env.from_string(
-            '{{ data|truncate(15, true, ">>>") }}|'
-            '{{ data|truncate(15, false, ">>>") }}|'
-            '{{ smalldata|truncate(15) }}'
-        )
-        out = tmpl.render(data='foobar baz bar' * 1000,
-                          smalldata='foobar baz bar')
-        assert out == 'foobar baz barf>>>|foobar baz >>>|foobar baz bar'
-
-    def test_upper(self):
-        tmpl = env.from_string('{{ "foo"|upper }}')
-        assert tmpl.render() == 'FOO'
-
-    def test_urlize(self):
-        tmpl = env.from_string('{{ "foo http://www.example.com/ bar"|urlize }}')
-        assert tmpl.render() == 'foo <a href="http://www.example.com/">'\
-                                'http://www.example.com/</a> bar'
-
-    def test_wordcount(self):
-        tmpl = env.from_string('{{ "foo bar baz"|wordcount }}')
-        assert tmpl.render() == '3'
-
-    def test_block(self):
-        tmpl = env.from_string('{% filter lower|escape %}<HEHE>{% endfilter %}')
-        assert tmpl.render() == '<hehe>'
-
-    def test_chaining(self):
-        tmpl = env.from_string('''{{ ['<foo>', '<bar>']|first|upper|escape }}''')
-        assert tmpl.render() == '<FOO>'
-
-    def test_sum(self):
-        tmpl = env.from_string('''{{ [1, 2, 3, 4, 5, 6]|sum }}''')
-        assert tmpl.render() == '21'
-
-    def test_sum_attributes(self):
-        tmpl = env.from_string('''{{ values|sum('value') }}''')
-        assert tmpl.render(values=[
-            {'value': 23},
-            {'value': 1},
-            {'value': 18},
-        ]) == '42'
-
-    def test_sum_attributes_nested(self):
-        tmpl = env.from_string('''{{ values|sum('real.value') }}''')
-        assert tmpl.render(values=[
-            {'real': {'value': 23}},
-            {'real': {'value': 1}},
-            {'real': {'value': 18}},
-        ]) == '42'
-
-    def test_sum_attributes_tuple(self):
-        tmpl = env.from_string('''{{ values.items()|sum('1') }}''')
-        assert tmpl.render(values={
-            'foo': 23,
-            'bar': 1,
-            'baz': 18,
-        }) == '42'
-
-    def test_abs(self):
-        tmpl = env.from_string('''{{ -1|abs }}|{{ 1|abs }}''')
-        assert tmpl.render() == '1|1', tmpl.render()
-
-    def test_round_positive(self):
-        tmpl = env.from_string('{{ 2.7|round }}|{{ 2.1|round }}|'
-                               "{{ 2.1234|round(3, 'floor') }}|"
-                               "{{ 2.1|round(0, 'ceil') }}")
-        assert tmpl.render() == '3.0|2.0|2.123|3.0', tmpl.render()
-
-    def test_round_negative(self):
-        tmpl = env.from_string('{{ 21.3|round(-1)}}|'
-                               "{{ 21.3|round(-1, 'ceil')}}|"
-                               "{{ 21.3|round(-1, 'floor')}}")
-        assert tmpl.render() == '20.0|30.0|20.0',tmpl.render()
-
-    def test_xmlattr(self):
-        tmpl = env.from_string("{{ {'foo': 42, 'bar': 23, 'fish': none, "
-                               "'spam': missing, 'blub:blub': '<?>'}|xmlattr }}")
-        out = tmpl.render().split()
-        assert len(out) == 3
-        assert 'foo="42"' in out
-        assert 'bar="23"' in out
-        assert 'blub:blub="<?>"' in out
-
-    def test_sort1(self):
-        tmpl = env.from_string('{{ [2, 3, 1]|sort }}|{{ [2, 3, 1]|sort(true) }}')
-        assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
-
-    def test_sort2(self):
-        tmpl = env.from_string('{{ "".join(["c", "A", "b", "D"]|sort) }}')
-        assert tmpl.render() == 'AbcD'
-
-    def test_sort3(self):
-        tmpl = env.from_string('''{{ ['foo', 'Bar', 'blah']|sort }}''')
-        assert tmpl.render() == "['Bar', 'blah', 'foo']"
-
-    def test_sort4(self):
-        @implements_to_string
-        class Magic(object):
-            def __init__(self, value):
-                self.value = value
-            def __str__(self):
-                return text_type(self.value)
-        tmpl = env.from_string('''{{ items|sort(attribute='value')|join }}''')
-        assert tmpl.render(items=map(Magic, [3, 2, 4, 1])) == '1234'
-
-    def test_groupby(self):
-        tmpl = env.from_string('''
-        {%- for grouper, list in [{'foo': 1, 'bar': 2},
-                                  {'foo': 2, 'bar': 3},
-                                  {'foo': 1, 'bar': 1},
-                                  {'foo': 3, 'bar': 4}]|groupby('foo') -%}
-            {{ grouper }}{% for x in list %}: {{ x.foo }}, {{ x.bar }}{% endfor %}|
-        {%- endfor %}''')
-        assert tmpl.render().split('|') == [
-            "1: 1, 2: 1, 1",
-            "2: 2, 3",
-            "3: 3, 4",
-            ""
-        ]
-
-    def test_groupby_tuple_index(self):
-        tmpl = env.from_string('''
-        {%- for grouper, list in [('a', 1), ('a', 2), ('b', 1)]|groupby(0) -%}
-            {{ grouper }}{% for x in list %}:{{ x.1 }}{% endfor %}|
-        {%- endfor %}''')
-        assert tmpl.render() == 'a:1:2|b:1|'
-
-    def test_groupby_multidot(self):
-        class Date(object):
-            def __init__(self, day, month, year):
-                self.day = day
-                self.month = month
-                self.year = year
-        class Article(object):
-            def __init__(self, title, *date):
-                self.date = Date(*date)
-                self.title = title
-        articles = [
-            Article('aha', 1, 1, 1970),
-            Article('interesting', 2, 1, 1970),
-            Article('really?', 3, 1, 1970),
-            Article('totally not', 1, 1, 1971)
-        ]
-        tmpl = env.from_string('''
-        {%- for year, list in articles|groupby('date.year') -%}
-            {{ year }}{% for x in list %}[{{ x.title }}]{% endfor %}|
-        {%- endfor %}''')
-        assert tmpl.render(articles=articles).split('|') == [
-            '1970[aha][interesting][really?]',
-            '1971[totally not]',
-            ''
-        ]
-
-    def test_filtertag(self):
-        tmpl = env.from_string("{% filter upper|replace('FOO', 'foo') %}"
-                               "foobar{% endfilter %}")
-        assert tmpl.render() == 'fooBAR'
-
-    def test_replace(self):
-        env = Environment()
-        tmpl = env.from_string('{{ string|replace("o", 42) }}')
-        assert tmpl.render(string='<foo>') == '<f4242>'
-        env = Environment(autoescape=True)
-        tmpl = env.from_string('{{ string|replace("o", 42) }}')
-        assert tmpl.render(string='<foo>') == '<f4242>'
-        tmpl = env.from_string('{{ string|replace("<", 42) }}')
-        assert tmpl.render(string='<foo>') == '42foo>'
-        tmpl = env.from_string('{{ string|replace("o", ">x<") }}')
-        assert tmpl.render(string=Markup('foo')) == 'f>x<>x<'
-
-    def test_forceescape(self):
-        tmpl = env.from_string('{{ x|forceescape }}')
-        assert tmpl.render(x=Markup('<div />')) == u'<div />'
-
-    def test_safe(self):
-        env = Environment(autoescape=True)
-        tmpl = env.from_string('{{ "<div>foo</div>"|safe }}')
-        assert tmpl.render() == '<div>foo</div>'
-        tmpl = env.from_string('{{ "<div>foo</div>" }}')
-        assert tmpl.render() == '<div>foo</div>'
-
-    def test_urlencode(self):
-        env = Environment(autoescape=True)
-        tmpl = env.from_string('{{ "Hello, world!"|urlencode }}')
-        assert tmpl.render() == 'Hello%2C%20world%21'
-        tmpl = env.from_string('{{ o|urlencode }}')
-        assert tmpl.render(o=u"Hello, world\u203d") == "Hello%2C%20world%E2%80%BD"
-        assert tmpl.render(o=(("f", 1),)) == "f=1"
-        assert tmpl.render(o=(('f', 1), ("z", 2))) == "f=1&z=2"
-        assert tmpl.render(o=((u"\u203d", 1),)) == "%E2%80%BD=1"
-        assert tmpl.render(o={u"\u203d": 1}) == "%E2%80%BD=1"
-        assert tmpl.render(o={0: 1}) == "0=1"
-
-    def test_simple_map(self):
-        env = Environment()
-        tmpl = env.from_string('{{ ["1", "2", "3"]|map("int")|sum }}')
-        self.assertEqual(tmpl.render(), '6')
-
-    def test_attribute_map(self):
-        class User(object):
-            def __init__(self, name):
-                self.name = name
-        env = Environment()
-        users = [
-            User('john'),
-            User('jane'),
-            User('mike'),
-        ]
-        tmpl = env.from_string('{{ users|map(attribute="name")|join("|") }}')
-        self.assertEqual(tmpl.render(users=users), 'john|jane|mike')
-
-    def test_empty_map(self):
-        env = Environment()
-        tmpl = env.from_string('{{ none|map("upper")|list }}')
-        self.assertEqual(tmpl.render(), '[]')
-
-    def test_simple_select(self):
-        env = Environment()
-        tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|select("odd")|join("|") }}')
-        self.assertEqual(tmpl.render(), '1|3|5')
-
-    def test_bool_select(self):
-        env = Environment()
-        tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|select|join("|") }}')
-        self.assertEqual(tmpl.render(), '1|2|3|4|5')
-
-    def test_simple_reject(self):
-        env = Environment()
-        tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|reject("odd")|join("|") }}')
-        self.assertEqual(tmpl.render(), '2|4')
-
-    def test_bool_reject(self):
-        env = Environment()
-        tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|reject|join("|") }}')
-        self.assertEqual(tmpl.render(), 'None|False|0')
-
-    def test_simple_select_attr(self):
-        class User(object):
-            def __init__(self, name, is_active):
-                self.name = name
-                self.is_active = is_active
-        env = Environment()
-        users = [
-            User('john', True),
-            User('jane', True),
-            User('mike', False),
-        ]
-        tmpl = env.from_string('{{ users|selectattr("is_active")|'
-            'map(attribute="name")|join("|") }}')
-        self.assertEqual(tmpl.render(users=users), 'john|jane')
-
-    def test_simple_reject_attr(self):
-        class User(object):
-            def __init__(self, name, is_active):
-                self.name = name
-                self.is_active = is_active
-        env = Environment()
-        users = [
-            User('john', True),
-            User('jane', True),
-            User('mike', False),
-        ]
-        tmpl = env.from_string('{{ users|rejectattr("is_active")|'
-            'map(attribute="name")|join("|") }}')
-        self.assertEqual(tmpl.render(users=users), 'mike')
-
-    def test_func_select_attr(self):
-        class User(object):
-            def __init__(self, id, name):
-                self.id = id
-                self.name = name
-        env = Environment()
-        users = [
-            User(1, 'john'),
-            User(2, 'jane'),
-            User(3, 'mike'),
-        ]
-        tmpl = env.from_string('{{ users|selectattr("id", "odd")|'
-            'map(attribute="name")|join("|") }}')
-        self.assertEqual(tmpl.render(users=users), 'john|mike')
-
-    def test_func_reject_attr(self):
-        class User(object):
-            def __init__(self, id, name):
-                self.id = id
-                self.name = name
-        env = Environment()
-        users = [
-            User(1, 'john'),
-            User(2, 'jane'),
-            User(3, 'mike'),
-        ]
-        tmpl = env.from_string('{{ users|rejectattr("id", "odd")|'
-            'map(attribute="name")|join("|") }}')
-        self.assertEqual(tmpl.render(users=users), 'jane')
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(FilterTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/imports.py b/python/ext-libs/jinja2/testsuite/imports.py
deleted file mode 100644
index 3db9008..0000000
--- a/python/ext-libs/jinja2/testsuite/imports.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.imports
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Tests the import features (with includes).
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Environment, DictLoader
-from jinja2.exceptions import TemplateNotFound, TemplatesNotFound
-
-
-test_env = Environment(loader=DictLoader(dict(
-    module='{% macro test() %}[{{ foo }}|{{ bar }}]{% endmacro %}',
-    header='[{{ foo }}|{{ 23 }}]',
-    o_printer='({{ o }})'
-)))
-test_env.globals['bar'] = 23
-
-
-class ImportsTestCase(JinjaTestCase):
-
-    def test_context_imports(self):
-        t = test_env.from_string('{% import "module" as m %}{{ m.test() }}')
-        assert t.render(foo=42) == '[|23]'
-        t = test_env.from_string('{% import "module" as m without context %}{{ m.test() }}')
-        assert t.render(foo=42) == '[|23]'
-        t = test_env.from_string('{% import "module" as m with context %}{{ m.test() }}')
-        assert t.render(foo=42) == '[42|23]'
-        t = test_env.from_string('{% from "module" import test %}{{ test() }}')
-        assert t.render(foo=42) == '[|23]'
-        t = test_env.from_string('{% from "module" import test without context %}{{ test() }}')
-        assert t.render(foo=42) == '[|23]'
-        t = test_env.from_string('{% from "module" import test with context %}{{ test() }}')
-        assert t.render(foo=42) == '[42|23]'
-
-    def test_trailing_comma(self):
-        test_env.from_string('{% from "foo" import bar, baz with context %}')
-        test_env.from_string('{% from "foo" import bar, baz, with context %}')
-        test_env.from_string('{% from "foo" import bar, with context %}')
-        test_env.from_string('{% from "foo" import bar, with, context %}')
-        test_env.from_string('{% from "foo" import bar, with with context %}')
-
-    def test_exports(self):
-        m = test_env.from_string('''
-            {% macro toplevel() %}...{% endmacro %}
-            {% macro __private() %}...{% endmacro %}
-            {% set variable = 42 %}
-            {% for item in [1] %}
-                {% macro notthere() %}{% endmacro %}
-            {% endfor %}
-        ''').module
-        assert m.toplevel() == '...'
-        assert not hasattr(m, '__missing')
-        assert m.variable == 42
-        assert not hasattr(m, 'notthere')
-
-
-class IncludesTestCase(JinjaTestCase):
-
-    def test_context_include(self):
-        t = test_env.from_string('{% include "header" %}')
-        assert t.render(foo=42) == '[42|23]'
-        t = test_env.from_string('{% include "header" with context %}')
-        assert t.render(foo=42) == '[42|23]'
-        t = test_env.from_string('{% include "header" without context %}')
-        assert t.render(foo=42) == '[|23]'
-
-    def test_choice_includes(self):
-        t = test_env.from_string('{% include ["missing", "header"] %}')
-        assert t.render(foo=42) == '[42|23]'
-
-        t = test_env.from_string('{% include ["missing", "missing2"] ignore missing %}')
-        assert t.render(foo=42) == ''
-
-        t = test_env.from_string('{% include ["missing", "missing2"] %}')
-        self.assert_raises(TemplateNotFound, t.render)
-        try:
-            t.render()
-        except TemplatesNotFound as e:
-            assert e.templates == ['missing', 'missing2']
-            assert e.name == 'missing2'
-        else:
-            assert False, 'thou shalt raise'
-
-        def test_includes(t, **ctx):
-            ctx['foo'] = 42
-            assert t.render(ctx) == '[42|23]'
-
-        t = test_env.from_string('{% include ["missing", "header"] %}')
-        test_includes(t)
-        t = test_env.from_string('{% include x %}')
-        test_includes(t, x=['missing', 'header'])
-        t = test_env.from_string('{% include [x, "header"] %}')
-        test_includes(t, x='missing')
-        t = test_env.from_string('{% include x %}')
-        test_includes(t, x='header')
-        t = test_env.from_string('{% include x %}')
-        test_includes(t, x='header')
-        t = test_env.from_string('{% include [x] %}')
-        test_includes(t, x='header')
-
-    def test_include_ignoring_missing(self):
-        t = test_env.from_string('{% include "missing" %}')
-        self.assert_raises(TemplateNotFound, t.render)
-        for extra in '', 'with context', 'without context':
-            t = test_env.from_string('{% include "missing" ignore missing ' +
-                                     extra + ' %}')
-            assert t.render() == ''
-
-    def test_context_include_with_overrides(self):
-        env = Environment(loader=DictLoader(dict(
-            main="{% for item in [1, 2, 3] %}{% include 'item' %}{% endfor %}",
-            item="{{ item }}"
-        )))
-        assert env.get_template("main").render() == "123"
-
-    def test_unoptimized_scopes(self):
-        t = test_env.from_string("""
-            {% macro outer(o) %}
-            {% macro inner() %}
-            {% include "o_printer" %}
-            {% endmacro %}
-            {{ inner() }}
-            {% endmacro %}
-            {{ outer("FOO") }}
-        """)
-        assert t.render().strip() == '(FOO)'
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(ImportsTestCase))
-    suite.addTest(unittest.makeSuite(IncludesTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/inheritance.py b/python/ext-libs/jinja2/testsuite/inheritance.py
deleted file mode 100644
index e0f51cd..0000000
--- a/python/ext-libs/jinja2/testsuite/inheritance.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.inheritance
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Tests the template inheritance feature.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Environment, DictLoader, TemplateError
-
-
-LAYOUTTEMPLATE = '''\
-|{% block block1 %}block 1 from layout{% endblock %}
-|{% block block2 %}block 2 from layout{% endblock %}
-|{% block block3 %}
-{% block block4 %}nested block 4 from layout{% endblock %}
-{% endblock %}|'''
-
-LEVEL1TEMPLATE = '''\
-{% extends "layout" %}
-{% block block1 %}block 1 from level1{% endblock %}'''
-
-LEVEL2TEMPLATE = '''\
-{% extends "level1" %}
-{% block block2 %}{% block block5 %}nested block 5 from level2{%
-endblock %}{% endblock %}'''
-
-LEVEL3TEMPLATE = '''\
-{% extends "level2" %}
-{% block block5 %}block 5 from level3{% endblock %}
-{% block block4 %}block 4 from level3{% endblock %}
-'''
-
-LEVEL4TEMPLATE = '''\
-{% extends "level3" %}
-{% block block3 %}block 3 from level4{% endblock %}
-'''
-
-WORKINGTEMPLATE = '''\
-{% extends "layout" %}
-{% block block1 %}
-  {% if false %}
-    {% block block2 %}
-      this should workd
-    {% endblock %}
-  {% endif %}
-{% endblock %}
-'''
-
-DOUBLEEXTENDS = '''\
-{% extends "layout" %}
-{% extends "layout" %}
-{% block block1 %}
-  {% if false %}
-    {% block block2 %}
-      this should workd
-    {% endblock %}
-  {% endif %}
-{% endblock %}
-'''
-
-
-env = Environment(loader=DictLoader({
-    'layout':       LAYOUTTEMPLATE,
-    'level1':       LEVEL1TEMPLATE,
-    'level2':       LEVEL2TEMPLATE,
-    'level3':       LEVEL3TEMPLATE,
-    'level4':       LEVEL4TEMPLATE,
-    'working':      WORKINGTEMPLATE,
-    'doublee':      DOUBLEEXTENDS,
-}), trim_blocks=True)
-
-
-class InheritanceTestCase(JinjaTestCase):
-
-    def test_layout(self):
-        tmpl = env.get_template('layout')
-        assert tmpl.render() == ('|block 1 from layout|block 2 from '
-                                 'layout|nested block 4 from layout|')
-
-    def test_level1(self):
-        tmpl = env.get_template('level1')
-        assert tmpl.render() == ('|block 1 from level1|block 2 from '
-                                 'layout|nested block 4 from layout|')
-
-    def test_level2(self):
-        tmpl = env.get_template('level2')
-        assert tmpl.render() == ('|block 1 from level1|nested block 5 from '
-                                 'level2|nested block 4 from layout|')
-
-    def test_level3(self):
-        tmpl = env.get_template('level3')
-        assert tmpl.render() == ('|block 1 from level1|block 5 from level3|'
-                                 'block 4 from level3|')
-
-    def test_level4(sel):
-        tmpl = env.get_template('level4')
-        assert tmpl.render() == ('|block 1 from level1|block 5 from '
-                                 'level3|block 3 from level4|')
-
-    def test_super(self):
-        env = Environment(loader=DictLoader({
-            'a': '{% block intro %}INTRO{% endblock %}|'
-                 'BEFORE|{% block data %}INNER{% endblock %}|AFTER',
-            'b': '{% extends "a" %}{% block data %}({{ '
-                 'super() }}){% endblock %}',
-            'c': '{% extends "b" %}{% block intro %}--{{ '
-                 'super() }}--{% endblock %}\n{% block data '
-                 '%}[{{ super() }}]{% endblock %}'
-        }))
-        tmpl = env.get_template('c')
-        assert tmpl.render() == '--INTRO--|BEFORE|[(INNER)]|AFTER'
-
-    def test_working(self):
-        tmpl = env.get_template('working')
-
-    def test_reuse_blocks(self):
-        tmpl = env.from_string('{{ self.foo() }}|{% block foo %}42'
-                               '{% endblock %}|{{ self.foo() }}')
-        assert tmpl.render() == '42|42|42'
-
-    def test_preserve_blocks(self):
-        env = Environment(loader=DictLoader({
-            'a': '{% if false %}{% block x %}A{% endblock %}{% endif %}{{ self.x() }}',
-            'b': '{% extends "a" %}{% block x %}B{{ super() }}{% endblock %}'
-        }))
-        tmpl = env.get_template('b')
-        assert tmpl.render() == 'BA'
-
-    def test_dynamic_inheritance(self):
-        env = Environment(loader=DictLoader({
-            'master1': 'MASTER1{% block x %}{% endblock %}',
-            'master2': 'MASTER2{% block x %}{% endblock %}',
-            'child': '{% extends master %}{% block x %}CHILD{% endblock %}'
-        }))
-        tmpl = env.get_template('child')
-        for m in range(1, 3):
-            assert tmpl.render(master='master%d' % m) == 'MASTER%dCHILD' % m
-
-    def test_multi_inheritance(self):
-        env = Environment(loader=DictLoader({
-            'master1': 'MASTER1{% block x %}{% endblock %}',
-            'master2': 'MASTER2{% block x %}{% endblock %}',
-            'child': '''{% if master %}{% extends master %}{% else %}{% extends
-                        'master1' %}{% endif %}{% block x %}CHILD{% endblock %}'''
-        }))
-        tmpl = env.get_template('child')
-        assert tmpl.render(master='master2') == 'MASTER2CHILD'
-        assert tmpl.render(master='master1') == 'MASTER1CHILD'
-        assert tmpl.render() == 'MASTER1CHILD'
-
-    def test_scoped_block(self):
-        env = Environment(loader=DictLoader({
-            'master.html': '{% for item in seq %}[{% block item scoped %}'
-                           '{% endblock %}]{% endfor %}'
-        }))
-        t = env.from_string('{% extends "master.html" %}{% block item %}'
-                            '{{ item }}{% endblock %}')
-        assert t.render(seq=list(range(5))) == '[0][1][2][3][4]'
-
-    def test_super_in_scoped_block(self):
-        env = Environment(loader=DictLoader({
-            'master.html': '{% for item in seq %}[{% block item scoped %}'
-                           '{{ item }}{% endblock %}]{% endfor %}'
-        }))
-        t = env.from_string('{% extends "master.html" %}{% block item %}'
-                            '{{ super() }}|{{ item * 2 }}{% endblock %}')
-        assert t.render(seq=list(range(5))) == '[0|0][1|2][2|4][3|6][4|8]'
-
-    def test_scoped_block_after_inheritance(self):
-        env = Environment(loader=DictLoader({
-            'layout.html': '''
-            {% block useless %}{% endblock %}
-            ''',
-            'index.html': '''
-            {%- extends 'layout.html' %}
-            {% from 'helpers.html' import foo with context %}
-            {% block useless %}
-                {% for x in [1, 2, 3] %}
-                    {% block testing scoped %}
-                        {{ foo(x) }}
-                    {% endblock %}
-                {% endfor %}
-            {% endblock %}
-            ''',
-            'helpers.html': '''
-            {% macro foo(x) %}{{ the_foo + x }}{% endmacro %}
-            '''
-        }))
-        rv = env.get_template('index.html').render(the_foo=42).split()
-        assert rv == ['43', '44', '45']
-
-
-class BugFixTestCase(JinjaTestCase):
-
-    def test_fixed_macro_scoping_bug(self):
-        assert Environment(loader=DictLoader({
-            'test.html': '''\
-        {% extends 'details.html' %}
-
-        {% macro my_macro() %}
-        my_macro
-        {% endmacro %}
-
-        {% block inner_box %}
-            {{ my_macro() }}
-        {% endblock %}
-            ''',
-            'details.html': '''\
-        {% extends 'standard.html' %}
-
-        {% macro my_macro() %}
-        my_macro
-        {% endmacro %}
-
-        {% block content %}
-            {% block outer_box %}
-                outer_box
-                {% block inner_box %}
-                    inner_box
-                {% endblock %}
-            {% endblock %}
-        {% endblock %}
-        ''',
-            'standard.html': '''
-        {% block content %} {% endblock %}
-        '''
-        })).get_template("test.html").render().split() == [u'outer_box', u'my_macro']
-
-    def test_double_extends(self):
-        """Ensures that a template with more than 1 {% extends ... %} usage
-        raises a ``TemplateError``.
-        """
-        try:
-            tmpl = env.get_template('doublee')
-        except Exception as e:
-            assert isinstance(e, TemplateError)
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(InheritanceTestCase))
-    suite.addTest(unittest.makeSuite(BugFixTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/lexnparse.py b/python/ext-libs/jinja2/testsuite/lexnparse.py
deleted file mode 100644
index bd1c94c..0000000
--- a/python/ext-libs/jinja2/testsuite/lexnparse.py
+++ /dev/null
@@ -1,593 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.lexnparse
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    All the unittests regarding lexing, parsing and syntax.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Environment, Template, TemplateSyntaxError, \
-     UndefinedError, nodes
-from jinja2._compat import next, iteritems, text_type, PY2
-from jinja2.lexer import Token, TokenStream, TOKEN_EOF, \
-     TOKEN_BLOCK_BEGIN, TOKEN_BLOCK_END
-
-env = Environment()
-
-
-# how does a string look like in jinja syntax?
-if PY2:
-    def jinja_string_repr(string):
-        return repr(string)[1:]
-else:
-    jinja_string_repr = repr
-
-
-class TokenStreamTestCase(JinjaTestCase):
-    test_tokens = [Token(1, TOKEN_BLOCK_BEGIN, ''),
-                   Token(2, TOKEN_BLOCK_END, ''),
-                  ]
-
-    def test_simple(self):
-        ts = TokenStream(self.test_tokens, "foo", "bar")
-        assert ts.current.type is TOKEN_BLOCK_BEGIN
-        assert bool(ts)
-        assert not bool(ts.eos)
-        next(ts)
-        assert ts.current.type is TOKEN_BLOCK_END
-        assert bool(ts)
-        assert not bool(ts.eos)
-        next(ts)
-        assert ts.current.type is TOKEN_EOF
-        assert not bool(ts)
-        assert bool(ts.eos)
-
-    def test_iter(self):
-        token_types = [t.type for t in TokenStream(self.test_tokens, "foo", "bar")]
-        assert token_types == ['block_begin', 'block_end', ]
-
-
-class LexerTestCase(JinjaTestCase):
-
-    def test_raw1(self):
-        tmpl = env.from_string('{% raw %}foo{% endraw %}|'
-                               '{%raw%}{{ bar }}|{% baz %}{%       endraw    %}')
-        assert tmpl.render() == 'foo|{{ bar }}|{% baz %}'
-
-    def test_raw2(self):
-        tmpl = env.from_string('1  {%- raw -%}   2   {%- endraw -%}   3')
-        assert tmpl.render() == '123'
-
-    def test_balancing(self):
-        env = Environment('{%', '%}', '${', '}')
-        tmpl = env.from_string('''{% for item in seq
-            %}${{'foo': item}|upper}{% endfor %}''')
-        assert tmpl.render(seq=list(range(3))) == "{'FOO': 0}{'FOO': 1}{'FOO': 2}"
-
-    def test_comments(self):
-        env = Environment('<!--', '-->', '{', '}')
-        tmpl = env.from_string('''\
-<ul>
-<!--- for item in seq -->
-  <li>{item}</li>
-<!--- endfor -->
-</ul>''')
-        assert tmpl.render(seq=list(range(3))) == ("<ul>\n  <li>0</li>\n  "
-                                             "<li>1</li>\n  <li>2</li>\n</ul>")
-
-    def test_string_escapes(self):
-        for char in u'\0', u'\u2668', u'\xe4', u'\t', u'\r', u'\n':
-            tmpl = env.from_string('{{ %s }}' % jinja_string_repr(char))
-            assert tmpl.render() == char
-        assert env.from_string('{{ "\N{HOT SPRINGS}" }}').render() == u'\u2668'
-
-    def test_bytefallback(self):
-        from pprint import pformat
-        tmpl = env.from_string(u'''{{ 'foo'|pprint }}|{{ 'bär'|pprint }}''')
-        assert tmpl.render() == pformat('foo') + '|' + pformat(u'bär')
-
-    def test_operators(self):
-        from jinja2.lexer import operators
-        for test, expect in iteritems(operators):
-            if test in '([{}])':
-                continue
-            stream = env.lexer.tokenize('{{ %s }}' % test)
-            next(stream)
-            assert stream.current.type == expect
-
-    def test_normalizing(self):
-        for seq in '\r', '\r\n', '\n':
-            env = Environment(newline_sequence=seq)
-            tmpl = env.from_string('1\n2\r\n3\n4\n')
-            result = tmpl.render()
-            assert result.replace(seq, 'X') == '1X2X3X4'
-
-    def test_trailing_newline(self):
-        for keep in [True, False]:
-            env = Environment(keep_trailing_newline=keep)
-            for template,expected in [
-                    ('', {}),
-                    ('no\nnewline', {}),
-                    ('with\nnewline\n', {False: 'with\nnewline'}),
-                    ('with\nseveral\n\n\n', {False: 'with\nseveral\n\n'}),
-                    ]:
-                tmpl = env.from_string(template)
-                expect = expected.get(keep, template)
-                result = tmpl.render()
-                assert result == expect, (keep, template, result, expect)
-
-class ParserTestCase(JinjaTestCase):
-
-    def test_php_syntax(self):
-        env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->')
-        tmpl = env.from_string('''\
-<!-- I'm a comment, I'm not interesting -->\
-<? for item in seq -?>
-    <?= item ?>
-<?- endfor ?>''')
-        assert tmpl.render(seq=list(range(5))) == '01234'
-
-    def test_erb_syntax(self):
-        env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>')
-        tmpl = env.from_string('''\
-<%# I'm a comment, I'm not interesting %>\
-<% for item in seq -%>
-    <%= item %>
-<%- endfor %>''')
-        assert tmpl.render(seq=list(range(5))) == '01234'
-
-    def test_comment_syntax(self):
-        env = Environment('<!--', '-->', '${', '}', '<!--#', '-->')
-        tmpl = env.from_string('''\
-<!--# I'm a comment, I'm not interesting -->\
-<!-- for item in seq --->
-    ${item}
-<!--- endfor -->''')
-        assert tmpl.render(seq=list(range(5))) == '01234'
-
-    def test_balancing(self):
-        tmpl = env.from_string('''{{{'foo':'bar'}.foo}}''')
-        assert tmpl.render() == 'bar'
-
-    def test_start_comment(self):
-        tmpl = env.from_string('''{# foo comment
-and bar comment #}
-{% macro blub() %}foo{% endmacro %}
-{{ blub() }}''')
-        assert tmpl.render().strip() == 'foo'
-
-    def test_line_syntax(self):
-        env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%')
-        tmpl = env.from_string('''\
-<%# regular comment %>
-% for item in seq:
-    ${item}
-% endfor''')
-        assert [int(x.strip()) for x in tmpl.render(seq=list(range(5))).split()] == \
-               list(range(5))
-
-        env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##')
-        tmpl = env.from_string('''\
-<%# regular comment %>
-% for item in seq:
-    ${item} ## the rest of the stuff
-% endfor''')
-        assert [int(x.strip()) for x in tmpl.render(seq=list(range(5))).split()] == \
-                list(range(5))
-
-    def test_line_syntax_priority(self):
-        # XXX: why is the whitespace there in front of the newline?
-        env = Environment('{%', '%}', '${', '}', '/*', '*/', '##', '#')
-        tmpl = env.from_string('''\
-/* ignore me.
-   I'm a multiline comment */
-## for item in seq:
-* ${item}          # this is just extra stuff
-## endfor''')
-        assert tmpl.render(seq=[1, 2]).strip() == '* 1\n* 2'
-        env = Environment('{%', '%}', '${', '}', '/*', '*/', '#', '##')
-        tmpl = env.from_string('''\
-/* ignore me.
-   I'm a multiline comment */
-# for item in seq:
-* ${item}          ## this is just extra stuff
-    ## extra stuff i just want to ignore
-# endfor''')
-        assert tmpl.render(seq=[1, 2]).strip() == '* 1\n\n* 2'
-
-    def test_error_messages(self):
-        def assert_error(code, expected):
-            try:
-                Template(code)
-            except TemplateSyntaxError as e:
-                assert str(e) == expected, 'unexpected error message'
-            else:
-                assert False, 'that was supposed to be an error'
-
-        assert_error('{% for item in seq %}...{% endif %}',
-                     "Encountered unknown tag 'endif'. Jinja was looking "
-                     "for the following tags: 'endfor' or 'else'. The "
-                     "innermost block that needs to be closed is 'for'.")
-        assert_error('{% if foo %}{% for item in seq %}...{% endfor %}{% endfor %}',
-                     "Encountered unknown tag 'endfor'. Jinja was looking for "
-                     "the following tags: 'elif' or 'else' or 'endif'. The "
-                     "innermost block that needs to be closed is 'if'.")
-        assert_error('{% if foo %}',
-                     "Unexpected end of template. Jinja was looking for the "
-                     "following tags: 'elif' or 'else' or 'endif'. The "
-                     "innermost block that needs to be closed is 'if'.")
-        assert_error('{% for item in seq %}',
-                     "Unexpected end of template. Jinja was looking for the "
-                     "following tags: 'endfor' or 'else'. The innermost block "
-                     "that needs to be closed is 'for'.")
-        assert_error('{% block foo-bar-baz %}',
-                     "Block names in Jinja have to be valid Python identifiers "
-                     "and may not contain hyphens, use an underscore instead.")
-        assert_error('{% unknown_tag %}',
-                     "Encountered unknown tag 'unknown_tag'.")
-
-
-class SyntaxTestCase(JinjaTestCase):
-
-    def test_call(self):
-        env = Environment()
-        env.globals['foo'] = lambda a, b, c, e, g: a + b + c + e + g
-        tmpl = env.from_string("{{ foo('a', c='d', e='f', *['b'], **{'g': 'h'}) }}")
-        assert tmpl.render() == 'abdfh'
-
-    def test_slicing(self):
-        tmpl = env.from_string('{{ [1, 2, 3][:] }}|{{ [1, 2, 3][::-1] }}')
-        assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
-
-    def test_attr(self):
-        tmpl = env.from_string("{{ foo.bar }}|{{ foo['bar'] }}")
-        assert tmpl.render(foo={'bar': 42}) == '42|42'
-
-    def test_subscript(self):
-        tmpl = env.from_string("{{ foo[0] }}|{{ foo[-1] }}")
-        assert tmpl.render(foo=[0, 1, 2]) == '0|2'
-
-    def test_tuple(self):
-        tmpl = env.from_string('{{ () }}|{{ (1,) }}|{{ (1, 2) }}')
-        assert tmpl.render() == '()|(1,)|(1, 2)'
-
-    def test_math(self):
-        tmpl = env.from_string('{{ (1 + 1 * 2) - 3 / 2 }}|{{ 2**3 }}')
-        assert tmpl.render() == '1.5|8'
-
-    def test_div(self):
-        tmpl = env.from_string('{{ 3 // 2 }}|{{ 3 / 2 }}|{{ 3 % 2 }}')
-        assert tmpl.render() == '1|1.5|1'
-
-    def test_unary(self):
-        tmpl = env.from_string('{{ +3 }}|{{ -3 }}')
-        assert tmpl.render() == '3|-3'
-
-    def test_concat(self):
-        tmpl = env.from_string("{{ [1, 2] ~ 'foo' }}")
-        assert tmpl.render() == '[1, 2]foo'
-
-    def test_compare(self):
-        tmpl = env.from_string('{{ 1 > 0 }}|{{ 1 >= 1 }}|{{ 2 < 3 }}|'
-                               '{{ 2 == 2 }}|{{ 1 <= 1 }}')
-        assert tmpl.render() == 'True|True|True|True|True'
-
-    def test_inop(self):
-        tmpl = env.from_string('{{ 1 in [1, 2, 3] }}|{{ 1 not in [1, 2, 3] }}')
-        assert tmpl.render() == 'True|False'
-
-    def test_literals(self):
-        tmpl = env.from_string('{{ [] }}|{{ {} }}|{{ () }}')
-        assert tmpl.render().lower() == '[]|{}|()'
-
-    def test_bool(self):
-        tmpl = env.from_string('{{ true and false }}|{{ false '
-                               'or true }}|{{ not false }}')
-        assert tmpl.render() == 'False|True|True'
-
-    def test_grouping(self):
-        tmpl = env.from_string('{{ (true and false) or (false and true) and not false }}')
-        assert tmpl.render() == 'False'
-
-    def test_django_attr(self):
-        tmpl = env.from_string('{{ [1, 2, 3].0 }}|{{ [[1]].0.0 }}')
-        assert tmpl.render() == '1|1'
-
-    def test_conditional_expression(self):
-        tmpl = env.from_string('''{{ 0 if true else 1 }}''')
-        assert tmpl.render() == '0'
-
-    def test_short_conditional_expression(self):
-        tmpl = env.from_string('<{{ 1 if false }}>')
-        assert tmpl.render() == '<>'
-
-        tmpl = env.from_string('<{{ (1 if false).bar }}>')
-        self.assert_raises(UndefinedError, tmpl.render)
-
-    def test_filter_priority(self):
-        tmpl = env.from_string('{{ "foo"|upper + "bar"|upper }}')
-        assert tmpl.render() == 'FOOBAR'
-
-    def test_function_calls(self):
-        tests = [
-            (True, '*foo, bar'),
-            (True, '*foo, *bar'),
-            (True, '*foo, bar=42'),
-            (True, '**foo, *bar'),
-            (True, '**foo, bar'),
-            (False, 'foo, bar'),
-            (False, 'foo, bar=42'),
-            (False, 'foo, bar=23, *args'),
-            (False, 'a, b=c, *d, **e'),
-            (False, '*foo, **bar')
-        ]
-        for should_fail, sig in tests:
-            if should_fail:
-                self.assert_raises(TemplateSyntaxError,
-                    env.from_string, '{{ foo(%s) }}' % sig)
-            else:
-                env.from_string('foo(%s)' % sig)
-
-    def test_tuple_expr(self):
-        for tmpl in [
-            '{{ () }}',
-            '{{ (1, 2) }}',
-            '{{ (1, 2,) }}',
-            '{{ 1, }}',
-            '{{ 1, 2 }}',
-            '{% for foo, bar in seq %}...{% endfor %}',
-            '{% for x in foo, bar %}...{% endfor %}',
-            '{% for x in foo, %}...{% endfor %}'
-        ]:
-            assert env.from_string(tmpl)
-
-    def test_trailing_comma(self):
-        tmpl = env.from_string('{{ (1, 2,) }}|{{ [1, 2,] }}|{{ {1: 2,} }}')
-        assert tmpl.render().lower() == '(1, 2)|[1, 2]|{1: 2}'
-
-    def test_block_end_name(self):
-        env.from_string('{% block foo %}...{% endblock foo %}')
-        self.assert_raises(TemplateSyntaxError, env.from_string,
-                           '{% block x %}{% endblock y %}')
-
-    def test_constant_casing(self):
-        for const in True, False, None:
-            tmpl = env.from_string('{{ %s }}|{{ %s }}|{{ %s }}' % (
-                str(const), str(const).lower(), str(const).upper()
-            ))
-            assert tmpl.render() == '%s|%s|' % (const, const)
-
-    def test_test_chaining(self):
-        self.assert_raises(TemplateSyntaxError, env.from_string,
-                           '{{ foo is string is sequence }}')
-        assert env.from_string('{{ 42 is string or 42 is number }}'
-            ).render() == 'True'
-
-    def test_string_concatenation(self):
-        tmpl = env.from_string('{{ "foo" "bar" "baz" }}')
-        assert tmpl.render() == 'foobarbaz'
-
-    def test_notin(self):
-        bar = range(100)
-        tmpl = env.from_string('''{{ not 42 in bar }}''')
-        assert tmpl.render(bar=bar) == text_type(not 42 in bar)
-
-    def test_implicit_subscribed_tuple(self):
-        class Foo(object):
-            def __getitem__(self, x):
-                return x
-        t = env.from_string('{{ foo[1, 2] }}')
-        assert t.render(foo=Foo()) == u'(1, 2)'
-
-    def test_raw2(self):
-        tmpl = env.from_string('{% raw %}{{ FOO }} and {% BAR %}{% endraw %}')
-        assert tmpl.render() == '{{ FOO }} and {% BAR %}'
-
-    def test_const(self):
-        tmpl = env.from_string('{{ true }}|{{ false }}|{{ none }}|'
-                               '{{ none is defined }}|{{ missing is defined }}')
-        assert tmpl.render() == 'True|False|None|True|False'
-
-    def test_neg_filter_priority(self):
-        node = env.parse('{{ -1|foo }}')
-        assert isinstance(node.body[0].nodes[0], nodes.Filter)
-        assert isinstance(node.body[0].nodes[0].node, nodes.Neg)
-
-    def test_const_assign(self):
-        constass1 = '''{% set true = 42 %}'''
-        constass2 = '''{% for none in seq %}{% endfor %}'''
-        for tmpl in constass1, constass2:
-            self.assert_raises(TemplateSyntaxError, env.from_string, tmpl)
-
-    def test_localset(self):
-        tmpl = env.from_string('''{% set foo = 0 %}\
-{% for item in [1, 2] %}{% set foo = 1 %}{% endfor %}\
-{{ foo }}''')
-        assert tmpl.render() == '0'
-
-    def test_parse_unary(self):
-        tmpl = env.from_string('{{ -foo["bar"] }}')
-        assert tmpl.render(foo={'bar': 42}) == '-42'
-        tmpl = env.from_string('{{ -foo["bar"]|abs }}')
-        assert tmpl.render(foo={'bar': 42}) == '42'
-
-
-class LstripBlocksTestCase(JinjaTestCase):
-
-    def test_lstrip(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    {% if True %}\n    {% endif %}''')
-        assert tmpl.render() == "\n"
-
-    def test_lstrip_trim(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''    {% if True %}\n    {% endif %}''')
-        assert tmpl.render() == ""
-
-    def test_no_lstrip(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    {%+ if True %}\n    {%+ endif %}''')
-        assert tmpl.render() == "    \n    "
-
-    def test_lstrip_endline(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    hello{% if True %}\n    goodbye{% endif %}''')
-        assert tmpl.render() == "    hello\n    goodbye"
-
-    def test_lstrip_inline(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    {% if True %}hello    {% endif %}''')
-        assert tmpl.render() == 'hello    '
-
-    def test_lstrip_nested(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    {% if True %}a {% if True %}b {% endif %}c {% endif %}''')
-        assert tmpl.render() == 'a b c '
-
-    def test_lstrip_left_chars(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    abc {% if True %}
-        hello{% endif %}''')
-        assert tmpl.render() == '    abc \n        hello'
-
-    def test_lstrip_embeded_strings(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    {% set x = " {% str %} " %}{{ x }}''')
-        assert tmpl.render() == ' {% str %} '
-
-    def test_lstrip_preserve_leading_newlines(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''\n\n\n{% set hello = 1 %}''')
-        assert tmpl.render() == '\n\n\n'
-        
-    def test_lstrip_comment(self):
-        env = Environment(lstrip_blocks=True, trim_blocks=False)
-        tmpl = env.from_string('''    {# if True #}
-hello
-    {#endif#}''')
-        assert tmpl.render() == '\nhello\n'
-
-    def test_lstrip_angle_bracket_simple(self):
-        env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''    <% if True %>hello    <% endif %>''')
-        assert tmpl.render() == 'hello    '
-
-    def test_lstrip_angle_bracket_comment(self):
-        env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''    <%# if True %>hello    <%# endif %>''')
-        assert tmpl.render() == 'hello    '
-
-    def test_lstrip_angle_bracket(self):
-        env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-    <%# regular comment %>
-    <% for item in seq %>
-${item} ## the rest of the stuff
-   <% endfor %>''')
-        assert tmpl.render(seq=range(5)) == \
-                ''.join('%s\n' % x for x in range(5))
-        
-    def test_lstrip_angle_bracket_compact(self):
-        env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-    <%#regular comment%>
-    <%for item in seq%>
-${item} ## the rest of the stuff
-   <%endfor%>''')
-        assert tmpl.render(seq=range(5)) == \
-                ''.join('%s\n' % x for x in range(5))
-        
-    def test_php_syntax_with_manual(self):
-        env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-    <!-- I'm a comment, I'm not interesting -->
-    <? for item in seq -?>
-        <?= item ?>
-    <?- endfor ?>''')
-        assert tmpl.render(seq=range(5)) == '01234'
-
-    def test_php_syntax(self):
-        env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-    <!-- I'm a comment, I'm not interesting -->
-    <? for item in seq ?>
-        <?= item ?>
-    <? endfor ?>''')
-        assert tmpl.render(seq=range(5)) == ''.join('        %s\n' % x for x in range(5))
-
-    def test_php_syntax_compact(self):
-        env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-    <!-- I'm a comment, I'm not interesting -->
-    <?for item in seq?>
-        <?=item?>
-    <?endfor?>''')
-        assert tmpl.render(seq=range(5)) == ''.join('        %s\n' % x for x in range(5))
-
-    def test_erb_syntax(self):
-        env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
-            lstrip_blocks=True, trim_blocks=True)
-        #env.from_string('')
-        #for n,r in env.lexer.rules.iteritems():
-        #    print n
-        #print env.lexer.rules['root'][0][0].pattern
-        #print "'%s'" % tmpl.render(seq=range(5))
-        tmpl = env.from_string('''\
-<%# I'm a comment, I'm not interesting %>
-    <% for item in seq %>
-    <%= item %>
-    <% endfor %>
-''')
-        assert tmpl.render(seq=range(5)) == ''.join('    %s\n' % x for x in range(5))
-
-    def test_erb_syntax_with_manual(self):
-        env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-<%# I'm a comment, I'm not interesting %>
-    <% for item in seq -%>
-        <%= item %>
-    <%- endfor %>''')
-        assert tmpl.render(seq=range(5)) == '01234'
-
-    def test_erb_syntax_no_lstrip(self):
-        env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-<%# I'm a comment, I'm not interesting %>
-    <%+ for item in seq -%>
-        <%= item %>
-    <%- endfor %>''')
-        assert tmpl.render(seq=range(5)) == '    01234'
-
-    def test_comment_syntax(self):
-        env = Environment('<!--', '-->', '${', '}', '<!--#', '-->',
-            lstrip_blocks=True, trim_blocks=True)
-        tmpl = env.from_string('''\
-<!--# I'm a comment, I'm not interesting -->\
-<!-- for item in seq --->
-    ${item}
-<!--- endfor -->''')
-        assert tmpl.render(seq=range(5)) == '01234'
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(TokenStreamTestCase))
-    suite.addTest(unittest.makeSuite(LexerTestCase))
-    suite.addTest(unittest.makeSuite(ParserTestCase))
-    suite.addTest(unittest.makeSuite(SyntaxTestCase))
-    suite.addTest(unittest.makeSuite(LstripBlocksTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/loader.py b/python/ext-libs/jinja2/testsuite/loader.py
deleted file mode 100644
index a7350aa..0000000
--- a/python/ext-libs/jinja2/testsuite/loader.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.loader
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Test the loaders.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import os
-import sys
-import tempfile
-import shutil
-import unittest
-
-from jinja2.testsuite import JinjaTestCase, dict_loader, \
-     package_loader, filesystem_loader, function_loader, \
-     choice_loader, prefix_loader
-
-from jinja2 import Environment, loaders
-from jinja2._compat import PYPY, PY2
-from jinja2.loaders import split_template_path
-from jinja2.exceptions import TemplateNotFound
-
-
-class LoaderTestCase(JinjaTestCase):
-
-    def test_dict_loader(self):
-        env = Environment(loader=dict_loader)
-        tmpl = env.get_template('justdict.html')
-        assert tmpl.render().strip() == 'FOO'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
-
-    def test_package_loader(self):
-        env = Environment(loader=package_loader)
-        tmpl = env.get_template('test.html')
-        assert tmpl.render().strip() == 'BAR'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
-
-    def test_filesystem_loader(self):
-        env = Environment(loader=filesystem_loader)
-        tmpl = env.get_template('test.html')
-        assert tmpl.render().strip() == 'BAR'
-        tmpl = env.get_template('foo/test.html')
-        assert tmpl.render().strip() == 'FOO'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
-
-    def test_choice_loader(self):
-        env = Environment(loader=choice_loader)
-        tmpl = env.get_template('justdict.html')
-        assert tmpl.render().strip() == 'FOO'
-        tmpl = env.get_template('test.html')
-        assert tmpl.render().strip() == 'BAR'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
-
-    def test_function_loader(self):
-        env = Environment(loader=function_loader)
-        tmpl = env.get_template('justfunction.html')
-        assert tmpl.render().strip() == 'FOO'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
-
-    def test_prefix_loader(self):
-        env = Environment(loader=prefix_loader)
-        tmpl = env.get_template('a/test.html')
-        assert tmpl.render().strip() == 'BAR'
-        tmpl = env.get_template('b/justdict.html')
-        assert tmpl.render().strip() == 'FOO'
-        self.assert_raises(TemplateNotFound, env.get_template, 'missing')
-
-    def test_caching(self):
-        changed = False
-        class TestLoader(loaders.BaseLoader):
-            def get_source(self, environment, template):
-                return u'foo', None, lambda: not changed
-        env = Environment(loader=TestLoader(), cache_size=-1)
-        tmpl = env.get_template('template')
-        assert tmpl is env.get_template('template')
-        changed = True
-        assert tmpl is not env.get_template('template')
-        changed = False
-
-        env = Environment(loader=TestLoader(), cache_size=0)
-        assert env.get_template('template') \
-               is not env.get_template('template')
-
-        env = Environment(loader=TestLoader(), cache_size=2)
-        t1 = env.get_template('one')
-        t2 = env.get_template('two')
-        assert t2 is env.get_template('two')
-        assert t1 is env.get_template('one')
-        t3 = env.get_template('three')
-        assert 'one' in env.cache
-        assert 'two' not in env.cache
-        assert 'three' in env.cache
-
-    def test_dict_loader_cache_invalidates(self):
-        mapping = {'foo': "one"}
-        env = Environment(loader=loaders.DictLoader(mapping))
-        assert env.get_template('foo').render() == "one"
-        mapping['foo'] = "two"
-        assert env.get_template('foo').render() == "two"
-
-    def test_split_template_path(self):
-        assert split_template_path('foo/bar') == ['foo', 'bar']
-        assert split_template_path('./foo/bar') == ['foo', 'bar']
-        self.assert_raises(TemplateNotFound, split_template_path, '../foo')
-
-
-class ModuleLoaderTestCase(JinjaTestCase):
-    archive = None
-
-    def compile_down(self, zip='deflated', py_compile=False):
-        super(ModuleLoaderTestCase, self).setup()
-        log = []
-        self.reg_env = Environment(loader=prefix_loader)
-        if zip is not None:
-            self.archive = tempfile.mkstemp(suffix='.zip')[1]
-        else:
-            self.archive = tempfile.mkdtemp()
-        self.reg_env.compile_templates(self.archive, zip=zip,
-                                       log_function=log.append,
-                                       py_compile=py_compile)
-        self.mod_env = Environment(loader=loaders.ModuleLoader(self.archive))
-        return ''.join(log)
-
-    def teardown(self):
-        super(ModuleLoaderTestCase, self).teardown()
-        if hasattr(self, 'mod_env'):
-            if os.path.isfile(self.archive):
-                os.remove(self.archive)
-            else:
-                shutil.rmtree(self.archive)
-            self.archive = None
-
-    def test_log(self):
-        log = self.compile_down()
-        assert 'Compiled "a/foo/test.html" as ' \
-               'tmpl_a790caf9d669e39ea4d280d597ec891c4ef0404a' in log
-        assert 'Finished compiling templates' in log
-        assert 'Could not compile "a/syntaxerror.html": ' \
-               'Encountered unknown tag \'endif\'' in log
-
-    def _test_common(self):
-        tmpl1 = self.reg_env.get_template('a/test.html')
-        tmpl2 = self.mod_env.get_template('a/test.html')
-        assert tmpl1.render() == tmpl2.render()
-
-        tmpl1 = self.reg_env.get_template('b/justdict.html')
-        tmpl2 = self.mod_env.get_template('b/justdict.html')
-        assert tmpl1.render() == tmpl2.render()
-
-    def test_deflated_zip_compile(self):
-        self.compile_down(zip='deflated')
-        self._test_common()
-
-    def test_stored_zip_compile(self):
-        self.compile_down(zip='stored')
-        self._test_common()
-
-    def test_filesystem_compile(self):
-        self.compile_down(zip=None)
-        self._test_common()
-
-    def test_weak_references(self):
-        self.compile_down()
-        tmpl = self.mod_env.get_template('a/test.html')
-        key = loaders.ModuleLoader.get_template_key('a/test.html')
-        name = self.mod_env.loader.module.__name__
-
-        assert hasattr(self.mod_env.loader.module, key)
-        assert name in sys.modules
-
-        # unset all, ensure the module is gone from sys.modules
-        self.mod_env = tmpl = None
-
-        try:
-            import gc
-            gc.collect()
-        except:
-            pass
-
-        assert name not in sys.modules
-
-    # This test only makes sense on non-pypy python 2
-    if PY2 and not PYPY:
-        def test_byte_compilation(self):
-            log = self.compile_down(py_compile=True)
-            assert 'Byte-compiled "a/test.html"' in log
-            tmpl1 = self.mod_env.get_template('a/test.html')
-            mod = self.mod_env.loader.module. \
-                tmpl_3c4ddf650c1a73df961a6d3d2ce2752f1b8fd490
-            assert mod.__file__.endswith('.pyc')
-
-    def test_choice_loader(self):
-        log = self.compile_down()
-
-        self.mod_env.loader = loaders.ChoiceLoader([
-            self.mod_env.loader,
-            loaders.DictLoader({'DICT_SOURCE': 'DICT_TEMPLATE'})
-        ])
-
-        tmpl1 = self.mod_env.get_template('a/test.html')
-        self.assert_equal(tmpl1.render(), 'BAR')
-        tmpl2 = self.mod_env.get_template('DICT_SOURCE')
-        self.assert_equal(tmpl2.render(), 'DICT_TEMPLATE')
-
-    def test_prefix_loader(self):
-        log = self.compile_down()
-
-        self.mod_env.loader = loaders.PrefixLoader({
-            'MOD':      self.mod_env.loader,
-            'DICT':     loaders.DictLoader({'test.html': 'DICT_TEMPLATE'})
-        })
-
-        tmpl1 = self.mod_env.get_template('MOD/a/test.html')
-        self.assert_equal(tmpl1.render(), 'BAR')
-        tmpl2 = self.mod_env.get_template('DICT/test.html')
-        self.assert_equal(tmpl2.render(), 'DICT_TEMPLATE')
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(LoaderTestCase))
-    suite.addTest(unittest.makeSuite(ModuleLoaderTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/regression.py b/python/ext-libs/jinja2/testsuite/regression.py
deleted file mode 100644
index c5f7d5c..0000000
--- a/python/ext-libs/jinja2/testsuite/regression.py
+++ /dev/null
@@ -1,279 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.regression
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Tests corner cases and bugs.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Template, Environment, DictLoader, TemplateSyntaxError, \
-     TemplateNotFound, PrefixLoader
-from jinja2._compat import text_type
-
-env = Environment()
-
-
-class CornerTestCase(JinjaTestCase):
-
-    def test_assigned_scoping(self):
-        t = env.from_string('''
-        {%- for item in (1, 2, 3, 4) -%}
-            [{{ item }}]
-        {%- endfor %}
-        {{- item -}}
-        ''')
-        assert t.render(item=42) == '[1][2][3][4]42'
-
-        t = env.from_string('''
-        {%- for item in (1, 2, 3, 4) -%}
-            [{{ item }}]
-        {%- endfor %}
-        {%- set item = 42 %}
-        {{- item -}}
-        ''')
-        assert t.render() == '[1][2][3][4]42'
-
-        t = env.from_string('''
-        {%- set item = 42 %}
-        {%- for item in (1, 2, 3, 4) -%}
-            [{{ item }}]
-        {%- endfor %}
-        {{- item -}}
-        ''')
-        assert t.render() == '[1][2][3][4]42'
-
-    def test_closure_scoping(self):
-        t = env.from_string('''
-        {%- set wrapper = "<FOO>" %}
-        {%- for item in (1, 2, 3, 4) %}
-            {%- macro wrapper() %}[{{ item }}]{% endmacro %}
-            {{- wrapper() }}
-        {%- endfor %}
-        {{- wrapper -}}
-        ''')
-        assert t.render() == '[1][2][3][4]<FOO>'
-
-        t = env.from_string('''
-        {%- for item in (1, 2, 3, 4) %}
-            {%- macro wrapper() %}[{{ item }}]{% endmacro %}
-            {{- wrapper() }}
-        {%- endfor %}
-        {%- set wrapper = "<FOO>" %}
-        {{- wrapper -}}
-        ''')
-        assert t.render() == '[1][2][3][4]<FOO>'
-
-        t = env.from_string('''
-        {%- for item in (1, 2, 3, 4) %}
-            {%- macro wrapper() %}[{{ item }}]{% endmacro %}
-            {{- wrapper() }}
-        {%- endfor %}
-        {{- wrapper -}}
-        ''')
-        assert t.render(wrapper=23) == '[1][2][3][4]23'
-
-
-class BugTestCase(JinjaTestCase):
-
-    def test_keyword_folding(self):
-        env = Environment()
-        env.filters['testing'] = lambda value, some: value + some
-        assert env.from_string("{{ 'test'|testing(some='stuff') }}") \
-               .render() == 'teststuff'
-
-    def test_extends_output_bugs(self):
-        env = Environment(loader=DictLoader({
-            'parent.html': '(({% block title %}{% endblock %}))'
-        }))
-
-        t = env.from_string('{% if expr %}{% extends "parent.html" %}{% endif %}'
-                            '[[{% block title %}title{% endblock %}]]'
-                            '{% for item in [1, 2, 3] %}({{ item }}){% endfor %}')
-        assert t.render(expr=False) == '[[title]](1)(2)(3)'
-        assert t.render(expr=True) == '((title))'
-
-    def test_urlize_filter_escaping(self):
-        tmpl = env.from_string('{{ "http://www.example.org/<foo"|urlize }}')
-        assert tmpl.render() == '<a href="http://www.example.org/<foo">http://www.example.org/<foo</a>'
-
-    def test_loop_call_loop(self):
-        tmpl = env.from_string('''
-
-        {% macro test() %}
-            {{ caller() }}
-        {% endmacro %}
-
-        {% for num1 in range(5) %}
-            {% call test() %}
-                {% for num2 in range(10) %}
-                    {{ loop.index }}
-                {% endfor %}
-            {% endcall %}
-        {% endfor %}
-
-        ''')
-
-        assert tmpl.render().split() == [text_type(x) for x in range(1, 11)] * 5
-
-    def test_weird_inline_comment(self):
-        env = Environment(line_statement_prefix='%')
-        self.assert_raises(TemplateSyntaxError, env.from_string,
-                           '% for item in seq {# missing #}\n...% endfor')
-
-    def test_old_macro_loop_scoping_bug(self):
-        tmpl = env.from_string('{% for i in (1, 2) %}{{ i }}{% endfor %}'
-                               '{% macro i() %}3{% endmacro %}{{ i() }}')
-        assert tmpl.render() == '123'
-
-    def test_partial_conditional_assignments(self):
-        tmpl = env.from_string('{% if b %}{% set a = 42 %}{% endif %}{{ a }}')
-        assert tmpl.render(a=23) == '23'
-        assert tmpl.render(b=True) == '42'
-
-    def test_stacked_locals_scoping_bug(self):
-        env = Environment(line_statement_prefix='#')
-        t = env.from_string('''\
-# for j in [1, 2]:
-#   set x = 1
-#   for i in [1, 2]:
-#     print x
-#     if i % 2 == 0:
-#       set x = x + 1
-#     endif
-#   endfor
-# endfor
-# if a
-#   print 'A'
-# elif b
-#   print 'B'
-# elif c == d
-#   print 'C'
-# else
-#   print 'D'
-# endif
-    ''')
-        assert t.render(a=0, b=False, c=42, d=42.0) == '1111C'
-
-    def test_stacked_locals_scoping_bug_twoframe(self):
-        t = Template('''
-            {% set x = 1 %}
-            {% for item in foo %}
-                {% if item == 1 %}
-                    {% set x = 2 %}
-                {% endif %}
-            {% endfor %}
-            {{ x }}
-        ''')
-        rv = t.render(foo=[1]).strip()
-        assert rv == u'1'
-
-    def test_call_with_args(self):
-        t = Template("""{% macro dump_users(users) -%}
-        <ul>
-          {%- for user in users -%}
-            <li><p>{{ user.username|e }}</p>{{ caller(user) }}</li>
-          {%- endfor -%}
-          </ul>
-        {%- endmacro -%}
-
-        {% call(user) dump_users(list_of_user) -%}
-          <dl>
-            <dl>Realname</dl>
-            <dd>{{ user.realname|e }}</dd>
-            <dl>Description</dl>
-            <dd>{{ user.description }}</dd>
-          </dl>
-        {% endcall %}""")
-
-        assert [x.strip() for x in t.render(list_of_user=[{
-            'username':'apo',
-            'realname':'something else',
-            'description':'test'
-        }]).splitlines()] == [
-            u'<ul><li><p>apo</p><dl>',
-            u'<dl>Realname</dl>',
-            u'<dd>something else</dd>',
-            u'<dl>Description</dl>',
-            u'<dd>test</dd>',
-            u'</dl>',
-            u'</li></ul>'
-        ]
-
-    def test_empty_if_condition_fails(self):
-        self.assert_raises(TemplateSyntaxError, Template, '{% if %}....{% endif %}')
-        self.assert_raises(TemplateSyntaxError, Template, '{% if foo %}...{% elif %}...{% endif %}')
-        self.assert_raises(TemplateSyntaxError, Template, '{% for x in %}..{% endfor %}')
-
-    def test_recursive_loop_bug(self):
-        tpl1 = Template("""
-        {% for p in foo recursive%}
-            {{p.bar}}
-            {% for f in p.fields recursive%}
-                {{f.baz}}
-                {{p.bar}}
-                {% if f.rec %}
-                    {{ loop(f.sub) }}
-                {% endif %}
-            {% endfor %}
-        {% endfor %}
-        """)
-
-        tpl2 = Template("""
-        {% for p in foo%}
-            {{p.bar}}
-            {% for f in p.fields recursive%}
-                {{f.baz}}
-                {{p.bar}}
-                {% if f.rec %}
-                    {{ loop(f.sub) }}
-                {% endif %}
-            {% endfor %}
-        {% endfor %}
-        """)
-
-    def test_else_loop_bug(self):
-        t = Template('''
-            {% for x in y %}
-                {{ loop.index0 }}
-            {% else %}
-                {% for i in range(3) %}{{ i }}{% endfor %}
-            {% endfor %}
-        ''')
-        self.assertEqual(t.render(y=[]).strip(), '012')
-
-    def test_correct_prefix_loader_name(self):
-        env = Environment(loader=PrefixLoader({
-            'foo':  DictLoader({})
-        }))
-        try:
-            env.get_template('foo/bar.html')
-        except TemplateNotFound as e:
-            assert e.name == 'foo/bar.html'
-        else:
-            assert False, 'expected error here'
-
-    def test_contextfunction_callable_classes(self):
-        from jinja2.utils import contextfunction
-        class CallableClass(object):
-            @contextfunction
-            def __call__(self, ctx):
-                return ctx.resolve('hello')
-
-        tpl = Template("""{{ callableclass() }}""")
-        output = tpl.render(callableclass = CallableClass(), hello = 'TEST')
-        expected = 'TEST'
-
-        self.assert_equal(output, expected)
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(CornerTestCase))
-    suite.addTest(unittest.makeSuite(BugTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/res/__init__.py b/python/ext-libs/jinja2/testsuite/res/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/python/ext-libs/jinja2/testsuite/res/templates/broken.html b/python/ext-libs/jinja2/testsuite/res/templates/broken.html
deleted file mode 100644
index 77669fa..0000000
--- a/python/ext-libs/jinja2/testsuite/res/templates/broken.html
+++ /dev/null
@@ -1,3 +0,0 @@
-Before
-{{ fail() }}
-After
diff --git a/python/ext-libs/jinja2/testsuite/res/templates/foo/test.html b/python/ext-libs/jinja2/testsuite/res/templates/foo/test.html
deleted file mode 100644
index b7d6715..0000000
--- a/python/ext-libs/jinja2/testsuite/res/templates/foo/test.html
+++ /dev/null
@@ -1 +0,0 @@
-FOO
diff --git a/python/ext-libs/jinja2/testsuite/res/templates/syntaxerror.html b/python/ext-libs/jinja2/testsuite/res/templates/syntaxerror.html
deleted file mode 100644
index f21b817..0000000
--- a/python/ext-libs/jinja2/testsuite/res/templates/syntaxerror.html
+++ /dev/null
@@ -1,4 +0,0 @@
-Foo
-{% for item in broken %}
-  ...
-{% endif %}
diff --git a/python/ext-libs/jinja2/testsuite/res/templates/test.html b/python/ext-libs/jinja2/testsuite/res/templates/test.html
deleted file mode 100644
index ba578e4..0000000
--- a/python/ext-libs/jinja2/testsuite/res/templates/test.html
+++ /dev/null
@@ -1 +0,0 @@
-BAR
diff --git a/python/ext-libs/jinja2/testsuite/security.py b/python/ext-libs/jinja2/testsuite/security.py
deleted file mode 100644
index 246d0f0..0000000
--- a/python/ext-libs/jinja2/testsuite/security.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.security
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Checks the sandbox and other security features.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Environment
-from jinja2.sandbox import SandboxedEnvironment, \
-     ImmutableSandboxedEnvironment, unsafe
-from jinja2 import Markup, escape
-from jinja2.exceptions import SecurityError, TemplateSyntaxError, \
-     TemplateRuntimeError
-from jinja2._compat import text_type
-
-
-class PrivateStuff(object):
-
-    def bar(self):
-        return 23
-
-    @unsafe
-    def foo(self):
-        return 42
-
-    def __repr__(self):
-        return 'PrivateStuff'
-
-
-class PublicStuff(object):
-    bar = lambda self: 23
-    _foo = lambda self: 42
-
-    def __repr__(self):
-        return 'PublicStuff'
-
-
-class SandboxTestCase(JinjaTestCase):
-
-    def test_unsafe(self):
-        env = SandboxedEnvironment()
-        self.assert_raises(SecurityError, env.from_string("{{ foo.foo() }}").render,
-                           foo=PrivateStuff())
-        self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()), '23')
-
-        self.assert_raises(SecurityError, env.from_string("{{ foo._foo() }}").render,
-                           foo=PublicStuff())
-        self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()), '23')
-        self.assert_equal(env.from_string("{{ foo.__class__ }}").render(foo=42), '')
-        self.assert_equal(env.from_string("{{ foo.func_code }}").render(foo=lambda:None), '')
-        # security error comes from __class__ already.
-        self.assert_raises(SecurityError, env.from_string(
-            "{{ foo.__class__.__subclasses__() }}").render, foo=42)
-
-    def test_immutable_environment(self):
-        env = ImmutableSandboxedEnvironment()
-        self.assert_raises(SecurityError, env.from_string(
-            '{{ [].append(23) }}').render)
-        self.assert_raises(SecurityError, env.from_string(
-            '{{ {1:2}.clear() }}').render)
-
-    def test_restricted(self):
-        env = SandboxedEnvironment()
-        self.assert_raises(TemplateSyntaxError, env.from_string,
-                      "{% for item.attribute in seq %}...{% endfor %}")
-        self.assert_raises(TemplateSyntaxError, env.from_string,
-                      "{% for foo, bar.baz in seq %}...{% endfor %}")
-
-    def test_markup_operations(self):
-        # adding two strings should escape the unsafe one
-        unsafe = '<script type="application/x-some-script">alert("foo");</script>'
-        safe = Markup('<em>username</em>')
-        assert unsafe + safe == text_type(escape(unsafe)) + text_type(safe)
-
-        # string interpolations are safe to use too
-        assert Markup('<em>%s</em>') % '<bad user>' == \
-               '<em><bad user></em>'
-        assert Markup('<em>%(username)s</em>') % {
-            'username': '<bad user>'
-        } == '<em><bad user></em>'
-
-        # an escaped object is markup too
-        assert type(Markup('foo') + 'bar') is Markup
-
-        # and it implements __html__ by returning itself
-        x = Markup("foo")
-        assert x.__html__() is x
-
-        # it also knows how to treat __html__ objects
-        class Foo(object):
-            def __html__(self):
-                return '<em>awesome</em>'
-            def __unicode__(self):
-                return 'awesome'
-        assert Markup(Foo()) == '<em>awesome</em>'
-        assert Markup('<strong>%s</strong>') % Foo() == \
-               '<strong><em>awesome</em></strong>'
-
-        # escaping and unescaping
-        assert escape('"<>&\'') == '"<>&''
-        assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
-        assert Markup("<test>").unescape() == "<test>"
-
-    def test_template_data(self):
-        env = Environment(autoescape=True)
-        t = env.from_string('{% macro say_hello(name) %}'
-                            '<p>Hello {{ name }}!</p>{% endmacro %}'
-                            '{{ say_hello("<blink>foo</blink>") }}')
-        escaped_out = '<p>Hello <blink>foo</blink>!</p>'
-        assert t.render() == escaped_out
-        assert text_type(t.module) == escaped_out
-        assert escape(t.module) == escaped_out
-        assert t.module.say_hello('<blink>foo</blink>') == escaped_out
-        assert escape(t.module.say_hello('<blink>foo</blink>')) == escaped_out
-
-    def test_attr_filter(self):
-        env = SandboxedEnvironment()
-        tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
-        self.assert_raises(SecurityError, tmpl.render, cls=int)
-
-    def test_binary_operator_intercepting(self):
-        def disable_op(left, right):
-            raise TemplateRuntimeError('that operator so does not work')
-        for expr, ctx, rv in ('1 + 2', {}, '3'), ('a + 2', {'a': 2}, '4'):
-            env = SandboxedEnvironment()
-            env.binop_table['+'] = disable_op
-            t = env.from_string('{{ %s }}' % expr)
-            assert t.render(ctx) == rv
-            env.intercepted_binops = frozenset(['+'])
-            t = env.from_string('{{ %s }}' % expr)
-            try:
-                t.render(ctx)
-            except TemplateRuntimeError as e:
-                pass
-            else:
-                self.fail('expected runtime error')
-
-    def test_unary_operator_intercepting(self):
-        def disable_op(arg):
-            raise TemplateRuntimeError('that operator so does not work')
-        for expr, ctx, rv in ('-1', {}, '-1'), ('-a', {'a': 2}, '-2'):
-            env = SandboxedEnvironment()
-            env.unop_table['-'] = disable_op
-            t = env.from_string('{{ %s }}' % expr)
-            assert t.render(ctx) == rv
-            env.intercepted_unops = frozenset(['-'])
-            t = env.from_string('{{ %s }}' % expr)
-            try:
-                t.render(ctx)
-            except TemplateRuntimeError as e:
-                pass
-            else:
-                self.fail('expected runtime error')
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(SandboxTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/tests.py b/python/ext-libs/jinja2/testsuite/tests.py
deleted file mode 100644
index 3ece7a8..0000000
--- a/python/ext-libs/jinja2/testsuite/tests.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.tests
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Who tests the tests?
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import unittest
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2 import Markup, Environment
-
-env = Environment()
-
-
-class TestsTestCase(JinjaTestCase):
-
-    def test_defined(self):
-        tmpl = env.from_string('{{ missing is defined }}|{{ true is defined }}')
-        assert tmpl.render() == 'False|True'
-
-    def test_even(self):
-        tmpl = env.from_string('''{{ 1 is even }}|{{ 2 is even }}''')
-        assert tmpl.render() == 'False|True'
-
-    def test_odd(self):
-        tmpl = env.from_string('''{{ 1 is odd }}|{{ 2 is odd }}''')
-        assert tmpl.render() == 'True|False'
-
-    def test_lower(self):
-        tmpl = env.from_string('''{{ "foo" is lower }}|{{ "FOO" is lower }}''')
-        assert tmpl.render() == 'True|False'
-
-    def test_typechecks(self):
-        tmpl = env.from_string('''
-            {{ 42 is undefined }}
-            {{ 42 is defined }}
-            {{ 42 is none }}
-            {{ none is none }}
-            {{ 42 is number }}
-            {{ 42 is string }}
-            {{ "foo" is string }}
-            {{ "foo" is sequence }}
-            {{ [1] is sequence }}
-            {{ range is callable }}
-            {{ 42 is callable }}
-            {{ range(5) is iterable }}
-            {{ {} is mapping }}
-            {{ mydict is mapping }}
-            {{ [] is mapping }}
-        ''')
-        class MyDict(dict):
-            pass
-        assert tmpl.render(mydict=MyDict()).split() == [
-            'False', 'True', 'False', 'True', 'True', 'False',
-            'True', 'True', 'True', 'True', 'False', 'True',
-            'True', 'True', 'False'
-        ]
-
-    def test_sequence(self):
-        tmpl = env.from_string(
-            '{{ [1, 2, 3] is sequence }}|'
-            '{{ "foo" is sequence }}|'
-            '{{ 42 is sequence }}'
-        )
-        assert tmpl.render() == 'True|True|False'
-
-    def test_upper(self):
-        tmpl = env.from_string('{{ "FOO" is upper }}|{{ "foo" is upper }}')
-        assert tmpl.render() == 'True|False'
-
-    def test_sameas(self):
-        tmpl = env.from_string('{{ foo is sameas false }}|'
-                               '{{ 0 is sameas false }}')
-        assert tmpl.render(foo=False) == 'True|False'
-
-    def test_no_paren_for_arg1(self):
-        tmpl = env.from_string('{{ foo is sameas none }}')
-        assert tmpl.render(foo=None) == 'True'
-
-    def test_escaped(self):
-        env = Environment(autoescape=True)
-        tmpl = env.from_string('{{ x is escaped }}|{{ y is escaped }}')
-        assert tmpl.render(x='foo', y=Markup('foo')) == 'False|True'
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(TestsTestCase))
-    return suite
diff --git a/python/ext-libs/jinja2/testsuite/utils.py b/python/ext-libs/jinja2/testsuite/utils.py
deleted file mode 100644
index cab9b09..0000000
--- a/python/ext-libs/jinja2/testsuite/utils.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.testsuite.utils
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Tests utilities jinja uses.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import gc
-import unittest
-
-import pickle
-
-from jinja2.testsuite import JinjaTestCase
-
-from jinja2.utils import LRUCache, escape, object_type_repr
-
-
-class LRUCacheTestCase(JinjaTestCase):
-
-    def test_simple(self):
-        d = LRUCache(3)
-        d["a"] = 1
-        d["b"] = 2
-        d["c"] = 3
-        d["a"]
-        d["d"] = 4
-        assert len(d) == 3
-        assert 'a' in d and 'c' in d and 'd' in d and 'b' not in d
-
-    def test_pickleable(self):
-        cache = LRUCache(2)
-        cache["foo"] = 42
-        cache["bar"] = 23
-        cache["foo"]
-
-        for protocol in range(3):
-            copy = pickle.loads(pickle.dumps(cache, protocol))
-            assert copy.capacity == cache.capacity
-            assert copy._mapping == cache._mapping
-            assert copy._queue == cache._queue
-
-
-class HelpersTestCase(JinjaTestCase):
-
-    def test_object_type_repr(self):
-        class X(object):
-            pass
-        self.assert_equal(object_type_repr(42), 'int object')
-        self.assert_equal(object_type_repr([]), 'list object')
-        self.assert_equal(object_type_repr(X()),
-                         'jinja2.testsuite.utils.X object')
-        self.assert_equal(object_type_repr(None), 'None')
-        self.assert_equal(object_type_repr(Ellipsis), 'Ellipsis')
-
-
-class MarkupLeakTestCase(JinjaTestCase):
-
-    def test_markup_leaks(self):
-        counts = set()
-        for count in range(20):
-            for item in range(1000):
-                escape("foo")
-                escape("<foo>")
-                escape(u"foo")
-                escape(u"<foo>")
-            counts.add(len(gc.get_objects()))
-        assert len(counts) == 1, 'ouch, c extension seems to leak objects'
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.makeSuite(LRUCacheTestCase))
-    suite.addTest(unittest.makeSuite(HelpersTestCase))
-
-    # this test only tests the c extension
-    if not hasattr(escape, 'func_code'):
-        suite.addTest(unittest.makeSuite(MarkupLeakTestCase))
-
-    return suite
diff --git a/python/ext-libs/jinja2/utils.py b/python/ext-libs/jinja2/utils.py
deleted file mode 100644
index ddc47da..0000000
--- a/python/ext-libs/jinja2/utils.py
+++ /dev/null
@@ -1,520 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.utils
-    ~~~~~~~~~~~~
-
-    Utility functions.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD, see LICENSE for more details.
-"""
-import re
-import errno
-from collections import deque
-from jinja2._compat import text_type, string_types, implements_iterator, \
-     allocate_lock, url_quote
-
-
-_word_split_re = re.compile(r'(\s+)')
-_punctuation_re = re.compile(
-    '^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % (
-        '|'.join(map(re.escape, ('(', '<', '<'))),
-        '|'.join(map(re.escape, ('.', ',', ')', '>', '\n', '>')))
-    )
-)
-_simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
-_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
-_entity_re = re.compile(r'&([^;]+);')
-_letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
-_digits = '0123456789'
-
-# special singleton representing missing values for the runtime
-missing = type('MissingType', (), {'__repr__': lambda x: 'missing'})()
-
-# internal code
-internal_code = set()
-
-concat = u''.join
-
-
-def contextfunction(f):
-    """This decorator can be used to mark a function or method context callable.
-    A context callable is passed the active :class:`Context` as first argument when
-    called from the template.  This is useful if a function wants to get access
-    to the context or functions provided on the context object.  For example
-    a function that returns a sorted list of template variables the current
-    template exports could look like this::
-
-        @contextfunction
-        def get_exported_names(context):
-            return sorted(context.exported_vars)
-    """
-    f.contextfunction = True
-    return f
-
-
-def evalcontextfunction(f):
-    """This decorator can be used to mark a function or method as an eval
-    context callable.  This is similar to the :func:`contextfunction`
-    but instead of passing the context, an evaluation context object is
-    passed.  For more information about the eval context, see
-    :ref:`eval-context`.
-
-    .. versionadded:: 2.4
-    """
-    f.evalcontextfunction = True
-    return f
-
-
-def environmentfunction(f):
-    """This decorator can be used to mark a function or method as environment
-    callable.  This decorator works exactly like the :func:`contextfunction`
-    decorator just that the first argument is the active :class:`Environment`
-    and not context.
-    """
-    f.environmentfunction = True
-    return f
-
-
-def internalcode(f):
-    """Marks the function as internally used"""
-    internal_code.add(f.__code__)
-    return f
-
-
-def is_undefined(obj):
-    """Check if the object passed is undefined.  This does nothing more than
-    performing an instance check against :class:`Undefined` but looks nicer.
-    This can be used for custom filters or tests that want to react to
-    undefined variables.  For example a custom default filter can look like
-    this::
-
-        def default(var, default=''):
-            if is_undefined(var):
-                return default
-            return var
-    """
-    from jinja2.runtime import Undefined
-    return isinstance(obj, Undefined)
-
-
-def consume(iterable):
-    """Consumes an iterable without doing anything with it."""
-    for event in iterable:
-        pass
-
-
-def clear_caches():
-    """Jinja2 keeps internal caches for environments and lexers.  These are
-    used so that Jinja2 doesn't have to recreate environments and lexers all
-    the time.  Normally you don't have to care about that but if you are
-    messuring memory consumption you may want to clean the caches.
-    """
-    from jinja2.environment import _spontaneous_environments
-    from jinja2.lexer import _lexer_cache
-    _spontaneous_environments.clear()
-    _lexer_cache.clear()
-
-
-def import_string(import_name, silent=False):
-    """Imports an object based on a string.  This is useful if you want to
-    use import paths as endpoints or something similar.  An import path can
-    be specified either in dotted notation (``xml.sax.saxutils.escape``)
-    or with a colon as object delimiter (``xml.sax.saxutils:escape``).
-
-    If the `silent` is True the return value will be `None` if the import
-    fails.
-
-    :return: imported object
-    """
-    try:
-        if ':' in import_name:
-            module, obj = import_name.split(':', 1)
-        elif '.' in import_name:
-            items = import_name.split('.')
-            module = '.'.join(items[:-1])
-            obj = items[-1]
-        else:
-            return __import__(import_name)
-        return getattr(__import__(module, None, None, [obj]), obj)
-    except (ImportError, AttributeError):
-        if not silent:
-            raise
-
-
-def open_if_exists(filename, mode='rb'):
-    """Returns a file descriptor for the filename if that file exists,
-    otherwise `None`.
-    """
-    try:
-        return open(filename, mode)
-    except IOError as e:
-        if e.errno not in (errno.ENOENT, errno.EISDIR):
-            raise
-
-
-def object_type_repr(obj):
-    """Returns the name of the object's type.  For some recognized
-    singletons the name of the object is returned instead. (For
-    example for `None` and `Ellipsis`).
-    """
-    if obj is None:
-        return 'None'
-    elif obj is Ellipsis:
-        return 'Ellipsis'
-    # __builtin__ in 2.x, builtins in 3.x
-    if obj.__class__.__module__ in ('__builtin__', 'builtins'):
-        name = obj.__class__.__name__
-    else:
-        name = obj.__class__.__module__ + '.' + obj.__class__.__name__
-    return '%s object' % name
-
-
-def pformat(obj, verbose=False):
-    """Prettyprint an object.  Either use the `pretty` library or the
-    builtin `pprint`.
-    """
-    try:
-        from pretty import pretty
-        return pretty(obj, verbose=verbose)
-    except ImportError:
-        from pprint import pformat
-        return pformat(obj)
-
-
-def urlize(text, trim_url_limit=None, nofollow=False):
-    """Converts any URLs in text into clickable links. Works on http://,
-    https:// and www. links. Links can have trailing punctuation (periods,
-    commas, close-parens) and leading punctuation (opening parens) and
-    it'll still do the right thing.
-
-    If trim_url_limit is not None, the URLs in link text will be limited
-    to trim_url_limit characters.
-
-    If nofollow is True, the URLs in link text will get a rel="nofollow"
-    attribute.
-    """
-    trim_url = lambda x, limit=trim_url_limit: limit is not None \
-                         and (x[:limit] + (len(x) >=limit and '...'
-                         or '')) or x
-    words = _word_split_re.split(text_type(escape(text)))
-    nofollow_attr = nofollow and ' rel="nofollow"' or ''
-    for i, word in enumerate(words):
-        match = _punctuation_re.match(word)
-        if match:
-            lead, middle, trail = match.groups()
-            if middle.startswith('www.') or (
-                '@' not in middle and
-                not middle.startswith('http://') and
-                not middle.startswith('https://') and
-                len(middle) > 0 and
-                middle[0] in _letters + _digits and (
-                    middle.endswith('.org') or
-                    middle.endswith('.net') or
-                    middle.endswith('.com')
-                )):
-                middle = '<a href="http://%s"%s>%s</a>' % (middle,
-                    nofollow_attr, trim_url(middle))
-            if middle.startswith('http://') or \
-               middle.startswith('https://'):
-                middle = '<a href="%s"%s>%s</a>' % (middle,
-                    nofollow_attr, trim_url(middle))
-            if '@' in middle and not middle.startswith('www.') and \
-               not ':' in middle and _simple_email_re.match(middle):
-                middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
-            if lead + middle + trail != word:
-                words[i] = lead + middle + trail
-    return u''.join(words)
-
-
-def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
-    """Generate some lorem impsum for the template."""
-    from jinja2.constants import LOREM_IPSUM_WORDS
-    from random import choice, randrange
-    words = LOREM_IPSUM_WORDS.split()
-    result = []
-
-    for _ in range(n):
-        next_capitalized = True
-        last_comma = last_fullstop = 0
-        word = None
-        last = None
-        p = []
-
-        # each paragraph contains out of 20 to 100 words.
-        for idx, _ in enumerate(range(randrange(min, max))):
-            while True:
-                word = choice(words)
-                if word != last:
-                    last = word
-                    break
-            if next_capitalized:
-                word = word.capitalize()
-                next_capitalized = False
-            # add commas
-            if idx - randrange(3, 8) > last_comma:
-                last_comma = idx
-                last_fullstop += 2
-                word += ','
-            # add end of sentences
-            if idx - randrange(10, 20) > last_fullstop:
-                last_comma = last_fullstop = idx
-                word += '.'
-                next_capitalized = True
-            p.append(word)
-
-        # ensure that the paragraph ends with a dot.
-        p = u' '.join(p)
-        if p.endswith(','):
-            p = p[:-1] + '.'
-        elif not p.endswith('.'):
-            p += '.'
-        result.append(p)
-
-    if not html:
-        return u'\n\n'.join(result)
-    return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result))
-
-
-def unicode_urlencode(obj, charset='utf-8'):
-    """URL escapes a single bytestring or unicode string with the
-    given charset if applicable to URL safe quoting under all rules
-    that need to be considered under all supported Python versions.
-
-    If non strings are provided they are converted to their unicode
-    representation first.
-    """
-    if not isinstance(obj, string_types):
-        obj = text_type(obj)
-    if isinstance(obj, text_type):
-        obj = obj.encode(charset)
-    return text_type(url_quote(obj))
-
-
-class LRUCache(object):
-    """A simple LRU Cache implementation."""
-
-    # this is fast for small capacities (something below 1000) but doesn't
-    # scale.  But as long as it's only used as storage for templates this
-    # won't do any harm.
-
-    def __init__(self, capacity):
-        self.capacity = capacity
-        self._mapping = {}
-        self._queue = deque()
-        self._postinit()
-
-    def _postinit(self):
-        # alias all queue methods for faster lookup
-        self._popleft = self._queue.popleft
-        self._pop = self._queue.pop
-        self._remove = self._queue.remove
-        self._wlock = allocate_lock()
-        self._append = self._queue.append
-
-    def __getstate__(self):
-        return {
-            'capacity':     self.capacity,
-            '_mapping':     self._mapping,
-            '_queue':       self._queue
-        }
-
-    def __setstate__(self, d):
-        self.__dict__.update(d)
-        self._postinit()
-
-    def __getnewargs__(self):
-        return (self.capacity,)
-
-    def copy(self):
-        """Return a shallow copy of the instance."""
-        rv = self.__class__(self.capacity)
-        rv._mapping.update(self._mapping)
-        rv._queue = deque(self._queue)
-        return rv
-
-    def get(self, key, default=None):
-        """Return an item from the cache dict or `default`"""
-        try:
-            return self[key]
-        except KeyError:
-            return default
-
-    def setdefault(self, key, default=None):
-        """Set `default` if the key is not in the cache otherwise
-        leave unchanged. Return the value of this key.
-        """
-        self._wlock.acquire()
-        try:
-            try:
-                return self[key]
-            except KeyError:
-                self[key] = default
-                return default
-        finally:
-            self._wlock.release()
-
-    def clear(self):
-        """Clear the cache."""
-        self._wlock.acquire()
-        try:
-            self._mapping.clear()
-            self._queue.clear()
-        finally:
-            self._wlock.release()
-
-    def __contains__(self, key):
-        """Check if a key exists in this cache."""
-        return key in self._mapping
-
-    def __len__(self):
-        """Return the current size of the cache."""
-        return len(self._mapping)
-
-    def __repr__(self):
-        return '<%s %r>' % (
-            self.__class__.__name__,
-            self._mapping
-        )
-
-    def __getitem__(self, key):
-        """Get an item from the cache. Moves the item up so that it has the
-        highest priority then.
-
-        Raise a `KeyError` if it does not exist.
-        """
-        self._wlock.acquire()
-        try:
-            rv = self._mapping[key]
-            if self._queue[-1] != key:
-                try:
-                    self._remove(key)
-                except ValueError:
-                    # if something removed the key from the container
-                    # when we read, ignore the ValueError that we would
-                    # get otherwise.
-                    pass
-                self._append(key)
-            return rv
-        finally:
-            self._wlock.release()
-
-    def __setitem__(self, key, value):
-        """Sets the value for an item. Moves the item up so that it
-        has the highest priority then.
-        """
-        self._wlock.acquire()
-        try:
-            if key in self._mapping:
-                self._remove(key)
-            elif len(self._mapping) == self.capacity:
-                del self._mapping[self._popleft()]
-            self._append(key)
-            self._mapping[key] = value
-        finally:
-            self._wlock.release()
-
-    def __delitem__(self, key):
-        """Remove an item from the cache dict.
-        Raise a `KeyError` if it does not exist.
-        """
-        self._wlock.acquire()
-        try:
-            del self._mapping[key]
-            try:
-                self._remove(key)
-            except ValueError:
-                # __getitem__ is not locked, it might happen
-                pass
-        finally:
-            self._wlock.release()
-
-    def items(self):
-        """Return a list of items."""
-        result = [(key, self._mapping[key]) for key in list(self._queue)]
-        result.reverse()
-        return result
-
-    def iteritems(self):
-        """Iterate over all items."""
-        return iter(self.items())
-
-    def values(self):
-        """Return a list of all values."""
-        return [x[1] for x in self.items()]
-
-    def itervalue(self):
-        """Iterate over all values."""
-        return iter(self.values())
-
-    def keys(self):
-        """Return a list of all keys ordered by most recent usage."""
-        return list(self)
-
-    def iterkeys(self):
-        """Iterate over all keys in the cache dict, ordered by
-        the most recent usage.
-        """
-        return reversed(tuple(self._queue))
-
-    __iter__ = iterkeys
-
-    def __reversed__(self):
-        """Iterate over the values in the cache dict, oldest items
-        coming first.
-        """
-        return iter(tuple(self._queue))
-
-    __copy__ = copy
-
-
-# register the LRU cache as mutable mapping if possible
-try:
-    from collections import MutableMapping
-    MutableMapping.register(LRUCache)
-except ImportError:
-    pass
-
-
- at implements_iterator
-class Cycler(object):
-    """A cycle helper for templates."""
-
-    def __init__(self, *items):
-        if not items:
-            raise RuntimeError('at least one item has to be provided')
-        self.items = items
-        self.reset()
-
-    def reset(self):
-        """Resets the cycle."""
-        self.pos = 0
-
-    @property
-    def current(self):
-        """Returns the current item."""
-        return self.items[self.pos]
-
-    def __next__(self):
-        """Goes one item ahead and returns it."""
-        rv = self.current
-        self.pos = (self.pos + 1) % len(self.items)
-        return rv
-
-
-class Joiner(object):
-    """A joining helper for templates."""
-
-    def __init__(self, sep=u', '):
-        self.sep = sep
-        self.used = False
-
-    def __call__(self):
-        if not self.used:
-            self.used = True
-            return u''
-        return self.sep
-
-
-# Imported here because that's where it was in the past
-from markupsafe import Markup, escape, soft_unicode
diff --git a/python/ext-libs/jinja2/visitor.py b/python/ext-libs/jinja2/visitor.py
deleted file mode 100644
index 413e7c3..0000000
--- a/python/ext-libs/jinja2/visitor.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    jinja2.visitor
-    ~~~~~~~~~~~~~~
-
-    This module implements a visitor for the nodes.
-
-    :copyright: (c) 2010 by the Jinja Team.
-    :license: BSD.
-"""
-from jinja2.nodes import Node
-
-
-class NodeVisitor(object):
-    """Walks the abstract syntax tree and call visitor functions for every
-    node found.  The visitor functions may return values which will be
-    forwarded by the `visit` method.
-
-    Per default the visitor functions for the nodes are ``'visit_'`` +
-    class name of the node.  So a `TryFinally` node visit function would
-    be `visit_TryFinally`.  This behavior can be changed by overriding
-    the `get_visitor` function.  If no visitor function exists for a node
-    (return value `None`) the `generic_visit` visitor is used instead.
-    """
-
-    def get_visitor(self, node):
-        """Return the visitor function for this node or `None` if no visitor
-        exists for this node.  In that case the generic visit function is
-        used instead.
-        """
-        method = 'visit_' + node.__class__.__name__
-        return getattr(self, method, None)
-
-    def visit(self, node, *args, **kwargs):
-        """Visit a node."""
-        f = self.get_visitor(node)
-        if f is not None:
-            return f(node, *args, **kwargs)
-        return self.generic_visit(node, *args, **kwargs)
-
-    def generic_visit(self, node, *args, **kwargs):
-        """Called if no explicit visitor function exists for a node."""
-        for node in node.iter_child_nodes():
-            self.visit(node, *args, **kwargs)
-
-
-class NodeTransformer(NodeVisitor):
-    """Walks the abstract syntax tree and allows modifications of nodes.
-
-    The `NodeTransformer` will walk the AST and use the return value of the
-    visitor functions to replace or remove the old node.  If the return
-    value of the visitor function is `None` the node will be removed
-    from the previous location otherwise it's replaced with the return
-    value.  The return value may be the original node in which case no
-    replacement takes place.
-    """
-
-    def generic_visit(self, node, *args, **kwargs):
-        for field, old_value in node.iter_fields():
-            if isinstance(old_value, list):
-                new_values = []
-                for value in old_value:
-                    if isinstance(value, Node):
-                        value = self.visit(value, *args, **kwargs)
-                        if value is None:
-                            continue
-                        elif not isinstance(value, Node):
-                            new_values.extend(value)
-                            continue
-                    new_values.append(value)
-                old_value[:] = new_values
-            elif isinstance(old_value, Node):
-                new_node = self.visit(old_value, *args, **kwargs)
-                if new_node is None:
-                    delattr(node, field)
-                else:
-                    setattr(node, field, new_node)
-        return node
-
-    def visit_list(self, node, *args, **kwargs):
-        """As transformers may return lists in some places this method
-        can be used to enforce a list as return value.
-        """
-        rv = self.visit(node, *args, **kwargs)
-        if not isinstance(rv, list):
-            rv = [rv]
-        return rv
diff --git a/python/ext-libs/pygments/__init__.py b/python/ext-libs/pygments/__init__.py
deleted file mode 100644
index 2bfd8ba..0000000
--- a/python/ext-libs/pygments/__init__.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    Pygments
-    ~~~~~~~~
-
-    Pygments is a syntax highlighting package written in Python.
-
-    It is a generic syntax highlighter for general use in all kinds of software
-    such as forum systems, wikis or other applications that need to prettify
-    source code. Highlights are:
-
-    * a wide range of common languages and markup formats is supported
-    * special attention is paid to details, increasing quality by a fair amount
-    * support for new languages and formats are added easily
-    * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
-      formats that PIL supports, and ANSI sequences
-    * it is usable as a command-line tool and as a library
-    * ... and it highlights even Brainfuck!
-
-    The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
-
-    .. _Pygments tip:
-       http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-__version__ = '1.6'
-__docformat__ = 'restructuredtext'
-
-__all__ = ['lex', 'format', 'highlight']
-
-
-import sys
-
-from pygments.util import StringIO, BytesIO
-
-
-def lex(code, lexer):
-    """
-    Lex ``code`` with ``lexer`` and return an iterable of tokens.
-    """
-    try:
-        return lexer.get_tokens(code)
-    except TypeError, err:
-        if isinstance(err.args[0], str) and \
-           'unbound method get_tokens' in err.args[0]:
-            raise TypeError('lex() argument must be a lexer instance, '
-                            'not a class')
-        raise
-
-
-def format(tokens, formatter, outfile=None):
-    """
-    Format a tokenlist ``tokens`` with the formatter ``formatter``.
-
-    If ``outfile`` is given and a valid file object (an object
-    with a ``write`` method), the result will be written to it, otherwise
-    it is returned as a string.
-    """
-    try:
-        if not outfile:
-            #print formatter, 'using', formatter.encoding
-            realoutfile = formatter.encoding and BytesIO() or StringIO()
-            formatter.format(tokens, realoutfile)
-            return realoutfile.getvalue()
-        else:
-            formatter.format(tokens, outfile)
-    except TypeError, err:
-        if isinstance(err.args[0], str) and \
-           'unbound method format' in err.args[0]:
-            raise TypeError('format() argument must be a formatter instance, '
-                            'not a class')
-        raise
-
-
-def highlight(code, lexer, formatter, outfile=None):
-    """
-    Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
-
-    If ``outfile`` is given and a valid file object (an object
-    with a ``write`` method), the result will be written to it, otherwise
-    it is returned as a string.
-    """
-    return format(lex(code, lexer), formatter, outfile)
-
-
-if __name__ == '__main__':
-    from pygments.cmdline import main
-    sys.exit(main(sys.argv))
diff --git a/python/ext-libs/pygments/cmdline.py b/python/ext-libs/pygments/cmdline.py
deleted file mode 100644
index c25204b..0000000
--- a/python/ext-libs/pygments/cmdline.py
+++ /dev/null
@@ -1,441 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.cmdline
-    ~~~~~~~~~~~~~~~~
-
-    Command line interface.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-import sys
-import getopt
-from textwrap import dedent
-
-from pygments import __version__, highlight
-from pygments.util import ClassNotFound, OptionError, docstring_headline
-from pygments.lexers import get_all_lexers, get_lexer_by_name, get_lexer_for_filename, \
-     find_lexer_class, guess_lexer, TextLexer
-from pygments.formatters import get_all_formatters, get_formatter_by_name, \
-     get_formatter_for_filename, find_formatter_class, \
-     TerminalFormatter  # pylint:disable-msg=E0611
-from pygments.filters import get_all_filters, find_filter_class
-from pygments.styles import get_all_styles, get_style_by_name
-
-
-USAGE = """\
-Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
-          [-O <options>] [-P <option=value>] [-o <outfile>] [<infile>]
-
-       %s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
-       %s -L [<which> ...]
-       %s -N <filename>
-       %s -H <type> <name>
-       %s -h | -V
-
-Highlight the input file and write the result to <outfile>.
-
-If no input file is given, use stdin, if -o is not given, use stdout.
-
-<lexer> is a lexer name (query all lexer names with -L). If -l is not
-given, the lexer is guessed from the extension of the input file name
-(this obviously doesn't work if the input is stdin).  If -g is passed,
-attempt to guess the lexer from the file contents, or pass through as
-plain text if this fails (this can work for stdin).
-
-Likewise, <formatter> is a formatter name, and will be guessed from
-the extension of the output file name. If no output file is given,
-the terminal formatter will be used by default.
-
-With the -O option, you can give the lexer and formatter a comma-
-separated list of options, e.g. ``-O bg=light,python=cool``.
-
-The -P option adds lexer and formatter options like the -O option, but
-you can only give one option per -P. That way, the option value may
-contain commas and equals signs, which it can't with -O, e.g.
-``-P "heading=Pygments, the Python highlighter".
-
-With the -F option, you can add filters to the token stream, you can
-give options in the same way as for -O after a colon (note: there must
-not be spaces around the colon).
-
-The -O, -P and -F options can be given multiple times.
-
-With the -S option, print out style definitions for style <style>
-for formatter <formatter>. The argument given by -a is formatter
-dependent.
-
-The -L option lists lexers, formatters, styles or filters -- set
-`which` to the thing you want to list (e.g. "styles"), or omit it to
-list everything.
-
-The -N option guesses and prints out a lexer name based solely on
-the given filename. It does not take input or highlight anything.
-If no specific lexer can be determined "text" is returned.
-
-The -H option prints detailed help for the object <name> of type <type>,
-where <type> is one of "lexer", "formatter" or "filter".
-
-The -h option prints this help.
-The -V option prints the package version.
-"""
-
-
-def _parse_options(o_strs):
-    opts = {}
-    if not o_strs:
-        return opts
-    for o_str in o_strs:
-        if not o_str:
-            continue
-        o_args = o_str.split(',')
-        for o_arg in o_args:
-            o_arg = o_arg.strip()
-            try:
-                o_key, o_val = o_arg.split('=')
-                o_key = o_key.strip()
-                o_val = o_val.strip()
-            except ValueError:
-                opts[o_arg] = True
-            else:
-                opts[o_key] = o_val
-    return opts
-
-
-def _parse_filters(f_strs):
-    filters = []
-    if not f_strs:
-        return filters
-    for f_str in f_strs:
-        if ':' in f_str:
-            fname, fopts = f_str.split(':', 1)
-            filters.append((fname, _parse_options([fopts])))
-        else:
-            filters.append((f_str, {}))
-    return filters
-
-
-def _print_help(what, name):
-    try:
-        if what == 'lexer':
-            cls = find_lexer_class(name)
-            print "Help on the %s lexer:" % cls.name
-            print dedent(cls.__doc__)
-        elif what == 'formatter':
-            cls = find_formatter_class(name)
-            print "Help on the %s formatter:" % cls.name
-            print dedent(cls.__doc__)
-        elif what == 'filter':
-            cls = find_filter_class(name)
-            print "Help on the %s filter:" % name
-            print dedent(cls.__doc__)
-    except AttributeError:
-        print >>sys.stderr, "%s not found!" % what
-
-
-def _print_list(what):
-    if what == 'lexer':
-        print
-        print "Lexers:"
-        print "~~~~~~~"
-
-        info = []
-        for fullname, names, exts, _ in get_all_lexers():
-            tup = (', '.join(names)+':', fullname,
-                   exts and '(filenames ' + ', '.join(exts) + ')' or '')
-            info.append(tup)
-        info.sort()
-        for i in info:
-            print ('* %s\n    %s %s') % i
-
-    elif what == 'formatter':
-        print
-        print "Formatters:"
-        print "~~~~~~~~~~~"
-
-        info = []
-        for cls in get_all_formatters():
-            doc = docstring_headline(cls)
-            tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
-                   '(filenames ' + ', '.join(cls.filenames) + ')' or '')
-            info.append(tup)
-        info.sort()
-        for i in info:
-            print ('* %s\n    %s %s') % i
-
-    elif what == 'filter':
-        print
-        print "Filters:"
-        print "~~~~~~~~"
-
-        for name in get_all_filters():
-            cls = find_filter_class(name)
-            print "* " + name + ':'
-            print "    %s" % docstring_headline(cls)
-
-    elif what == 'style':
-        print
-        print "Styles:"
-        print "~~~~~~~"
-
-        for name in get_all_styles():
-            cls = get_style_by_name(name)
-            print "* " + name + ':'
-            print "    %s" % docstring_headline(cls)
-
-
-def main(args=sys.argv):
-    """
-    Main command line entry point.
-    """
-    # pylint: disable-msg=R0911,R0912,R0915
-
-    usage = USAGE % ((args[0],) * 6)
-
-    if sys.platform in ['win32', 'cygwin']:
-        try:
-            # Provide coloring under Windows, if possible
-            import colorama
-            colorama.init()
-        except ImportError:
-            pass
-
-    try:
-        popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHg")
-    except getopt.GetoptError, err:
-        print >>sys.stderr, usage
-        return 2
-    opts = {}
-    O_opts = []
-    P_opts = []
-    F_opts = []
-    for opt, arg in popts:
-        if opt == '-O':
-            O_opts.append(arg)
-        elif opt == '-P':
-            P_opts.append(arg)
-        elif opt == '-F':
-            F_opts.append(arg)
-        opts[opt] = arg
-
-    if not opts and not args:
-        print usage
-        return 0
-
-    if opts.pop('-h', None) is not None:
-        print usage
-        return 0
-
-    if opts.pop('-V', None) is not None:
-        print 'Pygments version %s, (c) 2006-2013 by Georg Brandl.' % __version__
-        return 0
-
-    # handle ``pygmentize -L``
-    L_opt = opts.pop('-L', None)
-    if L_opt is not None:
-        if opts:
-            print >>sys.stderr, usage
-            return 2
-
-        # print version
-        main(['', '-V'])
-        if not args:
-            args = ['lexer', 'formatter', 'filter', 'style']
-        for arg in args:
-            _print_list(arg.rstrip('s'))
-        return 0
-
-    # handle ``pygmentize -H``
-    H_opt = opts.pop('-H', None)
-    if H_opt is not None:
-        if opts or len(args) != 2:
-            print >>sys.stderr, usage
-            return 2
-
-        what, name = args
-        if what not in ('lexer', 'formatter', 'filter'):
-            print >>sys.stderr, usage
-            return 2
-
-        _print_help(what, name)
-        return 0
-
-    # parse -O options
-    parsed_opts = _parse_options(O_opts)
-    opts.pop('-O', None)
-
-    # parse -P options
-    for p_opt in P_opts:
-        try:
-            name, value = p_opt.split('=', 1)
-        except ValueError:
-            parsed_opts[p_opt] = True
-        else:
-            parsed_opts[name] = value
-    opts.pop('-P', None)
-
-    # handle ``pygmentize -N``
-    infn = opts.pop('-N', None)
-    if infn is not None:
-        try:
-            lexer = get_lexer_for_filename(infn, **parsed_opts)
-        except ClassNotFound, err:
-            lexer = TextLexer()
-        except OptionError, err:
-            print >>sys.stderr, 'Error:', err
-            return 1
-
-        print lexer.aliases[0]
-        return 0
-
-    # handle ``pygmentize -S``
-    S_opt = opts.pop('-S', None)
-    a_opt = opts.pop('-a', None)
-    if S_opt is not None:
-        f_opt = opts.pop('-f', None)
-        if not f_opt:
-            print >>sys.stderr, usage
-            return 2
-        if opts or args:
-            print >>sys.stderr, usage
-            return 2
-
-        try:
-            parsed_opts['style'] = S_opt
-            fmter = get_formatter_by_name(f_opt, **parsed_opts)
-        except ClassNotFound, err:
-            print >>sys.stderr, err
-            return 1
-
-        arg = a_opt or ''
-        try:
-            print fmter.get_style_defs(arg)
-        except Exception, err:
-            print >>sys.stderr, 'Error:', err
-            return 1
-        return 0
-
-    # if no -S is given, -a is not allowed
-    if a_opt is not None:
-        print >>sys.stderr, usage
-        return 2
-
-    # parse -F options
-    F_opts = _parse_filters(F_opts)
-    opts.pop('-F', None)
-
-    # select formatter
-    outfn = opts.pop('-o', None)
-    fmter = opts.pop('-f', None)
-    if fmter:
-        try:
-            fmter = get_formatter_by_name(fmter, **parsed_opts)
-        except (OptionError, ClassNotFound), err:
-            print >>sys.stderr, 'Error:', err
-            return 1
-
-    if outfn:
-        if not fmter:
-            try:
-                fmter = get_formatter_for_filename(outfn, **parsed_opts)
-            except (OptionError, ClassNotFound), err:
-                print >>sys.stderr, 'Error:', err
-                return 1
-        try:
-            outfile = open(outfn, 'wb')
-        except Exception, err:
-            print >>sys.stderr, 'Error: cannot open outfile:', err
-            return 1
-    else:
-        if not fmter:
-            fmter = TerminalFormatter(**parsed_opts)
-        outfile = sys.stdout
-
-    # select lexer
-    lexer = opts.pop('-l', None)
-    if lexer:
-        try:
-            lexer = get_lexer_by_name(lexer, **parsed_opts)
-        except (OptionError, ClassNotFound), err:
-            print >>sys.stderr, 'Error:', err
-            return 1
-
-    if args:
-        if len(args) > 1:
-            print >>sys.stderr, usage
-            return 2
-
-        infn = args[0]
-        try:
-            code = open(infn, 'rb').read()
-        except Exception, err:
-            print >>sys.stderr, 'Error: cannot read infile:', err
-            return 1
-
-        if not lexer:
-            try:
-                lexer = get_lexer_for_filename(infn, code, **parsed_opts)
-            except ClassNotFound, err:
-                if '-g' in opts:
-                    try:
-                        lexer = guess_lexer(code, **parsed_opts)
-                    except ClassNotFound:
-                        lexer = TextLexer(**parsed_opts)
-                else:
-                    print >>sys.stderr, 'Error:', err
-                    return 1
-            except OptionError, err:
-                print >>sys.stderr, 'Error:', err
-                return 1
-
-    else:
-        if '-g' in opts:
-            code = sys.stdin.read()
-            try:
-                lexer = guess_lexer(code, **parsed_opts)
-            except ClassNotFound:
-                lexer = TextLexer(**parsed_opts)
-        elif not lexer:
-            print >>sys.stderr, 'Error: no lexer name given and reading ' + \
-                                'from stdin (try using -g or -l <lexer>)'
-            return 2
-        else:
-            code = sys.stdin.read()
-
-    # No encoding given? Use latin1 if output file given,
-    # stdin/stdout encoding otherwise.
-    # (This is a compromise, I'm not too happy with it...)
-    if 'encoding' not in parsed_opts and 'outencoding' not in parsed_opts:
-        if outfn:
-            # encoding pass-through
-            fmter.encoding = 'latin1'
-        else:
-            if sys.version_info < (3,):
-                # use terminal encoding; Python 3's terminals already do that
-                lexer.encoding = getattr(sys.stdin, 'encoding',
-                                         None) or 'ascii'
-                fmter.encoding = getattr(sys.stdout, 'encoding',
-                                         None) or 'ascii'
-    elif not outfn and sys.version_info > (3,):
-        # output to terminal with encoding -> use .buffer
-        outfile = sys.stdout.buffer
-
-    # ... and do it!
-    try:
-        # process filters
-        for fname, fopts in F_opts:
-            lexer.add_filter(fname, **fopts)
-        highlight(code, lexer, fmter, outfile)
-    except Exception, err:
-        import traceback
-        info = traceback.format_exception(*sys.exc_info())
-        msg = info[-1].strip()
-        if len(info) >= 3:
-            # extract relevant file and position info
-            msg += '\n   (f%s)' % info[-2].split('\n')[0].strip()[1:]
-        print >>sys.stderr
-        print >>sys.stderr, '*** Error while highlighting:'
-        print >>sys.stderr, msg
-        return 1
-
-    return 0
diff --git a/python/ext-libs/pygments/console.py b/python/ext-libs/pygments/console.py
deleted file mode 100644
index c8dfbd1..0000000
--- a/python/ext-libs/pygments/console.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.console
-    ~~~~~~~~~~~~~~~~
-
-    Format colored console output.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-esc = "\x1b["
-
-codes = {}
-codes[""]          = ""
-codes["reset"]     = esc + "39;49;00m"
-
-codes["bold"]      = esc + "01m"
-codes["faint"]     = esc + "02m"
-codes["standout"]  = esc + "03m"
-codes["underline"] = esc + "04m"
-codes["blink"]     = esc + "05m"
-codes["overline"]  = esc + "06m"
-
-dark_colors  = ["black", "darkred", "darkgreen", "brown", "darkblue",
-                "purple", "teal", "lightgray"]
-light_colors = ["darkgray", "red", "green", "yellow", "blue",
-                "fuchsia", "turquoise", "white"]
-
-x = 30
-for d, l in zip(dark_colors, light_colors):
-    codes[d] = esc + "%im" % x
-    codes[l] = esc + "%i;01m" % x
-    x += 1
-
-del d, l, x
-
-codes["darkteal"]   = codes["turquoise"]
-codes["darkyellow"] = codes["brown"]
-codes["fuscia"]     = codes["fuchsia"]
-codes["white"]      = codes["bold"]
-
-
-def reset_color():
-    return codes["reset"]
-
-
-def colorize(color_key, text):
-    return codes[color_key] + text + codes["reset"]
-
-
-def ansiformat(attr, text):
-    """
-    Format ``text`` with a color and/or some attributes::
-
-        color       normal color
-        *color*     bold color
-        _color_     underlined color
-        +color+     blinking color
-    """
-    result = []
-    if attr[:1] == attr[-1:] == '+':
-        result.append(codes['blink'])
-        attr = attr[1:-1]
-    if attr[:1] == attr[-1:] == '*':
-        result.append(codes['bold'])
-        attr = attr[1:-1]
-    if attr[:1] == attr[-1:] == '_':
-        result.append(codes['underline'])
-        attr = attr[1:-1]
-    result.append(codes[attr])
-    result.append(text)
-    result.append(codes['reset'])
-    return ''.join(result)
diff --git a/python/ext-libs/pygments/filter.py b/python/ext-libs/pygments/filter.py
deleted file mode 100644
index 0b9224f..0000000
--- a/python/ext-libs/pygments/filter.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.filter
-    ~~~~~~~~~~~~~~~
-
-    Module that implements the default filter.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-
-def apply_filters(stream, filters, lexer=None):
-    """
-    Use this method to apply an iterable of filters to
-    a stream. If lexer is given it's forwarded to the
-    filter, otherwise the filter receives `None`.
-    """
-    def _apply(filter_, stream):
-        for token in filter_.filter(lexer, stream):
-            yield token
-    for filter_ in filters:
-        stream = _apply(filter_, stream)
-    return stream
-
-
-def simplefilter(f):
-    """
-    Decorator that converts a function into a filter::
-
-        @simplefilter
-        def lowercase(lexer, stream, options):
-            for ttype, value in stream:
-                yield ttype, value.lower()
-    """
-    return type(f.__name__, (FunctionFilter,), {
-                'function':     f,
-                '__module__':   getattr(f, '__module__'),
-                '__doc__':      f.__doc__
-            })
-
-
-class Filter(object):
-    """
-    Default filter. Subclass this class or use the `simplefilter`
-    decorator to create own filters.
-    """
-
-    def __init__(self, **options):
-        self.options = options
-
-    def filter(self, lexer, stream):
-        raise NotImplementedError()
-
-
-class FunctionFilter(Filter):
-    """
-    Abstract class used by `simplefilter` to create simple
-    function filters on the fly. The `simplefilter` decorator
-    automatically creates subclasses of this class for
-    functions passed to it.
-    """
-    function = None
-
-    def __init__(self, **options):
-        if not hasattr(self, 'function'):
-            raise TypeError('%r used without bound function' %
-                            self.__class__.__name__)
-        Filter.__init__(self, **options)
-
-    def filter(self, lexer, stream):
-        # pylint: disable-msg=E1102
-        for ttype, value in self.function(lexer, stream, self.options):
-            yield ttype, value
diff --git a/python/ext-libs/pygments/filters/__init__.py b/python/ext-libs/pygments/filters/__init__.py
deleted file mode 100644
index f12d025..0000000
--- a/python/ext-libs/pygments/filters/__init__.py
+++ /dev/null
@@ -1,356 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.filters
-    ~~~~~~~~~~~~~~~~
-
-    Module containing filter lookup functions and default
-    filters.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
-    string_to_tokentype
-from pygments.filter import Filter
-from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
-     get_choice_opt, ClassNotFound, OptionError
-from pygments.plugin import find_plugin_filters
-
-
-def find_filter_class(filtername):
-    """
-    Lookup a filter by name. Return None if not found.
-    """
-    if filtername in FILTERS:
-        return FILTERS[filtername]
-    for name, cls in find_plugin_filters():
-        if name == filtername:
-            return cls
-    return None
-
-
-def get_filter_by_name(filtername, **options):
-    """
-    Return an instantiated filter. Options are passed to the filter
-    initializer if wanted. Raise a ClassNotFound if not found.
-    """
-    cls = find_filter_class(filtername)
-    if cls:
-        return cls(**options)
-    else:
-        raise ClassNotFound('filter %r not found' % filtername)
-
-
-def get_all_filters():
-    """
-    Return a generator of all filter names.
-    """
-    for name in FILTERS:
-        yield name
-    for name, _ in find_plugin_filters():
-        yield name
-
-
-def _replace_special(ttype, value, regex, specialttype,
-                     replacefunc=lambda x: x):
-    last = 0
-    for match in regex.finditer(value):
-        start, end = match.start(), match.end()
-        if start != last:
-            yield ttype, value[last:start]
-        yield specialttype, replacefunc(value[start:end])
-        last = end
-    if last != len(value):
-        yield ttype, value[last:]
-
-
-class CodeTagFilter(Filter):
-    """
-    Highlight special code tags in comments and docstrings.
-
-    Options accepted:
-
-    `codetags` : list of strings
-       A list of strings that are flagged as code tags.  The default is to
-       highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
-    """
-
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-        tags = get_list_opt(options, 'codetags',
-                            ['XXX', 'TODO', 'BUG', 'NOTE'])
-        self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
-            re.escape(tag) for tag in tags if tag
-        ]))
-
-    def filter(self, lexer, stream):
-        regex = self.tag_re
-        for ttype, value in stream:
-            if ttype in String.Doc or \
-               ttype in Comment and \
-               ttype not in Comment.Preproc:
-                for sttype, svalue in _replace_special(ttype, value, regex,
-                                                       Comment.Special):
-                    yield sttype, svalue
-            else:
-                yield ttype, value
-
-
-class KeywordCaseFilter(Filter):
-    """
-    Convert keywords to lowercase or uppercase or capitalize them, which
-    means first letter uppercase, rest lowercase.
-
-    This can be useful e.g. if you highlight Pascal code and want to adapt the
-    code to your styleguide.
-
-    Options accepted:
-
-    `case` : string
-       The casing to convert keywords to. Must be one of ``'lower'``,
-       ``'upper'`` or ``'capitalize'``.  The default is ``'lower'``.
-    """
-
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-        case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
-        self.convert = getattr(unicode, case)
-
-    def filter(self, lexer, stream):
-        for ttype, value in stream:
-            if ttype in Keyword:
-                yield ttype, self.convert(value)
-            else:
-                yield ttype, value
-
-
-class NameHighlightFilter(Filter):
-    """
-    Highlight a normal Name token with a different token type.
-
-    Example::
-
-        filter = NameHighlightFilter(
-            names=['foo', 'bar', 'baz'],
-            tokentype=Name.Function,
-        )
-
-    This would highlight the names "foo", "bar" and "baz"
-    as functions. `Name.Function` is the default token type.
-
-    Options accepted:
-
-    `names` : list of strings
-      A list of names that should be given the different token type.
-      There is no default.
-    `tokentype` : TokenType or string
-      A token type or a string containing a token type name that is
-      used for highlighting the strings in `names`.  The default is
-      `Name.Function`.
-    """
-
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-        self.names = set(get_list_opt(options, 'names', []))
-        tokentype = options.get('tokentype')
-        if tokentype:
-            self.tokentype = string_to_tokentype(tokentype)
-        else:
-            self.tokentype = Name.Function
-
-    def filter(self, lexer, stream):
-        for ttype, value in stream:
-            if ttype is Name and value in self.names:
-                yield self.tokentype, value
-            else:
-                yield ttype, value
-
-
-class ErrorToken(Exception):
-    pass
-
-class RaiseOnErrorTokenFilter(Filter):
-    """
-    Raise an exception when the lexer generates an error token.
-
-    Options accepted:
-
-    `excclass` : Exception class
-      The exception class to raise.
-      The default is `pygments.filters.ErrorToken`.
-
-    *New in Pygments 0.8.*
-    """
-
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-        self.exception = options.get('excclass', ErrorToken)
-        try:
-            # issubclass() will raise TypeError if first argument is not a class
-            if not issubclass(self.exception, Exception):
-                raise TypeError
-        except TypeError:
-            raise OptionError('excclass option is not an exception class')
-
-    def filter(self, lexer, stream):
-        for ttype, value in stream:
-            if ttype is Error:
-                raise self.exception(value)
-            yield ttype, value
-
-
-class VisibleWhitespaceFilter(Filter):
-    """
-    Convert tabs, newlines and/or spaces to visible characters.
-
-    Options accepted:
-
-    `spaces` : string or bool
-      If this is a one-character string, spaces will be replaces by this string.
-      If it is another true value, spaces will be replaced by ``·`` (unicode
-      MIDDLE DOT).  If it is a false value, spaces will not be replaced.  The
-      default is ``False``.
-    `tabs` : string or bool
-      The same as for `spaces`, but the default replacement character is ``»``
-      (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK).  The default value
-      is ``False``.  Note: this will not work if the `tabsize` option for the
-      lexer is nonzero, as tabs will already have been expanded then.
-    `tabsize` : int
-      If tabs are to be replaced by this filter (see the `tabs` option), this
-      is the total number of characters that a tab should be expanded to.
-      The default is ``8``.
-    `newlines` : string or bool
-      The same as for `spaces`, but the default replacement character is ``¶``
-      (unicode PILCROW SIGN).  The default value is ``False``.
-    `wstokentype` : bool
-      If true, give whitespace the special `Whitespace` token type.  This allows
-      styling the visible whitespace differently (e.g. greyed out), but it can
-      disrupt background colors.  The default is ``True``.
-
-    *New in Pygments 0.8.*
-    """
-
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-        for name, default in {'spaces': u'·', 'tabs': u'»', 'newlines': u'¶'}.items():
-            opt = options.get(name, False)
-            if isinstance(opt, basestring) and len(opt) == 1:
-                setattr(self, name, opt)
-            else:
-                setattr(self, name, (opt and default or ''))
-        tabsize = get_int_opt(options, 'tabsize', 8)
-        if self.tabs:
-            self.tabs += ' '*(tabsize-1)
-        if self.newlines:
-            self.newlines += '\n'
-        self.wstt = get_bool_opt(options, 'wstokentype', True)
-
-    def filter(self, lexer, stream):
-        if self.wstt:
-            spaces = self.spaces or ' '
-            tabs = self.tabs or '\t'
-            newlines = self.newlines or '\n'
-            regex = re.compile(r'\s')
-            def replacefunc(wschar):
-                if wschar == ' ':
-                    return spaces
-                elif wschar == '\t':
-                    return tabs
-                elif wschar == '\n':
-                    return newlines
-                return wschar
-
-            for ttype, value in stream:
-                for sttype, svalue in _replace_special(ttype, value, regex,
-                                                       Whitespace, replacefunc):
-                    yield sttype, svalue
-        else:
-            spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
-            # simpler processing
-            for ttype, value in stream:
-                if spaces:
-                    value = value.replace(' ', spaces)
-                if tabs:
-                    value = value.replace('\t', tabs)
-                if newlines:
-                    value = value.replace('\n', newlines)
-                yield ttype, value
-
-
-class GobbleFilter(Filter):
-    """
-    Gobbles source code lines (eats initial characters).
-
-    This filter drops the first ``n`` characters off every line of code.  This
-    may be useful when the source code fed to the lexer is indented by a fixed
-    amount of space that isn't desired in the output.
-
-    Options accepted:
-
-    `n` : int
-       The number of characters to gobble.
-
-    *New in Pygments 1.2.*
-    """
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-        self.n = get_int_opt(options, 'n', 0)
-
-    def gobble(self, value, left):
-        if left < len(value):
-            return value[left:], 0
-        else:
-            return '', left - len(value)
-
-    def filter(self, lexer, stream):
-        n = self.n
-        left = n # How many characters left to gobble.
-        for ttype, value in stream:
-            # Remove ``left`` tokens from first line, ``n`` from all others.
-            parts = value.split('\n')
-            (parts[0], left) = self.gobble(parts[0], left)
-            for i in range(1, len(parts)):
-                (parts[i], left) = self.gobble(parts[i], n)
-            value = '\n'.join(parts)
-
-            if value != '':
-                yield ttype, value
-
-
-class TokenMergeFilter(Filter):
-    """
-    Merges consecutive tokens with the same token type in the output stream of a
-    lexer.
-
-    *New in Pygments 1.2.*
-    """
-    def __init__(self, **options):
-        Filter.__init__(self, **options)
-
-    def filter(self, lexer, stream):
-        current_type = None
-        current_value = None
-        for ttype, value in stream:
-            if ttype is current_type:
-                current_value += value
-            else:
-                if current_type is not None:
-                    yield current_type, current_value
-                current_type = ttype
-                current_value = value
-        if current_type is not None:
-            yield current_type, current_value
-
-
-FILTERS = {
-    'codetagify':     CodeTagFilter,
-    'keywordcase':    KeywordCaseFilter,
-    'highlight':      NameHighlightFilter,
-    'raiseonerror':   RaiseOnErrorTokenFilter,
-    'whitespace':     VisibleWhitespaceFilter,
-    'gobble':         GobbleFilter,
-    'tokenmerge':     TokenMergeFilter,
-}
diff --git a/python/ext-libs/pygments/formatter.py b/python/ext-libs/pygments/formatter.py
deleted file mode 100644
index 4b69f2a..0000000
--- a/python/ext-libs/pygments/formatter.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatter
-    ~~~~~~~~~~~~~~~~~~
-
-    Base formatter class.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import codecs
-
-from pygments.util import get_bool_opt
-from pygments.styles import get_style_by_name
-
-__all__ = ['Formatter']
-
-
-def _lookup_style(style):
-    if isinstance(style, basestring):
-        return get_style_by_name(style)
-    return style
-
-
-class Formatter(object):
-    """
-    Converts a token stream to text.
-
-    Options accepted:
-
-    ``style``
-        The style to use, can be a string or a Style subclass
-        (default: "default"). Not used by e.g. the
-        TerminalFormatter.
-    ``full``
-        Tells the formatter to output a "full" document, i.e.
-        a complete self-contained document. This doesn't have
-        any effect for some formatters (default: false).
-    ``title``
-        If ``full`` is true, the title that should be used to
-        caption the document (default: '').
-    ``encoding``
-        If given, must be an encoding name. This will be used to
-        convert the Unicode token strings to byte strings in the
-        output. If it is "" or None, Unicode strings will be written
-        to the output file, which most file-like objects do not
-        support (default: None).
-    ``outencoding``
-        Overrides ``encoding`` if given.
-    """
-
-    #: Name of the formatter
-    name = None
-
-    #: Shortcuts for the formatter
-    aliases = []
-
-    #: fn match rules
-    filenames = []
-
-    #: If True, this formatter outputs Unicode strings when no encoding
-    #: option is given.
-    unicodeoutput = True
-
-    def __init__(self, **options):
-        self.style = _lookup_style(options.get('style', 'default'))
-        self.full  = get_bool_opt(options, 'full', False)
-        self.title = options.get('title', '')
-        self.encoding = options.get('encoding', None) or None
-        self.encoding = options.get('outencoding', None) or self.encoding
-        self.options = options
-
-    def get_style_defs(self, arg=''):
-        """
-        Return the style definitions for the current style as a string.
-
-        ``arg`` is an additional argument whose meaning depends on the
-        formatter used. Note that ``arg`` can also be a list or tuple
-        for some formatters like the html formatter.
-        """
-        return ''
-
-    def format(self, tokensource, outfile):
-        """
-        Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
-        tuples and write it into ``outfile``.
-        """
-        if self.encoding:
-            # wrap the outfile in a StreamWriter
-            outfile = codecs.lookup(self.encoding)[3](outfile)
-        return self.format_unencoded(tokensource, outfile)
diff --git a/python/ext-libs/pygments/formatters/__init__.py b/python/ext-libs/pygments/formatters/__init__.py
deleted file mode 100644
index d842b96..0000000
--- a/python/ext-libs/pygments/formatters/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters
-    ~~~~~~~~~~~~~~~~~~~
-
-    Pygments formatters.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-import os.path
-import fnmatch
-
-from pygments.formatters._mapping import FORMATTERS
-from pygments.plugin import find_plugin_formatters
-from pygments.util import ClassNotFound
-
-ns = globals()
-for fcls in FORMATTERS:
-    ns[fcls.__name__] = fcls
-del fcls
-
-__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
-           'get_all_formatters'] + [cls.__name__ for cls in FORMATTERS]
-
-
-_formatter_alias_cache = {}
-_formatter_filename_cache = []
-
-def _init_formatter_cache():
-    if _formatter_alias_cache:
-        return
-    for cls in get_all_formatters():
-        for alias in cls.aliases:
-            _formatter_alias_cache[alias] = cls
-        for fn in cls.filenames:
-            _formatter_filename_cache.append((fn, cls))
-
-
-def find_formatter_class(name):
-    _init_formatter_cache()
-    cls = _formatter_alias_cache.get(name, None)
-    return cls
-
-
-def get_formatter_by_name(name, **options):
-    _init_formatter_cache()
-    cls = _formatter_alias_cache.get(name, None)
-    if not cls:
-        raise ClassNotFound("No formatter found for name %r" % name)
-    return cls(**options)
-
-
-def get_formatter_for_filename(fn, **options):
-    _init_formatter_cache()
-    fn = os.path.basename(fn)
-    for pattern, cls in _formatter_filename_cache:
-        if fnmatch.fnmatch(fn, pattern):
-            return cls(**options)
-    raise ClassNotFound("No formatter found for file name %r" % fn)
-
-
-def get_all_formatters():
-    """Return a generator for all formatters."""
-    for formatter in FORMATTERS:
-        yield formatter
-    for _, formatter in find_plugin_formatters():
-        yield formatter
diff --git a/python/ext-libs/pygments/formatters/_mapping.py b/python/ext-libs/pygments/formatters/_mapping.py
deleted file mode 100644
index a423ba5..0000000
--- a/python/ext-libs/pygments/formatters/_mapping.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters._mapping
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter mapping defintions. This file is generated by itself. Everytime
-    you change something on a builtin formatter defintion, run this script from
-    the formatters folder to update it.
-
-    Do not alter the FORMATTERS dictionary by hand.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-# start
-from pygments.formatters.bbcode import BBCodeFormatter
-from pygments.formatters.html import HtmlFormatter
-from pygments.formatters.img import BmpImageFormatter
-from pygments.formatters.img import GifImageFormatter
-from pygments.formatters.img import ImageFormatter
-from pygments.formatters.img import JpgImageFormatter
-from pygments.formatters.latex import LatexFormatter
-from pygments.formatters.other import NullFormatter
-from pygments.formatters.other import RawTokenFormatter
-from pygments.formatters.rtf import RtfFormatter
-from pygments.formatters.svg import SvgFormatter
-from pygments.formatters.terminal import TerminalFormatter
-from pygments.formatters.terminal256 import Terminal256Formatter
-
-FORMATTERS = {
-    BBCodeFormatter: ('BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
-    BmpImageFormatter: ('img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
-    GifImageFormatter: ('img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
-    HtmlFormatter: ('HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
-    ImageFormatter: ('img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
-    JpgImageFormatter: ('img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
-    LatexFormatter: ('LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
-    NullFormatter: ('Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
-    RawTokenFormatter: ('Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
-    RtfFormatter: ('RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft\xc2\xae Word\xc2\xae documents.'),
-    SvgFormatter: ('SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file.  This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
-    Terminal256Formatter: ('Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
-    TerminalFormatter: ('Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.')
-}
-
-if __name__ == '__main__':
-    import sys
-    import os
-
-    # lookup formatters
-    found_formatters = []
-    imports = []
-    sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
-    from pygments.util import docstring_headline
-
-    for filename in os.listdir('.'):
-        if filename.endswith('.py') and not filename.startswith('_'):
-            module_name = 'pygments.formatters.%s' % filename[:-3]
-            print module_name
-            module = __import__(module_name, None, None, [''])
-            for formatter_name in module.__all__:
-                imports.append((module_name, formatter_name))
-                formatter = getattr(module, formatter_name)
-                found_formatters.append(
-                    '%s: %r' % (formatter_name,
-                                (formatter.name,
-                                 tuple(formatter.aliases),
-                                 tuple(formatter.filenames),
-                                 docstring_headline(formatter))))
-    # sort them, that should make the diff files for svn smaller
-    found_formatters.sort()
-    imports.sort()
-
-    # extract useful sourcecode from this file
-    f = open(__file__)
-    try:
-        content = f.read()
-    finally:
-        f.close()
-    header = content[:content.find('# start')]
-    footer = content[content.find("if __name__ == '__main__':"):]
-
-    # write new file
-    f = open(__file__, 'w')
-    f.write(header)
-    f.write('# start\n')
-    f.write('\n'.join(['from %s import %s' % imp for imp in imports]))
-    f.write('\n\n')
-    f.write('FORMATTERS = {\n    %s\n}\n\n' % ',\n    '.join(found_formatters))
-    f.write(footer)
-    f.close()
diff --git a/python/ext-libs/pygments/formatters/bbcode.py b/python/ext-libs/pygments/formatters/bbcode.py
deleted file mode 100644
index 15faff6..0000000
--- a/python/ext-libs/pygments/formatters/bbcode.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.bbcode
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    BBcode formatter.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt
-
-__all__ = ['BBCodeFormatter']
-
-
-class BBCodeFormatter(Formatter):
-    """
-    Format tokens with BBcodes. These formatting codes are used by many
-    bulletin boards, so you can highlight your sourcecode with pygments before
-    posting it there.
-
-    This formatter has no support for background colors and borders, as there
-    are no common BBcode tags for that.
-
-    Some board systems (e.g. phpBB) don't support colors in their [code] tag,
-    so you can't use the highlighting together with that tag.
-    Text in a [code] tag usually is shown with a monospace font (which this
-    formatter can do with the ``monofont`` option) and no spaces (which you
-    need for indentation) are removed.
-
-    Additional options accepted:
-
-    `style`
-        The style to use, can be a string or a Style subclass (default:
-        ``'default'``).
-
-    `codetag`
-        If set to true, put the output into ``[code]`` tags (default:
-        ``false``)
-
-    `monofont`
-        If set to true, add a tag to show the code with a monospace font
-        (default: ``false``).
-    """
-    name = 'BBCode'
-    aliases = ['bbcode', 'bb']
-    filenames = []
-
-    def __init__(self, **options):
-        Formatter.__init__(self, **options)
-        self._code = get_bool_opt(options, 'codetag', False)
-        self._mono = get_bool_opt(options, 'monofont', False)
-
-        self.styles = {}
-        self._make_styles()
-
-    def _make_styles(self):
-        for ttype, ndef in self.style:
-            start = end = ''
-            if ndef['color']:
-                start += '[color=#%s]' % ndef['color']
-                end = '[/color]' + end
-            if ndef['bold']:
-                start += '[b]'
-                end = '[/b]' + end
-            if ndef['italic']:
-                start += '[i]'
-                end = '[/i]' + end
-            if ndef['underline']:
-                start += '[u]'
-                end = '[/u]' + end
-            # there are no common BBcodes for background-color and border
-
-            self.styles[ttype] = start, end
-
-    def format_unencoded(self, tokensource, outfile):
-        if self._code:
-            outfile.write('[code]')
-        if self._mono:
-            outfile.write('[font=monospace]')
-
-        lastval = ''
-        lasttype = None
-
-        for ttype, value in tokensource:
-            while ttype not in self.styles:
-                ttype = ttype.parent
-            if ttype == lasttype:
-                lastval += value
-            else:
-                if lastval:
-                    start, end = self.styles[lasttype]
-                    outfile.write(''.join((start, lastval, end)))
-                lastval = value
-                lasttype = ttype
-
-        if lastval:
-            start, end = self.styles[lasttype]
-            outfile.write(''.join((start, lastval, end)))
-
-        if self._mono:
-            outfile.write('[/font]')
-        if self._code:
-            outfile.write('[/code]')
-        if self._code or self._mono:
-            outfile.write('\n')
diff --git a/python/ext-libs/pygments/formatters/html.py b/python/ext-libs/pygments/formatters/html.py
deleted file mode 100644
index 0609693..0000000
--- a/python/ext-libs/pygments/formatters/html.py
+++ /dev/null
@@ -1,821 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.html
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter for HTML output.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import os
-import sys
-import os.path
-import StringIO
-
-from pygments.formatter import Formatter
-from pygments.token import Token, Text, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, bytes
-
-try:
-    import ctags
-except ImportError:
-    ctags = None
-
-__all__ = ['HtmlFormatter']
-
-
-_escape_html_table = {
-    ord('&'): u'&',
-    ord('<'): u'<',
-    ord('>'): u'>',
-    ord('"'): u'"',
-    ord("'"): u''',
-}
-
-def escape_html(text, table=_escape_html_table):
-    """Escape &, <, > as well as single and double quotes for HTML."""
-    return text.translate(table)
-
-def get_random_id():
-    """Return a random id for javascript fields."""
-    from random import random
-    from time import time
-    try:
-        from hashlib import sha1 as sha
-    except ImportError:
-        import sha
-        sha = sha.new
-    return sha('%s|%s' % (random(), time())).hexdigest()
-
-
-def _get_ttype_class(ttype):
-    fname = STANDARD_TYPES.get(ttype)
-    if fname:
-        return fname
-    aname = ''
-    while fname is None:
-        aname = '-' + ttype[-1] + aname
-        ttype = ttype.parent
-        fname = STANDARD_TYPES.get(ttype)
-    return fname + aname
-
-
-CSSFILE_TEMPLATE = '''\
-td.linenos { background-color: #f0f0f0; padding-right: 10px; }
-span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; }
-pre { line-height: 125%%; }
-%(styledefs)s
-'''
-
-DOC_HEADER = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
-   "http://www.w3.org/TR/html4/strict.dtd">
-
-<html>
-<head>
-  <title>%(title)s</title>
-  <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
-  <style type="text/css">
-''' + CSSFILE_TEMPLATE + '''
-  </style>
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_HEADER_EXTERNALCSS = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
-   "http://www.w3.org/TR/html4/strict.dtd">
-
-<html>
-<head>
-  <title>%(title)s</title>
-  <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
-  <link rel="stylesheet" href="%(cssfile)s" type="text/css">
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_FOOTER = '''\
-</body>
-</html>
-'''
-
-
-class HtmlFormatter(Formatter):
-    r"""
-    Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped
-    in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass`
-    option.
-
-    If the `linenos` option is set to ``"table"``, the ``<pre>`` is
-    additionally wrapped inside a ``<table>`` which has one row and two
-    cells: one containing the line numbers and one containing the code.
-    Example:
-
-    .. sourcecode:: html
-
-        <div class="highlight" >
-        <table><tr>
-          <td class="linenos" title="click to toggle"
-            onclick="with (this.firstChild.style)
-                     { display = (display == '') ? 'none' : '' }">
-            <pre>1
-            2</pre>
-          </td>
-          <td class="code">
-            <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
-              <span class="Ke">pass</span>
-            </pre>
-          </td>
-        </tr></table></div>
-
-    (whitespace added to improve clarity).
-
-    Wrapping can be disabled using the `nowrap` option.
-
-    A list of lines can be specified using the `hl_lines` option to make these
-    lines highlighted (as of Pygments 0.11).
-
-    With the `full` option, a complete HTML 4 document is output, including
-    the style definitions inside a ``<style>`` tag, or in a separate file if
-    the `cssfile` option is given.
-
-    When `tagsfile` is set to the path of a ctags index file, it is used to
-    generate hyperlinks from names to their definition.  You must enable
-    `anchorlines` and run ctags with the `-n` option for this to work.  The
-    `python-ctags` module from PyPI must be installed to use this feature;
-    otherwise a `RuntimeError` will be raised.
-
-    The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
-    containing CSS rules for the CSS classes used by the formatter. The
-    argument `arg` can be used to specify additional CSS selectors that
-    are prepended to the classes. A call `fmter.get_style_defs('td .code')`
-    would result in the following CSS classes:
-
-    .. sourcecode:: css
-
-        td .code .kw { font-weight: bold; color: #00FF00 }
-        td .code .cm { color: #999999 }
-        ...
-
-    If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
-    `get_style_defs()` method to request multiple prefixes for the tokens:
-
-    .. sourcecode:: python
-
-        formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
-
-    The output would then look like this:
-
-    .. sourcecode:: css
-
-        div.syntax pre .kw,
-        pre.syntax .kw { font-weight: bold; color: #00FF00 }
-        div.syntax pre .cm,
-        pre.syntax .cm { color: #999999 }
-        ...
-
-    Additional options accepted:
-
-    `nowrap`
-        If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>``
-        tag. This disables most other options (default: ``False``).
-
-    `full`
-        Tells the formatter to output a "full" document, i.e. a complete
-        self-contained document (default: ``False``).
-
-    `title`
-        If `full` is true, the title that should be used to caption the
-        document (default: ``''``).
-
-    `style`
-        The style to use, can be a string or a Style subclass (default:
-        ``'default'``). This option has no effect if the `cssfile`
-        and `noclobber_cssfile` option are given and the file specified in
-        `cssfile` exists.
-
-    `noclasses`
-        If set to true, token ``<span>`` tags will not use CSS classes, but
-        inline styles. This is not recommended for larger pieces of code since
-        it increases output size by quite a bit (default: ``False``).
-
-    `classprefix`
-        Since the token types use relatively short class names, they may clash
-        with some of your own class names. In this case you can use the
-        `classprefix` option to give a string to prepend to all Pygments-generated
-        CSS class names for token types.
-        Note that this option also affects the output of `get_style_defs()`.
-
-    `cssclass`
-        CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
-        If you set this option, the default selector for `get_style_defs()`
-        will be this class.
-
-        *New in Pygments 0.9:* If you select the ``'table'`` line numbers, the
-        wrapping table will have a CSS class of this string plus ``'table'``,
-        the default is accordingly ``'highlighttable'``.
-
-    `cssstyles`
-        Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
-
-    `prestyles`
-        Inline CSS styles for the ``<pre>`` tag (default: ``''``).  *New in
-        Pygments 0.11.*
-
-    `cssfile`
-        If the `full` option is true and this option is given, it must be the
-        name of an external file. If the filename does not include an absolute
-        path, the file's path will be assumed to be relative to the main output
-        file's path, if the latter can be found. The stylesheet is then written
-        to this file instead of the HTML file. *New in Pygments 0.6.*
-
-    `noclobber_cssfile`
-        If `cssfile` is given and the specified file exists, the css file will
-        not be overwritten. This allows the use of the `full` option in
-        combination with a user specified css file. Default is ``False``.
-        *New in Pygments 1.1.*
-
-    `linenos`
-        If set to ``'table'``, output line numbers as a table with two cells,
-        one containing the line numbers, the other the whole code.  This is
-        copy-and-paste-friendly, but may cause alignment problems with some
-        browsers or fonts.  If set to ``'inline'``, the line numbers will be
-        integrated in the ``<pre>`` tag that contains the code (that setting
-        is *new in Pygments 0.8*).
-
-        For compatibility with Pygments 0.7 and earlier, every true value
-        except ``'inline'`` means the same as ``'table'`` (in particular, that
-        means also ``True``).
-
-        The default value is ``False``, which means no line numbers at all.
-
-        **Note:** with the default ("table") line number mechanism, the line
-        numbers and code can have different line heights in Internet Explorer
-        unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
-        CSS property (you get the default line spacing with ``line-height:
-        125%``).
-
-    `hl_lines`
-        Specify a list of lines to be highlighted.  *New in Pygments 0.11.*
-
-    `linenostart`
-        The line number for the first line (default: ``1``).
-
-    `linenostep`
-        If set to a number n > 1, only every nth line number is printed.
-
-    `linenospecial`
-        If set to a number n > 0, every nth line number is given the CSS
-        class ``"special"`` (default: ``0``).
-
-    `nobackground`
-        If set to ``True``, the formatter won't output the background color
-        for the wrapping element (this automatically defaults to ``False``
-        when there is no wrapping element [eg: no argument for the
-        `get_syntax_defs` method given]) (default: ``False``). *New in
-        Pygments 0.6.*
-
-    `lineseparator`
-        This string is output between lines of code. It defaults to ``"\n"``,
-        which is enough to break a line inside ``<pre>`` tags, but you can
-        e.g. set it to ``"<br>"`` to get HTML line breaks. *New in Pygments
-        0.7.*
-
-    `lineanchors`
-        If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
-        output line in an anchor tag with a ``name`` of ``foo-linenumber``.
-        This allows easy linking to certain lines. *New in Pygments 0.9.*
-
-    `linespans`
-        If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
-        output line in a span tag with an ``id`` of ``foo-linenumber``.
-        This allows easy access to lines via javascript. *New in Pygments 1.6.*
-
-    `anchorlinenos`
-        If set to `True`, will wrap line numbers in <a> tags. Used in
-        combination with `linenos` and `lineanchors`.
-
-    `tagsfile`
-        If set to the path of a ctags file, wrap names in anchor tags that
-        link to their definitions. `lineanchors` should be used, and the
-        tags file should specify line numbers (see the `-n` option to ctags).
-        *New in Pygments 1.6.*
-
-    `tagurlformat`
-        A string formatting pattern used to generate links to ctags definitions.
-        Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
-        Defaults to an empty string, resulting in just `#prefix-number` links.
-        *New in Pygments 1.6.*
-
-
-    **Subclassing the HTML formatter**
-
-    *New in Pygments 0.7.*
-
-    The HTML formatter is now built in a way that allows easy subclassing, thus
-    customizing the output HTML code. The `format()` method calls
-    `self._format_lines()` which returns a generator that yields tuples of ``(1,
-    line)``, where the ``1`` indicates that the ``line`` is a line of the
-    formatted source code.
-
-    If the `nowrap` option is set, the generator is the iterated over and the
-    resulting HTML is output.
-
-    Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
-    other generators. These may add some HTML code to the one generated by
-    `_format_lines()`, either by modifying the lines generated by the latter,
-    then yielding them again with ``(1, line)``, and/or by yielding other HTML
-    code before or after the lines, with ``(0, html)``. The distinction between
-    source lines and other code makes it possible to wrap the generator multiple
-    times.
-
-    The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
-
-    A custom `HtmlFormatter` subclass could look like this:
-
-    .. sourcecode:: python
-
-        class CodeHtmlFormatter(HtmlFormatter):
-
-            def wrap(self, source, outfile):
-                return self._wrap_code(source)
-
-            def _wrap_code(self, source):
-                yield 0, '<code>'
-                for i, t in source:
-                    if i == 1:
-                        # it's a line of formatted code
-                        t += '<br>'
-                    yield i, t
-                yield 0, '</code>'
-
-    This results in wrapping the formatted lines with a ``<code>`` tag, where the
-    source lines are broken using ``<br>`` tags.
-
-    After calling `wrap()`, the `format()` method also adds the "line numbers"
-    and/or "full document" wrappers if the respective options are set. Then, all
-    HTML yielded by the wrapped generator is output.
-    """
-
-    name = 'HTML'
-    aliases = ['html']
-    filenames = ['*.html', '*.htm']
-
-    def __init__(self, **options):
-        Formatter.__init__(self, **options)
-        self.title = self._decodeifneeded(self.title)
-        self.nowrap = get_bool_opt(options, 'nowrap', False)
-        self.noclasses = get_bool_opt(options, 'noclasses', False)
-        self.classprefix = options.get('classprefix', '')
-        self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
-        self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
-        self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
-        self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
-        self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
-        self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
-        self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
-
-        if self.tagsfile:
-            if not ctags:
-                raise RuntimeError('The "ctags" package must to be installed '
-                                   'to be able to use the "tagsfile" feature.')
-            self._ctags = ctags.CTags(self.tagsfile)
-
-        linenos = options.get('linenos', False)
-        if linenos == 'inline':
-            self.linenos = 2
-        elif linenos:
-            # compatibility with <= 0.7
-            self.linenos = 1
-        else:
-            self.linenos = 0
-        self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
-        self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
-        self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
-        self.nobackground = get_bool_opt(options, 'nobackground', False)
-        self.lineseparator = options.get('lineseparator', '\n')
-        self.lineanchors = options.get('lineanchors', '')
-        self.linespans = options.get('linespans', '')
-        self.anchorlinenos = options.get('anchorlinenos', False)
-        self.hl_lines = set()
-        for lineno in get_list_opt(options, 'hl_lines', []):
-            try:
-                self.hl_lines.add(int(lineno))
-            except ValueError:
-                pass
-
-        self._create_stylesheet()
-
-    def _get_css_class(self, ttype):
-        """Return the css class of this token type prefixed with
-        the classprefix option."""
-        ttypeclass = _get_ttype_class(ttype)
-        if ttypeclass:
-            return self.classprefix + ttypeclass
-        return ''
-
-    def _create_stylesheet(self):
-        t2c = self.ttype2class = {Token: ''}
-        c2s = self.class2style = {}
-        for ttype, ndef in self.style:
-            name = self._get_css_class(ttype)
-            style = ''
-            if ndef['color']:
-                style += 'color: #%s; ' % ndef['color']
-            if ndef['bold']:
-                style += 'font-weight: bold; '
-            if ndef['italic']:
-                style += 'font-style: italic; '
-            if ndef['underline']:
-                style += 'text-decoration: underline; '
-            if ndef['bgcolor']:
-                style += 'background-color: #%s; ' % ndef['bgcolor']
-            if ndef['border']:
-                style += 'border: 1px solid #%s; ' % ndef['border']
-            if style:
-                t2c[ttype] = name
-                # save len(ttype) to enable ordering the styles by
-                # hierarchy (necessary for CSS cascading rules!)
-                c2s[name] = (style[:-2], ttype, len(ttype))
-
-    def get_style_defs(self, arg=None):
-        """
-        Return CSS style definitions for the classes produced by the current
-        highlighting style. ``arg`` can be a string or list of selectors to
-        insert before the token type classes.
-        """
-        if arg is None:
-            arg = ('cssclass' in self.options and '.'+self.cssclass or '')
-        if isinstance(arg, basestring):
-            args = [arg]
-        else:
-            args = list(arg)
-
-        def prefix(cls):
-            if cls:
-                cls = '.' + cls
-            tmp = []
-            for arg in args:
-                tmp.append((arg and arg + ' ' or '') + cls)
-            return ', '.join(tmp)
-
-        styles = [(level, ttype, cls, style)
-                  for cls, (style, ttype, level) in self.class2style.iteritems()
-                  if cls and style]
-        styles.sort()
-        lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
-                 for (level, ttype, cls, style) in styles]
-        if arg and not self.nobackground and \
-           self.style.background_color is not None:
-            text_style = ''
-            if Text in self.ttype2class:
-                text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
-            lines.insert(0, '%s { background: %s;%s }' %
-                         (prefix(''), self.style.background_color, text_style))
-        if self.style.highlight_color is not None:
-            lines.insert(0, '%s.hll { background-color: %s }' %
-                         (prefix(''), self.style.highlight_color))
-        return '\n'.join(lines)
-
-    def _decodeifneeded(self, value):
-        if isinstance(value, bytes):
-            if self.encoding:
-                return value.decode(self.encoding)
-            return value.decode()
-        return value
-
-    def _wrap_full(self, inner, outfile):
-        if self.cssfile:
-            if os.path.isabs(self.cssfile):
-                # it's an absolute filename
-                cssfilename = self.cssfile
-            else:
-                try:
-                    filename = outfile.name
-                    if not filename or filename[0] == '<':
-                        # pseudo files, e.g. name == '<fdopen>'
-                        raise AttributeError
-                    cssfilename = os.path.join(os.path.dirname(filename),
-                                               self.cssfile)
-                except AttributeError:
-                    print >>sys.stderr, 'Note: Cannot determine output file name, ' \
-                          'using current directory as base for the CSS file name'
-                    cssfilename = self.cssfile
-            # write CSS file only if noclobber_cssfile isn't given as an option.
-            try:
-                if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
-                    cf = open(cssfilename, "w")
-                    cf.write(CSSFILE_TEMPLATE %
-                            {'styledefs': self.get_style_defs('body')})
-                    cf.close()
-            except IOError, err:
-                err.strerror = 'Error writing CSS file: ' + err.strerror
-                raise
-
-            yield 0, (DOC_HEADER_EXTERNALCSS %
-                      dict(title     = self.title,
-                           cssfile   = self.cssfile,
-                           encoding  = self.encoding))
-        else:
-            yield 0, (DOC_HEADER %
-                      dict(title     = self.title,
-                           styledefs = self.get_style_defs('body'),
-                           encoding  = self.encoding))
-
-        for t, line in inner:
-            yield t, line
-        yield 0, DOC_FOOTER
-
-    def _wrap_tablelinenos(self, inner):
-        dummyoutfile = StringIO.StringIO()
-        lncount = 0
-        for t, line in inner:
-            if t:
-                lncount += 1
-            dummyoutfile.write(line)
-
-        fl = self.linenostart
-        mw = len(str(lncount + fl - 1))
-        sp = self.linenospecial
-        st = self.linenostep
-        la = self.lineanchors
-        aln = self.anchorlinenos
-        nocls = self.noclasses
-        if sp:
-            lines = []
-
-            for i in range(fl, fl+lncount):
-                if i % st == 0:
-                    if i % sp == 0:
-                        if aln:
-                            lines.append('<a href="#%s-%d" class="special">%*d</a>' %
-                                         (la, i, mw, i))
-                        else:
-                            lines.append('<span class="special">%*d</span>' % (mw, i))
-                    else:
-                        if aln:
-                            lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
-                        else:
-                            lines.append('%*d' % (mw, i))
-                else:
-                    lines.append('')
-            ls = '\n'.join(lines)
-        else:
-            lines = []
-            for i in range(fl, fl+lncount):
-                if i % st == 0:
-                    if aln:
-                        lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
-                    else:
-                        lines.append('%*d' % (mw, i))
-                else:
-                    lines.append('')
-            ls = '\n'.join(lines)
-
-        # in case you wonder about the seemingly redundant <div> here: since the
-        # content in the other cell also is wrapped in a div, some browsers in
-        # some configurations seem to mess up the formatting...
-        if nocls:
-            yield 0, ('<table class="%stable">' % self.cssclass +
-                      '<tr><td><div class="linenodiv" '
-                      'style="background-color: #f0f0f0; padding-right: 10px">'
-                      '<pre style="line-height: 125%">' +
-                      ls + '</pre></div></td><td class="code">')
-        else:
-            yield 0, ('<table class="%stable">' % self.cssclass +
-                      '<tr><td class="linenos"><div class="linenodiv"><pre>' +
-                      ls + '</pre></div></td><td class="code">')
-        yield 0, dummyoutfile.getvalue()
-        yield 0, '</td></tr></table>'
-
-    def _wrap_inlinelinenos(self, inner):
-        # need a list of lines since we need the width of a single number :(
-        lines = list(inner)
-        sp = self.linenospecial
-        st = self.linenostep
-        num = self.linenostart
-        mw = len(str(len(lines) + num - 1))
-
-        if self.noclasses:
-            if sp:
-                for t, line in lines:
-                    if num%sp == 0:
-                        style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
-                    else:
-                        style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
-                    yield 1, '<span style="%s">%*s</span> ' % (
-                        style, mw, (num%st and ' ' or num)) + line
-                    num += 1
-            else:
-                for t, line in lines:
-                    yield 1, ('<span style="background-color: #f0f0f0; '
-                              'padding: 0 5px 0 5px">%*s</span> ' % (
-                              mw, (num%st and ' ' or num)) + line)
-                    num += 1
-        elif sp:
-            for t, line in lines:
-                yield 1, '<span class="lineno%s">%*s</span> ' % (
-                    num%sp == 0 and ' special' or '', mw,
-                    (num%st and ' ' or num)) + line
-                num += 1
-        else:
-            for t, line in lines:
-                yield 1, '<span class="lineno">%*s</span> ' % (
-                    mw, (num%st and ' ' or num)) + line
-                num += 1
-
-    def _wrap_lineanchors(self, inner):
-        s = self.lineanchors
-        i = self.linenostart - 1 # subtract 1 since we have to increment i
-                                 # *before* yielding
-        for t, line in inner:
-            if t:
-                i += 1
-                yield 1, '<a name="%s-%d"></a>' % (s, i) + line
-            else:
-                yield 0, line
-
-    def _wrap_linespans(self, inner):
-        s = self.linespans
-        i = self.linenostart - 1
-        for t, line in inner:
-            if t:
-                i += 1
-                yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
-            else:
-                yield 0, line
-
-    def _wrap_div(self, inner):
-        style = []
-        if (self.noclasses and not self.nobackground and
-            self.style.background_color is not None):
-            style.append('background: %s' % (self.style.background_color,))
-        if self.cssstyles:
-            style.append(self.cssstyles)
-        style = '; '.join(style)
-
-        yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass)
-                  + (style and (' style="%s"' % style)) + '>')
-        for tup in inner:
-            yield tup
-        yield 0, '</div>\n'
-
-    def _wrap_pre(self, inner):
-        style = []
-        if self.prestyles:
-            style.append(self.prestyles)
-        if self.noclasses:
-            style.append('line-height: 125%')
-        style = '; '.join(style)
-
-        yield 0, ('<pre' + (style and ' style="%s"' % style) + '>')
-        for tup in inner:
-            yield tup
-        yield 0, '</pre>'
-
-    def _format_lines(self, tokensource):
-        """
-        Just format the tokens, without any wrapping tags.
-        Yield individual lines.
-        """
-        nocls = self.noclasses
-        lsep = self.lineseparator
-        # for <span style=""> lookup only
-        getcls = self.ttype2class.get
-        c2s = self.class2style
-        escape_table = _escape_html_table
-        tagsfile = self.tagsfile
-
-        lspan = ''
-        line = ''
-        for ttype, value in tokensource:
-            if nocls:
-                cclass = getcls(ttype)
-                while cclass is None:
-                    ttype = ttype.parent
-                    cclass = getcls(ttype)
-                cspan = cclass and '<span style="%s">' % c2s[cclass][0] or ''
-            else:
-                cls = self._get_css_class(ttype)
-                cspan = cls and '<span class="%s">' % cls or ''
-
-            parts = value.translate(escape_table).split('\n')
-
-            if tagsfile and ttype in Token.Name:
-                filename, linenumber = self._lookup_ctag(value)
-                if linenumber:
-                    base, filename = os.path.split(filename)
-                    if base:
-                        base += '/'
-                    filename, extension = os.path.splitext(filename)
-                    url = self.tagurlformat % {'path': base, 'fname': filename,
-                                               'fext': extension}
-                    parts[0] = "<a href=\"%s#%s-%d\">%s" % \
-                        (url, self.lineanchors, linenumber, parts[0])
-                    parts[-1] = parts[-1] + "</a>"
-
-            # for all but the last line
-            for part in parts[:-1]:
-                if line:
-                    if lspan != cspan:
-                        line += (lspan and '</span>') + cspan + part + \
-                                (cspan and '</span>') + lsep
-                    else: # both are the same
-                        line += part + (lspan and '</span>') + lsep
-                    yield 1, line
-                    line = ''
-                elif part:
-                    yield 1, cspan + part + (cspan and '</span>') + lsep
-                else:
-                    yield 1, lsep
-            # for the last line
-            if line and parts[-1]:
-                if lspan != cspan:
-                    line += (lspan and '</span>') + cspan + parts[-1]
-                    lspan = cspan
-                else:
-                    line += parts[-1]
-            elif parts[-1]:
-                line = cspan + parts[-1]
-                lspan = cspan
-            # else we neither have to open a new span nor set lspan
-
-        if line:
-            yield 1, line + (lspan and '</span>') + lsep
-
-    def _lookup_ctag(self, token):
-        entry = ctags.TagEntry()
-        if self._ctags.find(entry, token, 0):
-            return entry['file'], entry['lineNumber']
-        else:
-            return None, None
-
-    def _highlight_lines(self, tokensource):
-        """
-        Highlighted the lines specified in the `hl_lines` option by
-        post-processing the token stream coming from `_format_lines`.
-        """
-        hls = self.hl_lines
-
-        for i, (t, value) in enumerate(tokensource):
-            if t != 1:
-                yield t, value
-            if i + 1 in hls: # i + 1 because Python indexes start at 0
-                if self.noclasses:
-                    style = ''
-                    if self.style.highlight_color is not None:
-                        style = (' style="background-color: %s"' %
-                                 (self.style.highlight_color,))
-                    yield 1, '<span%s>%s</span>' % (style, value)
-                else:
-                    yield 1, '<span class="hll">%s</span>' % value
-            else:
-                yield 1, value
-
-    def wrap(self, source, outfile):
-        """
-        Wrap the ``source``, which is a generator yielding
-        individual lines, in custom generators. See docstring
-        for `format`. Can be overridden.
-        """
-        return self._wrap_div(self._wrap_pre(source))
-
-    def format_unencoded(self, tokensource, outfile):
-        """
-        The formatting process uses several nested generators; which of
-        them are used is determined by the user's options.
-
-        Each generator should take at least one argument, ``inner``,
-        and wrap the pieces of text generated by this.
-
-        Always yield 2-tuples: (code, text). If "code" is 1, the text
-        is part of the original tokensource being highlighted, if it's
-        0, the text is some piece of wrapping. This makes it possible to
-        use several different wrappers that process the original source
-        linewise, e.g. line number generators.
-        """
-        source = self._format_lines(tokensource)
-        if self.hl_lines:
-            source = self._highlight_lines(source)
-        if not self.nowrap:
-            if self.linenos == 2:
-                source = self._wrap_inlinelinenos(source)
-            if self.lineanchors:
-                source = self._wrap_lineanchors(source)
-            if self.linespans:
-                source = self._wrap_linespans(source)
-            source = self.wrap(source, outfile)
-            if self.linenos == 1:
-                source = self._wrap_tablelinenos(source)
-            if self.full:
-                source = self._wrap_full(source, outfile)
-
-        for t, piece in source:
-            outfile.write(piece)
diff --git a/python/ext-libs/pygments/formatters/img.py b/python/ext-libs/pygments/formatters/img.py
deleted file mode 100644
index 394c3b6..0000000
--- a/python/ext-libs/pygments/formatters/img.py
+++ /dev/null
@@ -1,553 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.img
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter for Pixmap output.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt, \
-     get_list_opt, get_choice_opt
-
-# Import this carefully
-try:
-    from PIL import Image, ImageDraw, ImageFont
-    pil_available = True
-except ImportError:
-    pil_available = False
-
-try:
-    import _winreg
-except ImportError:
-    _winreg = None
-
-__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
-           'BmpImageFormatter']
-
-
-# For some unknown reason every font calls it something different
-STYLES = {
-    'NORMAL':     ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
-    'ITALIC':     ['Oblique', 'Italic'],
-    'BOLD':       ['Bold'],
-    'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
-}
-
-# A sane default for modern systems
-DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono'
-DEFAULT_FONT_NAME_WIN = 'Courier New'
-
-
-class PilNotAvailable(ImportError):
-    """When Python imaging library is not available"""
-
-
-class FontNotFound(Exception):
-    """When there are no usable fonts specified"""
-
-
-class FontManager(object):
-    """
-    Manages a set of fonts: normal, italic, bold, etc...
-    """
-
-    def __init__(self, font_name, font_size=14):
-        self.font_name = font_name
-        self.font_size = font_size
-        self.fonts = {}
-        self.encoding = None
-        if sys.platform.startswith('win'):
-            if not font_name:
-                self.font_name = DEFAULT_FONT_NAME_WIN
-            self._create_win()
-        else:
-            if not font_name:
-                self.font_name = DEFAULT_FONT_NAME_NIX
-            self._create_nix()
-
-    def _get_nix_font_path(self, name, style):
-        from commands import getstatusoutput
-        exit, out = getstatusoutput('fc-list "%s:style=%s" file' %
-                                    (name, style))
-        if not exit:
-            lines = out.splitlines()
-            if lines:
-                path = lines[0].strip().strip(':')
-                return path
-
-    def _create_nix(self):
-        for name in STYLES['NORMAL']:
-            path = self._get_nix_font_path(self.font_name, name)
-            if path is not None:
-                self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
-                break
-        else:
-            raise FontNotFound('No usable fonts named: "%s"' %
-                               self.font_name)
-        for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
-            for stylename in STYLES[style]:
-                path = self._get_nix_font_path(self.font_name, stylename)
-                if path is not None:
-                    self.fonts[style] = ImageFont.truetype(path, self.font_size)
-                    break
-            else:
-                if style == 'BOLDITALIC':
-                    self.fonts[style] = self.fonts['BOLD']
-                else:
-                    self.fonts[style] = self.fonts['NORMAL']
-
-    def _lookup_win(self, key, basename, styles, fail=False):
-        for suffix in ('', ' (TrueType)'):
-            for style in styles:
-                try:
-                    valname = '%s%s%s' % (basename, style and ' '+style, suffix)
-                    val, _ = _winreg.QueryValueEx(key, valname)
-                    return val
-                except EnvironmentError:
-                    continue
-        else:
-            if fail:
-                raise FontNotFound('Font %s (%s) not found in registry' %
-                                   (basename, styles[0]))
-            return None
-
-    def _create_win(self):
-        try:
-            key = _winreg.OpenKey(
-                _winreg.HKEY_LOCAL_MACHINE,
-                r'Software\Microsoft\Windows NT\CurrentVersion\Fonts')
-        except EnvironmentError:
-            try:
-                key = _winreg.OpenKey(
-                    _winreg.HKEY_LOCAL_MACHINE,
-                    r'Software\Microsoft\Windows\CurrentVersion\Fonts')
-            except EnvironmentError:
-                raise FontNotFound('Can\'t open Windows font registry key')
-        try:
-            path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
-            self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
-            for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
-                path = self._lookup_win(key, self.font_name, STYLES[style])
-                if path:
-                    self.fonts[style] = ImageFont.truetype(path, self.font_size)
-                else:
-                    if style == 'BOLDITALIC':
-                        self.fonts[style] = self.fonts['BOLD']
-                    else:
-                        self.fonts[style] = self.fonts['NORMAL']
-        finally:
-            _winreg.CloseKey(key)
-
-    def get_char_size(self):
-        """
-        Get the character size.
-        """
-        return self.fonts['NORMAL'].getsize('M')
-
-    def get_font(self, bold, oblique):
-        """
-        Get the font based on bold and italic flags.
-        """
-        if bold and oblique:
-            return self.fonts['BOLDITALIC']
-        elif bold:
-            return self.fonts['BOLD']
-        elif oblique:
-            return self.fonts['ITALIC']
-        else:
-            return self.fonts['NORMAL']
-
-
-class ImageFormatter(Formatter):
-    """
-    Create a PNG image from source code. This uses the Python Imaging Library to
-    generate a pixmap from the source code.
-
-    *New in Pygments 0.10.*
-
-    Additional options accepted:
-
-    `image_format`
-        An image format to output to that is recognised by PIL, these include:
-
-        * "PNG" (default)
-        * "JPEG"
-        * "BMP"
-        * "GIF"
-
-    `line_pad`
-        The extra spacing (in pixels) between each line of text.
-
-        Default: 2
-
-    `font_name`
-        The font name to be used as the base font from which others, such as
-        bold and italic fonts will be generated.  This really should be a
-        monospace font to look sane.
-
-        Default: "Bitstream Vera Sans Mono"
-
-    `font_size`
-        The font size in points to be used.
-
-        Default: 14
-
-    `image_pad`
-        The padding, in pixels to be used at each edge of the resulting image.
-
-        Default: 10
-
-    `line_numbers`
-        Whether line numbers should be shown: True/False
-
-        Default: True
-
-    `line_number_start`
-        The line number of the first line.
-
-        Default: 1
-
-    `line_number_step`
-        The step used when printing line numbers.
-
-        Default: 1
-
-    `line_number_bg`
-        The background colour (in "#123456" format) of the line number bar, or
-        None to use the style background color.
-
-        Default: "#eed"
-
-    `line_number_fg`
-        The text color of the line numbers (in "#123456"-like format).
-
-        Default: "#886"
-
-    `line_number_chars`
-        The number of columns of line numbers allowable in the line number
-        margin.
-
-        Default: 2
-
-    `line_number_bold`
-        Whether line numbers will be bold: True/False
-
-        Default: False
-
-    `line_number_italic`
-        Whether line numbers will be italicized: True/False
-
-        Default: False
-
-    `line_number_separator`
-        Whether a line will be drawn between the line number area and the
-        source code area: True/False
-
-        Default: True
-
-    `line_number_pad`
-        The horizontal padding (in pixels) between the line number margin, and
-        the source code area.
-
-        Default: 6
-
-    `hl_lines`
-        Specify a list of lines to be highlighted.  *New in Pygments 1.2.*
-
-        Default: empty list
-
-    `hl_color`
-        Specify the color for highlighting lines.  *New in Pygments 1.2.*
-
-        Default: highlight color of the selected style
-    """
-
-    # Required by the pygments mapper
-    name = 'img'
-    aliases = ['img', 'IMG', 'png']
-    filenames = ['*.png']
-
-    unicodeoutput = False
-
-    default_image_format = 'png'
-
-    def __init__(self, **options):
-        """
-        See the class docstring for explanation of options.
-        """
-        if not pil_available:
-            raise PilNotAvailable(
-                'Python Imaging Library is required for this formatter')
-        Formatter.__init__(self, **options)
-        # Read the style
-        self.styles = dict(self.style)
-        if self.style.background_color is None:
-            self.background_color = '#fff'
-        else:
-            self.background_color = self.style.background_color
-        # Image options
-        self.image_format = get_choice_opt(
-            options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
-            self.default_image_format, normcase=True)
-        self.image_pad = get_int_opt(options, 'image_pad', 10)
-        self.line_pad = get_int_opt(options, 'line_pad', 2)
-        # The fonts
-        fontsize = get_int_opt(options, 'font_size', 14)
-        self.fonts = FontManager(options.get('font_name', ''), fontsize)
-        self.fontw, self.fonth = self.fonts.get_char_size()
-        # Line number options
-        self.line_number_fg = options.get('line_number_fg', '#886')
-        self.line_number_bg = options.get('line_number_bg', '#eed')
-        self.line_number_chars = get_int_opt(options,
-                                        'line_number_chars', 2)
-        self.line_number_bold = get_bool_opt(options,
-                                        'line_number_bold', False)
-        self.line_number_italic = get_bool_opt(options,
-                                        'line_number_italic', False)
-        self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
-        self.line_numbers = get_bool_opt(options, 'line_numbers', True)
-        self.line_number_separator = get_bool_opt(options,
-                                        'line_number_separator', True)
-        self.line_number_step = get_int_opt(options, 'line_number_step', 1)
-        self.line_number_start = get_int_opt(options, 'line_number_start', 1)
-        if self.line_numbers:
-            self.line_number_width = (self.fontw * self.line_number_chars +
-                                   self.line_number_pad * 2)
-        else:
-            self.line_number_width = 0
-        self.hl_lines = []
-        hl_lines_str = get_list_opt(options, 'hl_lines', [])
-        for line in hl_lines_str:
-            try:
-                self.hl_lines.append(int(line))
-            except ValueError:
-                pass
-        self.hl_color = options.get('hl_color',
-                                    self.style.highlight_color) or '#f90'
-        self.drawables = []
-
-    def get_style_defs(self, arg=''):
-        raise NotImplementedError('The -S option is meaningless for the image '
-                                  'formatter. Use -O style=<stylename> instead.')
-
-    def _get_line_height(self):
-        """
-        Get the height of a line.
-        """
-        return self.fonth + self.line_pad
-
-    def _get_line_y(self, lineno):
-        """
-        Get the Y coordinate of a line number.
-        """
-        return lineno * self._get_line_height() + self.image_pad
-
-    def _get_char_width(self):
-        """
-        Get the width of a character.
-        """
-        return self.fontw
-
-    def _get_char_x(self, charno):
-        """
-        Get the X coordinate of a character position.
-        """
-        return charno * self.fontw + self.image_pad + self.line_number_width
-
-    def _get_text_pos(self, charno, lineno):
-        """
-        Get the actual position for a character and line position.
-        """
-        return self._get_char_x(charno), self._get_line_y(lineno)
-
-    def _get_linenumber_pos(self, lineno):
-        """
-        Get the actual position for the start of a line number.
-        """
-        return (self.image_pad, self._get_line_y(lineno))
-
-    def _get_text_color(self, style):
-        """
-        Get the correct color for the token from the style.
-        """
-        if style['color'] is not None:
-            fill = '#' + style['color']
-        else:
-            fill = '#000'
-        return fill
-
-    def _get_style_font(self, style):
-        """
-        Get the correct font for the style.
-        """
-        return self.fonts.get_font(style['bold'], style['italic'])
-
-    def _get_image_size(self, maxcharno, maxlineno):
-        """
-        Get the required image size.
-        """
-        return (self._get_char_x(maxcharno) + self.image_pad,
-                self._get_line_y(maxlineno + 0) + self.image_pad)
-
-    def _draw_linenumber(self, posno, lineno):
-        """
-        Remember a line number drawable to paint later.
-        """
-        self._draw_text(
-            self._get_linenumber_pos(posno),
-            str(lineno).rjust(self.line_number_chars),
-            font=self.fonts.get_font(self.line_number_bold,
-                                     self.line_number_italic),
-            fill=self.line_number_fg,
-        )
-
-    def _draw_text(self, pos, text, font, **kw):
-        """
-        Remember a single drawable tuple to paint later.
-        """
-        self.drawables.append((pos, text, font, kw))
-
-    def _create_drawables(self, tokensource):
-        """
-        Create drawables for the token content.
-        """
-        lineno = charno = maxcharno = 0
-        for ttype, value in tokensource:
-            while ttype not in self.styles:
-                ttype = ttype.parent
-            style = self.styles[ttype]
-            # TODO: make sure tab expansion happens earlier in the chain.  It
-            # really ought to be done on the input, as to do it right here is
-            # quite complex.
-            value = value.expandtabs(4)
-            lines = value.splitlines(True)
-            #print lines
-            for i, line in enumerate(lines):
-                temp = line.rstrip('\n')
-                if temp:
-                    self._draw_text(
-                        self._get_text_pos(charno, lineno),
-                        temp,
-                        font = self._get_style_font(style),
-                        fill = self._get_text_color(style)
-                    )
-                    charno += len(temp)
-                    maxcharno = max(maxcharno, charno)
-                if line.endswith('\n'):
-                    # add a line for each extra line in the value
-                    charno = 0
-                    lineno += 1
-        self.maxcharno = maxcharno
-        self.maxlineno = lineno
-
-    def _draw_line_numbers(self):
-        """
-        Create drawables for the line numbers.
-        """
-        if not self.line_numbers:
-            return
-        for p in xrange(self.maxlineno):
-            n = p + self.line_number_start
-            if (n % self.line_number_step) == 0:
-                self._draw_linenumber(p, n)
-
-    def _paint_line_number_bg(self, im):
-        """
-        Paint the line number background on the image.
-        """
-        if not self.line_numbers:
-            return
-        if self.line_number_fg is None:
-            return
-        draw = ImageDraw.Draw(im)
-        recth = im.size[-1]
-        rectw = self.image_pad + self.line_number_width - self.line_number_pad
-        draw.rectangle([(0, 0),
-                        (rectw, recth)],
-             fill=self.line_number_bg)
-        draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
-        del draw
-
-    def format(self, tokensource, outfile):
-        """
-        Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
-        tuples and write it into ``outfile``.
-
-        This implementation calculates where it should draw each token on the
-        pixmap, then calculates the required pixmap size and draws the items.
-        """
-        self._create_drawables(tokensource)
-        self._draw_line_numbers()
-        im = Image.new(
-            'RGB',
-            self._get_image_size(self.maxcharno, self.maxlineno),
-            self.background_color
-        )
-        self._paint_line_number_bg(im)
-        draw = ImageDraw.Draw(im)
-        # Highlight
-        if self.hl_lines:
-            x = self.image_pad + self.line_number_width - self.line_number_pad + 1
-            recth = self._get_line_height()
-            rectw = im.size[0] - x
-            for linenumber in self.hl_lines:
-                y = self._get_line_y(linenumber - 1)
-                draw.rectangle([(x, y), (x + rectw, y + recth)],
-                               fill=self.hl_color)
-        for pos, value, font, kw in self.drawables:
-            draw.text(pos, value, font=font, **kw)
-        im.save(outfile, self.image_format.upper())
-
-
-# Add one formatter per format, so that the "-f gif" option gives the correct result
-# when used in pygmentize.
-
-class GifImageFormatter(ImageFormatter):
-    """
-    Create a GIF image from source code. This uses the Python Imaging Library to
-    generate a pixmap from the source code.
-
-    *New in Pygments 1.0.* (You could create GIF images before by passing a
-    suitable `image_format` option to the `ImageFormatter`.)
-    """
-
-    name = 'img_gif'
-    aliases = ['gif']
-    filenames = ['*.gif']
-    default_image_format = 'gif'
-
-
-class JpgImageFormatter(ImageFormatter):
-    """
-    Create a JPEG image from source code. This uses the Python Imaging Library to
-    generate a pixmap from the source code.
-
-    *New in Pygments 1.0.* (You could create JPEG images before by passing a
-    suitable `image_format` option to the `ImageFormatter`.)
-    """
-
-    name = 'img_jpg'
-    aliases = ['jpg', 'jpeg']
-    filenames = ['*.jpg']
-    default_image_format = 'jpeg'
-
-
-class BmpImageFormatter(ImageFormatter):
-    """
-    Create a bitmap image from source code. This uses the Python Imaging Library to
-    generate a pixmap from the source code.
-
-    *New in Pygments 1.0.* (You could create bitmap images before by passing a
-    suitable `image_format` option to the `ImageFormatter`.)
-    """
-
-    name = 'img_bmp'
-    aliases = ['bmp', 'bitmap']
-    filenames = ['*.bmp']
-    default_image_format = 'bmp'
diff --git a/python/ext-libs/pygments/formatters/latex.py b/python/ext-libs/pygments/formatters/latex.py
deleted file mode 100644
index 47fd123..0000000
--- a/python/ext-libs/pygments/formatters/latex.py
+++ /dev/null
@@ -1,378 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.latex
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter for LaTeX fancyvrb output.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.token import Token, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, StringIO
-
-
-__all__ = ['LatexFormatter']
-
-
-def escape_tex(text, commandprefix):
-    return text.replace('\\', '\x00'). \
-                replace('{', '\x01'). \
-                replace('}', '\x02'). \
-                replace('\x00', r'\%sZbs{}' % commandprefix). \
-                replace('\x01', r'\%sZob{}' % commandprefix). \
-                replace('\x02', r'\%sZcb{}' % commandprefix). \
-                replace('^', r'\%sZca{}' % commandprefix). \
-                replace('_', r'\%sZus{}' % commandprefix). \
-                replace('&', r'\%sZam{}' % commandprefix). \
-                replace('<', r'\%sZlt{}' % commandprefix). \
-                replace('>', r'\%sZgt{}' % commandprefix). \
-                replace('#', r'\%sZsh{}' % commandprefix). \
-                replace('%', r'\%sZpc{}' % commandprefix). \
-                replace('$', r'\%sZdl{}' % commandprefix). \
-                replace('-', r'\%sZhy{}' % commandprefix). \
-                replace("'", r'\%sZsq{}' % commandprefix). \
-                replace('"', r'\%sZdq{}' % commandprefix). \
-                replace('~', r'\%sZti{}' % commandprefix)
-
-
-DOC_TEMPLATE = r'''
-\documentclass{%(docclass)s}
-\usepackage{fancyvrb}
-\usepackage{color}
-\usepackage[%(encoding)s]{inputenc}
-%(preamble)s
-
-%(styledefs)s
-
-\begin{document}
-
-\section*{%(title)s}
-
-%(code)s
-\end{document}
-'''
-
-## Small explanation of the mess below :)
-#
-# The previous version of the LaTeX formatter just assigned a command to
-# each token type defined in the current style.  That obviously is
-# problematic if the highlighted code is produced for a different style
-# than the style commands themselves.
-#
-# This version works much like the HTML formatter which assigns multiple
-# CSS classes to each <span> tag, from the most specific to the least
-# specific token type, thus falling back to the parent token type if one
-# is not defined.  Here, the classes are there too and use the same short
-# forms given in token.STANDARD_TYPES.
-#
-# Highlighted code now only uses one custom command, which by default is
-# \PY and selectable by the commandprefix option (and in addition the
-# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
-# backwards compatibility purposes).
-#
-# \PY has two arguments: the classes, separated by +, and the text to
-# render in that style.  The classes are resolved into the respective
-# style commands by magic, which serves to ignore unknown classes.
-#
-# The magic macros are:
-# * \PY at it, \PY at bf, etc. are unconditionally wrapped around the text
-#   to render in \PY at do.  Their definition determines the style.
-# * \PY at reset resets \PY at it etc. to do nothing.
-# * \PY at toks parses the list of classes, using magic inspired by the
-#   keyval package (but modified to use plusses instead of commas
-#   because fancyvrb redefines commas inside its environments).
-# * \PY at tok processes one class, calling the \PY at tok@classname command
-#   if it exists.
-# * \PY at tok@classname sets the \PY at it etc. to reflect the chosen style
-#   for its class.
-# * \PY resets the style, parses the classnames and then calls \PY at do.
-#
-# Tip: to read this code, print it out in substituted form using e.g.
-# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
-
-STYLE_TEMPLATE = r'''
-\makeatletter
-\def\%(cp)s at reset{\let\%(cp)s at it=\relax \let\%(cp)s at bf=\relax%%
-    \let\%(cp)s at ul=\relax \let\%(cp)s at tc=\relax%%
-    \let\%(cp)s at bc=\relax \let\%(cp)s at ff=\relax}
-\def\%(cp)s at tok#1{\csname %(cp)s at tok@#1\endcsname}
-\def\%(cp)s at toks#1+{\ifx\relax#1\empty\else%%
-    \%(cp)s at tok{#1}\expandafter\%(cp)s at toks\fi}
-\def\%(cp)s at do#1{\%(cp)s at bc{\%(cp)s at tc{\%(cp)s at ul{%%
-    \%(cp)s at it{\%(cp)s at bf{\%(cp)s at ff{#1}}}}}}}
-\def\%(cp)s#1#2{\%(cp)s at reset\%(cp)s at toks#1+\relax+\%(cp)s at do{#2}}
-
-%(styles)s
-
-\def\%(cp)sZbs{\char`\\}
-\def\%(cp)sZus{\char`\_}
-\def\%(cp)sZob{\char`\{}
-\def\%(cp)sZcb{\char`\}}
-\def\%(cp)sZca{\char`\^}
-\def\%(cp)sZam{\char`\&}
-\def\%(cp)sZlt{\char`\<}
-\def\%(cp)sZgt{\char`\>}
-\def\%(cp)sZsh{\char`\#}
-\def\%(cp)sZpc{\char`\%%}
-\def\%(cp)sZdl{\char`\$}
-\def\%(cp)sZhy{\char`\-}
-\def\%(cp)sZsq{\char`\'}
-\def\%(cp)sZdq{\char`\"}
-\def\%(cp)sZti{\char`\~}
-%% for compatibility with earlier versions
-\def\%(cp)sZat{@}
-\def\%(cp)sZlb{[}
-\def\%(cp)sZrb{]}
-\makeatother
-'''
-
-
-def _get_ttype_name(ttype):
-    fname = STANDARD_TYPES.get(ttype)
-    if fname:
-        return fname
-    aname = ''
-    while fname is None:
-        aname = ttype[-1] + aname
-        ttype = ttype.parent
-        fname = STANDARD_TYPES.get(ttype)
-    return fname + aname
-
-
-class LatexFormatter(Formatter):
-    r"""
-    Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
-    standard packages.
-
-    Without the `full` option, code is formatted as one ``Verbatim``
-    environment, like this:
-
-    .. sourcecode:: latex
-
-        \begin{Verbatim}[commandchars=\\{\}]
-        \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
-            \PY{k}{pass}
-        \end{Verbatim}
-
-    The special command used here (``\PY``) and all the other macros it needs
-    are output by the `get_style_defs` method.
-
-    With the `full` option, a complete LaTeX document is output, including
-    the command definitions in the preamble.
-
-    The `get_style_defs()` method of a `LatexFormatter` returns a string
-    containing ``\def`` commands defining the macros needed inside the
-    ``Verbatim`` environments.
-
-    Additional options accepted:
-
-    `style`
-        The style to use, can be a string or a Style subclass (default:
-        ``'default'``).
-
-    `full`
-        Tells the formatter to output a "full" document, i.e. a complete
-        self-contained document (default: ``False``).
-
-    `title`
-        If `full` is true, the title that should be used to caption the
-        document (default: ``''``).
-
-    `docclass`
-        If the `full` option is enabled, this is the document class to use
-        (default: ``'article'``).
-
-    `preamble`
-        If the `full` option is enabled, this can be further preamble commands,
-        e.g. ``\usepackage`` (default: ``''``).
-
-    `linenos`
-        If set to ``True``, output line numbers (default: ``False``).
-
-    `linenostart`
-        The line number for the first line (default: ``1``).
-
-    `linenostep`
-        If set to a number n > 1, only every nth line number is printed.
-
-    `verboptions`
-        Additional options given to the Verbatim environment (see the *fancyvrb*
-        docs for possible values) (default: ``''``).
-
-    `commandprefix`
-        The LaTeX commands used to produce colored output are constructed
-        using this prefix and some letters (default: ``'PY'``).
-        *New in Pygments 0.7.*
-
-        *New in Pygments 0.10:* the default is now ``'PY'`` instead of ``'C'``.
-
-    `texcomments`
-        If set to ``True``, enables LaTeX comment lines.  That is, LaTex markup
-        in comment tokens is not escaped so that LaTeX can render it (default:
-        ``False``).  *New in Pygments 1.2.*
-
-    `mathescape`
-        If set to ``True``, enables LaTeX math mode escape in comments. That
-        is, ``'$...$'`` inside a comment will trigger math mode (default:
-        ``False``).  *New in Pygments 1.2.*
-    """
-    name = 'LaTeX'
-    aliases = ['latex', 'tex']
-    filenames = ['*.tex']
-
-    def __init__(self, **options):
-        Formatter.__init__(self, **options)
-        self.docclass = options.get('docclass', 'article')
-        self.preamble = options.get('preamble', '')
-        self.linenos = get_bool_opt(options, 'linenos', False)
-        self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
-        self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
-        self.verboptions = options.get('verboptions', '')
-        self.nobackground = get_bool_opt(options, 'nobackground', False)
-        self.commandprefix = options.get('commandprefix', 'PY')
-        self.texcomments = get_bool_opt(options, 'texcomments', False)
-        self.mathescape = get_bool_opt(options, 'mathescape', False)
-
-        self._create_stylesheet()
-
-
-    def _create_stylesheet(self):
-        t2n = self.ttype2name = {Token: ''}
-        c2d = self.cmd2def = {}
-        cp = self.commandprefix
-
-        def rgbcolor(col):
-            if col:
-                return ','.join(['%.2f' %(int(col[i] + col[i + 1], 16) / 255.0)
-                                 for i in (0, 2, 4)])
-            else:
-                return '1,1,1'
-
-        for ttype, ndef in self.style:
-            name = _get_ttype_name(ttype)
-            cmndef = ''
-            if ndef['bold']:
-                cmndef += r'\let\$$@bf=\textbf'
-            if ndef['italic']:
-                cmndef += r'\let\$$@it=\textit'
-            if ndef['underline']:
-                cmndef += r'\let\$$@ul=\underline'
-            if ndef['roman']:
-                cmndef += r'\let\$$@ff=\textrm'
-            if ndef['sans']:
-                cmndef += r'\let\$$@ff=\textsf'
-            if ndef['mono']:
-                cmndef += r'\let\$$@ff=\textsf'
-            if ndef['color']:
-                cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
-                           rgbcolor(ndef['color']))
-            if ndef['border']:
-                cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
-                           r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}' %
-                           (rgbcolor(ndef['border']),
-                            rgbcolor(ndef['bgcolor'])))
-            elif ndef['bgcolor']:
-                cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
-                           r'\colorbox[rgb]{%s}{\strut ##1}}' %
-                           rgbcolor(ndef['bgcolor']))
-            if cmndef == '':
-                continue
-            cmndef = cmndef.replace('$$', cp)
-            t2n[ttype] = name
-            c2d[name] = cmndef
-
-    def get_style_defs(self, arg=''):
-        """
-        Return the command sequences needed to define the commands
-        used to format text in the verbatim environment. ``arg`` is ignored.
-        """
-        cp = self.commandprefix
-        styles = []
-        for name, definition in self.cmd2def.iteritems():
-            styles.append(r'\expandafter\def\csname %s at tok@%s\endcsname{%s}' %
-                          (cp, name, definition))
-        return STYLE_TEMPLATE % {'cp': self.commandprefix,
-                                 'styles': '\n'.join(styles)}
-
-    def format_unencoded(self, tokensource, outfile):
-        # TODO: add support for background colors
-        t2n = self.ttype2name
-        cp = self.commandprefix
-
-        if self.full:
-            realoutfile = outfile
-            outfile = StringIO()
-
-        outfile.write(ur'\begin{Verbatim}[commandchars=\\\{\}')
-        if self.linenos:
-            start, step = self.linenostart, self.linenostep
-            outfile.write(u',numbers=left' +
-                          (start and u',firstnumber=%d' % start or u'') +
-                          (step and u',stepnumber=%d' % step or u''))
-        if self.mathescape or self.texcomments:
-            outfile.write(ur',codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}')
-        if self.verboptions:
-            outfile.write(u',' + self.verboptions)
-        outfile.write(u']\n')
-
-        for ttype, value in tokensource:
-            if ttype in Token.Comment:
-                if self.texcomments:
-                    # Try to guess comment starting lexeme and escape it ...
-                    start = value[0:1]
-                    for i in xrange(1, len(value)):
-                        if start[0] != value[i]:
-                            break
-                        start += value[i]
-
-                    value = value[len(start):]
-                    start = escape_tex(start, self.commandprefix)
-
-                    # ... but do not escape inside comment.
-                    value = start + value
-                elif self.mathescape:
-                    # Only escape parts not inside a math environment.
-                    parts = value.split('$')
-                    in_math = False
-                    for i, part in enumerate(parts):
-                        if not in_math:
-                            parts[i] = escape_tex(part, self.commandprefix)
-                        in_math = not in_math
-                    value = '$'.join(parts)
-                else:
-                    value = escape_tex(value, self.commandprefix)
-            else:
-                value = escape_tex(value, self.commandprefix)
-            styles = []
-            while ttype is not Token:
-                try:
-                    styles.append(t2n[ttype])
-                except KeyError:
-                    # not in current style
-                    styles.append(_get_ttype_name(ttype))
-                ttype = ttype.parent
-            styleval = '+'.join(reversed(styles))
-            if styleval:
-                spl = value.split('\n')
-                for line in spl[:-1]:
-                    if line:
-                        outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
-                    outfile.write('\n')
-                if spl[-1]:
-                    outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
-            else:
-                outfile.write(value)
-
-        outfile.write(u'\\end{Verbatim}\n')
-
-        if self.full:
-            realoutfile.write(DOC_TEMPLATE %
-                dict(docclass  = self.docclass,
-                     preamble  = self.preamble,
-                     title     = self.title,
-                     encoding  = self.encoding or 'latin1',
-                     styledefs = self.get_style_defs(),
-                     code      = outfile.getvalue()))
diff --git a/python/ext-libs/pygments/formatters/other.py b/python/ext-libs/pygments/formatters/other.py
deleted file mode 100644
index 1029a7a..0000000
--- a/python/ext-libs/pygments/formatters/other.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.other
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Other formatters: NullFormatter, RawTokenFormatter.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.util import OptionError, get_choice_opt, b
-from pygments.token import Token
-from pygments.console import colorize
-
-__all__ = ['NullFormatter', 'RawTokenFormatter']
-
-
-class NullFormatter(Formatter):
-    """
-    Output the text unchanged without any formatting.
-    """
-    name = 'Text only'
-    aliases = ['text', 'null']
-    filenames = ['*.txt']
-
-    def format(self, tokensource, outfile):
-        enc = self.encoding
-        for ttype, value in tokensource:
-            if enc:
-                outfile.write(value.encode(enc))
-            else:
-                outfile.write(value)
-
-
-class RawTokenFormatter(Formatter):
-    r"""
-    Format tokens as a raw representation for storing token streams.
-
-    The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
-    be converted to a token stream with the `RawTokenLexer`, described in the
-    `lexer list <lexers.txt>`_.
-
-    Only two options are accepted:
-
-    `compress`
-        If set to ``'gz'`` or ``'bz2'``, compress the output with the given
-        compression algorithm after encoding (default: ``''``).
-    `error_color`
-        If set to a color name, highlight error tokens using that color.  If
-        set but with no value, defaults to ``'red'``.
-        *New in Pygments 0.11.*
-
-    """
-    name = 'Raw tokens'
-    aliases = ['raw', 'tokens']
-    filenames = ['*.raw']
-
-    unicodeoutput = False
-
-    def __init__(self, **options):
-        Formatter.__init__(self, **options)
-        if self.encoding:
-            raise OptionError('the raw formatter does not support the '
-                              'encoding option')
-        self.encoding = 'ascii'  # let pygments.format() do the right thing
-        self.compress = get_choice_opt(options, 'compress',
-                                       ['', 'none', 'gz', 'bz2'], '')
-        self.error_color = options.get('error_color', None)
-        if self.error_color is True:
-            self.error_color = 'red'
-        if self.error_color is not None:
-            try:
-                colorize(self.error_color, '')
-            except KeyError:
-                raise ValueError("Invalid color %r specified" %
-                                 self.error_color)
-
-    def format(self, tokensource, outfile):
-        try:
-            outfile.write(b(''))
-        except TypeError:
-            raise TypeError('The raw tokens formatter needs a binary '
-                            'output file')
-        if self.compress == 'gz':
-            import gzip
-            outfile = gzip.GzipFile('', 'wb', 9, outfile)
-            def write(text):
-                outfile.write(text.encode())
-            flush = outfile.flush
-        elif self.compress == 'bz2':
-            import bz2
-            compressor = bz2.BZ2Compressor(9)
-            def write(text):
-                outfile.write(compressor.compress(text.encode()))
-            def flush():
-                outfile.write(compressor.flush())
-                outfile.flush()
-        else:
-            def write(text):
-                outfile.write(text.encode())
-            flush = outfile.flush
-
-        if self.error_color:
-            for ttype, value in tokensource:
-                line = "%s\t%r\n" % (ttype, value)
-                if ttype is Token.Error:
-                    write(colorize(self.error_color, line))
-                else:
-                    write(line)
-        else:
-            for ttype, value in tokensource:
-                write("%s\t%r\n" % (ttype, value))
-        flush()
diff --git a/python/ext-libs/pygments/formatters/rtf.py b/python/ext-libs/pygments/formatters/rtf.py
deleted file mode 100644
index 3efda28..0000000
--- a/python/ext-libs/pygments/formatters/rtf.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.rtf
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    A formatter that generates RTF files.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-
-
-__all__ = ['RtfFormatter']
-
-
-class RtfFormatter(Formatter):
-    """
-    Format tokens as RTF markup. This formatter automatically outputs full RTF
-    documents with color information and other useful stuff. Perfect for Copy and
-    Paste into Microsoft® Word® documents.
-
-    *New in Pygments 0.6.*
-
-    Additional options accepted:
-
-    `style`
-        The style to use, can be a string or a Style subclass (default:
-        ``'default'``).
-
-    `fontface`
-        The used font famliy, for example ``Bitstream Vera Sans``. Defaults to
-        some generic font which is supposed to have fixed width.
-    """
-    name = 'RTF'
-    aliases = ['rtf']
-    filenames = ['*.rtf']
-
-    unicodeoutput = False
-
-    def __init__(self, **options):
-        """
-        Additional options accepted:
-
-        ``fontface``
-            Name of the font used. Could for example be ``'Courier New'``
-            to further specify the default which is ``'\fmodern'``. The RTF
-            specification claims that ``\fmodern`` are "Fixed-pitch serif
-            and sans serif fonts". Hope every RTF implementation thinks
-            the same about modern...
-        """
-        Formatter.__init__(self, **options)
-        self.fontface = options.get('fontface') or ''
-
-    def _escape(self, text):
-        return text.replace('\\', '\\\\') \
-                   .replace('{', '\\{') \
-                   .replace('}', '\\}')
-
-    def _escape_text(self, text):
-        # empty strings, should give a small performance improvment
-        if not text:
-            return ''
-
-        # escape text
-        text = self._escape(text)
-        if self.encoding in ('utf-8', 'utf-16', 'utf-32'):
-            encoding = 'iso-8859-15'
-        else:
-            encoding = self.encoding or 'iso-8859-15'
-
-        buf = []
-        for c in text:
-            if ord(c) > 128:
-                ansic = c.encode(encoding, 'ignore') or '?'
-                if ord(ansic) > 128:
-                    ansic = '\\\'%x' % ord(ansic)
-                else:
-                    ansic = c
-                buf.append(r'\ud{\u%d%s}' % (ord(c), ansic))
-            else:
-                buf.append(str(c))
-
-        return ''.join(buf).replace('\n', '\\par\n')
-
-    def format_unencoded(self, tokensource, outfile):
-        # rtf 1.8 header
-        outfile.write(r'{\rtf1\ansi\deff0'
-                      r'{\fonttbl{\f0\fmodern\fprq1\fcharset0%s;}}'
-                      r'{\colortbl;' % (self.fontface and
-                                        ' ' + self._escape(self.fontface) or
-                                        ''))
-
-        # convert colors and save them in a mapping to access them later.
-        color_mapping = {}
-        offset = 1
-        for _, style in self.style:
-            for color in style['color'], style['bgcolor'], style['border']:
-                if color and color not in color_mapping:
-                    color_mapping[color] = offset
-                    outfile.write(r'\red%d\green%d\blue%d;' % (
-                        int(color[0:2], 16),
-                        int(color[2:4], 16),
-                        int(color[4:6], 16)
-                    ))
-                    offset += 1
-        outfile.write(r'}\f0')
-
-        # highlight stream
-        for ttype, value in tokensource:
-            while not self.style.styles_token(ttype) and ttype.parent:
-                ttype = ttype.parent
-            style = self.style.style_for_token(ttype)
-            buf = []
-            if style['bgcolor']:
-                buf.append(r'\cb%d' % color_mapping[style['bgcolor']])
-            if style['color']:
-                buf.append(r'\cf%d' % color_mapping[style['color']])
-            if style['bold']:
-                buf.append(r'\b')
-            if style['italic']:
-                buf.append(r'\i')
-            if style['underline']:
-                buf.append(r'\ul')
-            if style['border']:
-                buf.append(r'\chbrdr\chcfpat%d' %
-                           color_mapping[style['border']])
-            start = ''.join(buf)
-            if start:
-                outfile.write('{%s ' % start)
-            outfile.write(self._escape_text(value))
-            if start:
-                outfile.write('}')
-
-        outfile.write('}')
diff --git a/python/ext-libs/pygments/formatters/svg.py b/python/ext-libs/pygments/formatters/svg.py
deleted file mode 100644
index 271f22a..0000000
--- a/python/ext-libs/pygments/formatters/svg.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.svg
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter for SVG output.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['SvgFormatter']
-
-
-def escape_html(text):
-    """Escape &, <, > as well as single and double quotes for HTML."""
-    return text.replace('&', '&').  \
-                replace('<', '<').   \
-                replace('>', '>').   \
-                replace('"', '"'). \
-                replace("'", ''')
-
-
-class2style = {}
-
-class SvgFormatter(Formatter):
-    """
-    Format tokens as an SVG graphics file.  This formatter is still experimental.
-    Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
-    coordinates containing ``<tspan>`` elements with the individual token styles.
-
-    By default, this formatter outputs a full SVG document including doctype
-    declaration and the ``<svg>`` root element.
-
-    *New in Pygments 0.9.*
-
-    Additional options accepted:
-
-    `nowrap`
-        Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
-        don't add a XML declaration and a doctype.  If true, the `fontfamily`
-        and `fontsize` options are ignored.  Defaults to ``False``.
-
-    `fontfamily`
-        The value to give the wrapping ``<g>`` element's ``font-family``
-        attribute, defaults to ``"monospace"``.
-
-    `fontsize`
-        The value to give the wrapping ``<g>`` element's ``font-size``
-        attribute, defaults to ``"14px"``.
-
-    `xoffset`
-        Starting offset in X direction, defaults to ``0``.
-
-    `yoffset`
-        Starting offset in Y direction, defaults to the font size if it is given
-        in pixels, or ``20`` else.  (This is necessary since text coordinates
-        refer to the text baseline, not the top edge.)
-
-    `ystep`
-        Offset to add to the Y coordinate for each subsequent line.  This should
-        roughly be the text size plus 5.  It defaults to that value if the text
-        size is given in pixels, or ``25`` else.
-
-    `spacehack`
-        Convert spaces in the source to `` ``, which are non-breaking
-        spaces.  SVG provides the ``xml:space`` attribute to control how
-        whitespace inside tags is handled, in theory, the ``preserve`` value
-        could be used to keep all whitespace as-is.  However, many current SVG
-        viewers don't obey that rule, so this option is provided as a workaround
-        and defaults to ``True``.
-    """
-    name = 'SVG'
-    aliases = ['svg']
-    filenames = ['*.svg']
-
-    def __init__(self, **options):
-        # XXX outencoding
-        Formatter.__init__(self, **options)
-        self.nowrap = get_bool_opt(options, 'nowrap', False)
-        self.fontfamily = options.get('fontfamily', 'monospace')
-        self.fontsize = options.get('fontsize', '14px')
-        self.xoffset = get_int_opt(options, 'xoffset', 0)
-        fs = self.fontsize.strip()
-        if fs.endswith('px'): fs = fs[:-2].strip()
-        try:
-            int_fs = int(fs)
-        except:
-            int_fs = 20
-        self.yoffset = get_int_opt(options, 'yoffset', int_fs)
-        self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
-        self.spacehack = get_bool_opt(options, 'spacehack', True)
-        self._stylecache = {}
-
-    def format_unencoded(self, tokensource, outfile):
-        """
-        Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
-        tuples and write it into ``outfile``.
-
-        For our implementation we put all lines in their own 'line group'.
-        """
-        x = self.xoffset
-        y = self.yoffset
-        if not self.nowrap:
-            if self.encoding:
-                outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
-                              self.encoding)
-            else:
-                outfile.write('<?xml version="1.0"?>\n')
-            outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
-                          '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
-                          'svg10.dtd">\n')
-            outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
-            outfile.write('<g font-family="%s" font-size="%s">\n' %
-                          (self.fontfamily, self.fontsize))
-        outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (x, y))
-        for ttype, value in tokensource:
-            style = self._get_style(ttype)
-            tspan = style and '<tspan' + style + '>' or ''
-            tspanend = tspan and '</tspan>' or ''
-            value = escape_html(value)
-            if self.spacehack:
-                value = value.expandtabs().replace(' ', ' ')
-            parts = value.split('\n')
-            for part in parts[:-1]:
-                outfile.write(tspan + part + tspanend)
-                y += self.ystep
-                outfile.write('</text>\n<text x="%s" y="%s" '
-                              'xml:space="preserve">' % (x, y))
-            outfile.write(tspan + parts[-1] + tspanend)
-        outfile.write('</text>')
-
-        if not self.nowrap:
-            outfile.write('</g></svg>\n')
-
-    def _get_style(self, tokentype):
-        if tokentype in self._stylecache:
-            return self._stylecache[tokentype]
-        otokentype = tokentype
-        while not self.style.styles_token(tokentype):
-            tokentype = tokentype.parent
-        value = self.style.style_for_token(tokentype)
-        result = ''
-        if value['color']:
-            result = ' fill="#' + value['color'] + '"'
-        if value['bold']:
-            result += ' font-weight="bold"'
-        if value['italic']:
-            result += ' font-style="italic"'
-        self._stylecache[otokentype] = result
-        return result
diff --git a/python/ext-libs/pygments/formatters/terminal.py b/python/ext-libs/pygments/formatters/terminal.py
deleted file mode 100644
index 94e078f..0000000
--- a/python/ext-libs/pygments/formatters/terminal.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.terminal
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter for terminal output with ANSI sequences.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Token, Whitespace
-from pygments.console import ansiformat
-from pygments.util import get_choice_opt
-
-
-__all__ = ['TerminalFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-TERMINAL_COLORS = {
-    Token:              ('',            ''),
-
-    Whitespace:         ('lightgray',   'darkgray'),
-    Comment:            ('lightgray',   'darkgray'),
-    Comment.Preproc:    ('teal',        'turquoise'),
-    Keyword:            ('darkblue',    'blue'),
-    Keyword.Type:       ('teal',        'turquoise'),
-    Operator.Word:      ('purple',      'fuchsia'),
-    Name.Builtin:       ('teal',        'turquoise'),
-    Name.Function:      ('darkgreen',   'green'),
-    Name.Namespace:     ('_teal_',      '_turquoise_'),
-    Name.Class:         ('_darkgreen_', '_green_'),
-    Name.Exception:     ('teal',        'turquoise'),
-    Name.Decorator:     ('darkgray',    'lightgray'),
-    Name.Variable:      ('darkred',     'red'),
-    Name.Constant:      ('darkred',     'red'),
-    Name.Attribute:     ('teal',        'turquoise'),
-    Name.Tag:           ('blue',        'blue'),
-    String:             ('brown',       'brown'),
-    Number:             ('darkblue',    'blue'),
-
-    Generic.Deleted:    ('red',        'red'),
-    Generic.Inserted:   ('darkgreen',  'green'),
-    Generic.Heading:    ('**',         '**'),
-    Generic.Subheading: ('*purple*',   '*fuchsia*'),
-    Generic.Error:      ('red',        'red'),
-
-    Error:              ('_red_',      '_red_'),
-}
-
-
-class TerminalFormatter(Formatter):
-    r"""
-    Format tokens with ANSI color sequences, for output in a text console.
-    Color sequences are terminated at newlines, so that paging the output
-    works correctly.
-
-    The `get_style_defs()` method doesn't do anything special since there is
-    no support for common styles.
-
-    Options accepted:
-
-    `bg`
-        Set to ``"light"`` or ``"dark"`` depending on the terminal's background
-        (default: ``"light"``).
-
-    `colorscheme`
-        A dictionary mapping token types to (lightbg, darkbg) color names or
-        ``None`` (default: ``None`` = use builtin colorscheme).
-    """
-    name = 'Terminal'
-    aliases = ['terminal', 'console']
-    filenames = []
-
-    def __init__(self, **options):
-        Formatter.__init__(self, **options)
-        self.darkbg = get_choice_opt(options, 'bg',
-                                     ['light', 'dark'], 'light') == 'dark'
-        self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
-
-    def format(self, tokensource, outfile):
-        # hack: if the output is a terminal and has an encoding set,
-        # use that to avoid unicode encode problems
-        if not self.encoding and hasattr(outfile, "encoding") and \
-           hasattr(outfile, "isatty") and outfile.isatty() and \
-           sys.version_info < (3,):
-            self.encoding = outfile.encoding
-        return Formatter.format(self, tokensource, outfile)
-
-    def format_unencoded(self, tokensource, outfile):
-        for ttype, value in tokensource:
-            color = self.colorscheme.get(ttype)
-            while color is None:
-                ttype = ttype[:-1]
-                color = self.colorscheme.get(ttype)
-            if color:
-                color = color[self.darkbg]
-                spl = value.split('\n')
-                for line in spl[:-1]:
-                    if line:
-                        outfile.write(ansiformat(color, line))
-                    outfile.write('\n')
-                if spl[-1]:
-                    outfile.write(ansiformat(color, spl[-1]))
-            else:
-                outfile.write(value)
diff --git a/python/ext-libs/pygments/formatters/terminal256.py b/python/ext-libs/pygments/formatters/terminal256.py
deleted file mode 100644
index 772ed42..0000000
--- a/python/ext-libs/pygments/formatters/terminal256.py
+++ /dev/null
@@ -1,222 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.formatters.terminal256
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Formatter for 256-color terminal output with ANSI sequences.
-
-    RGB-to-XTERM color conversion routines adapted from xterm256-conv
-    tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
-    by Wolfgang Frisch.
-
-    Formatter version 1.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-# TODO:
-#  - Options to map style's bold/underline/italic/border attributes
-#    to some ANSI attrbutes (something like 'italic=underline')
-#  - An option to output "style RGB to xterm RGB/index" conversion table
-#  - An option to indicate that we are running in "reverse background"
-#    xterm. This means that default colors are white-on-black, not
-#    black-on-while, so colors like "white background" need to be converted
-#    to "white background, black foreground", etc...
-
-import sys
-
-from pygments.formatter import Formatter
-
-
-__all__ = ['Terminal256Formatter']
-
-
-class EscapeSequence:
-    def __init__(self, fg=None, bg=None, bold=False, underline=False):
-        self.fg = fg
-        self.bg = bg
-        self.bold = bold
-        self.underline = underline
-
-    def escape(self, attrs):
-        if len(attrs):
-            return "\x1b[" + ";".join(attrs) + "m"
-        return ""
-
-    def color_string(self):
-        attrs = []
-        if self.fg is not None:
-            attrs.extend(("38", "5", "%i" % self.fg))
-        if self.bg is not None:
-            attrs.extend(("48", "5", "%i" % self.bg))
-        if self.bold:
-            attrs.append("01")
-        if self.underline:
-            attrs.append("04")
-        return self.escape(attrs)
-
-    def reset_string(self):
-        attrs = []
-        if self.fg is not None:
-            attrs.append("39")
-        if self.bg is not None:
-            attrs.append("49")
-        if self.bold or self.underline:
-            attrs.append("00")
-        return self.escape(attrs)
-
-class Terminal256Formatter(Formatter):
-    r"""
-    Format tokens with ANSI color sequences, for output in a 256-color
-    terminal or console. Like in `TerminalFormatter` color sequences
-    are terminated at newlines, so that paging the output works correctly.
-
-    The formatter takes colors from a style defined by the `style` option
-    and converts them to nearest ANSI 256-color escape sequences. Bold and
-    underline attributes from the style are preserved (and displayed).
-
-    *New in Pygments 0.9.*
-
-    Options accepted:
-
-    `style`
-        The style to use, can be a string or a Style subclass (default:
-        ``'default'``).
-    """
-    name = 'Terminal256'
-    aliases = ['terminal256', 'console256', '256']
-    filenames = []
-
-    def __init__(self, **options):
-        Formatter.__init__(self, **options)
-
-        self.xterm_colors = []
-        self.best_match = {}
-        self.style_string = {}
-
-        self.usebold = 'nobold' not in options
-        self.useunderline = 'nounderline' not in options
-
-        self._build_color_table() # build an RGB-to-256 color conversion table
-        self._setup_styles() # convert selected style's colors to term. colors
-
-    def _build_color_table(self):
-        # colors 0..15: 16 basic colors
-
-        self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
-        self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
-        self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
-        self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
-        self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
-        self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
-        self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
-        self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
-        self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
-        self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
-        self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
-        self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
-        self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
-        self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
-        self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
-        self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
-
-        # colors 16..232: the 6x6x6 color cube
-
-        valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
-
-        for i in range(217):
-            r = valuerange[(i // 36) % 6]
-            g = valuerange[(i // 6) % 6]
-            b = valuerange[i % 6]
-            self.xterm_colors.append((r, g, b))
-
-        # colors 233..253: grayscale
-
-        for i in range(1, 22):
-            v = 8 + i * 10
-            self.xterm_colors.append((v, v, v))
-
-    def _closest_color(self, r, g, b):
-        distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
-        match = 0
-
-        for i in range(0, 254):
-            values = self.xterm_colors[i]
-
-            rd = r - values[0]
-            gd = g - values[1]
-            bd = b - values[2]
-            d = rd*rd + gd*gd + bd*bd
-
-            if d < distance:
-                match = i
-                distance = d
-        return match
-
-    def _color_index(self, color):
-        index = self.best_match.get(color, None)
-        if index is None:
-            try:
-                rgb = int(str(color), 16)
-            except ValueError:
-                rgb = 0
-
-            r = (rgb >> 16) & 0xff
-            g = (rgb >> 8) & 0xff
-            b = rgb & 0xff
-            index = self._closest_color(r, g, b)
-            self.best_match[color] = index
-        return index
-
-    def _setup_styles(self):
-        for ttype, ndef in self.style:
-            escape = EscapeSequence()
-            if ndef['color']:
-                escape.fg = self._color_index(ndef['color'])
-            if ndef['bgcolor']:
-                escape.bg = self._color_index(ndef['bgcolor'])
-            if self.usebold and ndef['bold']:
-                escape.bold = True
-            if self.useunderline and ndef['underline']:
-                escape.underline = True
-            self.style_string[str(ttype)] = (escape.color_string(),
-                                             escape.reset_string())
-
-    def format(self, tokensource, outfile):
-        # hack: if the output is a terminal and has an encoding set,
-        # use that to avoid unicode encode problems
-        if not self.encoding and hasattr(outfile, "encoding") and \
-           hasattr(outfile, "isatty") and outfile.isatty() and \
-           sys.version_info < (3,):
-            self.encoding = outfile.encoding
-        return Formatter.format(self, tokensource, outfile)
-
-    def format_unencoded(self, tokensource, outfile):
-        for ttype, value in tokensource:
-            not_found = True
-            while ttype and not_found:
-                try:
-                    #outfile.write( "<" + str(ttype) + ">" )
-                    on, off = self.style_string[str(ttype)]
-
-                    # Like TerminalFormatter, add "reset colors" escape sequence
-                    # on newline.
-                    spl = value.split('\n')
-                    for line in spl[:-1]:
-                        if line:
-                            outfile.write(on + line + off)
-                        outfile.write('\n')
-                    if spl[-1]:
-                        outfile.write(on + spl[-1] + off)
-
-                    not_found = False
-                    #outfile.write( '#' + str(ttype) + '#' )
-
-                except KeyError:
-                    #ottype = ttype
-                    ttype = ttype[:-1]
-                    #outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
-
-            if not_found:
-                outfile.write(value)
diff --git a/python/ext-libs/pygments/lexer.py b/python/ext-libs/pygments/lexer.py
deleted file mode 100644
index 8f88dfd..0000000
--- a/python/ext-libs/pygments/lexer.py
+++ /dev/null
@@ -1,765 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexer
-    ~~~~~~~~~~~~~~
-
-    Base lexer classes.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-import re, itertools
-
-from pygments.filter import apply_filters, Filter
-from pygments.filters import get_filter_by_name
-from pygments.token import Error, Text, Other, _TokenType
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
-     make_analysator
-
-
-__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
-           'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this']
-
-
-_encoding_map = [('\xef\xbb\xbf', 'utf-8'),
-                 ('\xff\xfe\0\0', 'utf-32'),
-                 ('\0\0\xfe\xff', 'utf-32be'),
-                 ('\xff\xfe', 'utf-16'),
-                 ('\xfe\xff', 'utf-16be')]
-
-_default_analyse = staticmethod(lambda x: 0.0)
-
-
-class LexerMeta(type):
-    """
-    This metaclass automagically converts ``analyse_text`` methods into
-    static methods which always return float values.
-    """
-
-    def __new__(cls, name, bases, d):
-        if 'analyse_text' in d:
-            d['analyse_text'] = make_analysator(d['analyse_text'])
-        return type.__new__(cls, name, bases, d)
-
-
-class Lexer(object):
-    """
-    Lexer for a specific language.
-
-    Basic options recognized:
-    ``stripnl``
-        Strip leading and trailing newlines from the input (default: True).
-    ``stripall``
-        Strip all leading and trailing whitespace from the input
-        (default: False).
-    ``ensurenl``
-        Make sure that the input ends with a newline (default: True).  This
-        is required for some lexers that consume input linewise.
-        *New in Pygments 1.3.*
-    ``tabsize``
-        If given and greater than 0, expand tabs in the input (default: 0).
-    ``encoding``
-        If given, must be an encoding name. This encoding will be used to
-        convert the input string to Unicode, if it is not already a Unicode
-        string (default: ``'latin1'``).
-        Can also be ``'guess'`` to use a simple UTF-8 / Latin1 detection, or
-        ``'chardet'`` to use the chardet library, if it is installed.
-    """
-
-    #: Name of the lexer
-    name = None
-
-    #: Shortcuts for the lexer
-    aliases = []
-
-    #: File name globs
-    filenames = []
-
-    #: Secondary file name globs
-    alias_filenames = []
-
-    #: MIME types
-    mimetypes = []
-
-    #: Priority, should multiple lexers match and no content is provided
-    priority = 0
-
-    __metaclass__ = LexerMeta
-
-    def __init__(self, **options):
-        self.options = options
-        self.stripnl = get_bool_opt(options, 'stripnl', True)
-        self.stripall = get_bool_opt(options, 'stripall', False)
-        self.ensurenl = get_bool_opt(options, 'ensurenl', True)
-        self.tabsize = get_int_opt(options, 'tabsize', 0)
-        self.encoding = options.get('encoding', 'latin1')
-        # self.encoding = options.get('inencoding', None) or self.encoding
-        self.filters = []
-        for filter_ in get_list_opt(options, 'filters', ()):
-            self.add_filter(filter_)
-
-    def __repr__(self):
-        if self.options:
-            return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
-                                                     self.options)
-        else:
-            return '<pygments.lexers.%s>' % self.__class__.__name__
-
-    def add_filter(self, filter_, **options):
-        """
-        Add a new stream filter to this lexer.
-        """
-        if not isinstance(filter_, Filter):
-            filter_ = get_filter_by_name(filter_, **options)
-        self.filters.append(filter_)
-
-    def analyse_text(text):
-        """
-        Has to return a float between ``0`` and ``1`` that indicates
-        if a lexer wants to highlight this text. Used by ``guess_lexer``.
-        If this method returns ``0`` it won't highlight it in any case, if
-        it returns ``1`` highlighting with this lexer is guaranteed.
-
-        The `LexerMeta` metaclass automatically wraps this function so
-        that it works like a static method (no ``self`` or ``cls``
-        parameter) and the return value is automatically converted to
-        `float`. If the return value is an object that is boolean `False`
-        it's the same as if the return values was ``0.0``.
-        """
-
-    def get_tokens(self, text, unfiltered=False):
-        """
-        Return an iterable of (tokentype, value) pairs generated from
-        `text`. If `unfiltered` is set to `True`, the filtering mechanism
-        is bypassed even if filters are defined.
-
-        Also preprocess the text, i.e. expand tabs and strip it if
-        wanted and applies registered filters.
-        """
-        if not isinstance(text, unicode):
-            if self.encoding == 'guess':
-                try:
-                    text = text.decode('utf-8')
-                    if text.startswith(u'\ufeff'):
-                        text = text[len(u'\ufeff'):]
-                except UnicodeDecodeError:
-                    text = text.decode('latin1')
-            elif self.encoding == 'chardet':
-                try:
-                    import chardet
-                except ImportError:
-                    raise ImportError('To enable chardet encoding guessing, '
-                                      'please install the chardet library '
-                                      'from http://chardet.feedparser.org/')
-                # check for BOM first
-                decoded = None
-                for bom, encoding in _encoding_map:
-                    if text.startswith(bom):
-                        decoded = unicode(text[len(bom):], encoding,
-                                          errors='replace')
-                        break
-                # no BOM found, so use chardet
-                if decoded is None:
-                    enc = chardet.detect(text[:1024]) # Guess using first 1KB
-                    decoded = unicode(text, enc.get('encoding') or 'utf-8',
-                                      errors='replace')
-                text = decoded
-            else:
-                text = text.decode(self.encoding)
-        else:
-            if text.startswith(u'\ufeff'):
-                text = text[len(u'\ufeff'):]
-
-        # text now *is* a unicode string
-        text = text.replace('\r\n', '\n')
-        text = text.replace('\r', '\n')
-        if self.stripall:
-            text = text.strip()
-        elif self.stripnl:
-            text = text.strip('\n')
-        if self.tabsize > 0:
-            text = text.expandtabs(self.tabsize)
-        if self.ensurenl and not text.endswith('\n'):
-            text += '\n'
-
-        def streamer():
-            for i, t, v in self.get_tokens_unprocessed(text):
-                yield t, v
-        stream = streamer()
-        if not unfiltered:
-            stream = apply_filters(stream, self.filters, self)
-        return stream
-
-    def get_tokens_unprocessed(self, text):
-        """
-        Return an iterable of (tokentype, value) pairs.
-        In subclasses, implement this method as a generator to
-        maximize effectiveness.
-        """
-        raise NotImplementedError
-
-
-class DelegatingLexer(Lexer):
-    """
-    This lexer takes two lexer as arguments. A root lexer and
-    a language lexer. First everything is scanned using the language
-    lexer, afterwards all ``Other`` tokens are lexed using the root
-    lexer.
-
-    The lexers from the ``template`` lexer package use this base lexer.
-    """
-
-    def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
-        self.root_lexer = _root_lexer(**options)
-        self.language_lexer = _language_lexer(**options)
-        self.needle = _needle
-        Lexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        buffered = ''
-        insertions = []
-        lng_buffer = []
-        for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
-            if t is self.needle:
-                if lng_buffer:
-                    insertions.append((len(buffered), lng_buffer))
-                    lng_buffer = []
-                buffered += v
-            else:
-                lng_buffer.append((i, t, v))
-        if lng_buffer:
-            insertions.append((len(buffered), lng_buffer))
-        return do_insertions(insertions,
-                             self.root_lexer.get_tokens_unprocessed(buffered))
-
-
-#-------------------------------------------------------------------------------
-# RegexLexer and ExtendedRegexLexer
-#
-
-
-class include(str):
-    """
-    Indicates that a state should include rules from another state.
-    """
-    pass
-
-
-class _inherit(object):
-    """
-    Indicates the a state should inherit from its superclass.
-    """
-    def __repr__(self):
-        return 'inherit'
-
-inherit = _inherit()
-
-
-class combined(tuple):
-    """
-    Indicates a state combined from multiple states.
-    """
-
-    def __new__(cls, *args):
-        return tuple.__new__(cls, args)
-
-    def __init__(self, *args):
-        # tuple.__init__ doesn't do anything
-        pass
-
-
-class _PseudoMatch(object):
-    """
-    A pseudo match object constructed from a string.
-    """
-
-    def __init__(self, start, text):
-        self._text = text
-        self._start = start
-
-    def start(self, arg=None):
-        return self._start
-
-    def end(self, arg=None):
-        return self._start + len(self._text)
-
-    def group(self, arg=None):
-        if arg:
-            raise IndexError('No such group')
-        return self._text
-
-    def groups(self):
-        return (self._text,)
-
-    def groupdict(self):
-        return {}
-
-
-def bygroups(*args):
-    """
-    Callback that yields multiple actions for each group in the match.
-    """
-    def callback(lexer, match, ctx=None):
-        for i, action in enumerate(args):
-            if action is None:
-                continue
-            elif type(action) is _TokenType:
-                data = match.group(i + 1)
-                if data:
-                    yield match.start(i + 1), action, data
-            else:
-                data = match.group(i + 1)
-                if data is not None:
-                    if ctx:
-                        ctx.pos = match.start(i + 1)
-                    for item in action(lexer, _PseudoMatch(match.start(i + 1),
-                                       data), ctx):
-                        if item:
-                            yield item
-        if ctx:
-            ctx.pos = match.end()
-    return callback
-
-
-class _This(object):
-    """
-    Special singleton used for indicating the caller class.
-    Used by ``using``.
-    """
-this = _This()
-
-
-def using(_other, **kwargs):
-    """
-    Callback that processes the match with a different lexer.
-
-    The keyword arguments are forwarded to the lexer, except `state` which
-    is handled separately.
-
-    `state` specifies the state that the new lexer will start in, and can
-    be an enumerable such as ('root', 'inline', 'string') or a simple
-    string which is assumed to be on top of the root state.
-
-    Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
-    """
-    gt_kwargs = {}
-    if 'state' in kwargs:
-        s = kwargs.pop('state')
-        if isinstance(s, (list, tuple)):
-            gt_kwargs['stack'] = s
-        else:
-            gt_kwargs['stack'] = ('root', s)
-
-    if _other is this:
-        def callback(lexer, match, ctx=None):
-            # if keyword arguments are given the callback
-            # function has to create a new lexer instance
-            if kwargs:
-                # XXX: cache that somehow
-                kwargs.update(lexer.options)
-                lx = lexer.__class__(**kwargs)
-            else:
-                lx = lexer
-            s = match.start()
-            for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
-                yield i + s, t, v
-            if ctx:
-                ctx.pos = match.end()
-    else:
-        def callback(lexer, match, ctx=None):
-            # XXX: cache that somehow
-            kwargs.update(lexer.options)
-            lx = _other(**kwargs)
-
-            s = match.start()
-            for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
-                yield i + s, t, v
-            if ctx:
-                ctx.pos = match.end()
-    return callback
-
-
-class RegexLexerMeta(LexerMeta):
-    """
-    Metaclass for RegexLexer, creates the self._tokens attribute from
-    self.tokens on the first instantiation.
-    """
-
-    def _process_regex(cls, regex, rflags):
-        """Preprocess the regular expression component of a token definition."""
-        return re.compile(regex, rflags).match
-
-    def _process_token(cls, token):
-        """Preprocess the token component of a token definition."""
-        assert type(token) is _TokenType or callable(token), \
-               'token type must be simple type or callable, not %r' % (token,)
-        return token
-
-    def _process_new_state(cls, new_state, unprocessed, processed):
-        """Preprocess the state transition action of a token definition."""
-        if isinstance(new_state, str):
-            # an existing state
-            if new_state == '#pop':
-                return -1
-            elif new_state in unprocessed:
-                return (new_state,)
-            elif new_state == '#push':
-                return new_state
-            elif new_state[:5] == '#pop:':
-                return -int(new_state[5:])
-            else:
-                assert False, 'unknown new state %r' % new_state
-        elif isinstance(new_state, combined):
-            # combine a new state from existing ones
-            tmp_state = '_tmp_%d' % cls._tmpname
-            cls._tmpname += 1
-            itokens = []
-            for istate in new_state:
-                assert istate != new_state, 'circular state ref %r' % istate
-                itokens.extend(cls._process_state(unprocessed,
-                                                  processed, istate))
-            processed[tmp_state] = itokens
-            return (tmp_state,)
-        elif isinstance(new_state, tuple):
-            # push more than one state
-            for istate in new_state:
-                assert (istate in unprocessed or
-                        istate in ('#pop', '#push')), \
-                       'unknown new state ' + istate
-            return new_state
-        else:
-            assert False, 'unknown new state def %r' % new_state
-
-    def _process_state(cls, unprocessed, processed, state):
-        """Preprocess a single state definition."""
-        assert type(state) is str, "wrong state name %r" % state
-        assert state[0] != '#', "invalid state name %r" % state
-        if state in processed:
-            return processed[state]
-        tokens = processed[state] = []
-        rflags = cls.flags
-        for tdef in unprocessed[state]:
-            if isinstance(tdef, include):
-                # it's a state reference
-                assert tdef != state, "circular state reference %r" % state
-                tokens.extend(cls._process_state(unprocessed, processed,
-                                                 str(tdef)))
-                continue
-            if isinstance(tdef, _inherit):
-                # processed already
-                continue
-
-            assert type(tdef) is tuple, "wrong rule def %r" % tdef
-
-            try:
-                rex = cls._process_regex(tdef[0], rflags)
-            except Exception, err:
-                raise ValueError("uncompilable regex %r in state %r of %r: %s" %
-                                 (tdef[0], state, cls, err))
-
-            token = cls._process_token(tdef[1])
-
-            if len(tdef) == 2:
-                new_state = None
-            else:
-                new_state = cls._process_new_state(tdef[2],
-                                                   unprocessed, processed)
-
-            tokens.append((rex, token, new_state))
-        return tokens
-
-    def process_tokendef(cls, name, tokendefs=None):
-        """Preprocess a dictionary of token definitions."""
-        processed = cls._all_tokens[name] = {}
-        tokendefs = tokendefs or cls.tokens[name]
-        for state in tokendefs.keys():
-            cls._process_state(tokendefs, processed, state)
-        return processed
-
-    def get_tokendefs(cls):
-        """
-        Merge tokens from superclasses in MRO order, returning a single tokendef
-        dictionary.
-
-        Any state that is not defined by a subclass will be inherited
-        automatically.  States that *are* defined by subclasses will, by
-        default, override that state in the superclass.  If a subclass wishes to
-        inherit definitions from a superclass, it can use the special value
-        "inherit", which will cause the superclass' state definition to be
-        included at that point in the state.
-        """
-        tokens = {}
-        inheritable = {}
-        for c in itertools.chain((cls,), cls.__mro__):
-            toks = c.__dict__.get('tokens', {})
-
-            for state, items in toks.iteritems():
-                curitems = tokens.get(state)
-                if curitems is None:
-                    tokens[state] = items
-                    try:
-                        inherit_ndx = items.index(inherit)
-                    except ValueError:
-                        continue
-                    inheritable[state] = inherit_ndx
-                    continue
-
-                inherit_ndx = inheritable.pop(state, None)
-                if inherit_ndx is None:
-                    continue
-
-                # Replace the "inherit" value with the items
-                curitems[inherit_ndx:inherit_ndx+1] = items
-                try:
-                    new_inh_ndx = items.index(inherit)
-                except ValueError:
-                    pass
-                else:
-                    inheritable[state] = inherit_ndx + new_inh_ndx
-
-        return tokens
-
-    def __call__(cls, *args, **kwds):
-        """Instantiate cls after preprocessing its token definitions."""
-        if '_tokens' not in cls.__dict__:
-            cls._all_tokens = {}
-            cls._tmpname = 0
-            if hasattr(cls, 'token_variants') and cls.token_variants:
-                # don't process yet
-                pass
-            else:
-                cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
-
-        return type.__call__(cls, *args, **kwds)
-
-
-class RegexLexer(Lexer):
-    """
-    Base for simple stateful regular expression-based lexers.
-    Simplifies the lexing process so that you need only
-    provide a list of states and regular expressions.
-    """
-    __metaclass__ = RegexLexerMeta
-
-    #: Flags for compiling the regular expressions.
-    #: Defaults to MULTILINE.
-    flags = re.MULTILINE
-
-    #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
-    #:
-    #: The initial state is 'root'.
-    #: ``new_state`` can be omitted to signify no state transition.
-    #: If it is a string, the state is pushed on the stack and changed.
-    #: If it is a tuple of strings, all states are pushed on the stack and
-    #: the current state will be the topmost.
-    #: It can also be ``combined('state1', 'state2', ...)``
-    #: to signify a new, anonymous state combined from the rules of two
-    #: or more existing ones.
-    #: Furthermore, it can be '#pop' to signify going back one step in
-    #: the state stack, or '#push' to push the current state on the stack
-    #: again.
-    #:
-    #: The tuple can also be replaced with ``include('state')``, in which
-    #: case the rules from the state named by the string are included in the
-    #: current one.
-    tokens = {}
-
-    def get_tokens_unprocessed(self, text, stack=('root',)):
-        """
-        Split ``text`` into (tokentype, text) pairs.
-
-        ``stack`` is the inital stack (default: ``['root']``)
-        """
-        pos = 0
-        tokendefs = self._tokens
-        statestack = list(stack)
-        statetokens = tokendefs[statestack[-1]]
-        while 1:
-            for rexmatch, action, new_state in statetokens:
-                m = rexmatch(text, pos)
-                if m:
-                    if type(action) is _TokenType:
-                        yield pos, action, m.group()
-                    else:
-                        for item in action(self, m):
-                            yield item
-                    pos = m.end()
-                    if new_state is not None:
-                        # state transition
-                        if isinstance(new_state, tuple):
-                            for state in new_state:
-                                if state == '#pop':
-                                    statestack.pop()
-                                elif state == '#push':
-                                    statestack.append(statestack[-1])
-                                else:
-                                    statestack.append(state)
-                        elif isinstance(new_state, int):
-                            # pop
-                            del statestack[new_state:]
-                        elif new_state == '#push':
-                            statestack.append(statestack[-1])
-                        else:
-                            assert False, "wrong state def: %r" % new_state
-                        statetokens = tokendefs[statestack[-1]]
-                    break
-            else:
-                try:
-                    if text[pos] == '\n':
-                        # at EOL, reset state to "root"
-                        statestack = ['root']
-                        statetokens = tokendefs['root']
-                        yield pos, Text, u'\n'
-                        pos += 1
-                        continue
-                    yield pos, Error, text[pos]
-                    pos += 1
-                except IndexError:
-                    break
-
-
-class LexerContext(object):
-    """
-    A helper object that holds lexer position data.
-    """
-
-    def __init__(self, text, pos, stack=None, end=None):
-        self.text = text
-        self.pos = pos
-        self.end = end or len(text) # end=0 not supported ;-)
-        self.stack = stack or ['root']
-
-    def __repr__(self):
-        return 'LexerContext(%r, %r, %r)' % (
-            self.text, self.pos, self.stack)
-
-
-class ExtendedRegexLexer(RegexLexer):
-    """
-    A RegexLexer that uses a context object to store its state.
-    """
-
-    def get_tokens_unprocessed(self, text=None, context=None):
-        """
-        Split ``text`` into (tokentype, text) pairs.
-        If ``context`` is given, use this lexer context instead.
-        """
-        tokendefs = self._tokens
-        if not context:
-            ctx = LexerContext(text, 0)
-            statetokens = tokendefs['root']
-        else:
-            ctx = context
-            statetokens = tokendefs[ctx.stack[-1]]
-            text = ctx.text
-        while 1:
-            for rexmatch, action, new_state in statetokens:
-                m = rexmatch(text, ctx.pos, ctx.end)
-                if m:
-                    if type(action) is _TokenType:
-                        yield ctx.pos, action, m.group()
-                        ctx.pos = m.end()
-                    else:
-                        for item in action(self, m, ctx):
-                            yield item
-                        if not new_state:
-                            # altered the state stack?
-                            statetokens = tokendefs[ctx.stack[-1]]
-                    # CAUTION: callback must set ctx.pos!
-                    if new_state is not None:
-                        # state transition
-                        if isinstance(new_state, tuple):
-                            for state in new_state:
-                                if state == '#pop':
-                                    ctx.stack.pop()
-                                elif state == '#push':
-                                    ctx.stack.append(statestack[-1])
-                                else:
-                                    ctx.stack.append(state)
-                        elif isinstance(new_state, int):
-                            # pop
-                            del ctx.stack[new_state:]
-                        elif new_state == '#push':
-                            ctx.stack.append(ctx.stack[-1])
-                        else:
-                            assert False, "wrong state def: %r" % new_state
-                        statetokens = tokendefs[ctx.stack[-1]]
-                    break
-            else:
-                try:
-                    if ctx.pos >= ctx.end:
-                        break
-                    if text[ctx.pos] == '\n':
-                        # at EOL, reset state to "root"
-                        ctx.stack = ['root']
-                        statetokens = tokendefs['root']
-                        yield ctx.pos, Text, u'\n'
-                        ctx.pos += 1
-                        continue
-                    yield ctx.pos, Error, text[ctx.pos]
-                    ctx.pos += 1
-                except IndexError:
-                    break
-
-
-def do_insertions(insertions, tokens):
-    """
-    Helper for lexers which must combine the results of several
-    sublexers.
-
-    ``insertions`` is a list of ``(index, itokens)`` pairs.
-    Each ``itokens`` iterable should be inserted at position
-    ``index`` into the token stream given by the ``tokens``
-    argument.
-
-    The result is a combined token stream.
-
-    TODO: clean up the code here.
-    """
-    insertions = iter(insertions)
-    try:
-        index, itokens = insertions.next()
-    except StopIteration:
-        # no insertions
-        for item in tokens:
-            yield item
-        return
-
-    realpos = None
-    insleft = True
-
-    # iterate over the token stream where we want to insert
-    # the tokens from the insertion list.
-    for i, t, v in tokens:
-        # first iteration. store the postition of first item
-        if realpos is None:
-            realpos = i
-        oldi = 0
-        while insleft and i + len(v) >= index:
-            tmpval = v[oldi:index - i]
-            yield realpos, t, tmpval
-            realpos += len(tmpval)
-            for it_index, it_token, it_value in itokens:
-                yield realpos, it_token, it_value
-                realpos += len(it_value)
-            oldi = index - i
-            try:
-                index, itokens = insertions.next()
-            except StopIteration:
-                insleft = False
-                break  # not strictly necessary
-        yield realpos, t, v[oldi:]
-        realpos += len(v) - oldi
-
-    # leftover tokens
-    while insleft:
-        # no normal tokens, set realpos to zero
-        realpos = realpos or 0
-        for p, t, v in itokens:
-            yield realpos, t, v
-            realpos += len(v)
-        try:
-            index, itokens = insertions.next()
-        except StopIteration:
-            insleft = False
-            break  # not strictly necessary
diff --git a/python/ext-libs/pygments/lexers/__init__.py b/python/ext-libs/pygments/lexers/__init__.py
deleted file mode 100644
index 9af6ce6..0000000
--- a/python/ext-libs/pygments/lexers/__init__.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers
-    ~~~~~~~~~~~~~~~
-
-    Pygments lexers.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.lexers._mapping import LEXERS
-from pygments.plugin import find_plugin_lexers
-from pygments.util import ClassNotFound, bytes
-
-
-__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
-           'guess_lexer'] + LEXERS.keys()
-
-_lexer_cache = {}
-
-
-def _load_lexers(module_name):
-    """
-    Load a lexer (and all others in the module too).
-    """
-    mod = __import__(module_name, None, None, ['__all__'])
-    for lexer_name in mod.__all__:
-        cls = getattr(mod, lexer_name)
-        _lexer_cache[cls.name] = cls
-
-
-def get_all_lexers():
-    """
-    Return a generator of tuples in the form ``(name, aliases,
-    filenames, mimetypes)`` of all know lexers.
-    """
-    for item in LEXERS.itervalues():
-        yield item[1:]
-    for lexer in find_plugin_lexers():
-        yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
-
-
-def find_lexer_class(name):
-    """
-    Lookup a lexer class by name. Return None if not found.
-    """
-    if name in _lexer_cache:
-        return _lexer_cache[name]
-    # lookup builtin lexers
-    for module_name, lname, aliases, _, _ in LEXERS.itervalues():
-        if name == lname:
-            _load_lexers(module_name)
-            return _lexer_cache[name]
-    # continue with lexers from setuptools entrypoints
-    for cls in find_plugin_lexers():
-        if cls.name == name:
-            return cls
-
-
-def get_lexer_by_name(_alias, **options):
-    """
-    Get a lexer by an alias.
-    """
-    # lookup builtin lexers
-    for module_name, name, aliases, _, _ in LEXERS.itervalues():
-        if _alias in aliases:
-            if name not in _lexer_cache:
-                _load_lexers(module_name)
-            return _lexer_cache[name](**options)
-    # continue with lexers from setuptools entrypoints
-    for cls in find_plugin_lexers():
-        if _alias in cls.aliases:
-            return cls(**options)
-    raise ClassNotFound('no lexer for alias %r found' % _alias)
-
-
-def get_lexer_for_filename(_fn, code=None, **options):
-    """
-    Get a lexer for a filename.  If multiple lexers match the filename
-    pattern, use ``analyze_text()`` to figure out which one is more
-    appropriate.
-    """
-    matches = []
-    fn = basename(_fn)
-    for modname, name, _, filenames, _ in LEXERS.itervalues():
-        for filename in filenames:
-            if fnmatch.fnmatch(fn, filename):
-                if name not in _lexer_cache:
-                    _load_lexers(modname)
-                matches.append((_lexer_cache[name], filename))
-    for cls in find_plugin_lexers():
-        for filename in cls.filenames:
-            if fnmatch.fnmatch(fn, filename):
-                matches.append((cls, filename))
-
-    if sys.version_info > (3,) and isinstance(code, bytes):
-        # decode it, since all analyse_text functions expect unicode
-        code = code.decode('latin1')
-
-    def get_rating(info):
-        cls, filename = info
-        # explicit patterns get a bonus
-        bonus = '*' not in filename and 0.5 or 0
-        # The class _always_ defines analyse_text because it's included in
-        # the Lexer class.  The default implementation returns None which
-        # gets turned into 0.0.  Run scripts/detect_missing_analyse_text.py
-        # to find lexers which need it overridden.
-        if code:
-            return cls.analyse_text(code) + bonus
-        return cls.priority + bonus
-
-    if matches:
-        matches.sort(key=get_rating)
-        #print "Possible lexers, after sort:", matches
-        return matches[-1][0](**options)
-    raise ClassNotFound('no lexer for filename %r found' % _fn)
-
-
-def get_lexer_for_mimetype(_mime, **options):
-    """
-    Get a lexer for a mimetype.
-    """
-    for modname, name, _, _, mimetypes in LEXERS.itervalues():
-        if _mime in mimetypes:
-            if name not in _lexer_cache:
-                _load_lexers(modname)
-            return _lexer_cache[name](**options)
-    for cls in find_plugin_lexers():
-        if _mime in cls.mimetypes:
-            return cls(**options)
-    raise ClassNotFound('no lexer for mimetype %r found' % _mime)
-
-
-def _iter_lexerclasses():
-    """
-    Return an iterator over all lexer classes.
-    """
-    for key in sorted(LEXERS):
-        module_name, name = LEXERS[key][:2]
-        if name not in _lexer_cache:
-            _load_lexers(module_name)
-        yield _lexer_cache[name]
-    for lexer in find_plugin_lexers():
-        yield lexer
-
-
-def guess_lexer_for_filename(_fn, _text, **options):
-    """
-    Lookup all lexers that handle those filenames primary (``filenames``)
-    or secondary (``alias_filenames``). Then run a text analysis for those
-    lexers and choose the best result.
-
-    usage::
-
-        >>> from pygments.lexers import guess_lexer_for_filename
-        >>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
-        <pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
-        >>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
-        <pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
-        >>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
-        <pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
-    """
-    fn = basename(_fn)
-    primary = None
-    matching_lexers = set()
-    for lexer in _iter_lexerclasses():
-        for filename in lexer.filenames:
-            if fnmatch.fnmatch(fn, filename):
-                matching_lexers.add(lexer)
-                primary = lexer
-        for filename in lexer.alias_filenames:
-            if fnmatch.fnmatch(fn, filename):
-                matching_lexers.add(lexer)
-    if not matching_lexers:
-        raise ClassNotFound('no lexer for filename %r found' % fn)
-    if len(matching_lexers) == 1:
-        return matching_lexers.pop()(**options)
-    result = []
-    for lexer in matching_lexers:
-        rv = lexer.analyse_text(_text)
-        if rv == 1.0:
-            return lexer(**options)
-        result.append((rv, lexer))
-    result.sort()
-    if not result[-1][0] and primary is not None:
-        return primary(**options)
-    return result[-1][1](**options)
-
-
-def guess_lexer(_text, **options):
-    """
-    Guess a lexer by strong distinctions in the text (eg, shebang).
-    """
-    best_lexer = [0.0, None]
-    for lexer in _iter_lexerclasses():
-        rv = lexer.analyse_text(_text)
-        if rv == 1.0:
-            return lexer(**options)
-        if rv > best_lexer[0]:
-            best_lexer[:] = (rv, lexer)
-    if not best_lexer[0] or best_lexer[1] is None:
-        raise ClassNotFound('no lexer matching the text found')
-    return best_lexer[1](**options)
-
-
-class _automodule(types.ModuleType):
-    """Automatically import lexers."""
-
-    def __getattr__(self, name):
-        info = LEXERS.get(name)
-        if info:
-            _load_lexers(info[0])
-            cls = _lexer_cache[info[1]]
-            setattr(self, name, cls)
-            return cls
-        raise AttributeError(name)
-
-
-oldmod = sys.modules['pygments.lexers']
-newmod = _automodule('pygments.lexers')
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules['pygments.lexers'] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/python/ext-libs/pygments/lexers/_asybuiltins.py b/python/ext-libs/pygments/lexers/_asybuiltins.py
deleted file mode 100644
index 108fa19..0000000
--- a/python/ext-libs/pygments/lexers/_asybuiltins.py
+++ /dev/null
@@ -1,1645 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._asybuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This file contains the asy-function names and asy-variable names of
-    Asymptote.
-
-    Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
-    TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
-    for function and variable names.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-ASYFUNCNAME = set([
-    'AND',
-    'Arc',
-    'ArcArrow',
-    'ArcArrows',
-    'Arrow',
-    'Arrows',
-    'Automatic',
-    'AvantGarde',
-    'BBox',
-    'BWRainbow',
-    'BWRainbow2',
-    'Bar',
-    'Bars',
-    'BeginArcArrow',
-    'BeginArrow',
-    'BeginBar',
-    'BeginDotMargin',
-    'BeginMargin',
-    'BeginPenMargin',
-    'Blank',
-    'Bookman',
-    'Bottom',
-    'BottomTop',
-    'Bounds',
-    'Break',
-    'Broken',
-    'BrokenLog',
-    'Ceil',
-    'Circle',
-    'CircleBarIntervalMarker',
-    'Cos',
-    'Courier',
-    'CrossIntervalMarker',
-    'DefaultFormat',
-    'DefaultLogFormat',
-    'Degrees',
-    'Dir',
-    'DotMargin',
-    'DotMargins',
-    'Dotted',
-    'Draw',
-    'Drawline',
-    'Embed',
-    'EndArcArrow',
-    'EndArrow',
-    'EndBar',
-    'EndDotMargin',
-    'EndMargin',
-    'EndPenMargin',
-    'Fill',
-    'FillDraw',
-    'Floor',
-    'Format',
-    'Full',
-    'Gaussian',
-    'Gaussrand',
-    'Gaussrandpair',
-    'Gradient',
-    'Grayscale',
-    'Helvetica',
-    'Hermite',
-    'HookHead',
-    'InOutTicks',
-    'InTicks',
-    'J',
-    'Label',
-    'Landscape',
-    'Left',
-    'LeftRight',
-    'LeftTicks',
-    'Legend',
-    'Linear',
-    'Link',
-    'Log',
-    'LogFormat',
-    'Margin',
-    'Margins',
-    'Mark',
-    'MidArcArrow',
-    'MidArrow',
-    'NOT',
-    'NewCenturySchoolBook',
-    'NoBox',
-    'NoMargin',
-    'NoModifier',
-    'NoTicks',
-    'NoTicks3',
-    'NoZero',
-    'NoZeroFormat',
-    'None',
-    'OR',
-    'OmitFormat',
-    'OmitTick',
-    'OutTicks',
-    'Ox',
-    'Oy',
-    'Palatino',
-    'PaletteTicks',
-    'Pen',
-    'PenMargin',
-    'PenMargins',
-    'Pentype',
-    'Portrait',
-    'RadialShade',
-    'Rainbow',
-    'Range',
-    'Relative',
-    'Right',
-    'RightTicks',
-    'Rotate',
-    'Round',
-    'SQR',
-    'Scale',
-    'ScaleX',
-    'ScaleY',
-    'ScaleZ',
-    'Seascape',
-    'Shift',
-    'Sin',
-    'Slant',
-    'Spline',
-    'StickIntervalMarker',
-    'Straight',
-    'Symbol',
-    'Tan',
-    'TeXify',
-    'Ticks',
-    'Ticks3',
-    'TildeIntervalMarker',
-    'TimesRoman',
-    'Top',
-    'TrueMargin',
-    'UnFill',
-    'UpsideDown',
-    'Wheel',
-    'X',
-    'XEquals',
-    'XOR',
-    'XY',
-    'XYEquals',
-    'XYZero',
-    'XYgrid',
-    'XZEquals',
-    'XZZero',
-    'XZero',
-    'XZgrid',
-    'Y',
-    'YEquals',
-    'YXgrid',
-    'YZ',
-    'YZEquals',
-    'YZZero',
-    'YZero',
-    'YZgrid',
-    'Z',
-    'ZX',
-    'ZXgrid',
-    'ZYgrid',
-    'ZapfChancery',
-    'ZapfDingbats',
-    '_cputime',
-    '_draw',
-    '_eval',
-    '_image',
-    '_labelpath',
-    '_projection',
-    '_strokepath',
-    '_texpath',
-    'aCos',
-    'aSin',
-    'aTan',
-    'abort',
-    'abs',
-    'accel',
-    'acos',
-    'acosh',
-    'acot',
-    'acsc',
-    'add',
-    'addArrow',
-    'addMargins',
-    'addSaveFunction',
-    'addnode',
-    'addnodes',
-    'addpenarc',
-    'addpenline',
-    'addseg',
-    'adjust',
-    'alias',
-    'align',
-    'all',
-    'altitude',
-    'angabscissa',
-    'angle',
-    'angpoint',
-    'animate',
-    'annotate',
-    'anticomplementary',
-    'antipedal',
-    'apply',
-    'approximate',
-    'arc',
-    'arcarrowsize',
-    'arccircle',
-    'arcdir',
-    'arcfromcenter',
-    'arcfromfocus',
-    'arclength',
-    'arcnodesnumber',
-    'arcpoint',
-    'arcsubtended',
-    'arcsubtendedcenter',
-    'arctime',
-    'arctopath',
-    'array',
-    'arrow',
-    'arrow2',
-    'arrowbase',
-    'arrowbasepoints',
-    'arrowsize',
-    'asec',
-    'asin',
-    'asinh',
-    'ask',
-    'assert',
-    'asy',
-    'asycode',
-    'asydir',
-    'asyfigure',
-    'asyfilecode',
-    'asyinclude',
-    'asywrite',
-    'atan',
-    'atan2',
-    'atanh',
-    'atbreakpoint',
-    'atexit',
-    'atime',
-    'attach',
-    'attract',
-    'atupdate',
-    'autoformat',
-    'autoscale',
-    'autoscale3',
-    'axes',
-    'axes3',
-    'axialshade',
-    'axis',
-    'axiscoverage',
-    'azimuth',
-    'babel',
-    'background',
-    'bangles',
-    'bar',
-    'barmarksize',
-    'barsize',
-    'basealign',
-    'baseline',
-    'bbox',
-    'beep',
-    'begin',
-    'beginclip',
-    'begingroup',
-    'beginpoint',
-    'between',
-    'bevel',
-    'bezier',
-    'bezierP',
-    'bezierPP',
-    'bezierPPP',
-    'bezulate',
-    'bibliography',
-    'bibliographystyle',
-    'binarytree',
-    'binarytreeNode',
-    'binomial',
-    'binput',
-    'bins',
-    'bisector',
-    'bisectorpoint',
-    'blend',
-    'boutput',
-    'box',
-    'bqe',
-    'breakpoint',
-    'breakpoints',
-    'brick',
-    'buildRestoreDefaults',
-    'buildRestoreThunk',
-    'buildcycle',
-    'bulletcolor',
-    'canonical',
-    'canonicalcartesiansystem',
-    'cartesiansystem',
-    'case1',
-    'case2',
-    'case3',
-    'cbrt',
-    'cd',
-    'ceil',
-    'center',
-    'centerToFocus',
-    'centroid',
-    'cevian',
-    'change2',
-    'changecoordsys',
-    'checkSegment',
-    'checkconditionlength',
-    'checker',
-    'checklengths',
-    'checkposition',
-    'checktriangle',
-    'choose',
-    'circle',
-    'circlebarframe',
-    'circlemarkradius',
-    'circlenodesnumber',
-    'circumcenter',
-    'circumcircle',
-    'clamped',
-    'clear',
-    'clip',
-    'clipdraw',
-    'close',
-    'cmyk',
-    'code',
-    'colatitude',
-    'collect',
-    'collinear',
-    'color',
-    'colorless',
-    'colors',
-    'colorspace',
-    'comma',
-    'compassmark',
-    'complement',
-    'complementary',
-    'concat',
-    'concurrent',
-    'cone',
-    'conic',
-    'conicnodesnumber',
-    'conictype',
-    'conj',
-    'connect',
-    'containmentTree',
-    'contains',
-    'contour',
-    'contour3',
-    'controlSpecifier',
-    'convert',
-    'coordinates',
-    'coordsys',
-    'copy',
-    'cos',
-    'cosh',
-    'cot',
-    'countIntersections',
-    'cputime',
-    'crop',
-    'cropcode',
-    'cross',
-    'crossframe',
-    'crosshatch',
-    'crossmarksize',
-    'csc',
-    'cubicroots',
-    'curabscissa',
-    'curlSpecifier',
-    'curpoint',
-    'currentarrow',
-    'currentexitfunction',
-    'currentmomarrow',
-    'currentpolarconicroutine',
-    'curve',
-    'cut',
-    'cutafter',
-    'cutbefore',
-    'cyclic',
-    'cylinder',
-    'debugger',
-    'deconstruct',
-    'defaultdir',
-    'defaultformat',
-    'defaultpen',
-    'defined',
-    'degenerate',
-    'degrees',
-    'delete',
-    'deletepreamble',
-    'determinant',
-    'diagonal',
-    'diamond',
-    'diffdiv',
-    'dir',
-    'dirSpecifier',
-    'dirtime',
-    'display',
-    'distance',
-    'divisors',
-    'do_overpaint',
-    'dot',
-    'dotframe',
-    'dotsize',
-    'downcase',
-    'draw',
-    'drawAll',
-    'drawDoubleLine',
-    'drawFermion',
-    'drawGhost',
-    'drawGluon',
-    'drawMomArrow',
-    'drawPhoton',
-    'drawScalar',
-    'drawVertex',
-    'drawVertexBox',
-    'drawVertexBoxO',
-    'drawVertexBoxX',
-    'drawVertexO',
-    'drawVertexOX',
-    'drawVertexTriangle',
-    'drawVertexTriangleO',
-    'drawVertexX',
-    'drawarrow',
-    'drawarrow2',
-    'drawline',
-    'drawtick',
-    'duplicate',
-    'elle',
-    'ellipse',
-    'ellipsenodesnumber',
-    'embed',
-    'embed3',
-    'empty',
-    'enclose',
-    'end',
-    'endScript',
-    'endclip',
-    'endgroup',
-    'endl',
-    'endpoint',
-    'endpoints',
-    'eof',
-    'eol',
-    'equation',
-    'equations',
-    'erase',
-    'erasestep',
-    'erf',
-    'erfc',
-    'error',
-    'errorbar',
-    'errorbars',
-    'eval',
-    'excenter',
-    'excircle',
-    'exit',
-    'exitXasyMode',
-    'exitfunction',
-    'exp',
-    'expfactors',
-    'expi',
-    'expm1',
-    'exradius',
-    'extend',
-    'extension',
-    'extouch',
-    'fabs',
-    'factorial',
-    'fermat',
-    'fft',
-    'fhorner',
-    'figure',
-    'file',
-    'filecode',
-    'fill',
-    'filldraw',
-    'filloutside',
-    'fillrule',
-    'filltype',
-    'find',
-    'finite',
-    'finiteDifferenceJacobian',
-    'firstcut',
-    'firstframe',
-    'fit',
-    'fit2',
-    'fixedscaling',
-    'floor',
-    'flush',
-    'fmdefaults',
-    'fmod',
-    'focusToCenter',
-    'font',
-    'fontcommand',
-    'fontsize',
-    'foot',
-    'format',
-    'frac',
-    'frequency',
-    'fromCenter',
-    'fromFocus',
-    'fspline',
-    'functionshade',
-    'gamma',
-    'generate_random_backtrace',
-    'generateticks',
-    'gergonne',
-    'getc',
-    'getint',
-    'getpair',
-    'getreal',
-    'getstring',
-    'gettriple',
-    'gluon',
-    'gouraudshade',
-    'graph',
-    'graphic',
-    'gray',
-    'grestore',
-    'grid',
-    'grid3',
-    'gsave',
-    'halfbox',
-    'hatch',
-    'hdiffdiv',
-    'hermite',
-    'hex',
-    'histogram',
-    'history',
-    'hline',
-    'hprojection',
-    'hsv',
-    'hyperbola',
-    'hyperbolanodesnumber',
-    'hyperlink',
-    'hypot',
-    'identity',
-    'image',
-    'incenter',
-    'incentral',
-    'incircle',
-    'increasing',
-    'incrementposition',
-    'indexedTransform',
-    'indexedfigure',
-    'initXasyMode',
-    'initdefaults',
-    'input',
-    'inradius',
-    'insert',
-    'inside',
-    'integrate',
-    'interactive',
-    'interior',
-    'interp',
-    'interpolate',
-    'intersect',
-    'intersection',
-    'intersectionpoint',
-    'intersectionpoints',
-    'intersections',
-    'intouch',
-    'inverse',
-    'inversion',
-    'invisible',
-    'is3D',
-    'isDuplicate',
-    'isogonal',
-    'isogonalconjugate',
-    'isotomic',
-    'isotomicconjugate',
-    'isparabola',
-    'italic',
-    'item',
-    'key',
-    'kurtosis',
-    'kurtosisexcess',
-    'label',
-    'labelaxis',
-    'labelmargin',
-    'labelpath',
-    'labels',
-    'labeltick',
-    'labelx',
-    'labelx3',
-    'labely',
-    'labely3',
-    'labelz',
-    'labelz3',
-    'lastcut',
-    'latex',
-    'latitude',
-    'latticeshade',
-    'layer',
-    'layout',
-    'ldexp',
-    'leastsquares',
-    'legend',
-    'legenditem',
-    'length',
-    'lift',
-    'light',
-    'limits',
-    'line',
-    'linear',
-    'linecap',
-    'lineinversion',
-    'linejoin',
-    'linemargin',
-    'lineskip',
-    'linetype',
-    'linewidth',
-    'link',
-    'list',
-    'lm_enorm',
-    'lm_evaluate_default',
-    'lm_lmdif',
-    'lm_lmpar',
-    'lm_minimize',
-    'lm_print_default',
-    'lm_print_quiet',
-    'lm_qrfac',
-    'lm_qrsolv',
-    'locale',
-    'locate',
-    'locatefile',
-    'location',
-    'log',
-    'log10',
-    'log1p',
-    'logaxiscoverage',
-    'longitude',
-    'lookup',
-    'magnetize',
-    'makeNode',
-    'makedraw',
-    'makepen',
-    'map',
-    'margin',
-    'markangle',
-    'markangleradius',
-    'markanglespace',
-    'markarc',
-    'marker',
-    'markinterval',
-    'marknodes',
-    'markrightangle',
-    'markuniform',
-    'mass',
-    'masscenter',
-    'massformat',
-    'math',
-    'max',
-    'max3',
-    'maxbezier',
-    'maxbound',
-    'maxcoords',
-    'maxlength',
-    'maxratio',
-    'maxtimes',
-    'mean',
-    'medial',
-    'median',
-    'midpoint',
-    'min',
-    'min3',
-    'minbezier',
-    'minbound',
-    'minipage',
-    'minratio',
-    'mintimes',
-    'miterlimit',
-    'momArrowPath',
-    'momarrowsize',
-    'monotonic',
-    'multifigure',
-    'nativeformat',
-    'natural',
-    'needshipout',
-    'newl',
-    'newpage',
-    'newslide',
-    'newton',
-    'newtree',
-    'nextframe',
-    'nextnormal',
-    'nextpage',
-    'nib',
-    'nodabscissa',
-    'none',
-    'norm',
-    'normalvideo',
-    'notaknot',
-    'nowarn',
-    'numberpage',
-    'nurb',
-    'object',
-    'offset',
-    'onpath',
-    'opacity',
-    'opposite',
-    'orientation',
-    'orig_circlenodesnumber',
-    'orig_circlenodesnumber1',
-    'orig_draw',
-    'orig_ellipsenodesnumber',
-    'orig_ellipsenodesnumber1',
-    'orig_hyperbolanodesnumber',
-    'orig_parabolanodesnumber',
-    'origin',
-    'orthic',
-    'orthocentercenter',
-    'outformat',
-    'outline',
-    'outprefix',
-    'output',
-    'overloadedMessage',
-    'overwrite',
-    'pack',
-    'pad',
-    'pairs',
-    'palette',
-    'parabola',
-    'parabolanodesnumber',
-    'parallel',
-    'partialsum',
-    'path',
-    'path3',
-    'pattern',
-    'pause',
-    'pdf',
-    'pedal',
-    'periodic',
-    'perp',
-    'perpendicular',
-    'perpendicularmark',
-    'phantom',
-    'phi1',
-    'phi2',
-    'phi3',
-    'photon',
-    'piecewisestraight',
-    'point',
-    'polar',
-    'polarconicroutine',
-    'polargraph',
-    'polygon',
-    'postcontrol',
-    'postscript',
-    'pow10',
-    'ppoint',
-    'prc',
-    'prc0',
-    'precision',
-    'precontrol',
-    'prepend',
-    'print_random_addresses',
-    'project',
-    'projection',
-    'purge',
-    'pwhermite',
-    'quadrant',
-    'quadraticroots',
-    'quantize',
-    'quarticroots',
-    'quotient',
-    'radialshade',
-    'radians',
-    'radicalcenter',
-    'radicalline',
-    'radius',
-    'rand',
-    'randompath',
-    'rd',
-    'readline',
-    'realmult',
-    'realquarticroots',
-    'rectangle',
-    'rectangular',
-    'rectify',
-    'reflect',
-    'relabscissa',
-    'relative',
-    'relativedistance',
-    'reldir',
-    'relpoint',
-    'reltime',
-    'remainder',
-    'remark',
-    'removeDuplicates',
-    'rename',
-    'replace',
-    'report',
-    'resetdefaultpen',
-    'restore',
-    'restoredefaults',
-    'reverse',
-    'reversevideo',
-    'rf',
-    'rfind',
-    'rgb',
-    'rgba',
-    'rgbint',
-    'rms',
-    'rotate',
-    'rotateO',
-    'rotation',
-    'round',
-    'roundbox',
-    'roundedpath',
-    'roundrectangle',
-    'samecoordsys',
-    'sameside',
-    'sample',
-    'save',
-    'savedefaults',
-    'saveline',
-    'scale',
-    'scale3',
-    'scaleO',
-    'scaleT',
-    'scaleless',
-    'scientific',
-    'search',
-    'searchtree',
-    'sec',
-    'secondaryX',
-    'secondaryY',
-    'seconds',
-    'section',
-    'sector',
-    'seek',
-    'seekeof',
-    'segment',
-    'sequence',
-    'setpens',
-    'sgn',
-    'sgnd',
-    'sharpangle',
-    'sharpdegrees',
-    'shift',
-    'shiftless',
-    'shipout',
-    'shipout3',
-    'show',
-    'side',
-    'simeq',
-    'simpson',
-    'sin',
-    'single',
-    'sinh',
-    'size',
-    'size3',
-    'skewness',
-    'skip',
-    'slant',
-    'sleep',
-    'slope',
-    'slopefield',
-    'solve',
-    'solveBVP',
-    'sort',
-    'sourceline',
-    'sphere',
-    'split',
-    'sqrt',
-    'square',
-    'srand',
-    'standardizecoordsys',
-    'startScript',
-    'startTrembling',
-    'stdev',
-    'step',
-    'stickframe',
-    'stickmarksize',
-    'stickmarkspace',
-    'stop',
-    'straight',
-    'straightness',
-    'string',
-    'stripdirectory',
-    'stripextension',
-    'stripfile',
-    'strokepath',
-    'subdivide',
-    'subitem',
-    'subpath',
-    'substr',
-    'sum',
-    'surface',
-    'symmedial',
-    'symmedian',
-    'system',
-    'tab',
-    'tableau',
-    'tan',
-    'tangent',
-    'tangential',
-    'tangents',
-    'tanh',
-    'tell',
-    'tensionSpecifier',
-    'tensorshade',
-    'tex',
-    'texcolor',
-    'texify',
-    'texpath',
-    'texpreamble',
-    'texreset',
-    'texshipout',
-    'texsize',
-    'textpath',
-    'thick',
-    'thin',
-    'tick',
-    'tickMax',
-    'tickMax3',
-    'tickMin',
-    'tickMin3',
-    'ticklabelshift',
-    'ticklocate',
-    'tildeframe',
-    'tildemarksize',
-    'tile',
-    'tiling',
-    'time',
-    'times',
-    'title',
-    'titlepage',
-    'topbox',
-    'transform',
-    'transformation',
-    'transpose',
-    'tremble',
-    'trembleFuzz',
-    'tremble_circlenodesnumber',
-    'tremble_circlenodesnumber1',
-    'tremble_draw',
-    'tremble_ellipsenodesnumber',
-    'tremble_ellipsenodesnumber1',
-    'tremble_hyperbolanodesnumber',
-    'tremble_marknodes',
-    'tremble_markuniform',
-    'tremble_parabolanodesnumber',
-    'triangle',
-    'triangleAbc',
-    'triangleabc',
-    'triangulate',
-    'tricoef',
-    'tridiagonal',
-    'trilinear',
-    'trim',
-    'trueMagnetize',
-    'truepoint',
-    'tube',
-    'uncycle',
-    'unfill',
-    'uniform',
-    'unit',
-    'unitrand',
-    'unitsize',
-    'unityroot',
-    'unstraighten',
-    'upcase',
-    'updatefunction',
-    'uperiodic',
-    'upscale',
-    'uptodate',
-    'usepackage',
-    'usersetting',
-    'usetypescript',
-    'usleep',
-    'value',
-    'variance',
-    'variancebiased',
-    'vbox',
-    'vector',
-    'vectorfield',
-    'verbatim',
-    'view',
-    'vline',
-    'vperiodic',
-    'vprojection',
-    'warn',
-    'warning',
-    'windingnumber',
-    'write',
-    'xaxis',
-    'xaxis3',
-    'xaxis3At',
-    'xaxisAt',
-    'xequals',
-    'xinput',
-    'xlimits',
-    'xoutput',
-    'xpart',
-    'xscale',
-    'xscaleO',
-    'xtick',
-    'xtick3',
-    'xtrans',
-    'yaxis',
-    'yaxis3',
-    'yaxis3At',
-    'yaxisAt',
-    'yequals',
-    'ylimits',
-    'ypart',
-    'yscale',
-    'yscaleO',
-    'ytick',
-    'ytick3',
-    'ytrans',
-    'zaxis3',
-    'zaxis3At',
-    'zero',
-    'zero3',
-    'zlimits',
-    'zpart',
-    'ztick',
-    'ztick3',
-    'ztrans'
-])
-
-ASYVARNAME = set([
-    'AliceBlue',
-    'Align',
-    'Allow',
-    'AntiqueWhite',
-    'Apricot',
-    'Aqua',
-    'Aquamarine',
-    'Aspect',
-    'Azure',
-    'BeginPoint',
-    'Beige',
-    'Bisque',
-    'Bittersweet',
-    'Black',
-    'BlanchedAlmond',
-    'Blue',
-    'BlueGreen',
-    'BlueViolet',
-    'Both',
-    'Break',
-    'BrickRed',
-    'Brown',
-    'BurlyWood',
-    'BurntOrange',
-    'CCW',
-    'CW',
-    'CadetBlue',
-    'CarnationPink',
-    'Center',
-    'Centered',
-    'Cerulean',
-    'Chartreuse',
-    'Chocolate',
-    'Coeff',
-    'Coral',
-    'CornflowerBlue',
-    'Cornsilk',
-    'Crimson',
-    'Crop',
-    'Cyan',
-    'Dandelion',
-    'DarkBlue',
-    'DarkCyan',
-    'DarkGoldenrod',
-    'DarkGray',
-    'DarkGreen',
-    'DarkKhaki',
-    'DarkMagenta',
-    'DarkOliveGreen',
-    'DarkOrange',
-    'DarkOrchid',
-    'DarkRed',
-    'DarkSalmon',
-    'DarkSeaGreen',
-    'DarkSlateBlue',
-    'DarkSlateGray',
-    'DarkTurquoise',
-    'DarkViolet',
-    'DeepPink',
-    'DeepSkyBlue',
-    'DefaultHead',
-    'DimGray',
-    'DodgerBlue',
-    'Dotted',
-    'Draw',
-    'E',
-    'ENE',
-    'EPS',
-    'ESE',
-    'E_Euler',
-    'E_PC',
-    'E_RK2',
-    'E_RK3BS',
-    'Emerald',
-    'EndPoint',
-    'Euler',
-    'Fill',
-    'FillDraw',
-    'FireBrick',
-    'FloralWhite',
-    'ForestGreen',
-    'Fuchsia',
-    'Gainsboro',
-    'GhostWhite',
-    'Gold',
-    'Goldenrod',
-    'Gray',
-    'Green',
-    'GreenYellow',
-    'Honeydew',
-    'HookHead',
-    'Horizontal',
-    'HotPink',
-    'I',
-    'IgnoreAspect',
-    'IndianRed',
-    'Indigo',
-    'Ivory',
-    'JOIN_IN',
-    'JOIN_OUT',
-    'JungleGreen',
-    'Khaki',
-    'LM_DWARF',
-    'LM_MACHEP',
-    'LM_SQRT_DWARF',
-    'LM_SQRT_GIANT',
-    'LM_USERTOL',
-    'Label',
-    'Lavender',
-    'LavenderBlush',
-    'LawnGreen',
-    'LeftJustified',
-    'LeftSide',
-    'LemonChiffon',
-    'LightBlue',
-    'LightCoral',
-    'LightCyan',
-    'LightGoldenrodYellow',
-    'LightGreen',
-    'LightGrey',
-    'LightPink',
-    'LightSalmon',
-    'LightSeaGreen',
-    'LightSkyBlue',
-    'LightSlateGray',
-    'LightSteelBlue',
-    'LightYellow',
-    'Lime',
-    'LimeGreen',
-    'Linear',
-    'Linen',
-    'Log',
-    'Logarithmic',
-    'Magenta',
-    'Mahogany',
-    'Mark',
-    'MarkFill',
-    'Maroon',
-    'Max',
-    'MediumAquamarine',
-    'MediumBlue',
-    'MediumOrchid',
-    'MediumPurple',
-    'MediumSeaGreen',
-    'MediumSlateBlue',
-    'MediumSpringGreen',
-    'MediumTurquoise',
-    'MediumVioletRed',
-    'Melon',
-    'MidPoint',
-    'MidnightBlue',
-    'Min',
-    'MintCream',
-    'MistyRose',
-    'Moccasin',
-    'Move',
-    'MoveQuiet',
-    'Mulberry',
-    'N',
-    'NE',
-    'NNE',
-    'NNW',
-    'NW',
-    'NavajoWhite',
-    'Navy',
-    'NavyBlue',
-    'NoAlign',
-    'NoCrop',
-    'NoFill',
-    'NoSide',
-    'OldLace',
-    'Olive',
-    'OliveDrab',
-    'OliveGreen',
-    'Orange',
-    'OrangeRed',
-    'Orchid',
-    'Ox',
-    'Oy',
-    'PC',
-    'PaleGoldenrod',
-    'PaleGreen',
-    'PaleTurquoise',
-    'PaleVioletRed',
-    'PapayaWhip',
-    'Peach',
-    'PeachPuff',
-    'Periwinkle',
-    'Peru',
-    'PineGreen',
-    'Pink',
-    'Plum',
-    'PowderBlue',
-    'ProcessBlue',
-    'Purple',
-    'RK2',
-    'RK3',
-    'RK3BS',
-    'RK4',
-    'RK5',
-    'RK5DP',
-    'RK5F',
-    'RawSienna',
-    'Red',
-    'RedOrange',
-    'RedViolet',
-    'Rhodamine',
-    'RightJustified',
-    'RightSide',
-    'RosyBrown',
-    'RoyalBlue',
-    'RoyalPurple',
-    'RubineRed',
-    'S',
-    'SE',
-    'SSE',
-    'SSW',
-    'SW',
-    'SaddleBrown',
-    'Salmon',
-    'SandyBrown',
-    'SeaGreen',
-    'Seashell',
-    'Sepia',
-    'Sienna',
-    'Silver',
-    'SimpleHead',
-    'SkyBlue',
-    'SlateBlue',
-    'SlateGray',
-    'Snow',
-    'SpringGreen',
-    'SteelBlue',
-    'Suppress',
-    'SuppressQuiet',
-    'Tan',
-    'TeXHead',
-    'Teal',
-    'TealBlue',
-    'Thistle',
-    'Ticksize',
-    'Tomato',
-    'Turquoise',
-    'UnFill',
-    'VERSION',
-    'Value',
-    'Vertical',
-    'Violet',
-    'VioletRed',
-    'W',
-    'WNW',
-    'WSW',
-    'Wheat',
-    'White',
-    'WhiteSmoke',
-    'WildStrawberry',
-    'XYAlign',
-    'YAlign',
-    'Yellow',
-    'YellowGreen',
-    'YellowOrange',
-    'addpenarc',
-    'addpenline',
-    'align',
-    'allowstepping',
-    'angularsystem',
-    'animationdelay',
-    'appendsuffix',
-    'arcarrowangle',
-    'arcarrowfactor',
-    'arrow2sizelimit',
-    'arrowangle',
-    'arrowbarb',
-    'arrowdir',
-    'arrowfactor',
-    'arrowhookfactor',
-    'arrowlength',
-    'arrowsizelimit',
-    'arrowtexfactor',
-    'authorpen',
-    'axis',
-    'axiscoverage',
-    'axislabelfactor',
-    'background',
-    'backgroundcolor',
-    'backgroundpen',
-    'barfactor',
-    'barmarksizefactor',
-    'basealign',
-    'baselinetemplate',
-    'beveljoin',
-    'bigvertexpen',
-    'bigvertexsize',
-    'black',
-    'blue',
-    'bm',
-    'bottom',
-    'bp',
-    'brown',
-    'bullet',
-    'byfoci',
-    'byvertices',
-    'camerafactor',
-    'chartreuse',
-    'circlemarkradiusfactor',
-    'circlenodesnumberfactor',
-    'circleprecision',
-    'circlescale',
-    'cm',
-    'codefile',
-    'codepen',
-    'codeskip',
-    'colorPen',
-    'coloredNodes',
-    'coloredSegments',
-    'conditionlength',
-    'conicnodesfactor',
-    'count',
-    'cputimeformat',
-    'crossmarksizefactor',
-    'currentcoordsys',
-    'currentlight',
-    'currentpatterns',
-    'currentpen',
-    'currentpicture',
-    'currentposition',
-    'currentprojection',
-    'curvilinearsystem',
-    'cuttings',
-    'cyan',
-    'darkblue',
-    'darkbrown',
-    'darkcyan',
-    'darkgray',
-    'darkgreen',
-    'darkgrey',
-    'darkmagenta',
-    'darkolive',
-    'darkred',
-    'dashdotted',
-    'dashed',
-    'datepen',
-    'dateskip',
-    'debuggerlines',
-    'debugging',
-    'deepblue',
-    'deepcyan',
-    'deepgray',
-    'deepgreen',
-    'deepgrey',
-    'deepmagenta',
-    'deepred',
-    'default',
-    'defaultControl',
-    'defaultS',
-    'defaultbackpen',
-    'defaultcoordsys',
-    'defaultfilename',
-    'defaultformat',
-    'defaultmassformat',
-    'defaultpen',
-    'diagnostics',
-    'differentlengths',
-    'dot',
-    'dotfactor',
-    'dotframe',
-    'dotted',
-    'doublelinepen',
-    'doublelinespacing',
-    'down',
-    'duplicateFuzz',
-    'ellipsenodesnumberfactor',
-    'eps',
-    'epsgeo',
-    'epsilon',
-    'evenodd',
-    'extendcap',
-    'fermionpen',
-    'figureborder',
-    'figuremattpen',
-    'firstnode',
-    'firststep',
-    'foregroundcolor',
-    'fuchsia',
-    'fuzz',
-    'gapfactor',
-    'ghostpen',
-    'gluonamplitude',
-    'gluonpen',
-    'gluonratio',
-    'gray',
-    'green',
-    'grey',
-    'hatchepsilon',
-    'havepagenumber',
-    'heavyblue',
-    'heavycyan',
-    'heavygray',
-    'heavygreen',
-    'heavygrey',
-    'heavymagenta',
-    'heavyred',
-    'hline',
-    'hwratio',
-    'hyperbolanodesnumberfactor',
-    'identity4',
-    'ignore',
-    'inXasyMode',
-    'inch',
-    'inches',
-    'includegraphicscommand',
-    'inf',
-    'infinity',
-    'institutionpen',
-    'intMax',
-    'intMin',
-    'invert',
-    'invisible',
-    'itempen',
-    'itemskip',
-    'itemstep',
-    'labelmargin',
-    'landscape',
-    'lastnode',
-    'left',
-    'legendhskip',
-    'legendlinelength',
-    'legendmargin',
-    'legendmarkersize',
-    'legendmaxrelativewidth',
-    'legendvskip',
-    'lightblue',
-    'lightcyan',
-    'lightgray',
-    'lightgreen',
-    'lightgrey',
-    'lightmagenta',
-    'lightolive',
-    'lightred',
-    'lightyellow',
-    'linemargin',
-    'lm_infmsg',
-    'lm_shortmsg',
-    'longdashdotted',
-    'longdashed',
-    'magenta',
-    'magneticPoints',
-    'magneticRadius',
-    'mantissaBits',
-    'markangleradius',
-    'markangleradiusfactor',
-    'markanglespace',
-    'markanglespacefactor',
-    'mediumblue',
-    'mediumcyan',
-    'mediumgray',
-    'mediumgreen',
-    'mediumgrey',
-    'mediummagenta',
-    'mediumred',
-    'mediumyellow',
-    'middle',
-    'minDistDefault',
-    'minblockheight',
-    'minblockwidth',
-    'mincirclediameter',
-    'minipagemargin',
-    'minipagewidth',
-    'minvertexangle',
-    'miterjoin',
-    'mm',
-    'momarrowfactor',
-    'momarrowlength',
-    'momarrowmargin',
-    'momarrowoffset',
-    'momarrowpen',
-    'monoPen',
-    'morepoints',
-    'nCircle',
-    'newbulletcolor',
-    'ngraph',
-    'nil',
-    'nmesh',
-    'nobasealign',
-    'nodeMarginDefault',
-    'nodesystem',
-    'nomarker',
-    'nopoint',
-    'noprimary',
-    'nullpath',
-    'nullpen',
-    'numarray',
-    'ocgindex',
-    'oldbulletcolor',
-    'olive',
-    'orange',
-    'origin',
-    'overpaint',
-    'page',
-    'pageheight',
-    'pagemargin',
-    'pagenumberalign',
-    'pagenumberpen',
-    'pagenumberposition',
-    'pagewidth',
-    'paleblue',
-    'palecyan',
-    'palegray',
-    'palegreen',
-    'palegrey',
-    'palemagenta',
-    'palered',
-    'paleyellow',
-    'parabolanodesnumberfactor',
-    'perpfactor',
-    'phi',
-    'photonamplitude',
-    'photonpen',
-    'photonratio',
-    'pi',
-    'pink',
-    'plain',
-    'plus',
-    'preamblenodes',
-    'pt',
-    'purple',
-    'r3',
-    'r4a',
-    'r4b',
-    'randMax',
-    'realDigits',
-    'realEpsilon',
-    'realMax',
-    'realMin',
-    'red',
-    'relativesystem',
-    'reverse',
-    'right',
-    'roundcap',
-    'roundjoin',
-    'royalblue',
-    'salmon',
-    'saveFunctions',
-    'scalarpen',
-    'sequencereal',
-    'settings',
-    'shipped',
-    'signedtrailingzero',
-    'solid',
-    'springgreen',
-    'sqrtEpsilon',
-    'squarecap',
-    'squarepen',
-    'startposition',
-    'stdin',
-    'stdout',
-    'stepfactor',
-    'stepfraction',
-    'steppagenumberpen',
-    'stepping',
-    'stickframe',
-    'stickmarksizefactor',
-    'stickmarkspacefactor',
-    'textpen',
-    'ticksize',
-    'tildeframe',
-    'tildemarksizefactor',
-    'tinv',
-    'titlealign',
-    'titlepagepen',
-    'titlepageposition',
-    'titlepen',
-    'titleskip',
-    'top',
-    'trailingzero',
-    'treeLevelStep',
-    'treeMinNodeWidth',
-    'treeNodeStep',
-    'trembleAngle',
-    'trembleFrequency',
-    'trembleRandom',
-    'tremblingMode',
-    'undefined',
-    'unitcircle',
-    'unitsquare',
-    'up',
-    'urlpen',
-    'urlskip',
-    'version',
-    'vertexpen',
-    'vertexsize',
-    'viewportmargin',
-    'viewportsize',
-    'vline',
-    'white',
-    'wye',
-    'xformStack',
-    'yellow',
-    'ylabelwidth',
-    'zerotickfuzz',
-    'zerowinding'
-])
diff --git a/python/ext-libs/pygments/lexers/_clbuiltins.py b/python/ext-libs/pygments/lexers/_clbuiltins.py
deleted file mode 100644
index 59f948b..0000000
--- a/python/ext-libs/pygments/lexers/_clbuiltins.py
+++ /dev/null
@@ -1,232 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._clbuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    ANSI Common Lisp builtins.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-BUILTIN_FUNCTIONS = [ # 638 functions
-    '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
-    'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
-    'adjustable-array-p', 'adjust-array', 'allocate-instance',
-    'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
-    'apropos-list', 'aref', 'arithmetic-error-operands',
-    'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
-    'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
-    'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
-    'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
-    'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
-    'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
-    'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
-    'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
-    'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
-    'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
-    'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
-    'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
-    'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
-    'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
-    'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
-    'characterp', 'char-code', 'char-downcase', 'char-equal',
-    'char-greaterp', 'char-int', 'char-lessp', 'char-name',
-    'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
-    'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
-    'close', 'clrhash', 'code-char', 'coerce', 'compile',
-    'compiled-function-p', 'compile-file', 'compile-file-pathname',
-    'compiler-macro-function', 'complement', 'complex', 'complexp',
-    'compute-applicable-methods', 'compute-restarts', 'concatenate',
-    'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
-    'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
-    'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
-    'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
-    'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
-    'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
-    'delete-package', 'denominator', 'deposit-field', 'describe',
-    'describe-object', 'digit-char', 'digit-char-p', 'directory',
-    'directory-namestring', 'disassemble', 'documentation', 'dpb',
-    'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
-    'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
-    'enough-namestring', 'ensure-directories-exist',
-    'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
-    'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
-    'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
-    'file-error-pathname', 'file-length', 'file-namestring',
-    'file-position', 'file-string-length', 'file-write-date',
-    'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
-    'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
-    'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
-    'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
-    'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
-    'fround', 'ftruncate', 'funcall', 'function-keywords',
-    'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
-    'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
-    'gethash', 'get-internal-real-time', 'get-internal-run-time',
-    'get-macro-character', 'get-output-stream-string', 'get-properties',
-    'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
-    'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
-    'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
-    'host-namestring', 'identity', 'imagpart', 'import',
-    'initialize-instance', 'input-stream-p', 'inspect',
-    'integer-decode-float', 'integer-length', 'integerp',
-    'interactive-stream-p', 'intern', 'intersection',
-    'invalid-method-error', 'invoke-debugger', 'invoke-restart',
-    'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
-    'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
-    'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
-    'listen', 'list-length', 'listp', 'load',
-    'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
-    'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
-    'logical-pathname-translations', 'logior', 'lognand', 'lognor',
-    'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
-    'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
-    'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
-    'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
-    'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
-    'make-instance', 'make-instances-obsolete', 'make-list',
-    'make-load-form', 'make-load-form-saving-slots', 'make-package',
-    'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
-    'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
-    'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
-    'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
-    'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
-    'merge', 'merge-pathnames', 'method-combination-error',
-    'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
-    'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
-    'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
-    'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
-    'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
-    'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
-    'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
-    'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
-    'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
-    'package-name', 'package-nicknames', 'packagep',
-    'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
-    'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
-    'pathname-device', 'pathname-directory', 'pathname-host',
-    'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
-    'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
-    'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
-    'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
-    'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
-    'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
-    'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
-    'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
-    'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
-    'read-from-string', 'read-line', 'read-preserving-whitespace',
-    'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
-    'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
-    'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
-    'remprop', 'rename-file', 'rename-package', 'replace', 'require',
-    'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
-    'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
-    'search', 'second', 'set', 'set-difference',
-    'set-dispatch-macro-character', 'set-exclusive-or',
-    'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
-    'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
-    'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
-    'simple-condition-format-arguments', 'simple-condition-format-control',
-    'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
-    'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
-    'slot-unbound', 'slot-value', 'software-type', 'software-version',
-    'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
-    'standard-char-p', 'store-value', 'stream-element-type',
-    'stream-error-stream', 'stream-external-format', 'streamp', 'string',
-    'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
-    'string-capitalize', 'string-downcase', 'string-equal',
-    'string-greaterp', 'string-left-trim', 'string-lessp',
-    'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
-    'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
-    'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
-    'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
-    'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
-    'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
-    'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
-    'translate-logical-pathname', 'translate-pathname', 'tree-equal',
-    'truename', 'truncate', 'two-way-stream-input-stream',
-    'two-way-stream-output-stream', 'type-error-datum',
-    'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
-    'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
-    'update-instance-for-different-class',
-    'update-instance-for-redefined-class', 'upgraded-array-element-type',
-    'upgraded-complex-part-type', 'upper-case-p', 'use-package',
-    'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
-    'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
-    'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
-    'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
-    'y-or-n-p', 'zerop',
-]
-
-SPECIAL_FORMS = [
-    'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
-    'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
-    'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
-    'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
-    'unwind-protect',
-]
-
-MACROS = [
-    'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
-    'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
-    'define-compiler-macro', 'define-condition', 'define-method-combination',
-    'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
-    'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
-    'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
-    'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
-    'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
-    'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
-    'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
-    'multiple-value-setq', 'nth-value', 'or', 'pop',
-    'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
-    'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
-    'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
-    'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
-    'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
-    'with-condition-restarts', 'with-hash-table-iterator',
-    'with-input-from-string', 'with-open-file', 'with-open-stream',
-    'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
-    'with-slots', 'with-standard-io-syntax',
-]
-
-LAMBDA_LIST_KEYWORDS = [
-    '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
-    '&rest', '&whole',
-]
-
-DECLARATIONS = [
-    'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
-    'ignorable', 'notinline', 'type',
-]
-
-BUILTIN_TYPES = [
-    'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
-    'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
-    'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
-    'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
-    'simple-vector', 'standard-char', 'unsigned-byte',
-
-    # Condition Types
-    'arithmetic-error', 'cell-error', 'condition', 'control-error',
-    'division-by-zero', 'end-of-file', 'error', 'file-error',
-    'floating-point-inexact', 'floating-point-overflow',
-    'floating-point-underflow', 'floating-point-invalid-operation',
-    'parse-error', 'package-error', 'print-not-readable', 'program-error',
-    'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
-    'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
-    'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
-    'undefined-function', 'warning',
-]
-
-BUILTIN_CLASSES = [
-    'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
-    'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
-    'file-stream', 'float', 'function', 'generic-function', 'hash-table',
-    'integer', 'list', 'logical-pathname', 'method-combination', 'method',
-    'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
-    'real', 'random-state', 'restart', 'sequence', 'standard-class',
-    'standard-generic-function', 'standard-method', 'standard-object',
-    'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
-    'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-]
diff --git a/python/ext-libs/pygments/lexers/_lassobuiltins.py b/python/ext-libs/pygments/lexers/_lassobuiltins.py
deleted file mode 100644
index 08b65f3..0000000
--- a/python/ext-libs/pygments/lexers/_lassobuiltins.py
+++ /dev/null
@@ -1,5416 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._lassobuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Built-in Lasso types, traits, and methods.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-BUILTINS = {
-    'Types': [
-        'null',
-        'void',
-        'tag',
-        'trait',
-        'integer',
-        'decimal',
-        'boolean',
-        'capture',
-        'string',
-        'bytes',
-        'keyword',
-        'custom',
-        'staticarray',
-        'signature',
-        'memberstream',
-        'dsinfo',
-        'sourcefile',
-        'array',
-        'pair',
-        'opaque',
-        'filedesc',
-        'dirdesc',
-        'locale',
-        'ucal',
-        'xml_domimplementation',
-        'xml_node',
-        'xml_characterdata',
-        'xml_document',
-        'xml_element',
-        'xml_attr',
-        'xml_text',
-        'xml_cdatasection',
-        'xml_entityreference',
-        'xml_entity',
-        'xml_processinginstruction',
-        'xml_comment',
-        'xml_documenttype',
-        'xml_documentfragment',
-        'xml_notation',
-        'xml_nodelist',
-        'xml_namednodemap',
-        'xml_namednodemap_ht',
-        'xml_namednodemap_attr',
-        'xmlstream',
-        'sqlite3',
-        'sqlite3_stmt',
-        'mime_reader',
-        'curltoken',
-        'regexp',
-        'zip_impl',
-        'zip_file_impl',
-        'library_thread_loader_thread$',
-        'library_thread_loader',
-        'generateforeachunkeyed',
-        'generateforeachkeyed',
-        'eacher',
-        'queriable_where',
-        'queriable_select',
-        'queriable_selectmany',
-        'queriable_groupby',
-        'queriable_join',
-        'queriable_groupjoin',
-        'queriable_orderby',
-        'queriable_orderbydescending',
-        'queriable_thenby',
-        'queriable_thenbydescending',
-        'queriable_skip',
-        'queriable_take',
-        'queriable_grouping',
-        'generateseries',
-        'tie',
-        'pairup',
-        'delve',
-        'repeat',
-        'pair_compare',
-        'serialization_object_identity_compare',
-        'serialization_element',
-        'serialization_writer_standin',
-        'serialization_writer_ref',
-        'serialization_writer',
-        'serialization_reader',
-        'tree_nullnode',
-        'tree_node',
-        'tree_base',
-        'map_node',
-        'map',
-        'file',
-        'dir',
-        'magick_image',
-        'ldap',
-        'os_process',
-        'java_jnienv',
-        'jobject',
-        'jmethodid',
-        'jfieldid',
-        'database_registry',
-        'sqlite_db',
-        'sqlite_results',
-        'sqlite_currentrow',
-        'sqlite_table',
-        'sqlite_column',
-        'curl',
-        'date',
-        'debugging_stack',
-        'dbgp_server',
-        'dbgp_packet',
-        'duration',
-        'inline_type',
-        'json_literal',
-        'json_object',
-        'list_node',
-        'list',
-        'jchar',
-        'jchararray',
-        'jbyte',
-        'jbytearray',
-        'jfloat',
-        'jint',
-        'jshort',
-        'currency',
-        'scientific',
-        'percent',
-        'dateandtime',
-        'timeonly',
-        'net_tcp',
-        'net_tcpssl',
-        'net_named_pipe',
-        'net_udppacket',
-        'net_udp',
-        'pdf_typebase',
-        'pdf_doc',
-        'pdf_color',
-        'pdf_barcode',
-        'pdf_font',
-        'pdf_image',
-        'pdf_list',
-        'pdf_read',
-        'pdf_table',
-        'pdf_text',
-        'pdf_hyphenator',
-        'pdf_chunk',
-        'pdf_phrase',
-        'pdf_paragraph',
-        'queue',
-        'set',
-        'sys_process',
-        'worker_pool',
-        'zip_file',
-        'zip',
-        'cache_server_element',
-        'cache_server',
-        'dns_response',
-        'component_render_state',
-        'component',
-        'component_container',
-        'document_base',
-        'document_body',
-        'document_header',
-        'text_document',
-        'data_document',
-        'email_compose',
-        'email_pop',
-        'email_parse',
-        'email_queue_impl_base',
-        'email_stage_impl_base',
-        'fcgi_record',
-        'web_request_impl',
-        'fcgi_request',
-        'include_cache_thread$',
-        'include_cache',
-        'atbegin_thread$',
-        'atbegin',
-        'fastcgi_each_fcgi_param',
-        'fastcgi_server',
-        'filemaker_datasource',
-        'http_document',
-        'http_document_header',
-        'http_header_field',
-        'html_document_head',
-        'html_document_body',
-        'raw_document_body',
-        'bytes_document_body',
-        'html_attr',
-        'html_atomic_element',
-        'html_container_element',
-        'http_error',
-        'html_script',
-        'html_text',
-        'html_raw',
-        'html_binary',
-        'html_json',
-        'html_cdata',
-        'html_eol',
-        'html_div',
-        'html_span',
-        'html_br',
-        'html_hr',
-        'html_h1',
-        'html_h2',
-        'html_h3',
-        'html_h4',
-        'html_h5',
-        'html_h6',
-        'html_meta',
-        'html_link',
-        'html_object',
-        'html_style',
-        'html_base',
-        'html_table',
-        'html_tr',
-        'html_td',
-        'html_th',
-        'html_img',
-        'html_form',
-        'html_fieldset',
-        'html_legend',
-        'html_input',
-        'html_label',
-        'html_option',
-        'html_select',
-        'http_server_web_connection',
-        'http_server',
-        'http_server_connection_handler',
-        'image',
-        'lassoapp_installer',
-        'lassoapp_content_rep_halt',
-        'lassoapp_dirsrc_fileresource',
-        'lassoapp_dirsrc_appsource',
-        'lassoapp_livesrc_fileresource',
-        'lassoapp_livesrc_appsource',
-        'lassoapp_long_expiring_bytes',
-        'lassoapp_zip_file_server_thread$',
-        'lassoapp_zip_file_server',
-        'lassoapp_zipsrc_fileresource',
-        'lassoapp_zipsrc_appsource',
-        'lassoapp_compiledsrc_fileresource',
-        'lassoapp_compiledsrc_appsource',
-        'lassoapp_manualsrc_appsource',
-        'log_impl_base',
-        'portal_impl',
-        'security_registry',
-        'memory_session_driver_impl_entry',
-        'memory_session_driver_impl',
-        'sqlite_session_driver_impl_entry',
-        'sqlite_session_driver_impl',
-        'mysql_session_driver_impl',
-        'odbc_session_driver_impl',
-        'session_delete_expired_thread_thread$',
-        'session_delete_expired_thread',
-        'email_smtp',
-        'client_address',
-        'client_ip',
-        'web_node_base',
-        'web_node_root',
-        'web_node_content_representation_xhr_container',
-        'web_node_content_representation_html_specialized',
-        'web_node_content_representation_css_specialized',
-        'web_node_content_representation_js_specialized',
-        'web_node_echo',
-        'web_error_atend',
-        'web_response_impl',
-        'web_router'
-    ],
-    'Traits': [
-        'trait_asstring',
-        'any',
-        'trait_generator',
-        'trait_decompose_assignment',
-        'trait_foreach',
-        'trait_generatorcentric',
-        'trait_foreachtextelement',
-        'trait_finite',
-        'trait_finiteforeach',
-        'trait_keyed',
-        'trait_keyedfinite',
-        'trait_keyedforeach',
-        'trait_frontended',
-        'trait_backended',
-        'trait_doubleended',
-        'trait_positionallykeyed',
-        'trait_expandable',
-        'trait_frontexpandable',
-        'trait_backexpandable',
-        'trait_contractible',
-        'trait_frontcontractible',
-        'trait_backcontractible',
-        'trait_fullymutable',
-        'trait_keyedmutable',
-        'trait_endedfullymutable',
-        'trait_setoperations',
-        'trait_searchable',
-        'trait_positionallysearchable',
-        'trait_pathcomponents',
-        'trait_readbytes',
-        'trait_writebytes',
-        'trait_setencoding',
-        'trait_readstring',
-        'trait_writestring',
-        'trait_hashable',
-        'trait_each_sub',
-        'trait_stack',
-        'trait_list',
-        'trait_array',
-        'trait_map',
-        'trait_close',
-        'trait_file',
-        'trait_scalar',
-        'trait_queriablelambda',
-        'trait_queriable',
-        'queriable_asstring',
-        'trait_serializable',
-        'trait_treenode',
-        'trait_json_serialize',
-        'formattingbase',
-        'trait_net',
-        'trait_xml_elementcompat',
-        'trait_xml_nodecompat',
-        'web_connection',
-        'html_element_coreattrs',
-        'html_element_i18nattrs',
-        'html_element_eventsattrs',
-        'html_attributed',
-        'lassoapp_resource',
-        'lassoapp_source',
-        'lassoapp_capabilities',
-        'session_driver',
-        'web_node_content_json_specialized',
-        'web_node',
-        'web_node_container',
-        'web_node_content_representation',
-        'web_node_content',
-        'web_node_content_document',
-        'web_node_postable',
-        'web_node_content_html_specialized',
-        'web_node_content_css_specialized',
-        'web_node_content_js_specialized'
-    ],
-    'Methods': [
-        'fail_now',
-        'staticarray',
-        'integer',
-        'decimal',
-        'string',
-        'bytes',
-        'keyword',
-        'signature',
-        'register',
-        'register_thread',
-        'escape_tag',
-        'handle',
-        'handle_failure',
-        'protect_now',
-        'threadvar_get',
-        'threadvar_set',
-        'threadvar_set_asrt',
-        'threadvar_find',
-        'abort_now',
-        'abort_clear',
-        'failure_clear',
-        'var_keys',
-        'var_values',
-        'null',
-        'trait',
-        'staticarray_join',
-        'suspend',
-        'main_thread_only',
-        'split_thread',
-        'capture_nearestloopcount',
-        'capture_nearestloopcontinue',
-        'capture_nearestloopabort',
-        'pair',
-        'io_file_o_rdonly',
-        'io_file_o_wronly',
-        'io_file_o_rdwr',
-        'io_file_o_nonblock',
-        'io_file_o_sync',
-        'io_file_o_shlock',
-        'io_file_o_exlock',
-        'io_file_o_async',
-        'io_file_o_fsync',
-        'io_file_o_nofollow',
-        'io_file_s_irwxu',
-        'io_file_s_irusr',
-        'io_file_s_iwusr',
-        'io_file_s_ixusr',
-        'io_file_s_irwxg',
-        'io_file_s_irgrp',
-        'io_file_s_iwgrp',
-        'io_file_s_ixgrp',
-        'io_file_s_irwxo',
-        'io_file_s_iroth',
-        'io_file_s_iwoth',
-        'io_file_s_ixoth',
-        'io_file_s_isuid',
-        'io_file_s_isgid',
-        'io_file_s_isvtx',
-        'io_file_s_ifmt',
-        'io_file_s_ifchr',
-        'io_file_s_ifdir',
-        'io_file_s_ifreg',
-        'io_file_o_append',
-        'io_file_o_creat',
-        'io_file_o_trunc',
-        'io_file_o_excl',
-        'io_file_seek_set',
-        'io_file_seek_cur',
-        'io_file_seek_end',
-        'io_file_s_ififo',
-        'io_file_s_ifblk',
-        'io_file_s_iflnk',
-        'io_file_s_ifsock',
-        'io_net_shut_rd',
-        'io_net_shut_wr',
-        'io_net_shut_rdwr',
-        'io_net_sock_stream',
-        'io_net_sock_dgram',
-        'io_net_sock_raw',
-        'io_net_sock_rdm',
-        'io_net_sock_seqpacket',
-        'io_net_so_debug',
-        'io_net_so_acceptconn',
-        'io_net_so_reuseaddr',
-        'io_net_so_keepalive',
-        'io_net_so_dontroute',
-        'io_net_so_broadcast',
-        'io_net_so_useloopback',
-        'io_net_so_linger',
-        'io_net_so_oobinline',
-        'io_net_so_timestamp',
-        'io_net_so_sndbuf',
-        'io_net_so_rcvbuf',
-        'io_net_so_sndlowat',
-        'io_net_so_rcvlowat',
-        'io_net_so_sndtimeo',
-        'io_net_so_rcvtimeo',
-        'io_net_so_error',
-        'io_net_so_type',
-        'io_net_sol_socket',
-        'io_net_af_unix',
-        'io_net_af_inet',
-        'io_net_af_inet6',
-        'io_net_ipproto_ip',
-        'io_net_ipproto_udp',
-        'io_net_msg_peek',
-        'io_net_msg_oob',
-        'io_net_msg_waitall',
-        'io_file_fioclex',
-        'io_file_fionclex',
-        'io_file_fionread',
-        'io_file_fionbio',
-        'io_file_fioasync',
-        'io_file_fiosetown',
-        'io_file_fiogetown',
-        'io_file_fiodtype',
-        'io_file_f_dupfd',
-        'io_file_f_getfd',
-        'io_file_f_setfd',
-        'io_file_f_getfl',
-        'io_file_f_setfl',
-        'io_file_f_getlk',
-        'io_file_f_setlk',
-        'io_file_f_setlkw',
-        'io_file_fd_cloexec',
-        'io_file_f_rdlck',
-        'io_file_f_unlck',
-        'io_file_f_wrlck',
-        'io_dir_dt_unknown',
-        'io_dir_dt_fifo',
-        'io_dir_dt_chr',
-        'io_dir_dt_blk',
-        'io_dir_dt_reg',
-        'io_dir_dt_sock',
-        'io_dir_dt_wht',
-        'io_dir_dt_lnk',
-        'io_dir_dt_dir',
-        'io_file_access',
-        'io_file_chdir',
-        'io_file_getcwd',
-        'io_file_chown',
-        'io_file_lchown',
-        'io_file_truncate',
-        'io_file_link',
-        'io_file_pipe',
-        'io_file_rmdir',
-        'io_file_symlink',
-        'io_file_unlink',
-        'io_file_remove',
-        'io_file_rename',
-        'io_file_tempnam',
-        'io_file_mkstemp',
-        'io_file_dirname',
-        'io_file_realpath',
-        'io_file_chmod',
-        'io_file_mkdir',
-        'io_file_mkfifo',
-        'io_file_umask',
-        'io_net_socket',
-        'io_net_bind',
-        'io_net_connect',
-        'io_net_listen',
-        'io_net_recv',
-        'io_net_recvfrom',
-        'io_net_accept',
-        'io_net_send',
-        'io_net_sendto',
-        'io_net_shutdown',
-        'io_net_getpeername',
-        'io_net_getsockname',
-        'io_net_ssl_begin',
-        'io_net_ssl_end',
-        'io_net_ssl_shutdown',
-        'io_net_ssl_setverifylocations',
-        'io_net_ssl_usecertificatechainfile',
-        'io_net_ssl_useprivatekeyfile',
-        'io_net_ssl_connect',
-        'io_net_ssl_accept',
-        'io_net_ssl_error',
-        'io_net_ssl_errorstring',
-        'io_net_ssl_liberrorstring',
-        'io_net_ssl_funcerrorstring',
-        'io_net_ssl_reasonerrorstring',
-        'io_net_ssl_setconnectstate',
-        'io_net_ssl_setacceptstate',
-        'io_net_ssl_read',
-        'io_net_ssl_write',
-        'io_file_stat_size',
-        'io_file_stat_mode',
-        'io_file_stat_mtime',
-        'io_file_stat_atime',
-        'io_file_lstat_size',
-        'io_file_lstat_mode',
-        'io_file_lstat_mtime',
-        'io_file_lstat_atime',
-        'io_file_readlink',
-        'io_file_lockf',
-        'io_file_f_ulock',
-        'io_file_f_tlock',
-        'io_file_f_test',
-        'dirdesc',
-        'io_file_stdin',
-        'io_file_stdout',
-        'io_file_stderr',
-        'filedesc',
-        'uchar_alphabetic',
-        'uchar_ascii_hex_digit',
-        'uchar_bidi_control',
-        'uchar_bidi_mirrored',
-        'uchar_dash',
-        'uchar_default_ignorable_code_point',
-        'uchar_deprecated',
-        'uchar_diacritic',
-        'uchar_extender',
-        'uchar_full_composition_exclusion',
-        'uchar_grapheme_base',
-        'uchar_grapheme_extend',
-        'uchar_grapheme_link',
-        'uchar_hex_digit',
-        'uchar_hyphen',
-        'uchar_id_continue',
-        'uchar_ideographic',
-        'uchar_ids_binary_operator',
-        'uchar_ids_trinary_operator',
-        'uchar_join_control',
-        'uchar_logical_order_exception',
-        'uchar_lowercase',
-        'uchar_math',
-        'uchar_noncharacter_code_point',
-        'uchar_quotation_mark',
-        'uchar_radical',
-        'uchar_soft_dotted',
-        'uchar_terminal_punctuation',
-        'uchar_unified_ideograph',
-        'uchar_uppercase',
-        'uchar_white_space',
-        'uchar_xid_continue',
-        'uchar_case_sensitive',
-        'uchar_s_term',
-        'uchar_variation_selector',
-        'uchar_nfd_inert',
-        'uchar_nfkd_inert',
-        'uchar_nfc_inert',
-        'uchar_nfkc_inert',
-        'uchar_segment_starter',
-        'uchar_pattern_syntax',
-        'uchar_pattern_white_space',
-        'uchar_posix_alnum',
-        'uchar_posix_blank',
-        'uchar_posix_graph',
-        'uchar_posix_print',
-        'uchar_posix_xdigit',
-        'uchar_bidi_class',
-        'uchar_block',
-        'uchar_canonical_combining_class',
-        'uchar_decomposition_type',
-        'uchar_east_asian_width',
-        'uchar_general_category',
-        'uchar_joining_group',
-        'uchar_joining_type',
-        'uchar_line_break',
-        'uchar_numeric_type',
-        'uchar_script',
-        'uchar_hangul_syllable_type',
-        'uchar_nfd_quick_check',
-        'uchar_nfkd_quick_check',
-        'uchar_nfc_quick_check',
-        'uchar_nfkc_quick_check',
-        'uchar_lead_canonical_combining_class',
-        'uchar_trail_canonical_combining_class',
-        'uchar_grapheme_cluster_break',
-        'uchar_sentence_break',
-        'uchar_word_break',
-        'uchar_general_category_mask',
-        'uchar_numeric_value',
-        'uchar_age',
-        'uchar_bidi_mirroring_glyph',
-        'uchar_case_folding',
-        'uchar_iso_comment',
-        'uchar_lowercase_mapping',
-        'uchar_name',
-        'uchar_simple_case_folding',
-        'uchar_simple_lowercase_mapping',
-        'uchar_simple_titlecase_mapping',
-        'uchar_simple_uppercase_mapping',
-        'uchar_titlecase_mapping',
-        'uchar_unicode_1_name',
-        'uchar_uppercase_mapping',
-        'u_wb_other',
-        'u_wb_aletter',
-        'u_wb_format',
-        'u_wb_katakana',
-        'u_wb_midletter',
-        'u_wb_midnum',
-        'u_wb_numeric',
-        'u_wb_extendnumlet',
-        'u_sb_other',
-        'u_sb_aterm',
-        'u_sb_close',
-        'u_sb_format',
-        'u_sb_lower',
-        'u_sb_numeric',
-        'u_sb_oletter',
-        'u_sb_sep',
-        'u_sb_sp',
-        'u_sb_sterm',
-        'u_sb_upper',
-        'u_lb_unknown',
-        'u_lb_ambiguous',
-        'u_lb_alphabetic',
-        'u_lb_break_both',
-        'u_lb_break_after',
-        'u_lb_break_before',
-        'u_lb_mandatory_break',
-        'u_lb_contingent_break',
-        'u_lb_close_punctuation',
-        'u_lb_combining_mark',
-        'u_lb_carriage_return',
-        'u_lb_exclamation',
-        'u_lb_glue',
-        'u_lb_hyphen',
-        'u_lb_ideographic',
-        'u_lb_inseparable',
-        'u_lb_infix_numeric',
-        'u_lb_line_feed',
-        'u_lb_nonstarter',
-        'u_lb_numeric',
-        'u_lb_open_punctuation',
-        'u_lb_postfix_numeric',
-        'u_lb_prefix_numeric',
-        'u_lb_quotation',
-        'u_lb_complex_context',
-        'u_lb_surrogate',
-        'u_lb_space',
-        'u_lb_break_symbols',
-        'u_lb_zwspace',
-        'u_lb_next_line',
-        'u_lb_word_joiner',
-        'u_lb_h2',
-        'u_lb_h3',
-        'u_lb_jl',
-        'u_lb_jt',
-        'u_lb_jv',
-        'u_nt_none',
-        'u_nt_decimal',
-        'u_nt_digit',
-        'u_nt_numeric',
-        'locale',
-        'locale_english',
-        'locale_french',
-        'locale_german',
-        'locale_italian',
-        'locale_japanese',
-        'locale_korean',
-        'locale_chinese',
-        'locale_simplifiedchinese',
-        'locale_traditionalchinese',
-        'locale_france',
-        'locale_germany',
-        'locale_italy',
-        'locale_japan',
-        'locale_korea',
-        'locale_china',
-        'locale_prc',
-        'locale_taiwan',
-        'locale_uk',
-        'locale_us',
-        'locale_canada',
-        'locale_canadafrench',
-        'locale_default',
-        'locale_setdefault',
-        'locale_isocountries',
-        'locale_isolanguages',
-        'locale_availablelocales',
-        'ucal_listtimezones',
-        'ucal',
-        'ucal_era',
-        'ucal_year',
-        'ucal_month',
-        'ucal_weekofyear',
-        'ucal_weekofmonth',
-        'ucal_dayofmonth',
-        'ucal_dayofyear',
-        'ucal_dayofweek',
-        'ucal_dayofweekinmonth',
-        'ucal_ampm',
-        'ucal_hour',
-        'ucal_hourofday',
-        'ucal_minute',
-        'ucal_second',
-        'ucal_millisecond',
-        'ucal_zoneoffset',
-        'ucal_dstoffset',
-        'ucal_yearwoy',
-        'ucal_dowlocal',
-        'ucal_extendedyear',
-        'ucal_julianday',
-        'ucal_millisecondsinday',
-        'ucal_lenient',
-        'ucal_firstdayofweek',
-        'ucal_daysinfirstweek',
-        'xml_domimplementation',
-        'sys_sigalrm',
-        'sys_sighup',
-        'sys_sigkill',
-        'sys_sigpipe',
-        'sys_sigquit',
-        'sys_sigusr1',
-        'sys_sigusr2',
-        'sys_sigchld',
-        'sys_sigcont',
-        'sys_sigstop',
-        'sys_sigtstp',
-        'sys_sigttin',
-        'sys_sigttou',
-        'sys_sigbus',
-        'sys_sigprof',
-        'sys_sigsys',
-        'sys_sigtrap',
-        'sys_sigurg',
-        'sys_sigvtalrm',
-        'sys_sigxcpu',
-        'sys_sigxfsz',
-        'sys_wcontinued',
-        'sys_wnohang',
-        'sys_wuntraced',
-        'sys_sigabrt',
-        'sys_sigfpe',
-        'sys_sigill',
-        'sys_sigint',
-        'sys_sigsegv',
-        'sys_sigterm',
-        'sys_exit',
-        'sys_fork',
-        'sys_kill',
-        'sys_waitpid',
-        'sys_getegid',
-        'sys_geteuid',
-        'sys_getgid',
-        'sys_getlogin',
-        'sys_getpid',
-        'sys_getppid',
-        'sys_getuid',
-        'sys_setuid',
-        'sys_setgid',
-        'sys_setsid',
-        'sys_errno',
-        'sys_strerror',
-        'sys_time',
-        'sys_difftime',
-        'sys_getpwuid',
-        'sys_getpwnam',
-        'sys_getgrnam',
-        'sys_drand48',
-        'sys_erand48',
-        'sys_jrand48',
-        'sys_lcong48',
-        'sys_lrand48',
-        'sys_mrand48',
-        'sys_nrand48',
-        'sys_srand48',
-        'sys_random',
-        'sys_srandom',
-        'sys_seed48',
-        'sys_rand',
-        'sys_srand',
-        'sys_environ',
-        'sys_getenv',
-        'sys_setenv',
-        'sys_unsetenv',
-        'sys_uname',
-        'uuid_compare',
-        'uuid_copy',
-        'uuid_generate',
-        'uuid_generate_random',
-        'uuid_generate_time',
-        'uuid_is_null',
-        'uuid_parse',
-        'uuid_unparse',
-        'uuid_unparse_lower',
-        'uuid_unparse_upper',
-        'sys_credits',
-        'sleep',
-        'sys_dll_ext',
-        'sys_listtypes',
-        'sys_listtraits',
-        'sys_listunboundmethods',
-        'sys_getthreadcount',
-        'sys_growheapby',
-        'sys_getheapsize',
-        'sys_getheapfreebytes',
-        'sys_getbytessincegc',
-        'sys_garbagecollect',
-        'sys_clock',
-        'sys_getstartclock',
-        'sys_clockspersec',
-        'sys_pointersize',
-        'sys_loadlibrary',
-        'sys_getchar',
-        'sys_chroot',
-        'sys_exec',
-        'sys_kill_exec',
-        'sys_wait_exec',
-        'sys_test_exec',
-        'sys_detach_exec',
-        'sys_pid_exec',
-        'wifexited',
-        'wexitstatus',
-        'wifsignaled',
-        'wtermsig',
-        'wifstopped',
-        'wstopsig',
-        'wifcontinued',
-        'sys_eol',
-        'sys_iswindows',
-        'sys_is_windows',
-        'sys_isfullpath',
-        'sys_is_full_path',
-        'lcapi_loadmodule',
-        'lcapi_listdatasources',
-        'dsinfo',
-        'encrypt_blowfish',
-        'decrypt_blowfish',
-        'cipher_digest',
-        'cipher_encrypt',
-        'cipher_decrypt',
-        'cipher_list',
-        'cipher_keylength',
-        'cipher_hmac',
-        'cipher_seal',
-        'cipher_open',
-        'cipher_sign',
-        'cipher_verify',
-        'cipher_decrypt_private',
-        'cipher_decrypt_public',
-        'cipher_encrypt_private',
-        'cipher_encrypt_public',
-        'cipher_generate_key',
-        'xmlstream',
-        'sourcefile',
-        'tag',
-        'tag_exists',
-        'mime_reader',
-        'curl_easy_init',
-        'curl_easy_duphandle',
-        'curl_easy_cleanup',
-        'curl_easy_getinfo',
-        'curl_multi_perform',
-        'curl_multi_result',
-        'curl_easy_reset',
-        'curl_easy_setopt',
-        'curl_easy_strerror',
-        'curl_getdate',
-        'curl_version',
-        'curl_version_info',
-        'curlinfo_effective_url',
-        'curlinfo_content_type',
-        'curlinfo_response_code',
-        'curlinfo_header_size',
-        'curlinfo_request_size',
-        'curlinfo_ssl_verifyresult',
-        'curlinfo_filetime',
-        'curlinfo_redirect_count',
-        'curlinfo_http_connectcode',
-        'curlinfo_httpauth_avail',
-        'curlinfo_proxyauth_avail',
-        'curlinfo_os_errno',
-        'curlinfo_num_connects',
-        'curlinfo_total_time',
-        'curlinfo_namelookup_time',
-        'curlinfo_connect_time',
-        'curlinfo_pretransfer_time',
-        'curlinfo_size_upload',
-        'curlinfo_size_download',
-        'curlinfo_speed_download',
-        'curlinfo_speed_upload',
-        'curlinfo_content_length_download',
-        'curlinfo_content_length_upload',
-        'curlinfo_starttransfer_time',
-        'curlinfo_redirect_time',
-        'curlinfo_ssl_engines',
-        'curlopt_url',
-        'curlopt_postfields',
-        'curlopt_cainfo',
-        'curlopt_capath',
-        'curlopt_cookie',
-        'curlopt_cookiefile',
-        'curlopt_cookiejar',
-        'curlopt_customrequest',
-        'curlopt_egdsocket',
-        'curlopt_encoding',
-        'curlopt_ftp_account',
-        'curlopt_ftpport',
-        'curlopt_interface',
-        'curlopt_krb4level',
-        'curlopt_netrc_file',
-        'curlopt_proxy',
-        'curlopt_proxyuserpwd',
-        'curlopt_random_file',
-        'curlopt_range',
-        'curlopt_readdata',
-        'curlopt_referer',
-        'curlopt_ssl_cipher_list',
-        'curlopt_sslcert',
-        'curlopt_sslcerttype',
-        'curlopt_sslengine',
-        'curlopt_sslkey',
-        'curlopt_sslkeypasswd',
-        'curlopt_sslkeytype',
-        'curlopt_useragent',
-        'curlopt_userpwd',
-        'curlopt_postfieldsize',
-        'curlopt_autoreferer',
-        'curlopt_buffersize',
-        'curlopt_connecttimeout',
-        'curlopt_cookiesession',
-        'curlopt_crlf',
-        'curlopt_dns_use_global_cache',
-        'curlopt_failonerror',
-        'curlopt_filetime',
-        'curlopt_followlocation',
-        'curlopt_forbid_reuse',
-        'curlopt_fresh_connect',
-        'curlopt_ftp_create_missing_dirs',
-        'curlopt_ftp_response_timeout',
-        'curlopt_ftp_ssl',
-        'curlopt_use_ssl',
-        'curlopt_ftp_use_eprt',
-        'curlopt_ftp_use_epsv',
-        'curlopt_ftpappend',
-        'curlopt_ftplistonly',
-        'curlopt_ftpsslauth',
-        'curlopt_header',
-        'curlopt_http_version',
-        'curlopt_httpauth',
-        'curlopt_httpget',
-        'curlopt_httpproxytunnel',
-        'curlopt_infilesize',
-        'curlopt_ipresolve',
-        'curlopt_low_speed_limit',
-        'curlopt_low_speed_time',
-        'curlopt_maxconnects',
-        'curlopt_maxfilesize',
-        'curlopt_maxredirs',
-        'curlopt_netrc',
-        'curlopt_nobody',
-        'curlopt_noprogress',
-        'curlopt_port',
-        'curlopt_post',
-        'curlopt_proxyauth',
-        'curlopt_proxyport',
-        'curlopt_proxytype',
-        'curlopt_put',
-        'curlopt_resume_from',
-        'curlopt_ssl_verifyhost',
-        'curlopt_ssl_verifypeer',
-        'curlopt_sslengine_default',
-        'curlopt_sslversion',
-        'curlopt_tcp_nodelay',
-        'curlopt_timecondition',
-        'curlopt_timeout',
-        'curlopt_timevalue',
-        'curlopt_transfertext',
-        'curlopt_unrestricted_auth',
-        'curlopt_upload',
-        'curlopt_verbose',
-        'curlopt_infilesize_large',
-        'curlopt_maxfilesize_large',
-        'curlopt_postfieldsize_large',
-        'curlopt_resume_from_large',
-        'curlopt_http200aliases',
-        'curlopt_httpheader',
-        'curlopt_postquote',
-        'curlopt_prequote',
-        'curlopt_quote',
-        'curlopt_httppost',
-        'curlopt_writedata',
-        'curl_version_ipv6',
-        'curl_version_kerberos4',
-        'curl_version_ssl',
-        'curl_version_libz',
-        'curl_version_ntlm',
-        'curl_version_gssnegotiate',
-        'curl_version_debug',
-        'curl_version_asynchdns',
-        'curl_version_spnego',
-        'curl_version_largefile',
-        'curl_version_idn',
-        'curl_netrc_ignored',
-        'curl_netrc_optional',
-        'curl_netrc_required',
-        'curl_http_version_none',
-        'curl_http_version_1_0',
-        'curl_http_version_1_1',
-        'curl_ipresolve_whatever',
-        'curl_ipresolve_v4',
-        'curl_ipresolve_v6',
-        'curlftpssl_none',
-        'curlftpssl_try',
-        'curlftpssl_control',
-        'curlftpssl_all',
-        'curlftpssl_last',
-        'curlftpauth_default',
-        'curlftpauth_ssl',
-        'curlftpauth_tls',
-        'curlauth_none',
-        'curlauth_basic',
-        'curlauth_digest',
-        'curlauth_gssnegotiate',
-        'curlauth_ntlm',
-        'curlauth_any',
-        'curlauth_anysafe',
-        'curlproxy_http',
-        'curlproxy_socks4',
-        'curlproxy_socks5',
-        'curle_ok',
-        'curle_unsupported_protocol',
-        'curle_failed_init',
-        'curle_url_malformat',
-        'curle_url_malformat_user',
-        'curle_couldnt_resolve_proxy',
-        'curle_couldnt_resolve_host',
-        'curle_couldnt_connect',
-        'curle_ftp_weird_server_reply',
-        'curle_ftp_access_denied',
-        'curle_ftp_user_password_incorrect',
-        'curle_ftp_weird_pass_reply',
-        'curle_ftp_weird_user_reply',
-        'curle_ftp_weird_pasv_reply',
-        'curle_ftp_weird_227_format',
-        'curle_ftp_cant_get_host',
-        'curle_ftp_cant_reconnect',
-        'curle_ftp_couldnt_set_binary',
-        'curle_partial_file',
-        'curle_ftp_couldnt_retr_file',
-        'curle_ftp_write_error',
-        'curle_ftp_quote_error',
-        'curle_http_returned_error',
-        'curle_write_error',
-        'curle_malformat_user',
-        'curle_read_error',
-        'curle_out_of_memory',
-        'curle_operation_timeouted',
-        'curle_ftp_couldnt_set_ascii',
-        'curle_ftp_port_failed',
-        'curle_ftp_couldnt_use_rest',
-        'curle_ftp_couldnt_get_size',
-        'curle_http_range_error',
-        'curle_http_post_error',
-        'curle_ssl_connect_error',
-        'curle_bad_download_resume',
-        'curle_file_couldnt_read_file',
-        'curle_ldap_cannot_bind',
-        'curle_ldap_search_failed',
-        'curle_library_not_found',
-        'curle_function_not_found',
-        'curle_aborted_by_callback',
-        'curle_bad_function_argument',
-        'curle_bad_calling_order',
-        'curle_interface_failed',
-        'curle_bad_password_entered',
-        'curle_too_many_redirects',
-        'curle_unknown_telnet_option',
-        'curle_telnet_option_syntax',
-        'curle_obsolete',
-        'curle_ssl_peer_certificate',
-        'curle_got_nothing',
-        'curle_ssl_engine_notfound',
-        'curle_ssl_engine_setfailed',
-        'curle_send_error',
-        'curle_recv_error',
-        'curle_share_in_use',
-        'curle_ssl_certproblem',
-        'curle_ssl_cipher',
-        'curle_ssl_cacert',
-        'curle_bad_content_encoding',
-        'curle_ldap_invalid_url',
-        'curle_filesize_exceeded',
-        'curle_ftp_ssl_failed',
-        'curle_send_fail_rewind',
-        'curle_ssl_engine_initfailed',
-        'curle_login_denied',
-        'curlmsg_done',
-        'regexp',
-        'array',
-        'boolean',
-        'zip_open',
-        'zip_name_locate',
-        'zip_fopen',
-        'zip_fopen_index',
-        'zip_fread',
-        'zip_fclose',
-        'zip_close',
-        'zip_stat',
-        'zip_stat_index',
-        'zip_get_archive_comment',
-        'zip_get_file_comment',
-        'zip_get_name',
-        'zip_get_num_files',
-        'zip_add',
-        'zip_replace',
-        'zip_add_dir',
-        'zip_set_file_comment',
-        'zip_rename',
-        'zip_delete',
-        'zip_unchange',
-        'zip_unchange_all',
-        'zip_unchange_archive',
-        'zip_set_archive_comment',
-        'zip_error_to_str',
-        'zip_file_strerror',
-        'zip_strerror',
-        'zip_error_get',
-        'zip_file_error_get',
-        'zip_error_get_sys_type',
-        'zlib_version',
-        'fastcgi_initiate_request',
-        'debugging_enabled',
-        'debugging_stop',
-        'evdns_resolve_ipv4',
-        'evdns_resolve_ipv6',
-        'evdns_resolve_reverse',
-        'evdns_resolve_reverse_ipv6',
-        'library_thread_loader',
-        'stdout',
-        'stdoutnl',
-        'fail',
-        'fail_if',
-        'fail_ifnot',
-        'error_code',
-        'error_msg',
-        'error_obj',
-        'error_stack',
-        'error_push',
-        'error_pop',
-        'error_reset',
-        'error_msg_invalidparameter',
-        'error_code_invalidparameter',
-        'error_msg_networkerror',
-        'error_code_networkerror',
-        'error_msg_runtimeassertion',
-        'error_code_runtimeassertion',
-        'error_msg_methodnotfound',
-        'error_code_methodnotfound',
-        'error_msg_resnotfound',
-        'error_code_resnotfound',
-        'error_msg_filenotfound',
-        'error_code_filenotfound',
-        'error_msg_aborted',
-        'error_code_aborted',
-        'error_msg_dividebyzero',
-        'error_code_dividebyzero',
-        'error_msg_noerror',
-        'error_code_noerror',
-        'abort',
-        'protect',
-        'trait_asstring',
-        'any',
-        'trait_generator',
-        'trait_decompose_assignment',
-        'trait_foreach',
-        'trait_generatorcentric',
-        'generateforeach',
-        'generateforeachunkeyed',
-        'generateforeachkeyed',
-        'trait_foreachtextelement',
-        'trait_finite',
-        'trait_finiteforeach',
-        'trait_keyed',
-        'trait_keyedfinite',
-        'trait_keyedforeach',
-        'trait_frontended',
-        'trait_backended',
-        'trait_doubleended',
-        'trait_positionallykeyed',
-        'trait_expandable',
-        'trait_frontexpandable',
-        'trait_backexpandable',
-        'trait_contractible',
-        'trait_frontcontractible',
-        'trait_backcontractible',
-        'trait_fullymutable',
-        'trait_keyedmutable',
-        'trait_endedfullymutable',
-        'trait_setoperations',
-        'trait_searchable',
-        'trait_positionallysearchable',
-        'trait_pathcomponents',
-        'trait_readbytes',
-        'trait_writebytes',
-        'trait_setencoding',
-        'trait_readstring',
-        'trait_writestring',
-        'trait_hashable',
-        'eacher',
-        'trait_each_sub',
-        'trait_stack',
-        'trait_list',
-        'trait_array',
-        'trait_map',
-        'trait_close',
-        'trait_file',
-        'trait_scalar',
-        'method_name',
-        'trait_queriablelambda',
-        'trait_queriable',
-        'queriable_asstring',
-        'queriable_where',
-        'queriable_do',
-        'queriable_sum',
-        'queriable_average',
-        'queriable_min',
-        'queriable_max',
-        'queriable_select',
-        'queriable_selectmany',
-        'queriable_groupby',
-        'queriable_join',
-        'queriable_groupjoin',
-        'queriable_orderby',
-        'queriable_orderbydescending',
-        'queriable_thenby',
-        'queriable_thenbydescending',
-        'queriable_skip',
-        'queriable_take',
-        'queriable_grouping',
-        'queriable_internal_combinebindings',
-        'queriable_defaultcompare',
-        'queriable_reversecompare',
-        'queriable_qsort',
-        'generateseries',
-        'timer',
-        'tie',
-        'pairup',
-        'delve',
-        'repeat',
-        'thread_var_push',
-        'thread_var_pop',
-        'thread_var_get',
-        'loop_value',
-        'loop_value_push',
-        'loop_value_pop',
-        'loop_key',
-        'loop_key_push',
-        'loop_key_pop',
-        'loop_push',
-        'loop_pop',
-        'loop_count',
-        'loop_continue',
-        'loop_abort',
-        'loop',
-        'sys_while',
-        'sys_iterate',
-        'pair_compare',
-        'serialization_object_identity_compare',
-        'serialization_element',
-        'trait_serializable',
-        'serialization_writer_standin',
-        'serialization_writer_ref',
-        'serialization_writer',
-        'serialization_reader',
-        'string_validcharset',
-        'eol',
-        'encoding_utf8',
-        'encoding_iso88591',
-        'trait_treenode',
-        'tree_nullnode',
-        'tree_node',
-        'tree_base',
-        'map_node',
-        'map',
-        'integer_random',
-        'integer_bitor',
-        'millis',
-        'micros',
-        'max',
-        'min',
-        'range',
-        'median',
-        'decimal_random',
-        'pi',
-        'lcapi_datasourceinit',
-        'lcapi_datasourceterm',
-        'lcapi_datasourcenames',
-        'lcapi_datasourcetablenames',
-        'lcapi_datasourcesearch',
-        'lcapi_datasourceadd',
-        'lcapi_datasourceupdate',
-        'lcapi_datasourcedelete',
-        'lcapi_datasourceinfo',
-        'lcapi_datasourceexecsql',
-        'lcapi_datasourcerandom',
-        'lcapi_datasourceschemanames',
-        'lcapi_datasourcecloseconnection',
-        'lcapi_datasourcetickle',
-        'lcapi_datasourceduplicate',
-        'lcapi_datasourcescripts',
-        'lcapi_datasourceimage',
-        'lcapi_datasourcefindall',
-        'lcapi_datasourcematchesname',
-        'lcapi_datasourcepreparesql',
-        'lcapi_datasourceunpreparesql',
-        'lcapi_datasourcenothing',
-        'lcapi_fourchartointeger',
-        'lcapi_datasourcetypestring',
-        'lcapi_datasourcetypeinteger',
-        'lcapi_datasourcetypeboolean',
-        'lcapi_datasourcetypeblob',
-        'lcapi_datasourcetypedecimal',
-        'lcapi_datasourcetypedate',
-        'lcapi_datasourceprotectionnone',
-        'lcapi_datasourceprotectionreadonly',
-        'lcapi_datasourceopgt',
-        'lcapi_datasourceopgteq',
-        'lcapi_datasourceopeq',
-        'lcapi_datasourceopneq',
-        'lcapi_datasourceoplt',
-        'lcapi_datasourceoplteq',
-        'lcapi_datasourceopbw',
-        'lcapi_datasourceopew',
-        'lcapi_datasourceopct',
-        'lcapi_datasourceopnct',
-        'lcapi_datasourceopnbw',
-        'lcapi_datasourceopnew',
-        'lcapi_datasourceopand',
-        'lcapi_datasourceopor',
-        'lcapi_datasourceopnot',
-        'lcapi_datasourceopno',
-        'lcapi_datasourceopany',
-        'lcapi_datasourceopin',
-        'lcapi_datasourceopnin',
-        'lcapi_datasourceopft',
-        'lcapi_datasourceoprx',
-        'lcapi_datasourceopnrx',
-        'lcapi_datasourcesortascending',
-        'lcapi_datasourcesortdescending',
-        'lcapi_datasourcesortcustom',
-        'lcapi_loadmodules',
-        'lasso_version',
-        'lasso_uniqueid',
-        'usage',
-        'file_defaultencoding',
-        'file_copybuffersize',
-        'file_modeline',
-        'file_modechar',
-        'file_forceroot',
-        'file_tempfile',
-        'file',
-        'file_stdin',
-        'file_stdout',
-        'file_stderr',
-        'lasso_tagexists',
-        'lasso_methodexists',
-        'output',
-        'if_empty',
-        'if_null',
-        'if_true',
-        'if_false',
-        'process',
-        'treemap',
-        'locale_format',
-        'compress',
-        'uncompress',
-        'decompress',
-        'tag_name',
-        'series',
-        'nslookup',
-        'all',
-        'bw',
-        'cn',
-        'eq',
-        'ew',
-        'ft',
-        'gt',
-        'gte',
-        'lt',
-        'lte',
-        'neq',
-        'nrx',
-        'rx',
-        'none',
-        'minimal',
-        'full',
-        'output_none',
-        'lasso_executiontimelimit',
-        'namespace_global',
-        'namespace_using',
-        'namespace_import',
-        'site_id',
-        'site_name',
-        'sys_homepath',
-        'sys_masterhomepath',
-        'sys_supportpath',
-        'sys_librariespath',
-        'sys_databasespath',
-        'sys_usercapimodulepath',
-        'sys_appspath',
-        'sys_userstartuppath',
-        'dir',
-        'magick_image',
-        'ldap',
-        'ldap_scope_base',
-        'ldap_scope_onelevel',
-        'ldap_scope_subtree',
-        'mysqlds',
-        'os_process',
-        'odbc',
-        'sqliteconnector',
-        'sqlite_createdb',
-        'sqlite_setsleepmillis',
-        'sqlite_setsleeptries',
-        'java_jvm_getenv',
-        'java_jvm_create',
-        'java_jdbc_load',
-        'database_database',
-        'database_table_datasources',
-        'database_table_datasource_hosts',
-        'database_table_datasource_databases',
-        'database_table_database_tables',
-        'database_table_table_fields',
-        'database_qs',
-        'database_initialize',
-        'database_util_cleanpath',
-        'database_adddefaultsqlitehost',
-        'database_registry',
-        'sqlite_ok',
-        'sqlite_error',
-        'sqlite_internal',
-        'sqlite_perm',
-        'sqlite_abort',
-        'sqlite_busy',
-        'sqlite_locked',
-        'sqlite_nomem',
-        'sqlite_readonly',
-        'sqlite_interrupt',
-        'sqlite_ioerr',
-        'sqlite_corrupt',
-        'sqlite_notfound',
-        'sqlite_full',
-        'sqlite_cantopen',
-        'sqlite_protocol',
-        'sqlite_empty',
-        'sqlite_schema',
-        'sqlite_toobig',
-        'sqlite_constraint',
-        'sqlite_mismatch',
-        'sqlite_misuse',
-        'sqlite_nolfs',
-        'sqlite_auth',
-        'sqlite_format',
-        'sqlite_range',
-        'sqlite_notadb',
-        'sqlite_row',
-        'sqlite_done',
-        'sqlite_integer',
-        'sqlite_float',
-        'sqlite_blob',
-        'sqlite_null',
-        'sqlite_text',
-        'sqlite3',
-        'sqlite_db',
-        'sqlite_results',
-        'sqlite_currentrow',
-        'sqlite_table',
-        'sqlite_column',
-        'bom_utf16be',
-        'bom_utf16le',
-        'bom_utf32be',
-        'bom_utf32le',
-        'bom_utf8',
-        'curl',
-        'include_url',
-        'ftp_getdata',
-        'ftp_getfile',
-        'ftp_getlisting',
-        'ftp_putdata',
-        'ftp_putfile',
-        'ftp_deletefile',
-        'date',
-        'debugging_step_in',
-        'debugging_get_stack',
-        'debugging_get_context',
-        'debugging_detach',
-        'debugging_step_over',
-        'debugging_step_out',
-        'debugging_run',
-        'debugging_break',
-        'debugging_breakpoint_set',
-        'debugging_breakpoint_get',
-        'debugging_breakpoint_remove',
-        'debugging_breakpoint_list',
-        'debugging_breakpoint_update',
-        'debugging_terminate',
-        'debugging_context_locals',
-        'debugging_context_vars',
-        'debugging_context_self',
-        'debugging_stack',
-        'dbgp_stop_stack_name',
-        'dbgp_server',
-        'dbgp_packet',
-        'duration',
-        'encrypt_md5',
-        'inline_columninfo_pos',
-        'inline_resultrows_pos',
-        'inline_foundcount_pos',
-        'inline_colinfo_name_pos',
-        'inline_colinfo_valuelist_pos',
-        'inline_scopeget',
-        'inline_scopepush',
-        'inline_scopepop',
-        'inline_namedget',
-        'inline_namedput',
-        'inline',
-        'inline_type',
-        'resultset_count',
-        'resultset',
-        'resultsets',
-        'rows',
-        'rows_impl',
-        'records',
-        'column',
-        'field',
-        'column_names',
-        'field_names',
-        'column_name',
-        'field_name',
-        'found_count',
-        'shown_count',
-        'shown_first',
-        'shown_last',
-        'action_statement',
-        'lasso_currentaction',
-        'maxrecords_value',
-        'skiprecords_value',
-        'action_param',
-        'action_params',
-        'admin_authorization',
-        'admin_currentgroups',
-        'admin_currentuserid',
-        'admin_currentusername',
-        'database_name',
-        'table_name',
-        'layout_name',
-        'schema_name',
-        'keycolumn_name',
-        'keyfield_name',
-        'keycolumn_value',
-        'keyfield_value',
-        'inline_colinfo_type_pos',
-        'column_type',
-        'rows_array',
-        'records_array',
-        'records_map',
-        'trait_json_serialize',
-        'json_serialize',
-        'json_consume_string',
-        'json_consume_token',
-        'json_consume_array',
-        'json_consume_object',
-        'json_deserialize',
-        'json_literal',
-        'json_object',
-        'json_rpccall',
-        'list_node',
-        'list',
-        'jchar',
-        'jchararray',
-        'jbyte',
-        'jbytearray',
-        'jfloat',
-        'jint',
-        'jshort',
-        'ljapi_initialize',
-        'formattingbase',
-        'currency',
-        'scientific',
-        'percent',
-        'dateandtime',
-        'timeonly',
-        'locale_format_style_full',
-        'locale_format_style_long',
-        'locale_format_style_medium',
-        'locale_format_style_short',
-        'locale_format_style_default',
-        'locale_format_style_none',
-        'locale_format_style_date_time',
-        'net_connectinprogress',
-        'net_connectok',
-        'net_typessl',
-        'net_typessltcp',
-        'net_typessludp',
-        'net_typetcp',
-        'net_typeudp',
-        'net_waitread',
-        'net_waittimeout',
-        'net_waitwrite',
-        'trait_net',
-        'net_tcp',
-        'net_tcpssl',
-        'net_named_pipe',
-        'net_udppacket',
-        'net_udp',
-        'admin_initialize',
-        'admin_getpref',
-        'admin_setpref',
-        'admin_removepref',
-        'admin_userexists',
-        'admin_lassoservicepath',
-        'pdf_package',
-        'pdf_rectangle',
-        'pdf_typebase',
-        'pdf_doc',
-        'pdf_color',
-        'pdf_barcode',
-        'pdf_font',
-        'pdf_image',
-        'pdf_list',
-        'pdf_read',
-        'pdf_table',
-        'pdf_text',
-        'pdf_hyphenator',
-        'pdf_chunk',
-        'pdf_phrase',
-        'pdf_paragraph',
-        'pdf_serve',
-        'queue',
-        'random_seed',
-        'set',
-        'sys_process',
-        'worker_pool',
-        'xml',
-        'trait_xml_elementcompat',
-        'trait_xml_nodecompat',
-        'xml_transform',
-        'zip_create',
-        'zip_excl',
-        'zip_checkcons',
-        'zip_fl_nocase',
-        'zip_fl_nodir',
-        'zip_fl_compressed',
-        'zip_fl_unchanged',
-        'zip_er_ok',
-        'zip_er_multidisk',
-        'zip_er_rename',
-        'zip_er_close',
-        'zip_er_seek',
-        'zip_er_read',
-        'zip_er_write',
-        'zip_er_crc',
-        'zip_er_zipclosed',
-        'zip_er_noent',
-        'zip_er_exists',
-        'zip_er_open',
-        'zip_er_tmpopen',
-        'zip_er_zlib',
-        'zip_er_memory',
-        'zip_er_changed',
-        'zip_er_compnotsupp',
-        'zip_er_eof',
-        'zip_er_inval',
-        'zip_er_nozip',
-        'zip_er_internal',
-        'zip_er_incons',
-        'zip_er_remove',
-        'zip_er_deleted',
-        'zip_et_none',
-        'zip_et_sys',
-        'zip_et_zlib',
-        'zip_cm_default',
-        'zip_cm_store',
-        'zip_cm_shrink',
-        'zip_cm_reduce_1',
-        'zip_cm_reduce_2',
-        'zip_cm_reduce_3',
-        'zip_cm_reduce_4',
-        'zip_cm_implode',
-        'zip_cm_deflate',
-        'zip_cm_deflate64',
-        'zip_cm_pkware_implode',
-        'zip_cm_bzip2',
-        'zip_em_none',
-        'zip_em_trad_pkware',
-        'zip_em_des',
-        'zip_em_rc2_old',
-        'zip_em_3des_168',
-        'zip_em_3des_112',
-        'zip_em_aes_128',
-        'zip_em_aes_192',
-        'zip_em_aes_256',
-        'zip_em_rc2',
-        'zip_em_rc4',
-        'zip_em_unknown',
-        'zip_file',
-        'zip',
-        'cache_server_element',
-        'cache_server',
-        'dns_response',
-        'dns_lookup',
-        'dns_default',
-        'string_charfromname',
-        'string_concatenate',
-        'string_endswith',
-        'string_extract',
-        'string_findposition',
-        'string_findregexp',
-        'string_getunicodeversion',
-        'string_insert',
-        'string_isalpha',
-        'string_isalphanumeric',
-        'string_isdigit',
-        'string_ishexdigit',
-        'string_islower',
-        'string_isnumeric',
-        'string_ispunctuation',
-        'string_isspace',
-        'string_isupper',
-        'string_length',
-        'string_remove',
-        'string_removeleading',
-        'string_removetrailing',
-        'string_replace',
-        'string_replaceregexp',
-        'string_todecimal',
-        'string_tointeger',
-        'string_uppercase',
-        'string_lowercase',
-        'document',
-        'component_render_state',
-        'component',
-        'component_container',
-        'document_base',
-        'document_body',
-        'document_header',
-        'text_document',
-        'data_document',
-        'email_attachment_mime_type',
-        'email_translatebreakstocrlf',
-        'email_findemails',
-        'email_fix_address',
-        'email_fix_address_list',
-        'email_compose',
-        'email_send',
-        'email_queue',
-        'email_immediate',
-        'email_result',
-        'email_status',
-        'email_token',
-        'email_merge',
-        'email_batch',
-        'encode_qheader',
-        'email_pop',
-        'email_parse',
-        'email_safeemail',
-        'email_extract',
-        'email_pop_priv_substring',
-        'email_pop_priv_extract',
-        'email_digestchallenge',
-        'email_pop_priv_quote',
-        'email_digestresponse',
-        'encrypt_hmac',
-        'encrypt_crammd5',
-        'email_queue_impl_base',
-        'email_fs_error_clean',
-        'email_stage_impl_base',
-        'email_initialize',
-        'email_mxlookup',
-        'lasso_errorreporting',
-        'fcgi_version_1',
-        'fcgi_null_request_id',
-        'fcgi_begin_request',
-        'fcgi_abort_request',
-        'fcgi_end_request',
-        'fcgi_params',
-        'fcgi_stdin',
-        'fcgi_stdout',
-        'fcgi_stderr',
-        'fcgi_data',
-        'fcgi_get_values',
-        'fcgi_get_values_result',
-        'fcgi_unknown_type',
-        'fcgi_keep_conn',
-        'fcgi_responder',
-        'fcgi_authorize',
-        'fcgi_filter',
-        'fcgi_request_complete',
-        'fcgi_cant_mpx_conn',
-        'fcgi_overloaded',
-        'fcgi_unknown_role',
-        'fcgi_max_conns',
-        'fcgi_max_reqs',
-        'fcgi_mpxs_conns',
-        'fcgi_read_timeout_seconds',
-        'fcgi_record',
-        'fcgi_makeendrequestbody',
-        'fcgi_bodychunksize',
-        'fcgi_makestdoutbody',
-        'fcgi_readparam',
-        'web_connection',
-        'web_request_impl',
-        'web_request',
-        'fcgi_request',
-        'include_cache_compare',
-        'include_cache',
-        'atbegin',
-        'fastcgi_initialize',
-        'fastcgi_handlecon',
-        'fastcgi_handlereq',
-        'fastcgi_each_fcgi_param',
-        'fastcgi_createfcgirequest',
-        'fastcgi_server',
-        'web_handlefcgirequest',
-        'filemaker_datasource',
-        'filemakerds_initialize',
-        'filemakerds',
-        'value_listitem',
-        'valuelistitem',
-        'selected',
-        'checked',
-        'value_list',
-        'http_document',
-        'http_document_header',
-        'http_header_field',
-        'html_document_head',
-        'html_document_body',
-        'raw_document_body',
-        'bytes_document_body',
-        'html_element_coreattrs',
-        'html_element_i18nattrs',
-        'html_element_eventsattrs',
-        'html_attributed',
-        'html_attr',
-        'html_atomic_element',
-        'html_container_element',
-        'http_error',
-        'html_script',
-        'html_text',
-        'html_raw',
-        'html_binary',
-        'html_json',
-        'html_cdata',
-        'html_eol',
-        'html_div',
-        'html_span',
-        'html_br',
-        'html_hr',
-        'html_h1',
-        'html_h2',
-        'html_h3',
-        'html_h4',
-        'html_h5',
-        'html_h6',
-        'html_meta',
-        'html_link',
-        'html_object',
-        'html_style',
-        'html_base',
-        'html_table',
-        'html_tr',
-        'html_td',
-        'html_th',
-        'html_img',
-        'html_form',
-        'html_fieldset',
-        'html_legend',
-        'html_input',
-        'html_label',
-        'html_option',
-        'html_select',
-        'http_char_space',
-        'http_char_htab',
-        'http_char_cr',
-        'http_char_lf',
-        'http_char_question',
-        'http_char_colon',
-        'http_read_timeout_secs',
-        'http_server_web_connection',
-        'http_server',
-        'http_server_connection_handler',
-        'image',
-        'jdbc_initialize',
-        'lassoapp_settingsdb',
-        'lassoapp_resource',
-        'lassoapp_format_mod_date',
-        'lassoapp_include_current',
-        'lassoapp_include',
-        'lassoapp_find_missing_file',
-        'lassoapp_source',
-        'lassoapp_capabilities',
-        'lassoapp_get_capabilities_name',
-        'lassoapp_exists',
-        'lassoapp_path_to_method_name',
-        'lassoapp_invoke_resource',
-        'lassoapp_installer',
-        'lassoapp_initialize_db',
-        'lassoapp_initialize',
-        'lassoapp_content_rep_halt',
-        'lassoapp_issourcefileextension',
-        'lassoapp_dirsrc_fileresource',
-        'lassoapp_dirsrc_appsource',
-        'lassoapp_livesrc_fileresource',
-        'lassoapp_livesrc_appsource',
-        'lassoapp_long_expiring_bytes',
-        'lassoapp_zip_file_server',
-        'lassoapp_zipsrc_fileresource',
-        'lassoapp_zipsrc_appsource',
-        'lassoapp_compiledsrc_fileresource',
-        'lassoapp_compiledsrc_appsource',
-        'lassoapp_manualsrc_appsource',
-        'lassoapp_current_include',
-        'lassoapp_current_app',
-        'lassoapp_do_with_include',
-        'lassoapp_link',
-        'lassoapp_load_module',
-        'lassoapp_mime_type_html',
-        'lassoapp_mime_type_lasso',
-        'lassoapp_mime_type_xml',
-        'lassoapp_mime_type_ppt',
-        'lassoapp_mime_type_js',
-        'lassoapp_mime_type_txt',
-        'lassoapp_mime_type_jpg',
-        'lassoapp_mime_type_png',
-        'lassoapp_mime_type_gif',
-        'lassoapp_mime_type_css',
-        'lassoapp_mime_type_csv',
-        'lassoapp_mime_type_tif',
-        'lassoapp_mime_type_ico',
-        'lassoapp_mime_type_rss',
-        'lassoapp_mime_type_xhr',
-        'lassoapp_mime_type_pdf',
-        'lassoapp_mime_type_docx',
-        'lassoapp_mime_type_doc',
-        'lassoapp_mime_type_zip',
-        'lassoapp_mime_type_svg',
-        'lassoapp_mime_type_ttf',
-        'lassoapp_mime_type_woff',
-        'lassoapp_mime_get',
-        'log_level_critical',
-        'log_level_warning',
-        'log_level_detail',
-        'log_level_sql',
-        'log_level_deprecated',
-        'log_destination_console',
-        'log_destination_file',
-        'log_destination_database',
-        'log',
-        'log_setdestination',
-        'log_always',
-        'log_critical',
-        'log_warning',
-        'log_detail',
-        'log_sql',
-        'log_deprecated',
-        'log_max_file_size',
-        'log_trim_file_size',
-        'log_impl_base',
-        'log_initialize',
-        'portal_impl',
-        'portal',
-        'security_database',
-        'security_table_groups',
-        'security_table_users',
-        'security_table_ug_map',
-        'security_default_realm',
-        'security_initialize',
-        'security_registry',
-        'session_driver',
-        'session_initialize',
-        'session_getdefaultdriver',
-        'session_setdefaultdriver',
-        'session_start',
-        'session_addvar',
-        'session_removevar',
-        'session_end',
-        'session_id',
-        'session_abort',
-        'session_result',
-        'session_deleteexpired',
-        'memory_session_driver_impl_entry',
-        'memory_session_driver_impl',
-        'sqlite_session_driver_impl_entry',
-        'sqlite_session_driver_impl',
-        'mysql_session_driver_impl',
-        'odbc_session_driver_mssql',
-        'odbc_session_driver_impl',
-        'session_decorate',
-        'session_delete_expired_thread',
-        'email_smtp',
-        'auth_admin',
-        'auth_check',
-        'auth_custom',
-        'auth_group',
-        'auth_prompt',
-        'auth_user',
-        'client_address',
-        'client_addr',
-        'client_authorization',
-        'client_browser',
-        'client_contentlength',
-        'client_contenttype',
-        'client_cookielist',
-        'client_cookies',
-        'client_encoding',
-        'client_formmethod',
-        'client_getargs',
-        'client_getparams',
-        'client_getparam',
-        'client_headers',
-        'client_integertoip',
-        'client_ip',
-        'client_iptointeger',
-        'client_password',
-        'client_postargs',
-        'client_postparams',
-        'client_postparam',
-        'client_type',
-        'client_username',
-        'client_url',
-        'referer_url',
-        'referrer_url',
-        'content_type',
-        'content_encoding',
-        'cookie',
-        'cookie_set',
-        'include',
-        'include_currentpath',
-        'include_filepath',
-        'include_localpath',
-        'include_once',
-        'include_path',
-        'include_raw',
-        'includes',
-        'library',
-        'library_once',
-        'response_filepath',
-        'response_localpath',
-        'response_path',
-        'response_realm',
-        'response_root',
-        'redirect_url',
-        'server_admin',
-        'server_name',
-        'server_ip',
-        'server_port',
-        'server_protocol',
-        'server_signature',
-        'server_software',
-        'server_push',
-        'token_value',
-        'wap_isenabled',
-        'wap_maxbuttons',
-        'wap_maxhorzpixels',
-        'wap_maxvertpixels',
-        'wap_maxcolumns',
-        'wap_maxrows',
-        'define_atbegin',
-        'define_atend',
-        'content_header',
-        'content_addheader',
-        'content_replaceheader',
-        'content_body',
-        'html_comment',
-        'web_node_content_json_specialized',
-        'web_node',
-        'web_node_container',
-        'web_node_content_representation',
-        'web_node_content',
-        'web_node_content_document',
-        'web_node_postable',
-        'web_node_base',
-        'web_node_forpath',
-        'web_nodes_requesthandler',
-        'web_nodes_normalizeextension',
-        'web_nodes_processcontentnode',
-        'web_node_root',
-        'web_nodes_initialize',
-        'web_node_content_representation_xhr_container',
-        'web_node_content_representation_xhr',
-        'web_node_content_html_specialized',
-        'web_node_content_representation_html_specialized',
-        'web_node_content_representation_html',
-        'web_node_content_css_specialized',
-        'web_node_content_representation_css_specialized',
-        'web_node_content_representation_css',
-        'web_node_content_js_specialized',
-        'web_node_content_representation_js_specialized',
-        'web_node_content_representation_js',
-        'web_node_echo',
-        'web_response_nodesentry',
-        'web_error_atend',
-        'web_response_impl',
-        'web_response',
-        'web_router_database',
-        'web_router_initialize',
-        'web_router',
-        'asstring',
-        'isnota',
-        'isallof',
-        'isanyof',
-        'oncompare',
-        'isa',
-        'ascopy',
-        'ascopydeep',
-        'type',
-        'invoke',
-        'atend',
-        'decomposeassignment',
-        'asgenerator',
-        'foreach',
-        'eachword',
-        'eachline',
-        'eachcharacter',
-        'foreachwordbreak',
-        'foreachlinebreak',
-        'foreachcharacter',
-        'isempty',
-        'isnotempty',
-        'ifempty',
-        'ifnotempty',
-        'size',
-        'values',
-        'asarray',
-        'aslist',
-        'asstaticarray',
-        'join',
-        'get',
-        'keys',
-        'askeyedgenerator',
-        'eachpair',
-        'eachkey',
-        'foreachpair',
-        'foreachkey',
-        'front',
-        'first',
-        'back',
-        'last',
-        'second',
-        'insert',
-        'insertfront',
-        'insertfirst',
-        'insertback',
-        'insertfrom',
-        'insertlast',
-        'remove',
-        'removeall',
-        'removefront',
-        'removefirst',
-        'removeback',
-        'removelast',
-        'difference',
-        'intersection',
-        'union',
-        'contains',
-        'find',
-        'findposition',
-        'componentdelimiter',
-        'extensiondelimiter',
-        'lastcomponent',
-        'foreachpathcomponent',
-        'eachcomponent',
-        'striplastcomponent',
-        'firstcomponent',
-        'stripfirstcomponent',
-        'splitextension',
-        'hastrailingcomponent',
-        'isfullpath',
-        'findlast',
-        'sub',
-        'readsomebytes',
-        'readbytesfully',
-        'readbytes',
-        'writebytes',
-        'encoding',
-        'readstring',
-        'writestring',
-        'hash',
-        'foreachsub',
-        'eachsub',
-        'push',
-        'pop',
-        'top',
-        'dowithclose',
-        'close',
-        'fd',
-        'do',
-        'sum',
-        'average',
-        'where',
-        'select',
-        'selectmany',
-        'groupby',
-        'groupjoin',
-        'orderby',
-        'orderbydescending',
-        'thenby',
-        'thenbydescending',
-        'skip',
-        'take',
-        'serialize',
-        'serializationelements',
-        'acceptdeserializedelement',
-        'left',
-        'right',
-        'up',
-        'value',
-        'bind',
-        'listen',
-        'localaddress',
-        'remoteaddress',
-        'shutdownrd',
-        'shutdownwr',
-        'shutdownrdwr',
-        'setname',
-        'contents',
-        'tagname',
-        'foreachchild',
-        'eachchild',
-        'foreachmatch',
-        'eachmatch',
-        'haschildnodes',
-        'childnodes',
-        'extract',
-        'connection',
-        'requestparams',
-        'stdin',
-        'mimes',
-        'setstatus',
-        'getstatus',
-        'writeheaderline',
-        'writeheaderbytes',
-        'writebodybytes',
-        'id',
-        'class',
-        'style',
-        'title',
-        'gethtmlattr',
-        'lang',
-        'onclick',
-        'ondblclick',
-        'onmousedown',
-        'onmouseup',
-        'onmouseover',
-        'onmousemove',
-        'onmouseout',
-        'onkeypress',
-        'onkeydown',
-        'onkeyup',
-        'sethtmlattr',
-        'gethtmlattrstring',
-        'hashtmlattr',
-        'addcomponent',
-        'attributes',
-        'issourcefile',
-        'resourceinvokable',
-        'resourcename',
-        'fullpath',
-        'appname',
-        'srcpath',
-        'resources',
-        'foo',
-        'startup',
-        'validatesessionstable',
-        'createtable',
-        'fetchdata',
-        'savedata',
-        'init',
-        'kill',
-        'expire',
-        'jsonlabel',
-        'jsonhtml',
-        'jsonisleaf',
-        'delim',
-        'name',
-        'path',
-        'nodelist',
-        'subnode',
-        'subnodes',
-        'representnoderesult',
-        'mime',
-        'extensions',
-        'representnode',
-        'defaultcontentrepresentation',
-        'supportscontentrepresentation',
-        'acceptpost',
-        'htmlcontent',
-        'csscontent',
-        'jscontent',
-        'escape_member',
-        'sameas',
-        'parent',
-        'settrait',
-        'oncreate',
-        'listmethods',
-        'hasmethod',
-        'addtrait',
-        'gettype',
-        'istype',
-        'doccomment',
-        'requires',
-        'provides',
-        'subtraits',
-        'description',
-        'hosttonet16',
-        'hosttonet32',
-        'nettohost16',
-        'nettohost32',
-        'nettohost64',
-        'hosttonet64',
-        'bitset',
-        'bittest',
-        'bitflip',
-        'bitclear',
-        'bitor',
-        'bitand',
-        'bitxor',
-        'bitnot',
-        'bitshiftleft',
-        'bitshiftright',
-        'abs',
-        'div',
-        'dereferencepointer',
-        'asdecimal',
-        'deg2rad',
-        'asstringhex',
-        'asstringoct',
-        'acos',
-        'asin',
-        'atan',
-        'atan2',
-        'ceil',
-        'cos',
-        'cosh',
-        'exp',
-        'fabs',
-        'floor',
-        'frexp',
-        'ldexp',
-        'log10',
-        'modf',
-        'pow',
-        'sin',
-        'sinh',
-        'sqrt',
-        'tan',
-        'tanh',
-        'erf',
-        'erfc',
-        'gamma',
-        'hypot',
-        'j0',
-        'j1',
-        'jn',
-        'lgamma',
-        'y0',
-        'y1',
-        'yn',
-        'isnan',
-        'acosh',
-        'asinh',
-        'atanh',
-        'cbrt',
-        'expm1',
-        'nextafter',
-        'scalb',
-        'ilogb',
-        'log1p',
-        'logb',
-        'remainder',
-        'rint',
-        'asinteger',
-        'self',
-        'detach',
-        'restart',
-        'resume',
-        'continuation',
-        'home',
-        'callsite_file',
-        'callsite_line',
-        'callsite_col',
-        'callstack',
-        'splitthread',
-        'threadreaddesc',
-        'givenblock',
-        'autocollectbuffer',
-        'calledname',
-        'methodname',
-        'invokeuntil',
-        'invokewhile',
-        'invokeautocollect',
-        'asasync',
-        'append',
-        'appendchar',
-        'private_find',
-        'private_findlast',
-        'length',
-        'chardigitvalue',
-        'private_compare',
-        'charname',
-        'chartype',
-        'decompose',
-        'normalize',
-        'digit',
-        'foldcase',
-        'private_merge',
-        'unescape',
-        'trim',
-        'titlecase',
-        'reverse',
-        'getisocomment',
-        'getnumericvalue',
-        'totitle',
-        'toupper',
-        'tolower',
-        'lowercase',
-        'uppercase',
-        'isalnum',
-        'isalpha',
-        'isbase',
-        'iscntrl',
-        'isdigit',
-        'isxdigit',
-        'islower',
-        'isprint',
-        'isspace',
-        'istitle',
-        'ispunct',
-        'isgraph',
-        'isblank',
-        'isualphabetic',
-        'isulowercase',
-        'isupper',
-        'isuuppercase',
-        'isuwhitespace',
-        'iswhitespace',
-        'encodehtml',
-        'decodehtml',
-        'encodexml',
-        'decodexml',
-        'encodehtmltoxml',
-        'getpropertyvalue',
-        'hasbinaryproperty',
-        'asbytes',
-        'equals',
-        'compare',
-        'comparecodepointorder',
-        'padleading',
-        'padtrailing',
-        'merge',
-        'split',
-        'removeleading',
-        'removetrailing',
-        'beginswith',
-        'endswith',
-        'replace',
-        'eachwordbreak',
-        'encodesql92',
-        'encodesql',
-        'substring',
-        'setsize',
-        'reserve',
-        'getrange',
-        'private_setrange',
-        'importas',
-        'import8bits',
-        'import32bits',
-        'import64bits',
-        'import16bits',
-        'importbytes',
-        'importpointer',
-        'export8bits',
-        'export16bits',
-        'export32bits',
-        'export64bits',
-        'exportbytes',
-        'exportsigned8bits',
-        'exportsigned16bits',
-        'exportsigned32bits',
-        'exportsigned64bits',
-        'marker',
-        'swapbytes',
-        'encodeurl',
-        'decodeurl',
-        'encodebase64',
-        'decodebase64',
-        'encodeqp',
-        'decodeqp',
-        'encodemd5',
-        'encodehex',
-        'decodehex',
-        'detectcharset',
-        'bestcharset',
-        'crc',
-        'importstring',
-        'setrange',
-        'exportas',
-        'exportstring',
-        'exportpointerbits',
-        'foreachbyte',
-        'eachbyte',
-        'typename',
-        'returntype',
-        'restname',
-        'paramdescs',
-        'action',
-        'statement',
-        'inputcolumns',
-        'keycolumns',
-        'returncolumns',
-        'sortcolumns',
-        'skiprows',
-        'maxrows',
-        'rowsfound',
-        'statementonly',
-        'lop',
-        'databasename',
-        'tablename',
-        'schemaname',
-        'hostid',
-        'hostdatasource',
-        'hostname',
-        'hostport',
-        'hostusername',
-        'hostpassword',
-        'hostschema',
-        'hosttableencoding',
-        'hostextra',
-        'hostisdynamic',
-        'refobj',
-        'prepared',
-        'getset',
-        'addset',
-        'numsets',
-        'addrow',
-        'addcolumninfo',
-        'forcedrowid',
-        'makeinheritedcopy',
-        'filename',
-        'expose',
-        'recover',
-        'count',
-        'exchange',
-        'findindex',
-        'sort',
-        'family',
-        'isvalid',
-        'isssl',
-        'open',
-        'read',
-        'write',
-        'ioctl',
-        'seek',
-        'mode',
-        'mtime',
-        'atime',
-        'dup',
-        'dup2',
-        'fchdir',
-        'fchown',
-        'fsync',
-        'ftruncate',
-        'fchmod',
-        'sendfd',
-        'receivefd',
-        'readobject',
-        'tryreadobject',
-        'writeobject',
-        'leaveopen',
-        'rewind',
-        'tell',
-        'language',
-        'script',
-        'country',
-        'variant',
-        'displaylanguage',
-        'displayscript',
-        'displaycountry',
-        'displayvariant',
-        'displayname',
-        'basename',
-        'keywords',
-        'iso3language',
-        'iso3country',
-        'formatas',
-        'formatnumber',
-        'parsenumber',
-        'parseas',
-        'format',
-        'parse',
-        'add',
-        'roll',
-        'getattr',
-        'setattr',
-        'clear',
-        'isset',
-        'settimezone',
-        'timezone',
-        'time',
-        'indaylighttime',
-        'createdocument',
-        'parsedocument',
-        'hasfeature',
-        'createdocumenttype',
-        'nodename',
-        'nodevalue',
-        'nodetype',
-        'parentnode',
-        'firstchild',
-        'lastchild',
-        'previoussibling',
-        'nextsibling',
-        'ownerdocument',
-        'namespaceuri',
-        'prefix',
-        'localname',
-        'insertbefore',
-        'replacechild',
-        'removechild',
-        'appendchild',
-        'clonenode',
-        'issupported',
-        'hasattributes',
-        'extractone',
-        'transform',
-        'data',
-        'substringdata',
-        'appenddata',
-        'insertdata',
-        'deletedata',
-        'replacedata',
-        'doctype',
-        'implementation',
-        'documentelement',
-        'createelement',
-        'createdocumentfragment',
-        'createtextnode',
-        'createcomment',
-        'createcdatasection',
-        'createprocessinginstruction',
-        'createattribute',
-        'createentityreference',
-        'getelementsbytagname',
-        'importnode',
-        'createelementns',
-        'createattributens',
-        'getelementsbytagnamens',
-        'getelementbyid',
-        'getattribute',
-        'setattribute',
-        'removeattribute',
-        'getattributenode',
-        'setattributenode',
-        'removeattributenode',
-        'getattributens',
-        'setattributens',
-        'removeattributens',
-        'getattributenodens',
-        'setattributenodens',
-        'hasattribute',
-        'hasattributens',
-        'specified',
-        'ownerelement',
-        'splittext',
-        'notationname',
-        'publicid',
-        'systemid',
-        'target',
-        'entities',
-        'notations',
-        'internalsubset',
-        'item',
-        'getnameditem',
-        'getnameditemns',
-        'setnameditem',
-        'setnameditemns',
-        'removenameditem',
-        'removenameditemns',
-        'next',
-        'readattributevalue',
-        'attributecount',
-        'baseuri',
-        'depth',
-        'hasvalue',
-        'isemptyelement',
-        'xmllang',
-        'getattributenamespace',
-        'lookupnamespace',
-        'movetoattribute',
-        'movetoattributenamespace',
-        'movetofirstattribute',
-        'movetonextattribute',
-        'movetoelement',
-        'prepare',
-        'last_insert_rowid',
-        'total_changes',
-        'interrupt',
-        'errcode',
-        'errmsg',
-        'addmathfunctions',
-        'finalize',
-        'step',
-        'bind_blob',
-        'bind_double',
-        'bind_int',
-        'bind_null',
-        'bind_text',
-        'bind_parameter_index',
-        'reset',
-        'column_count',
-        'column_decltype',
-        'column_blob',
-        'column_double',
-        'column_int64',
-        'column_text',
-        'ismultipart',
-        'gotfileupload',
-        'setmaxfilesize',
-        'getparts',
-        'trackingid',
-        'currentfile',
-        'addtobuffer',
-        'input',
-        'replacepattern',
-        'findpattern',
-        'ignorecase',
-        'setinput',
-        'setreplacepattern',
-        'setfindpattern',
-        'setignorecase',
-        'appendreplacement',
-        'matches',
-        'private_replaceall',
-        'appendtail',
-        'groupcount',
-        'matchposition',
-        'matchesstart',
-        'private_replacefirst',
-        'private_split',
-        'matchstring',
-        'replaceall',
-        'replacefirst',
-        'findall',
-        'findcount',
-        'findfirst',
-        'findsymbols',
-        'loadlibrary',
-        'getlibrary',
-        'f',
-        'r',
-        'form',
-        'gen',
-        'callfirst',
-        'key',
-        'by',
-        'from',
-        'to',
-        'd',
-        't',
-        'object',
-        'inneroncompare',
-        'members',
-        'writeid',
-        'addmember',
-        'refid',
-        'index',
-        'objects',
-        'tabs',
-        'trunk',
-        'trace',
-        'asxml',
-        'tabstr',
-        'toxmlstring',
-        'idmap',
-        'readidobjects',
-        'red',
-        'root',
-        'getnode',
-        'firstnode',
-        'lastnode',
-        'nextnode',
-        'private_rebalanceforremove',
-        'private_rotateleft',
-        'private_rotateright',
-        'private_rebalanceforinsert',
-        'eachnode',
-        'foreachnode',
-        'resolvelinks',
-        'parentdir',
-        'aslazystring',
-        'openread',
-        'openwrite',
-        'openwriteonly',
-        'openappend',
-        'opentruncate',
-        'exists',
-        'modificationtime',
-        'lastaccesstime',
-        'modificationdate',
-        'lastaccessdate',
-        'delete',
-        'moveto',
-        'copyto',
-        'linkto',
-        'flush',
-        'chmod',
-        'chown',
-        'isopen',
-        'position',
-        'setmarker',
-        'setposition',
-        'setmode',
-        'foreachline',
-        'lock',
-        'unlock',
-        'trylock',
-        'testlock',
-        'perms',
-        'islink',
-        'isdir',
-        'realpath',
-        'openwith',
-        'create',
-        'setcwd',
-        'foreachentry',
-        'eachpath',
-        'eachfilepath',
-        'eachdirpath',
-        'each',
-        'eachfile',
-        'eachdir',
-        'eachpathrecursive',
-        'eachfilepathrecursive',
-        'eachdirpathrecursive',
-        'eachentry',
-        'makefullpath',
-        'annotate',
-        'blur',
-        'command',
-        'composite',
-        'contrast',
-        'convert',
-        'crop',
-        'execute',
-        'enhance',
-        'flipv',
-        'fliph',
-        'modulate',
-        'rotate',
-        'save',
-        'scale',
-        'sharpen',
-        'addcomment',
-        'comments',
-        'describe',
-        'height',
-        'pixel',
-        'resolutionv',
-        'resolutionh',
-        'width',
-        'setcolorspace',
-        'colorspace',
-        'debug',
-        'histogram',
-        'imgptr',
-        'appendimagetolist',
-        'fx',
-        'applyheatcolors',
-        'authenticate',
-        'search',
-        'searchurl',
-        'readerror',
-        'readline',
-        'setencoding',
-        'closewrite',
-        'exitcode',
-        'getversion',
-        'findclass',
-        'throw',
-        'thrownew',
-        'exceptionoccurred',
-        'exceptiondescribe',
-        'exceptionclear',
-        'fatalerror',
-        'newglobalref',
-        'deleteglobalref',
-        'deletelocalref',
-        'issameobject',
-        'allocobject',
-        'newobject',
-        'getobjectclass',
-        'isinstanceof',
-        'getmethodid',
-        'callobjectmethod',
-        'callbooleanmethod',
-        'callbytemethod',
-        'callcharmethod',
-        'callshortmethod',
-        'callintmethod',
-        'calllongmethod',
-        'callfloatmethod',
-        'calldoublemethod',
-        'callvoidmethod',
-        'callnonvirtualobjectmethod',
-        'callnonvirtualbooleanmethod',
-        'callnonvirtualbytemethod',
-        'callnonvirtualcharmethod',
-        'callnonvirtualshortmethod',
-        'callnonvirtualintmethod',
-        'callnonvirtuallongmethod',
-        'callnonvirtualfloatmethod',
-        'callnonvirtualdoublemethod',
-        'callnonvirtualvoidmethod',
-        'getfieldid',
-        'getobjectfield',
-        'getbooleanfield',
-        'getbytefield',
-        'getcharfield',
-        'getshortfield',
-        'getintfield',
-        'getlongfield',
-        'getfloatfield',
-        'getdoublefield',
-        'setobjectfield',
-        'setbooleanfield',
-        'setbytefield',
-        'setcharfield',
-        'setshortfield',
-        'setintfield',
-        'setlongfield',
-        'setfloatfield',
-        'setdoublefield',
-        'getstaticmethodid',
-        'callstaticobjectmethod',
-        'callstaticbooleanmethod',
-        'callstaticbytemethod',
-        'callstaticcharmethod',
-        'callstaticshortmethod',
-        'callstaticintmethod',
-        'callstaticlongmethod',
-        'callstaticfloatmethod',
-        'callstaticdoublemethod',
-        'callstaticvoidmethod',
-        'getstaticfieldid',
-        'getstaticobjectfield',
-        'getstaticbooleanfield',
-        'getstaticbytefield',
-        'getstaticcharfield',
-        'getstaticshortfield',
-        'getstaticintfield',
-        'getstaticlongfield',
-        'getstaticfloatfield',
-        'getstaticdoublefield',
-        'setstaticobjectfield',
-        'setstaticbooleanfield',
-        'setstaticbytefield',
-        'setstaticcharfield',
-        'setstaticshortfield',
-        'setstaticintfield',
-        'setstaticlongfield',
-        'setstaticfloatfield',
-        'setstaticdoublefield',
-        'newstring',
-        'getstringlength',
-        'getstringchars',
-        'getarraylength',
-        'newobjectarray',
-        'getobjectarrayelement',
-        'setobjectarrayelement',
-        'newbooleanarray',
-        'newbytearray',
-        'newchararray',
-        'newshortarray',
-        'newintarray',
-        'newlongarray',
-        'newfloatarray',
-        'newdoublearray',
-        'getbooleanarrayelements',
-        'getbytearrayelements',
-        'getchararrayelements',
-        'getshortarrayelements',
-        'getintarrayelements',
-        'getlongarrayelements',
-        'getfloatarrayelements',
-        'getdoublearrayelements',
-        'getbooleanarrayregion',
-        'getbytearrayregion',
-        'getchararrayregion',
-        'getshortarrayregion',
-        'getintarrayregion',
-        'getlongarrayregion',
-        'getfloatarrayregion',
-        'getdoublearrayregion',
-        'setbooleanarrayregion',
-        'setbytearrayregion',
-        'setchararrayregion',
-        'setshortarrayregion',
-        'setintarrayregion',
-        'setlongarrayregion',
-        'setfloatarrayregion',
-        'setdoublearrayregion',
-        'monitorenter',
-        'monitorexit',
-        'fromreflectedmethod',
-        'fromreflectedfield',
-        'toreflectedmethod',
-        'toreflectedfield',
-        'exceptioncheck',
-        'dbtablestable',
-        'dstable',
-        'dsdbtable',
-        'dshoststable',
-        'fieldstable',
-        'sql',
-        'adddatasource',
-        'loaddatasourceinfo',
-        'loaddatasourcehostinfo',
-        'getdatasource',
-        'getdatasourceid',
-        'getdatasourcename',
-        'listdatasources',
-        'listactivedatasources',
-        'removedatasource',
-        'listdatasourcehosts',
-        'listhosts',
-        'adddatasourcehost',
-        'getdatasourcehost',
-        'removedatasourcehost',
-        'getdatabasehost',
-        'gethostdatabase',
-        'listalldatabases',
-        'listdatasourcedatabases',
-        'listhostdatabases',
-        'getdatasourcedatabase',
-        'getdatasourcedatabasebyid',
-        'getdatabasebyname',
-        'getdatabasebyid',
-        'getdatabasebyalias',
-        'adddatasourcedatabase',
-        'removedatasourcedatabase',
-        'listalltables',
-        'listdatabasetables',
-        'getdatabasetable',
-        'getdatabasetablebyalias',
-        'getdatabasetablebyid',
-        'gettablebyid',
-        'adddatabasetable',
-        'removedatabasetable',
-        'removefield',
-        'maybevalue',
-        'getuniquealiasname',
-        'makecolumnlist',
-        'makecolumnmap',
-        'datasourcecolumns',
-        'datasourcemap',
-        'hostcolumns',
-        'hostmap',
-        'hostcolumns2',
-        'hostmap2',
-        'databasecolumns',
-        'databasemap',
-        'tablecolumns',
-        'tablemap',
-        'databasecolumnnames',
-        'hostcolumnnames',
-        'hostcolumnnames2',
-        'datasourcecolumnnames',
-        'tablecolumnnames',
-        'bindcount',
-        'db',
-        'tables',
-        'hastable',
-        'tablehascolumn',
-        'eachrow',
-        'bindparam',
-        'foreachrow',
-        'executelazy',
-        'executenow',
-        'lastinsertid',
-        'table',
-        'bindone',
-        'src',
-        'stat',
-        'colmap',
-        'getcolumn',
-        'locals',
-        'getcolumns',
-        'bodybytes',
-        'headerbytes',
-        'ready',
-        'token',
-        'url',
-        'done',
-        'header',
-        'result',
-        'statuscode',
-        'raw',
-        'version',
-        'perform',
-        'performonce',
-        'asraw',
-        'rawdiff',
-        'getformat',
-        'setformat',
-        'subtract',
-        'gmt',
-        'dst',
-        'era',
-        'year',
-        'month',
-        'week',
-        'weekofyear',
-        'weekofmonth',
-        'day',
-        'dayofmonth',
-        'dayofyear',
-        'dayofweek',
-        'dayofweekinmonth',
-        'ampm',
-        'am',
-        'pm',
-        'hour',
-        'hourofday',
-        'hourofampm',
-        'minute',
-        'millisecond',
-        'zoneoffset',
-        'dstoffset',
-        'yearwoy',
-        'dowlocal',
-        'extendedyear',
-        'julianday',
-        'millisecondsinday',
-        'firstdayofweek',
-        'fixformat',
-        'minutesbetween',
-        'hoursbetween',
-        'secondsbetween',
-        'daysbetween',
-        'businessdaysbetween',
-        'pdifference',
-        'getfield',
-        's',
-        'linediffers',
-        'sourceline',
-        'sourcecolumn',
-        'continuationpacket',
-        'continuationpoint',
-        'continuationstack',
-        'features',
-        'lastpoint',
-        'net',
-        'running',
-        'source',
-        'run',
-        'pathtouri',
-        'sendpacket',
-        'readpacket',
-        'handlefeatureset',
-        'handlefeatureget',
-        'handlestdin',
-        'handlestdout',
-        'handlestderr',
-        'isfirststep',
-        'handlecontinuation',
-        'ensurestopped',
-        'handlestackget',
-        'handlecontextnames',
-        'formatcontextelements',
-        'formatcontextelement',
-        'bptypetostr',
-        'bptoxml',
-        'handlebreakpointlist',
-        'handlebreakpointget',
-        'handlebreakpointremove',
-        'condtoint',
-        'inttocond',
-        'handlebreakpointupdate',
-        'handlebreakpointset',
-        'handlecontextget',
-        'handlesource',
-        'error',
-        'stoprunning',
-        'pollide',
-        'polldbg',
-        'runonce',
-        'arguments',
-        'argumentvalue',
-        'end',
-        'start',
-        'days',
-        'foreachday',
-        'padzero',
-        'actionparams',
-        'capi',
-        'doclose',
-        'isnothing',
-        'named',
-        'workinginputcolumns',
-        'workingkeycolumns',
-        'workingreturncolumns',
-        'workingsortcolumns',
-        'workingkeyfield_name',
-        'scanfordatasource',
-        'configureds',
-        'configuredskeys',
-        'scrubkeywords',
-        'closeprepared',
-        'filterinputcolumn',
-        'prev',
-        'head',
-        'removenode',
-        'listnode',
-        'accept',
-        'connect',
-        'foreachaccept',
-        'writeobjecttcp',
-        'readobjecttcp',
-        'begintls',
-        'endtls',
-        'loadcerts',
-        'sslerrfail',
-        'fromname',
-        'fromport',
-        'env',
-        'getclass',
-        'jobjectisa',
-        'new',
-        'callvoid',
-        'callint',
-        'callfloat',
-        'callboolean',
-        'callobject',
-        'callstring',
-        'callstaticobject',
-        'callstaticstring',
-        'callstaticint',
-        'callstaticboolean',
-        'chk',
-        'makecolor',
-        'realdoc',
-        'addbarcode',
-        'addchapter',
-        'addcheckbox',
-        'addcombobox',
-        'addhiddenfield',
-        'addimage',
-        'addlist',
-        'addpage',
-        'addparagraph',
-        'addpasswordfield',
-        'addphrase',
-        'addradiobutton',
-        'addradiogroup',
-        'addresetbutton',
-        'addsection',
-        'addselectlist',
-        'addsubmitbutton',
-        'addtable',
-        'addtextarea',
-        'addtextfield',
-        'addtext',
-        'arc',
-        'circle',
-        'closepath',
-        'curveto',
-        'drawtext',
-        'getcolor',
-        'getheader',
-        'getheaders',
-        'getmargins',
-        'getpagenumber',
-        'getsize',
-        'insertpage',
-        'line',
-        'rect',
-        'setcolor',
-        'setfont',
-        'setlinewidth',
-        'setpagenumber',
-        'conventionaltop',
-        'lowagiefont',
-        'jcolor',
-        'jbarcode',
-        'generatechecksum',
-        'getbarheight',
-        'getbarmultiplier',
-        'getbarwidth',
-        'getbaseline',
-        'getcode',
-        'getfont',
-        'gettextalignment',
-        'gettextsize',
-        'setbarheight',
-        'setbarmultiplier',
-        'setbarwidth',
-        'setbaseline',
-        'setcode',
-        'setgeneratechecksum',
-        'setshowchecksum',
-        'settextalignment',
-        'settextsize',
-        'showchecksum',
-        'showcode39startstop',
-        'showeanguardbars',
-        'jfont',
-        'getencoding',
-        'getface',
-        'getfullfontname',
-        'getpsfontname',
-        'getsupportedencodings',
-        'istruetype',
-        'getstyle',
-        'getbold',
-        'getitalic',
-        'getunderline',
-        'setface',
-        'setunderline',
-        'setbold',
-        'setitalic',
-        'textwidth',
-        'jimage',
-        'ontop',
-        'jlist',
-        'jread',
-        'addjavascript',
-        'exportfdf',
-        'extractimage',
-        'fieldnames',
-        'fieldposition',
-        'fieldtype',
-        'fieldvalue',
-        'gettext',
-        'importfdf',
-        'javascript',
-        'pagecount',
-        'pagerotation',
-        'pagesize',
-        'setfieldvalue',
-        'setpagerange',
-        'jtable',
-        'getabswidth',
-        'getalignment',
-        'getbordercolor',
-        'getborderwidth',
-        'getcolumncount',
-        'getpadding',
-        'getrowcount',
-        'getspacing',
-        'setalignment',
-        'setbordercolor',
-        'setborderwidth',
-        'setpadding',
-        'setspacing',
-        'jtext',
-        'element',
-        'foreachspool',
-        'unspool',
-        'err',
-        'in',
-        'out',
-        'pid',
-        'wait',
-        'testexitcode',
-        'maxworkers',
-        'tasks',
-        'workers',
-        'startone',
-        'addtask',
-        'waitforcompletion',
-        'scanworkers',
-        'scantasks',
-        'z',
-        'addfile',
-        'adddir',
-        'adddirpath',
-        'foreachfile',
-        'foreachfilename',
-        'eachfilename',
-        'filenames',
-        'getfile',
-        'meta',
-        'criteria',
-        'valid',
-        'lazyvalue',
-        'qdcount',
-        'qdarray',
-        'answer',
-        'bitformat',
-        'consume_rdata',
-        'consume_string',
-        'consume_label',
-        'consume_domain',
-        'consume_message',
-        'errors',
-        'warnings',
-        'addwarning',
-        'adderror',
-        'renderbytes',
-        'renderstring',
-        'components',
-        'addcomponents',
-        'body',
-        'renderdocumentbytes',
-        'contenttype',
-        'mime_boundary',
-        'mime_contenttype',
-        'mime_hdrs',
-        'addtextpart',
-        'addhtmlpart',
-        'addattachment',
-        'addpart',
-        'recipients',
-        'pop_capa',
-        'pop_debug',
-        'pop_err',
-        'pop_get',
-        'pop_ids',
-        'pop_index',
-        'pop_log',
-        'pop_mode',
-        'pop_net',
-        'pop_res',
-        'pop_server',
-        'pop_timeout',
-        'pop_token',
-        'pop_cmd',
-        'user',
-        'pass',
-        'apop',
-        'auth',
-        'quit',
-        'rset',
-        'uidl',
-        'retr',
-        'dele',
-        'noop',
-        'capa',
-        'stls',
-        'authorize',
-        'retrieve',
-        'headers',
-        'uniqueid',
-        'capabilities',
-        'cancel',
-        'results',
-        'lasterror',
-        'parse_body',
-        'parse_boundary',
-        'parse_charset',
-        'parse_content_disposition',
-        'parse_content_transfer_encoding',
-        'parse_content_type',
-        'parse_hdrs',
-        'parse_mode',
-        'parse_msg',
-        'parse_parts',
-        'parse_rawhdrs',
-        'rawheaders',
-        'content_transfer_encoding',
-        'content_disposition',
-        'boundary',
-        'charset',
-        'cc',
-        'subject',
-        'bcc',
-        'pause',
-        'continue',
-        'touch',
-        'refresh',
-        'status',
-        'queue_status',
-        'active_tick',
-        'getprefs',
-        'initialize',
-        'queue_maintenance',
-        'queue_messages',
-        'content',
-        'rectype',
-        'requestid',
-        'cachedappprefix',
-        'cachedroot',
-        'cookiesary',
-        'fcgireq',
-        'fileuploadsary',
-        'headersmap',
-        'httpauthorization',
-        'postparamsary',
-        'queryparamsary',
-        'documentroot',
-        'appprefix',
-        'httpconnection',
-        'httpcookie',
-        'httphost',
-        'httpuseragent',
-        'httpcachecontrol',
-        'httpreferer',
-        'httpreferrer',
-        'contentlength',
-        'pathtranslated',
-        'remoteaddr',
-        'remoteport',
-        'requestmethod',
-        'requesturi',
-        'scriptfilename',
-        'scriptname',
-        'scripturi',
-        'scripturl',
-        'serveraddr',
-        'serveradmin',
-        'servername',
-        'serverport',
-        'serverprotocol',
-        'serversignature',
-        'serversoftware',
-        'pathinfo',
-        'gatewayinterface',
-        'httpaccept',
-        'httpacceptencoding',
-        'httpacceptlanguage',
-        'ishttps',
-        'cookies',
-        'rawheader',
-        'queryparam',
-        'postparam',
-        'param',
-        'queryparams',
-        'querystring',
-        'postparams',
-        'poststring',
-        'params',
-        'fileuploads',
-        'isxhr',
-        'reqid',
-        'statusmsg',
-        'cap',
-        'n',
-        'proxying',
-        'stop',
-        'printsimplemsg',
-        'handleevalexpired',
-        'handlenormalconnection',
-        'handledevconnection',
-        'splittoprivatedev',
-        'getmode',
-        'novaluelists',
-        'makeurl',
-        'choosecolumntype',
-        'getdatabasetablepart',
-        'getlcapitype',
-        'buildquery',
-        'getsortfieldspart',
-        'endjs',
-        'addjs',
-        'addjstext',
-        'addendjs',
-        'addendjstext',
-        'addcss',
-        'addfavicon',
-        'attrs',
-        'dtdid',
-        'xhtml',
-        'code',
-        'msg',
-        'scripttype',
-        'defer',
-        'httpequiv',
-        'scheme',
-        'href',
-        'hreflang',
-        'linktype',
-        'rel',
-        'rev',
-        'media',
-        'declare',
-        'classid',
-        'codebase',
-        'objecttype',
-        'codetype',
-        'archive',
-        'standby',
-        'usemap',
-        'tabindex',
-        'styletype',
-        'method',
-        'enctype',
-        'accept_charset',
-        'onsubmit',
-        'onreset',
-        'accesskey',
-        'inputtype',
-        'maxlength',
-        'for',
-        'label',
-        'multiple',
-        'buff',
-        'wroteheaders',
-        'pullrequest',
-        'pullrawpost',
-        'shouldclose',
-        'pullurlpost',
-        'pullmimepost',
-        'pullhttpheader',
-        'pulloneheaderline',
-        'parseoneheaderline',
-        'addoneheaderline',
-        'safeexport8bits',
-        'writeheader',
-        'connhandler',
-        'port',
-        'connectionhandler',
-        'acceptconnections',
-        'gotconnection',
-        'failnoconnectionhandler',
-        'splitconnection',
-        'scriptextensions',
-        'sendfile',
-        'probemimetype',
-        'inits',
-        'installs',
-        'rootmap',
-        'install',
-        'getappsource',
-        'preflight',
-        'splituppath',
-        'handleresource',
-        'handledefinitionhead',
-        'handledefinitionbody',
-        'handledefinitionresource',
-        'execinstalls',
-        'execinits',
-        'payload',
-        'eligiblepath',
-        'eligiblepaths',
-        'expiresminutes',
-        'moddatestr',
-        'zips',
-        'addzip',
-        'getzipfilebytes',
-        'resourcedata',
-        'zipfile',
-        'zipname',
-        'zipfilename',
-        'rawinvokable',
-        'route',
-        'setdestination',
-        'encodepassword',
-        'checkuser',
-        'needinitialization',
-        'adduser',
-        'getuserid',
-        'getuser',
-        'getuserbykey',
-        'removeuser',
-        'listusers',
-        'listusersbygroup',
-        'countusersbygroup',
-        'addgroup',
-        'updategroup',
-        'getgroupid',
-        'getgroup',
-        'removegroup',
-        'listgroups',
-        'listgroupsbyuser',
-        'addusertogroup',
-        'removeuserfromgroup',
-        'removeuserfromallgroups',
-        'md5hex',
-        'usercolumns',
-        'groupcolumns',
-        'expireminutes',
-        'lasttouched',
-        'hasexpired',
-        'idealinmemory',
-        'maxinmemory',
-        'nextprune',
-        'nextprunedelta',
-        'sessionsdump',
-        'prune',
-        'entry',
-        'host',
-        'tb',
-        'setdefaultstorage',
-        'getdefaultstorage',
-        'onconvert',
-        'send',
-        'addsubnode',
-        'removesubnode',
-        'nodeforpath',
-        'jsonfornode',
-        'appmessage',
-        'appstatus',
-        'atends',
-        'chunked',
-        'cookiesarray',
-        'didinclude',
-        'errstack',
-        'headersarray',
-        'includestack',
-        'outputencoding',
-        'sessionsmap',
-        'htmlizestacktrace',
-        'respond',
-        'sendresponse',
-        'sendchunk',
-        'makecookieyumyum',
-        'includeonce',
-        'includelibrary',
-        'includelibraryonce',
-        'includebytes',
-        'addatend',
-        'setcookie',
-        'addheader',
-        'replaceheader',
-        'setheaders',
-        'rawcontent',
-        'redirectto',
-        'htmlizestacktracelink',
-        'doatbegins',
-        'handlelassoappcontent',
-        'handlelassoappresponse',
-        'domainbody',
-        'establisherrorstate',
-        'tryfinderrorfile',
-        'doatends',
-        'dosessions',
-        'makenonrelative',
-        'pushinclude',
-        'popinclude',
-        'findinclude',
-        'checkdebugging',
-        'splitdebuggingthread',
-        'matchtriggers',
-        'rules',
-        'shouldabort',
-        'gettrigger',
-        'trigger',
-        'rule'
-    ],
-    'Lasso 8 Tags': [
-        '__char',
-        '__sync_timestamp__',
-        '_admin_addgroup',
-        '_admin_adduser',
-        '_admin_defaultconnector',
-        '_admin_defaultconnectornames',
-        '_admin_defaultdatabase',
-        '_admin_defaultfield',
-        '_admin_defaultgroup',
-        '_admin_defaulthost',
-        '_admin_defaulttable',
-        '_admin_defaultuser',
-        '_admin_deleteconnector',
-        '_admin_deletedatabase',
-        '_admin_deletefield',
-        '_admin_deletegroup',
-        '_admin_deletehost',
-        '_admin_deletetable',
-        '_admin_deleteuser',
-        '_admin_duplicategroup',
-        '_admin_internaldatabase',
-        '_admin_listconnectors',
-        '_admin_listdatabases',
-        '_admin_listfields',
-        '_admin_listgroups',
-        '_admin_listhosts',
-        '_admin_listtables',
-        '_admin_listusers',
-        '_admin_refreshconnector',
-        '_admin_refreshsecurity',
-        '_admin_servicepath',
-        '_admin_updateconnector',
-        '_admin_updatedatabase',
-        '_admin_updatefield',
-        '_admin_updategroup',
-        '_admin_updatehost',
-        '_admin_updatetable',
-        '_admin_updateuser',
-        '_chartfx_activation_string',
-        '_chartfx_getchallengestring',
-        '_chop_args',
-        '_chop_mimes',
-        '_client_addr_old',
-        '_client_address_old',
-        '_client_ip_old',
-        '_database_names',
-        '_datasource_reload',
-        '_date_current',
-        '_date_format',
-        '_date_msec',
-        '_date_parse',
-        '_execution_timelimit',
-        '_file_chmod',
-        '_initialize',
-        '_jdbc_acceptsurl',
-        '_jdbc_debug',
-        '_jdbc_deletehost',
-        '_jdbc_driverclasses',
-        '_jdbc_driverinfo',
-        '_jdbc_metainfo',
-        '_jdbc_propertyinfo',
-        '_jdbc_setdriver',
-        '_lasso_param',
-        '_log_helper',
-        '_proc_noparam',
-        '_proc_withparam',
-        '_recursion_limit',
-        '_request_param',
-        '_security_binaryexpiration',
-        '_security_flushcaches',
-        '_security_isserialized',
-        '_security_serialexpiration',
-        '_srand',
-        '_strict_literals',
-        '_substring',
-        '_xmlrpc_exconverter',
-        '_xmlrpc_inconverter',
-        '_xmlrpc_xmlinconverter',
-        'abort',
-        'accept',
-        'action_addinfo',
-        'action_addrecord',
-        'action_param',
-        'action_params',
-        'action_setfoundcount',
-        'action_setrecordid',
-        'action_settotalcount',
-        'action_statement',
-        'add',
-        'addattachment',
-        'addattribute',
-        'addbarcode',
-        'addchapter',
-        'addcheckbox',
-        'addchild',
-        'addcombobox',
-        'addcomment',
-        'addcontent',
-        'addhiddenfield',
-        'addhtmlpart',
-        'addimage',
-        'addjavascript',
-        'addlist',
-        'addnamespace',
-        'addnextsibling',
-        'addpage',
-        'addparagraph',
-        'addparenttype',
-        'addpart',
-        'addpasswordfield',
-        'addphrase',
-        'addprevsibling',
-        'addradiobutton',
-        'addradiogroup',
-        'addresetbutton',
-        'addsection',
-        'addselectlist',
-        'addsibling',
-        'addsubmitbutton',
-        'addtable',
-        'addtext',
-        'addtextarea',
-        'addtextfield',
-        'addtextpart',
-        'admin_allowedfileroots',
-        'admin_changeuser',
-        'admin_createuser',
-        'admin_currentgroups',
-        'admin_currentuserid',
-        'admin_currentusername',
-        'admin_getpref',
-        'admin_groupassignuser',
-        'admin_grouplistusers',
-        'admin_groupremoveuser',
-        'admin_lassoservicepath',
-        'admin_listgroups',
-        'admin_refreshlicensing',
-        'admin_refreshsecurity',
-        'admin_reloaddatasource',
-        'admin_removepref',
-        'admin_setpref',
-        'admin_userexists',
-        'admin_userlistgroups',
-        'alarms',
-        'all',
-        'and',
-        'annotate',
-        'answer',
-        'append',
-        'appendreplacement',
-        'appendtail',
-        'arc',
-        'array',
-        'array_iterator',
-        'asasync',
-        'astype',
-        'atbegin',
-        'atbottom',
-        'atend',
-        'atfarleft',
-        'atfarright',
-        'attop',
-        'attributecount',
-        'attributes',
-        'auth',
-        'auth_admin',
-        'auth_auth',
-        'auth_custom',
-        'auth_group',
-        'auth_prompt',
-        'auth_user',
-        'authenticate',
-        'authorize',
-        'backward',
-        'base64',
-        'baseuri',
-        'bcc',
-        'bean',
-        'beanproperties',
-        'beginswith',
-        'bigint',
-        'bind',
-        'bitand',
-        'bitclear',
-        'bitflip',
-        'bitformat',
-        'bitnot',
-        'bitor',
-        'bitset',
-        'bitshiftleft',
-        'bitshiftright',
-        'bittest',
-        'bitxor',
-        'blur',
-        'body',
-        'bom_utf16be',
-        'bom_utf16le',
-        'bom_utf32be',
-        'bom_utf32le',
-        'bom_utf8',
-        'boolean',
-        'boundary',
-        'bw',
-        'bytes',
-        'cache',
-        'cache_delete',
-        'cache_empty',
-        'cache_exists',
-        'cache_fetch',
-        'cache_internal',
-        'cache_maintenance',
-        'cache_object',
-        'cache_preferences',
-        'cache_store',
-        'call',
-        'cancel',
-        'capabilities',
-        'case',
-        'cc',
-        'chardigitvalue',
-        'charname',
-        'charset',
-        'chartfx',
-        'chartfx_records',
-        'chartfx_serve',
-        'chartype',
-        'checked',
-        'children',
-        'choice_list',
-        'choice_listitem',
-        'choicelistitem',
-        'cipher_decrypt',
-        'cipher_digest',
-        'cipher_encrypt',
-        'cipher_hmac',
-        'cipher_keylength',
-        'cipher_list',
-        'circle',
-        'click_text',
-        'client_addr',
-        'client_address',
-        'client_authorization',
-        'client_browser',
-        'client_contentlength',
-        'client_contenttype',
-        'client_cookielist',
-        'client_cookies',
-        'client_encoding',
-        'client_formmethod',
-        'client_getargs',
-        'client_getparams',
-        'client_headers',
-        'client_ip',
-        'client_ipfrominteger',
-        'client_iptointeger',
-        'client_password',
-        'client_postargs',
-        'client_postparams',
-        'client_type',
-        'client_url',
-        'client_username',
-        'close',
-        'closepath',
-        'closewrite',
-        'cn',
-        'code',
-        'colorspace',
-        'column',
-        'column_name',
-        'column_names',
-        'command',
-        'comments',
-        'compare',
-        'compare_beginswith',
-        'compare_contains',
-        'compare_endswith',
-        'compare_equalto',
-        'compare_greaterthan',
-        'compare_greaterthanorequals',
-        'compare_greaterthanorequls',
-        'compare_lessthan',
-        'compare_lessthanorequals',
-        'compare_notbeginswith',
-        'compare_notcontains',
-        'compare_notendswith',
-        'compare_notequalto',
-        'compare_notregexp',
-        'compare_regexp',
-        'compare_strictequalto',
-        'compare_strictnotequalto',
-        'comparecodepointorder',
-        'compile',
-        'compiler_removecacheddoc',
-        'compiler_setdefaultparserflags',
-        'composite',
-        'compress',
-        'connect',
-        'contains',
-        'content_body',
-        'content_disposition',
-        'content_encoding',
-        'content_header',
-        'content_transfer_encoding',
-        'content_type',
-        'contents',
-        'contrast',
-        'convert',
-        'cookie',
-        'cookie_set',
-        'crop',
-        'curl_ftp_getfile',
-        'curl_ftp_getlisting',
-        'curl_ftp_putfile',
-        'curl_include_url',
-        'currency',
-        'curveto',
-        'data',
-        'database_changecolumn',
-        'database_changefield',
-        'database_createcolumn',
-        'database_createfield',
-        'database_createtable',
-        'database_fmcontainer',
-        'database_hostinfo',
-        'database_inline',
-        'database_name',
-        'database_nameitem',
-        'database_names',
-        'database_realname',
-        'database_removecolumn',
-        'database_removefield',
-        'database_removetable',
-        'database_repeating',
-        'database_repeating_valueitem',
-        'database_repeatingvalueitem',
-        'database_schemanameitem',
-        'database_schemanames',
-        'database_tablecolumn',
-        'database_tablenameitem',
-        'database_tablenames',
-        'datasource_name',
-        'datasource_register',
-        'date',
-        'date__date_current',
-        'date__date_format',
-        'date__date_msec',
-        'date__date_parse',
-        'date_add',
-        'date_date',
-        'date_difference',
-        'date_duration',
-        'date_format',
-        'date_getcurrentdate',
-        'date_getday',
-        'date_getdayofweek',
-        'date_gethour',
-        'date_getlocaltimezone',
-        'date_getminute',
-        'date_getmonth',
-        'date_getsecond',
-        'date_gettime',
-        'date_getyear',
-        'date_gmttolocal',
-        'date_localtogmt',
-        'date_maximum',
-        'date_minimum',
-        'date_msec',
-        'date_setformat',
-        'date_subtract',
-        'day',
-        'daylights',
-        'dayofweek',
-        'dayofyear',
-        'db_layoutnameitem',
-        'db_layoutnames',
-        'db_nameitem',
-        'db_names',
-        'db_tablenameitem',
-        'db_tablenames',
-        'dbi_column_names',
-        'dbi_field_names',
-        'decimal',
-        'decimal_setglobaldefaultprecision',
-        'decode_base64',
-        'decode_bheader',
-        'decode_hex',
-        'decode_html',
-        'decode_json',
-        'decode_qheader',
-        'decode_quotedprintable',
-        'decode_quotedprintablebytes',
-        'decode_url',
-        'decode_xml',
-        'decompress',
-        'decrement',
-        'decrypt_blowfish',
-        'decrypt_blowfish2',
-        'default',
-        'define_atbegin',
-        'define_atend',
-        'define_constant',
-        'define_prototype',
-        'define_tag',
-        'define_tagp',
-        'define_type',
-        'define_typep',
-        'delete',
-        'depth',
-        'describe',
-        'description',
-        'deserialize',
-        'detach',
-        'detachreference',
-        'difference',
-        'digit',
-        'directory_directorynameitem',
-        'directory_lister',
-        'directory_nameitem',
-        'directorynameitem',
-        'dns_default',
-        'dns_lookup',
-        'dns_response',
-        'document',
-        'down',
-        'drawtext',
-        'dst',
-        'dump',
-        'duration',
-        'else',
-        'email_batch',
-        'email_compose',
-        'email_digestchallenge',
-        'email_digestresponse',
-        'email_extract',
-        'email_findemails',
-        'email_immediate',
-        'email_merge',
-        'email_mxerror',
-        'email_mxlookup',
-        'email_parse',
-        'email_pop',
-        'email_queue',
-        'email_result',
-        'email_safeemail',
-        'email_send',
-        'email_smtp',
-        'email_status',
-        'email_token',
-        'email_translatebreakstocrlf',
-        'encode_base64',
-        'encode_bheader',
-        'encode_break',
-        'encode_breaks',
-        'encode_crc32',
-        'encode_hex',
-        'encode_html',
-        'encode_htmltoxml',
-        'encode_json',
-        'encode_qheader',
-        'encode_quotedprintable',
-        'encode_quotedprintablebytes',
-        'encode_set',
-        'encode_smart',
-        'encode_sql',
-        'encode_sql92',
-        'encode_stricturl',
-        'encode_url',
-        'encode_xml',
-        'encrypt_blowfish',
-        'encrypt_blowfish2',
-        'encrypt_crammd5',
-        'encrypt_hmac',
-        'encrypt_md5',
-        'endswith',
-        'enhance',
-        'eq',
-        'equals',
-        'error_adderror',
-        'error_code',
-        'error_code_aborted',
-        'error_code_assert',
-        'error_code_bof',
-        'error_code_connectioninvalid',
-        'error_code_couldnotclosefile',
-        'error_code_couldnotcreateoropenfile',
-        'error_code_couldnotdeletefile',
-        'error_code_couldnotdisposememory',
-        'error_code_couldnotlockmemory',
-        'error_code_couldnotreadfromfile',
-        'error_code_couldnotunlockmemory',
-        'error_code_couldnotwritetofile',
-        'error_code_criterianotmet',
-        'error_code_datasourceerror',
-        'error_code_directoryfull',
-        'error_code_diskfull',
-        'error_code_dividebyzero',
-        'error_code_eof',
-        'error_code_failure',
-        'error_code_fieldrestriction',
-        'error_code_file',
-        'error_code_filealreadyexists',
-        'error_code_filecorrupt',
-        'error_code_fileinvalid',
-        'error_code_fileinvalidaccessmode',
-        'error_code_fileisclosed',
-        'error_code_fileisopen',
-        'error_code_filelocked',
-        'error_code_filenotfound',
-        'error_code_fileunlocked',
-        'error_code_httpfilenotfound',
-        'error_code_illegalinstruction',
-        'error_code_illegaluseoffrozeninstance',
-        'error_code_invaliddatabase',
-        'error_code_invalidfilename',
-        'error_code_invalidmemoryobject',
-        'error_code_invalidparameter',
-        'error_code_invalidpassword',
-        'error_code_invalidpathname',
-        'error_code_invalidusername',
-        'error_code_ioerror',
-        'error_code_loopaborted',
-        'error_code_memory',
-        'error_code_network',
-        'error_code_nilpointer',
-        'error_code_noerr',
-        'error_code_nopermission',
-        'error_code_outofmemory',
-        'error_code_outofstackspace',
-        'error_code_overflow',
-        'error_code_postconditionfailed',
-        'error_code_preconditionfailed',
-        'error_code_resnotfound',
-        'error_code_resource',
-        'error_code_streamreaderror',
-        'error_code_streamwriteerror',
-        'error_code_syntaxerror',
-        'error_code_tagnotfound',
-        'error_code_unknownerror',
-        'error_code_varnotfound',
-        'error_code_volumedoesnotexist',
-        'error_code_webactionnotsupported',
-        'error_code_webadderror',
-        'error_code_webdeleteerror',
-        'error_code_webmodulenotfound',
-        'error_code_webnosuchobject',
-        'error_code_webrepeatingrelatedfield',
-        'error_code_webrequiredfieldmissing',
-        'error_code_webtimeout',
-        'error_code_webupdateerror',
-        'error_columnrestriction',
-        'error_currenterror',
-        'error_databaseconnectionunavailable',
-        'error_databasetimeout',
-        'error_deleteerror',
-        'error_fieldrestriction',
-        'error_filenotfound',
-        'error_invaliddatabase',
-        'error_invalidpassword',
-        'error_invalidusername',
-        'error_modulenotfound',
-        'error_msg',
-        'error_msg_aborted',
-        'error_msg_assert',
-        'error_msg_bof',
-        'error_msg_connectioninvalid',
-        'error_msg_couldnotclosefile',
-        'error_msg_couldnotcreateoropenfile',
-        'error_msg_couldnotdeletefile',
-        'error_msg_couldnotdisposememory',
-        'error_msg_couldnotlockmemory',
-        'error_msg_couldnotreadfromfile',
-        'error_msg_couldnotunlockmemory',
-        'error_msg_couldnotwritetofile',
-        'error_msg_criterianotmet',
-        'error_msg_datasourceerror',
-        'error_msg_directoryfull',
-        'error_msg_diskfull',
-        'error_msg_dividebyzero',
-        'error_msg_eof',
-        'error_msg_failure',
-        'error_msg_fieldrestriction',
-        'error_msg_file',
-        'error_msg_filealreadyexists',
-        'error_msg_filecorrupt',
-        'error_msg_fileinvalid',
-        'error_msg_fileinvalidaccessmode',
-        'error_msg_fileisclosed',
-        'error_msg_fileisopen',
-        'error_msg_filelocked',
-        'error_msg_filenotfound',
-        'error_msg_fileunlocked',
-        'error_msg_httpfilenotfound',
-        'error_msg_illegalinstruction',
-        'error_msg_illegaluseoffrozeninstance',
-        'error_msg_invaliddatabase',
-        'error_msg_invalidfilename',
-        'error_msg_invalidmemoryobject',
-        'error_msg_invalidparameter',
-        'error_msg_invalidpassword',
-        'error_msg_invalidpathname',
-        'error_msg_invalidusername',
-        'error_msg_ioerror',
-        'error_msg_loopaborted',
-        'error_msg_memory',
-        'error_msg_network',
-        'error_msg_nilpointer',
-        'error_msg_noerr',
-        'error_msg_nopermission',
-        'error_msg_outofmemory',
-        'error_msg_outofstackspace',
-        'error_msg_overflow',
-        'error_msg_postconditionfailed',
-        'error_msg_preconditionfailed',
-        'error_msg_resnotfound',
-        'error_msg_resource',
-        'error_msg_streamreaderror',
-        'error_msg_streamwriteerror',
-        'error_msg_syntaxerror',
-        'error_msg_tagnotfound',
-        'error_msg_unknownerror',
-        'error_msg_varnotfound',
-        'error_msg_volumedoesnotexist',
-        'error_msg_webactionnotsupported',
-        'error_msg_webadderror',
-        'error_msg_webdeleteerror',
-        'error_msg_webmodulenotfound',
-        'error_msg_webnosuchobject',
-        'error_msg_webrepeatingrelatedfield',
-        'error_msg_webrequiredfieldmissing',
-        'error_msg_webtimeout',
-        'error_msg_webupdateerror',
-        'error_noerror',
-        'error_nopermission',
-        'error_norecordsfound',
-        'error_outofmemory',
-        'error_pop',
-        'error_push',
-        'error_reqcolumnmissing',
-        'error_reqfieldmissing',
-        'error_requiredcolumnmissing',
-        'error_requiredfieldmissing',
-        'error_reset',
-        'error_seterrorcode',
-        'error_seterrormessage',
-        'error_updateerror',
-        'errors',
-        'euro',
-        'eval',
-        'event_schedule',
-        'events',
-        'ew',
-        'execute',
-        'export16bits',
-        'export32bits',
-        'export64bits',
-        'export8bits',
-        'exportfdf',
-        'exportstring',
-        'extract',
-        'extractone',
-        'fail',
-        'fail_if',
-        'false',
-        'field',
-        'field_name',
-        'field_names',
-        'fieldnames',
-        'fieldtype',
-        'fieldvalue',
-        'file',
-        'file_autoresolvefullpaths',
-        'file_chmod',
-        'file_control',
-        'file_copy',
-        'file_create',
-        'file_creationdate',
-        'file_currenterror',
-        'file_delete',
-        'file_exists',
-        'file_getlinecount',
-        'file_getsize',
-        'file_isdirectory',
-        'file_listdirectory',
-        'file_moddate',
-        'file_modechar',
-        'file_modeline',
-        'file_move',
-        'file_openread',
-        'file_openreadwrite',
-        'file_openwrite',
-        'file_openwriteappend',
-        'file_openwritetruncate',
-        'file_probeeol',
-        'file_processuploads',
-        'file_read',
-        'file_readline',
-        'file_rename',
-        'file_serve',
-        'file_setsize',
-        'file_stream',
-        'file_streamcopy',
-        'file_uploads',
-        'file_waitread',
-        'file_waittimeout',
-        'file_waitwrite',
-        'file_write',
-        'find',
-        'find_soap_ops',
-        'findindex',
-        'findnamespace',
-        'findnamespacebyhref',
-        'findpattern',
-        'findposition',
-        'first',
-        'firstchild',
-        'fliph',
-        'flipv',
-        'flush',
-        'foldcase',
-        'foreach',
-        'form_param',
-        'format',
-        'forward',
-        'found_count',
-        'freebusies',
-        'freezetype',
-        'freezevalue',
-        'from',
-        'ft',
-        'ftp_getfile',
-        'ftp_getlisting',
-        'ftp_putfile',
-        'full',
-        'fulltype',
-        'generatechecksum',
-        'get',
-        'getabswidth',
-        'getalignment',
-        'getattribute',
-        'getattributenamespace',
-        'getbarheight',
-        'getbarmultiplier',
-        'getbarwidth',
-        'getbaseline',
-        'getbordercolor',
-        'getborderwidth',
-        'getcode',
-        'getcolor',
-        'getcolumncount',
-        'getencoding',
-        'getface',
-        'getfont',
-        'getformat',
-        'getfullfontname',
-        'getheaders',
-        'getmargins',
-        'getmethod',
-        'getnumericvalue',
-        'getpadding',
-        'getpagenumber',
-        'getparams',
-        'getproperty',
-        'getpsfontname',
-        'getrange',
-        'getrowcount',
-        'getsize',
-        'getspacing',
-        'getsupportedencodings',
-        'gettextalignment',
-        'gettextsize',
-        'gettype',
-        'global',
-        'global_defined',
-        'global_remove',
-        'global_reset',
-        'globals',
-        'gmt',
-        'groupcount',
-        'gt',
-        'gte',
-        'handle',
-        'handle_error',
-        'hasattribute',
-        'haschildren',
-        'hasvalue',
-        'header',
-        'headers',
-        'height',
-        'histogram',
-        'hosttonet16',
-        'hosttonet32',
-        'hour',
-        'html_comment',
-        'http_getfile',
-        'ical_alarm',
-        'ical_attribute',
-        'ical_calendar',
-        'ical_daylight',
-        'ical_event',
-        'ical_freebusy',
-        'ical_item',
-        'ical_journal',
-        'ical_parse',
-        'ical_standard',
-        'ical_timezone',
-        'ical_todo',
-        'id',
-        'if',
-        'if_empty',
-        'if_false',
-        'if_null',
-        'if_true',
-        'ignorecase',
-        'image',
-        'image_url',
-        'img',
-        'import16bits',
-        'import32bits',
-        'import64bits',
-        'import8bits',
-        'importfdf',
-        'importstring',
-        'include',
-        'include_cgi',
-        'include_currentpath',
-        'include_once',
-        'include_raw',
-        'include_url',
-        'increment',
-        'inline',
-        'input',
-        'insert',
-        'insertatcurrent',
-        'insertfirst',
-        'insertfrom',
-        'insertlast',
-        'insertpage',
-        'integer',
-        'intersection',
-        'invoke',
-        'isa',
-        'isalnum',
-        'isalpha',
-        'isbase',
-        'iscntrl',
-        'isdigit',
-        'isemptyelement',
-        'islower',
-        'isopen',
-        'isprint',
-        'isspace',
-        'istitle',
-        'istruetype',
-        'isualphabetic',
-        'isulowercase',
-        'isupper',
-        'isuuppercase',
-        'isuwhitespace',
-        'iswhitespace',
-        'iterate',
-        'iterator',
-        'java',
-        'java_bean',
-        'javascript',
-        'join',
-        'journals',
-        'json_records',
-        'json_rpccall',
-        'key',
-        'keycolumn_name',
-        'keycolumn_value',
-        'keyfield_name',
-        'keyfield_value',
-        'keys',
-        'lasso_comment',
-        'lasso_currentaction',
-        'lasso_datasourceis',
-        'lasso_datasourceis4d',
-        'lasso_datasourceisfilemaker',
-        'lasso_datasourceisfilemaker7',
-        'lasso_datasourceisfilemaker9',
-        'lasso_datasourceisfilemakersa',
-        'lasso_datasourceisjdbc',
-        'lasso_datasourceislassomysql',
-        'lasso_datasourceismysql',
-        'lasso_datasourceisodbc',
-        'lasso_datasourceisopenbase',
-        'lasso_datasourceisoracle',
-        'lasso_datasourceispostgresql',
-        'lasso_datasourceisspotlight',
-        'lasso_datasourceissqlite',
-        'lasso_datasourceissqlserver',
-        'lasso_datasourcemodulename',
-        'lasso_datatype',
-        'lasso_disableondemand',
-        'lasso_errorreporting',
-        'lasso_executiontimelimit',
-        'lasso_parser',
-        'lasso_process',
-        'lasso_sessionid',
-        'lasso_siteid',
-        'lasso_siteisrunning',
-        'lasso_sitename',
-        'lasso_siterestart',
-        'lasso_sitestart',
-        'lasso_sitestop',
-        'lasso_tagexists',
-        'lasso_tagmodulename',
-        'lasso_uniqueid',
-        'lasso_updatecheck',
-        'lasso_uptime',
-        'lasso_version',
-        'lassoapp_create',
-        'lassoapp_dump',
-        'lassoapp_flattendir',
-        'lassoapp_getappdata',
-        'lassoapp_link',
-        'lassoapp_list',
-        'lassoapp_process',
-        'lassoapp_unitize',
-        'last',
-        'lastchild',
-        'lasterror',
-        'layout_name',
-        'ldap',
-        'ldap_scope_base',
-        'ldap_scope_onelevel',
-        'ldap_scope_subtree',
-        'ldml',
-        'ldml_ldml',
-        'left',
-        'length',
-        'library',
-        'library_once',
-        'line',
-        'link',
-        'link_currentaction',
-        'link_currentactionparams',
-        'link_currentactionurl',
-        'link_currentgroup',
-        'link_currentgroupparams',
-        'link_currentgroupurl',
-        'link_currentrecord',
-        'link_currentrecordparams',
-        'link_currentrecordurl',
-        'link_currentsearch',
-        'link_currentsearchparams',
-        'link_currentsearchurl',
-        'link_detail',
-        'link_detailparams',
-        'link_detailurl',
-        'link_firstgroup',
-        'link_firstgroupparams',
-        'link_firstgroupurl',
-        'link_firstrecord',
-        'link_firstrecordparams',
-        'link_firstrecordurl',
-        'link_lastgroup',
-        'link_lastgroupparams',
-        'link_lastgroupurl',
-        'link_lastrecord',
-        'link_lastrecordparams',
-        'link_lastrecordurl',
-        'link_nextgroup',
-        'link_nextgroupparams',
-        'link_nextgroupurl',
-        'link_nextrecord',
-        'link_nextrecordparams',
-        'link_nextrecordurl',
-        'link_params',
-        'link_prevgroup',
-        'link_prevgroupparams',
-        'link_prevgroupurl',
-        'link_prevrecord',
-        'link_prevrecordparams',
-        'link_prevrecordurl',
-        'link_setformat',
-        'link_url',
-        'list',
-        'list_additem',
-        'list_fromlist',
-        'list_fromstring',
-        'list_getitem',
-        'list_itemcount',
-        'list_iterator',
-        'list_removeitem',
-        'list_replaceitem',
-        'list_reverseiterator',
-        'list_tostring',
-        'listen',
-        'literal',
-        'ljax_end',
-        'ljax_hastarget',
-        'ljax_include',
-        'ljax_start',
-        'ljax_target',
-        'local',
-        'local_defined',
-        'local_remove',
-        'local_reset',
-        'localaddress',
-        'locale_format',
-        'localname',
-        'locals',
-        'lock',
-        'log',
-        'log_always',
-        'log_critical',
-        'log_deprecated',
-        'log_destination_console',
-        'log_destination_database',
-        'log_destination_file',
-        'log_detail',
-        'log_level_critical',
-        'log_level_deprecated',
-        'log_level_detail',
-        'log_level_sql',
-        'log_level_warning',
-        'log_setdestination',
-        'log_sql',
-        'log_warning',
-        'logicalop_value',
-        'logicaloperator_value',
-        'lookupnamespace',
-        'loop',
-        'loop_abort',
-        'loop_continue',
-        'loop_count',
-        'lowercase',
-        'lt',
-        'lte',
-        'magick_image',
-        'map',
-        'map_iterator',
-        'marker',
-        'match_comparator',
-        'match_notrange',
-        'match_notregexp',
-        'match_range',
-        'match_regexp',
-        'matches',
-        'matchesstart',
-        'matchposition',
-        'matchstring',
-        'math_abs',
-        'math_acos',
-        'math_add',
-        'math_asin',
-        'math_atan',
-        'math_atan2',
-        'math_ceil',
-        'math_converteuro',
-        'math_cos',
-        'math_div',
-        'math_exp',
-        'math_floor',
-        'math_internal_rand',
-        'math_internal_randmax',
-        'math_internal_srand',
-        'math_ln',
-        'math_log',
-        'math_log10',
-        'math_max',
-        'math_min',
-        'math_mod',
-        'math_mult',
-        'math_pow',
-        'math_random',
-        'math_range',
-        'math_rint',
-        'math_roman',
-        'math_round',
-        'math_sin',
-        'math_sqrt',
-        'math_sub',
-        'math_tan',
-        'maxrecords_value',
-        'memory_session_driver',
-        'merge',
-        'millisecond',
-        'mime_type',
-        'minimal',
-        'minute',
-        'misc__srand',
-        'misc_randomnumber',
-        'misc_roman',
-        'misc_valid_creditcard',
-        'mode',
-        'modulate',
-        'month',
-        'moveto',
-        'movetoattributenamespace',
-        'movetoelement',
-        'movetofirstattribute',
-        'movetonextattribute',
-        'mysql_session_driver',
-        'name',
-        'named_param',
-        'namespace_current',
-        'namespace_delimiter',
-        'namespace_exists',
-        'namespace_file_fullpathexists',
-        'namespace_global',
-        'namespace_import',
-        'namespace_load',
-        'namespace_page',
-        'namespace_unload',
-        'namespace_using',
-        'namespaces',
-        'namespaceuri',
-        'neq',
-        'net',
-        'net_connectinprogress',
-        'net_connectok',
-        'net_typessl',
-        'net_typessltcp',
-        'net_typessludp',
-        'net_typetcp',
-        'net_typeudp',
-        'net_waitread',
-        'net_waittimeout',
-        'net_waitwrite',
-        'nettohost16',
-        'nettohost32',
-        'newchild',
-        'next',
-        'nextsibling',
-        'no_default_output',
-        'nodetype',
-        'none',
-        'noprocess',
-        'not',
-        'nrx',
-        'nslookup',
-        'null',
-        'object',
-        'once',
-        'oneoff',
-        'op_logicalvalue',
-        'open',
-        'operator_logicalvalue',
-        'option',
-        'or',
-        'os_process',
-        'output',
-        'output_none',
-        'padleading',
-        'padtrailing',
-        'pagecount',
-        'pagesize',
-        'pair',
-        'paraminfo',
-        'params',
-        'params_up',
-        'parent',
-        'path',
-        'pdf_barcode',
-        'pdf_color',
-        'pdf_doc',
-        'pdf_font',
-        'pdf_image',
-        'pdf_list',
-        'pdf_read',
-        'pdf_serve',
-        'pdf_table',
-        'pdf_text',
-        'percent',
-        'pixel',
-        'portal',
-        'position',
-        'postcondition',
-        'precondition',
-        'prefix',
-        'prettyprintingnsmap',
-        'prettyprintingtypemap',
-        'previoussibling',
-        'priorityqueue',
-        'private',
-        'proc_convert',
-        'proc_convertbody',
-        'proc_convertone',
-        'proc_extract',
-        'proc_extractone',
-        'proc_find',
-        'proc_first',
-        'proc_foreach',
-        'proc_get',
-        'proc_join',
-        'proc_lasso',
-        'proc_last',
-        'proc_map_entry',
-        'proc_null',
-        'proc_regexp',
-        'proc_xml',
-        'proc_xslt',
-        'process',
-        'properties',
-        'protect',
-        'queue',
-        'rand',
-        'randomnumber',
-        'raw',
-        'rawheaders',
-        'read',
-        'readattributevalue',
-        'readerror',
-        'readfrom',
-        'readline',
-        'readlock',
-        'readstring',
-        'readunlock',
-        'recid_value',
-        'recipients',
-        'record_count',
-        'recordcount',
-        'recordid_value',
-        'records',
-        'records_array',
-        'records_map',
-        'rect',
-        'redirect_url',
-        'refcount',
-        'reference',
-        'referer',
-        'referer_url',
-        'referrals',
-        'referrer',
-        'referrer_url',
-        'regexp',
-        'remoteaddress',
-        'remove',
-        'removeall',
-        'removeattribute',
-        'removechild',
-        'removecurrent',
-        'removefirst',
-        'removelast',
-        'removeleading',
-        'removenamespace',
-        'removetrailing',
-        'render',
-        'repeating',
-        'repeating_valueitem',
-        'repeatingvalueitem',
-        'repetition',
-        'replace',
-        'replaceall',
-        'replacefirst',
-        'replacepattern',
-        'replacewith',
-        'req_column',
-        'req_field',
-        'required_column',
-        'required_field',
-        'reserve',
-        'reset',
-        'resolutionh',
-        'resolutionv',
-        'response',
-        'response_fileexists',
-        'response_filepath',
-        'response_localpath',
-        'response_path',
-        'response_realm',
-        'results',
-        'resultset',
-        'resultset_count',
-        'retrieve',
-        'return',
-        'return_value',
-        'returntype',
-        'reverse',
-        'reverseiterator',
-        'right',
-        'roman',
-        'rotate',
-        'row_count',
-        'rows',
-        'rows_array',
-        'run',
-        'run_children',
-        'rx',
-        'save',
-        'scale',
-        'schema_name',
-        'scientific',
-        'search',
-        'search_args',
-        'search_arguments',
-        'search_columnitem',
-        'search_fielditem',
-        'search_operatoritem',
-        'search_opitem',
-        'search_valueitem',
-        'searchfielditem',
-        'searchoperatoritem',
-        'searchopitem',
-        'searchvalueitem',
-        'second',
-        'select',
-        'selected',
-        'self',
-        'send',
-        'serialize',
-        'series',
-        'server_date',
-        'server_day',
-        'server_ip',
-        'server_name',
-        'server_port',
-        'server_push',
-        'server_siteisrunning',
-        'server_sitestart',
-        'server_sitestop',
-        'server_time',
-        'session_abort',
-        'session_addoutputfilter',
-        'session_addvar',
-        'session_addvariable',
-        'session_deleteexpired',
-        'session_driver',
-        'session_end',
-        'session_id',
-        'session_removevar',
-        'session_removevariable',
-        'session_result',
-        'session_setdriver',
-        'session_start',
-        'set',
-        'set_iterator',
-        'set_reverseiterator',
-        'setalignment',
-        'setbarheight',
-        'setbarmultiplier',
-        'setbarwidth',
-        'setbaseline',
-        'setblocking',
-        'setbordercolor',
-        'setborderwidth',
-        'setbytes',
-        'setcode',
-        'setcolor',
-        'setcolorspace',
-        'setdatatype',
-        'setencoding',
-        'setface',
-        'setfieldvalue',
-        'setfont',
-        'setformat',
-        'setgeneratechecksum',
-        'setheight',
-        'setlassodata',
-        'setlinewidth',
-        'setmarker',
-        'setmode',
-        'setname',
-        'setpadding',
-        'setpagenumber',
-        'setpagerange',
-        'setposition',
-        'setproperty',
-        'setrange',
-        'setshowchecksum',
-        'setsize',
-        'setspacing',
-        'settemplate',
-        'settemplatestr',
-        'settextalignment',
-        'settextdata',
-        'settextsize',
-        'settype',
-        'setunderline',
-        'setwidth',
-        'setxmldata',
-        'sharpen',
-        'showchecksum',
-        'showcode39startstop',
-        'showeanguardbars',
-        'shown_count',
-        'shown_first',
-        'shown_last',
-        'signal',
-        'signalall',
-        'site_atbegin',
-        'site_id',
-        'site_name',
-        'site_restart',
-        'size',
-        'skiprecords_value',
-        'sleep',
-        'smooth',
-        'soap_convertpartstopairs',
-        'soap_definetag',
-        'soap_info',
-        'soap_lastrequest',
-        'soap_lastresponse',
-        'soap_stub',
-        'sort',
-        'sort_args',
-        'sort_arguments',
-        'sort_columnitem',
-        'sort_fielditem',
-        'sort_orderitem',
-        'sortcolumnitem',
-        'sortfielditem',
-        'sortorderitem',
-        'sortwith',
-        'split',
-        'sqlite_createdb',
-        'sqlite_session_driver',
-        'sqlite_setsleepmillis',
-        'sqlite_setsleeptries',
-        'srand',
-        'stack',
-        'standards',
-        'steal',
-        'stock_quote',
-        'string',
-        'string_charfromname',
-        'string_concatenate',
-        'string_countfields',
-        'string_endswith',
-        'string_extract',
-        'string_findposition',
-        'string_findregexp',
-        'string_fordigit',
-        'string_getfield',
-        'string_getunicodeversion',
-        'string_insert',
-        'string_isalpha',
-        'string_isalphanumeric',
-        'string_isdigit',
-        'string_ishexdigit',
-        'string_islower',
-        'string_isnumeric',
-        'string_ispunctuation',
-        'string_isspace',
-        'string_isupper',
-        'string_length',
-        'string_lowercase',
-        'string_remove',
-        'string_removeleading',
-        'string_removetrailing',
-        'string_replace',
-        'string_replaceregexp',
-        'string_todecimal',
-        'string_tointeger',
-        'string_uppercase',
-        'string_validcharset',
-        'subject',
-        'substring',
-        'subtract',
-        'swapbytes',
-        'table_name',
-        'table_realname',
-        'tag',
-        'tag_name',
-        'tags',
-        'tags_find',
-        'tags_list',
-        'tcp_close',
-        'tcp_open',
-        'tcp_send',
-        'tcp_tcp_close',
-        'tcp_tcp_open',
-        'tcp_tcp_send',
-        'textwidth',
-        'thread_abort',
-        'thread_atomic',
-        'thread_event',
-        'thread_exists',
-        'thread_getcurrentid',
-        'thread_getpriority',
-        'thread_info',
-        'thread_list',
-        'thread_lock',
-        'thread_pipe',
-        'thread_priority_default',
-        'thread_priority_high',
-        'thread_priority_low',
-        'thread_rwlock',
-        'thread_semaphore',
-        'thread_setpriority',
-        'time',
-        'timezones',
-        'titlecase',
-        'to',
-        'todos',
-        'token_value',
-        'tolower',
-        'total_records',
-        'totitle',
-        'toupper',
-        'transform',
-        'treemap',
-        'treemap_iterator',
-        'trim',
-        'true',
-        'type',
-        'unescape',
-        'union',
-        'uniqueid',
-        'unlock',
-        'unserialize',
-        'up',
-        'uppercase',
-        'url_rewrite',
-        'valid_creditcard',
-        'valid_date',
-        'valid_email',
-        'valid_url',
-        'value',
-        'value_list',
-        'value_listitem',
-        'valuelistitem',
-        'values',
-        'valuetype',
-        'var',
-        'var_defined',
-        'var_remove',
-        'var_reset',
-        'var_set',
-        'variable',
-        'variable_defined',
-        'variable_set',
-        'variables',
-        'variant_count',
-        'vars',
-        'wait',
-        'wap_isenabled',
-        'wap_maxbuttons',
-        'wap_maxcolumns',
-        'wap_maxhorzpixels',
-        'wap_maxrows',
-        'wap_maxvertpixels',
-        'waskeyword',
-        'week',
-        'while',
-        'width',
-        'write',
-        'writelock',
-        'writeto',
-        'writeunlock',
-        'wsdl_extract',
-        'wsdl_getbinding',
-        'wsdl_getbindingforoperation',
-        'wsdl_getbindingoperations',
-        'wsdl_getmessagenamed',
-        'wsdl_getmessageparts',
-        'wsdl_getmessagetriofromporttype',
-        'wsdl_getopbodystyle',
-        'wsdl_getopbodyuse',
-        'wsdl_getoperation',
-        'wsdl_getoplocation',
-        'wsdl_getopmessagetypes',
-        'wsdl_getopsoapaction',
-        'wsdl_getportaddress',
-        'wsdl_getportsforservice',
-        'wsdl_getporttype',
-        'wsdl_getporttypeoperation',
-        'wsdl_getservicedocumentation',
-        'wsdl_getservices',
-        'wsdl_gettargetnamespace',
-        'wsdl_issoapoperation',
-        'wsdl_listoperations',
-        'wsdl_maketest',
-        'xml',
-        'xml_extract',
-        'xml_rpc',
-        'xml_rpccall',
-        'xml_rw',
-        'xml_serve',
-        'xml_transform',
-        'xml_xml',
-        'xml_xmlstream',
-        'xmllang',
-        'xmlschematype',
-        'xmlstream',
-        'xsd_attribute',
-        'xsd_blankarraybase',
-        'xsd_blankbase',
-        'xsd_buildtype',
-        'xsd_cache',
-        'xsd_checkcardinality',
-        'xsd_continueall',
-        'xsd_continueannotation',
-        'xsd_continueany',
-        'xsd_continueanyattribute',
-        'xsd_continueattribute',
-        'xsd_continueattributegroup',
-        'xsd_continuechoice',
-        'xsd_continuecomplexcontent',
-        'xsd_continuecomplextype',
-        'xsd_continuedocumentation',
-        'xsd_continueextension',
-        'xsd_continuegroup',
-        'xsd_continuekey',
-        'xsd_continuelist',
-        'xsd_continuerestriction',
-        'xsd_continuesequence',
-        'xsd_continuesimplecontent',
-        'xsd_continuesimpletype',
-        'xsd_continueunion',
-        'xsd_deserialize',
-        'xsd_fullyqualifyname',
-        'xsd_generate',
-        'xsd_generateblankfromtype',
-        'xsd_generateblanksimpletype',
-        'xsd_generatetype',
-        'xsd_getschematype',
-        'xsd_issimpletype',
-        'xsd_loadschema',
-        'xsd_lookupnamespaceuri',
-        'xsd_lookuptype',
-        'xsd_processany',
-        'xsd_processattribute',
-        'xsd_processattributegroup',
-        'xsd_processcomplextype',
-        'xsd_processelement',
-        'xsd_processgroup',
-        'xsd_processimport',
-        'xsd_processinclude',
-        'xsd_processschema',
-        'xsd_processsimpletype',
-        'xsd_ref',
-        'xsd_type',
-        'year'
-    ]
-}
diff --git a/python/ext-libs/pygments/lexers/_luabuiltins.py b/python/ext-libs/pygments/lexers/_luabuiltins.py
deleted file mode 100644
index 069c44f..0000000
--- a/python/ext-libs/pygments/lexers/_luabuiltins.py
+++ /dev/null
@@ -1,249 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._luabuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This file contains the names and modules of lua functions
-    It is able to re-generate itself, but for adding new functions you
-    probably have to add some callbacks (see function module_callbacks).
-
-    Do not edit the MODULES dict by hand.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-MODULES = {'basic': ['_G',
-           '_VERSION',
-           'assert',
-           'collectgarbage',
-           'dofile',
-           'error',
-           'getfenv',
-           'getmetatable',
-           'ipairs',
-           'load',
-           'loadfile',
-           'loadstring',
-           'next',
-           'pairs',
-           'pcall',
-           'print',
-           'rawequal',
-           'rawget',
-           'rawset',
-           'select',
-           'setfenv',
-           'setmetatable',
-           'tonumber',
-           'tostring',
-           'type',
-           'unpack',
-           'xpcall'],
- 'coroutine': ['coroutine.create',
-               'coroutine.resume',
-               'coroutine.running',
-               'coroutine.status',
-               'coroutine.wrap',
-               'coroutine.yield'],
- 'debug': ['debug.debug',
-           'debug.getfenv',
-           'debug.gethook',
-           'debug.getinfo',
-           'debug.getlocal',
-           'debug.getmetatable',
-           'debug.getregistry',
-           'debug.getupvalue',
-           'debug.setfenv',
-           'debug.sethook',
-           'debug.setlocal',
-           'debug.setmetatable',
-           'debug.setupvalue',
-           'debug.traceback'],
- 'io': ['io.close',
-        'io.flush',
-        'io.input',
-        'io.lines',
-        'io.open',
-        'io.output',
-        'io.popen',
-        'io.read',
-        'io.tmpfile',
-        'io.type',
-        'io.write'],
- 'math': ['math.abs',
-          'math.acos',
-          'math.asin',
-          'math.atan2',
-          'math.atan',
-          'math.ceil',
-          'math.cosh',
-          'math.cos',
-          'math.deg',
-          'math.exp',
-          'math.floor',
-          'math.fmod',
-          'math.frexp',
-          'math.huge',
-          'math.ldexp',
-          'math.log10',
-          'math.log',
-          'math.max',
-          'math.min',
-          'math.modf',
-          'math.pi',
-          'math.pow',
-          'math.rad',
-          'math.random',
-          'math.randomseed',
-          'math.sinh',
-          'math.sin',
-          'math.sqrt',
-          'math.tanh',
-          'math.tan'],
- 'modules': ['module',
-             'require',
-             'package.cpath',
-             'package.loaded',
-             'package.loadlib',
-             'package.path',
-             'package.preload',
-             'package.seeall'],
- 'os': ['os.clock',
-        'os.date',
-        'os.difftime',
-        'os.execute',
-        'os.exit',
-        'os.getenv',
-        'os.remove',
-        'os.rename',
-        'os.setlocale',
-        'os.time',
-        'os.tmpname'],
- 'string': ['string.byte',
-            'string.char',
-            'string.dump',
-            'string.find',
-            'string.format',
-            'string.gmatch',
-            'string.gsub',
-            'string.len',
-            'string.lower',
-            'string.match',
-            'string.rep',
-            'string.reverse',
-            'string.sub',
-            'string.upper'],
- 'table': ['table.concat',
-           'table.insert',
-           'table.maxn',
-           'table.remove',
-           'table.sort']}
-
-if __name__ == '__main__':
-    import re
-    import urllib
-    import pprint
-
-    # you can't generally find out what module a function belongs to if you
-    # have only its name. Because of this, here are some callback functions
-    # that recognize if a gioven function belongs to a specific module
-    def module_callbacks():
-        def is_in_coroutine_module(name):
-            return name.startswith('coroutine.')
-
-        def is_in_modules_module(name):
-            if name in ['require', 'module'] or name.startswith('package'):
-                return True
-            else:
-                return False
-
-        def is_in_string_module(name):
-            return name.startswith('string.')
-
-        def is_in_table_module(name):
-            return name.startswith('table.')
-
-        def is_in_math_module(name):
-            return name.startswith('math')
-
-        def is_in_io_module(name):
-            return name.startswith('io.')
-
-        def is_in_os_module(name):
-            return name.startswith('os.')
-
-        def is_in_debug_module(name):
-            return name.startswith('debug.')
-
-        return {'coroutine': is_in_coroutine_module,
-                'modules': is_in_modules_module,
-                'string': is_in_string_module,
-                'table': is_in_table_module,
-                'math': is_in_math_module,
-                'io': is_in_io_module,
-                'os': is_in_os_module,
-                'debug': is_in_debug_module}
-
-
-
-    def get_newest_version():
-        f = urllib.urlopen('http://www.lua.org/manual/')
-        r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
-        for line in f:
-            m = r.match(line)
-            if m is not None:
-                return m.groups()[0]
-
-    def get_lua_functions(version):
-        f = urllib.urlopen('http://www.lua.org/manual/%s/' % version)
-        r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
-        functions = []
-        for line in f:
-            m = r.match(line)
-            if m is not None:
-                functions.append(m.groups()[0])
-        return functions
-
-    def get_function_module(name):
-        for mod, cb in module_callbacks().iteritems():
-            if cb(name):
-                return mod
-        if '.' in name:
-            return name.split('.')[0]
-        else:
-            return 'basic'
-
-    def regenerate(filename, modules):
-        f = open(filename)
-        try:
-            content = f.read()
-        finally:
-            f.close()
-
-        header = content[:content.find('MODULES = {')]
-        footer = content[content.find("if __name__ == '__main__':"):]
-
-
-        f = open(filename, 'w')
-        f.write(header)
-        f.write('MODULES = %s\n\n' % pprint.pformat(modules))
-        f.write(footer)
-        f.close()
-
-    def run():
-        version = get_newest_version()
-        print '> Downloading function index for Lua %s' % version
-        functions = get_lua_functions(version)
-        print '> %d functions found:' % len(functions)
-
-        modules = {}
-        for full_function_name in functions:
-            print '>> %s' % full_function_name
-            m = get_function_module(full_function_name)
-            modules.setdefault(m, []).append(full_function_name)
-
-        regenerate(__file__, modules)
-
-
-    run()
diff --git a/python/ext-libs/pygments/lexers/_mapping.py b/python/ext-libs/pygments/lexers/_mapping.py
deleted file mode 100644
index 53e0917..0000000
--- a/python/ext-libs/pygments/lexers/_mapping.py
+++ /dev/null
@@ -1,340 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._mapping
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer mapping defintions. This file is generated by itself. Everytime
-    you change something on a builtin lexer defintion, run this script from
-    the lexers folder to update it.
-
-    Do not alter the LEXERS dictionary by hand.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-LEXERS = {
-    'ABAPLexer': ('pygments.lexers.other', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
-    'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
-    'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
-    'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
-    'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
-    'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
-    'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
-    'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
-    'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
-    'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
-    'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
-    'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
-    'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
-    'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
-    'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
-    'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
-    'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
-    'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit', 'Autoit'), ('*.au3',), ('text/x-autoit',)),
-    'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
-    'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
-    'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
-    'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
-    'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*'), ('application/x-sh', 'application/x-shellscript')),
-    'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
-    'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
-    'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
-    'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
-    'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
-    'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
-    'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()),
-    'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
-    'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
-    'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
-    'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
-    'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
-    'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
-    'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()),
-    'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
-    'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
-    'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
-    'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire'), (), ('text/html+cheetah', 'text/html+spitfire')),
-    'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
-    'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
-    'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
-    'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
-    'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
-    'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
-    'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript'), ('*.coffee',), ('text/coffeescript',)),
-    'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
-    'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
-    'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
-    'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
-    'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
-    'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
-    'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
-    'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
-    'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
-    'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
-    'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)),
-    'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
-    'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
-    'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
-    'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
-    'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
-    'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
-    'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
-    'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
-    'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control',), ('control',), ()),
-    'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
-    'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
-    'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
-    'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
-    'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
-    'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
-    'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
-    'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
-    'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
-    'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
-    'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
-    'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
-    'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
-    'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
-    'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
-    'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
-    'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
-    'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
-    'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
-    'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
-    'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
-    'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
-    'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
-    'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
-    'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
-    'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()),
-    'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
-    'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas',), ('*.s', '*.S'), ('text/x-gas',)),
-    'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
-    'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
-    'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
-    'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('Cucumber', 'cucumber', 'Gherkin', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
-    'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
-    'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
-    'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
-    'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
-    'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
-    'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
-    'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
-    'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
-    'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
-    'HaxeLexer': ('pygments.lexers.web', 'haXe', ('hx', 'haXe'), ('*.hx',), ('text/haxe',)),
-    'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja'), (), ('text/html+django', 'text/html+jinja')),
-    'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
-    'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
-    'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
-    'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
-    'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
-    'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
-    'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
-    'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
-    'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
-    'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
-    'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
-    'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
-    'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
-    'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
-    'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
-    'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
-    'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
-    'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
-    'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
-    'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
-    'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
-    'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)),
-    'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
-    'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()),
-    'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
-    'KconfigLexer': ('pygments.lexers.other', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
-    'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
-    'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
-    'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
-    'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
-    'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
-    'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
-    'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
-    'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
-    'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
-    'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
-    'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
-    'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
-    'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
-    'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
-    'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
-    'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
-    'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
-    'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
-    'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
-    'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
-    'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
-    'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
-    'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
-    'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
-    'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
-    'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
-    'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
-    'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
-    'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
-    'MonkeyLexer': ('pygments.lexers.compiled', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
-    'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
-    'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
-    'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()),
-    'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()),
-    'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
-    'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
-    'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
-    'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
-    'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
-    'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
-    'NSISLexer': ('pygments.lexers.other', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
-    'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
-    'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
-    'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
-    'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
-    'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
-    'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
-    'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()),
-    'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
-    'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
-    'ObjectiveCppLexer': ('pygments.lexers.compiled', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
-    'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
-    'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
-    'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
-    'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
-    'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
-    'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
-    'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
-    'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
-    'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
-    'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)),
-    'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
-    'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
-    'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
-    'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1'), ('*.ps1',), ('text/x-powershell',)),
-    'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
-    'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)),
-    'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf',), ('*.proto',), ()),
-    'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()),
-    'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
-    'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
-    'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
-    'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
-    'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
-    'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
-    'QmlLexer': ('pygments.lexers.web', 'QML', ('qml', 'Qt Meta Language', 'Qt modeling Language'), ('*.qml',), ('application/x-qml',)),
-    'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
-    'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
-    'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktl'), ('text/x-racket', 'application/x-racket')),
-    'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
-    'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
-    'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
-    'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
-    'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
-    'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
-    'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
-    'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
-    'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
-    'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
-    'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
-    'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
-    'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
-    'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
-    'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)),
-    'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
-    'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
-    'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
-    'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs', '*.rc'), ('text/x-rustsrc',)),
-    'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
-    'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
-    'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
-    'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
-    'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
-    'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
-    'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
-    'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
-    'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)),
-    'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
-    'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
-    'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
-    'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
-    'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
-    'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
-    'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
-    'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
-    'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
-    'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
-    'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()),
-    'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
-    'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
-    'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
-    'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
-    'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
-    'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
-    'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
-    'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
-    'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
-    'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()),
-    'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
-    'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
-    'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
-    'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
-    'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
-    'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
-    'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
-    'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
-    'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
-    'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
-    'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
-    'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
-    'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
-    'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
-    'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
-    'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
-    'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
-    'YamlLexer': ('pygments.lexers.text', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
-}
-
-if __name__ == '__main__':
-    import sys
-    import os
-
-    # lookup lexers
-    found_lexers = []
-    sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
-    for filename in os.listdir('.'):
-        if filename.endswith('.py') and not filename.startswith('_'):
-            module_name = 'pygments.lexers.%s' % filename[:-3]
-            print module_name
-            module = __import__(module_name, None, None, [''])
-            for lexer_name in module.__all__:
-                lexer = getattr(module, lexer_name)
-                found_lexers.append(
-                    '%r: %r' % (lexer_name,
-                                (module_name,
-                                 lexer.name,
-                                 tuple(lexer.aliases),
-                                 tuple(lexer.filenames),
-                                 tuple(lexer.mimetypes))))
-    # sort them, that should make the diff files for svn smaller
-    found_lexers.sort()
-
-    # extract useful sourcecode from this file
-    f = open(__file__)
-    try:
-        content = f.read()
-    finally:
-        f.close()
-    header = content[:content.find('LEXERS = {')]
-    footer = content[content.find("if __name__ == '__main__':"):]
-
-    # write new file
-    f = open(__file__, 'wb')
-    f.write(header)
-    f.write('LEXERS = {\n    %s,\n}\n\n' % ',\n    '.join(found_lexers))
-    f.write(footer)
-    f.close()
diff --git a/python/ext-libs/pygments/lexers/_openedgebuiltins.py b/python/ext-libs/pygments/lexers/_openedgebuiltins.py
deleted file mode 100644
index 4561b07..0000000
--- a/python/ext-libs/pygments/lexers/_openedgebuiltins.py
+++ /dev/null
@@ -1,562 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._openedgebuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Builtin list for the OpenEdgeLexer.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-OPENEDGEKEYWORDS = [
-    'ABSOLUTE', 'ABS', 'ABSO', 'ABSOL', 'ABSOLU', 'ABSOLUT', 'ACCELERATOR',
-    'ACCUM', 'ACCUMULATE', 'ACCUM', 'ACCUMU', 'ACCUMUL', 'ACCUMULA',
-    'ACCUMULAT', 'ACTIVE-FORM', 'ACTIVE-WINDOW', 'ADD', 'ADD-BUFFER',
-    'ADD-CALC-COLUMN', 'ADD-COLUMNS-FROM', 'ADD-EVENTS-PROCEDURE',
-    'ADD-FIELDS-FROM', 'ADD-FIRST', 'ADD-INDEX-FIELD', 'ADD-LAST',
-    'ADD-LIKE-COLUMN', 'ADD-LIKE-FIELD', 'ADD-LIKE-INDEX', 'ADD-NEW-FIELD',
-    'ADD-NEW-INDEX', 'ADD-SCHEMA-LOCATION', 'ADD-SUPER-PROCEDURE', 'ADM-DATA',
-    'ADVISE', 'ALERT-BOX', 'ALIAS', 'ALL', 'ALLOW-COLUMN-SEARCHING',
-    'ALLOW-REPLICATION', 'ALTER', 'ALWAYS-ON-TOP', 'AMBIGUOUS', 'AMBIG',
-    'AMBIGU', 'AMBIGUO', 'AMBIGUOU', 'ANALYZE', 'ANALYZ', 'AND', 'ANSI-ONLY',
-    'ANY', 'ANYWHERE', 'APPEND', 'APPL-ALERT-BOXES', 'APPL-ALERT',
-    'APPL-ALERT-', 'APPL-ALERT-B', 'APPL-ALERT-BO', 'APPL-ALERT-BOX',
-    'APPL-ALERT-BOXE', 'APPL-CONTEXT-ID', 'APPLICATION', 'APPLY',
-    'APPSERVER-INFO', 'APPSERVER-PASSWORD', 'APPSERVER-USERID', 'ARRAY-MESSAGE',
-    'AS', 'ASC', 'ASCENDING', 'ASCE', 'ASCEN', 'ASCEND', 'ASCENDI', 'ASCENDIN',
-    'ASK-OVERWRITE', 'ASSEMBLY', 'ASSIGN', 'ASYNCHRONOUS',
-    'ASYNC-REQUEST-COUNT', 'ASYNC-REQUEST-HANDLE', 'AT', 'ATTACHED-PAIRLIST',
-    'ATTR-SPACE', 'ATTR', 'ATTRI', 'ATTRIB', 'ATTRIBU', 'ATTRIBUT',
-    'AUDIT-CONTROL', 'AUDIT-ENABLED', 'AUDIT-EVENT-CONTEXT', 'AUDIT-POLICY',
-    'AUTHENTICATION-FAILED', 'AUTHORIZATION', 'AUTO-COMPLETION', 'AUTO-COMP',
-    'AUTO-COMPL', 'AUTO-COMPLE', 'AUTO-COMPLET', 'AUTO-COMPLETI',
-    'AUTO-COMPLETIO', 'AUTO-ENDKEY', 'AUTO-END-KEY', 'AUTO-GO', 'AUTO-INDENT',
-    'AUTO-IND', 'AUTO-INDE', 'AUTO-INDEN', 'AUTOMATIC', 'AUTO-RESIZE',
-    'AUTO-RETURN', 'AUTO-RET', 'AUTO-RETU', 'AUTO-RETUR', 'AUTO-SYNCHRONIZE',
-    'AUTO-ZAP', 'AUTO-Z', 'AUTO-ZA', 'AVAILABLE', 'AVAIL', 'AVAILA', 'AVAILAB',
-    'AVAILABL', 'AVAILABLE-FORMATS', 'AVERAGE', 'AVE', 'AVER', 'AVERA',
-    'AVERAG', 'AVG', 'BACKGROUND', 'BACK', 'BACKG', 'BACKGR', 'BACKGRO',
-    'BACKGROU', 'BACKGROUN', 'BACKWARDS', 'BACKWARD', 'BASE64-DECODE',
-    'BASE64-ENCODE', 'BASE-ADE', 'BASE-KEY', 'BATCH-MODE', 'BATCH', 'BATCH-',
-    'BATCH-M', 'BATCH-MO', 'BATCH-MOD', 'BATCH-SIZE', 'BEFORE-HIDE', 'BEFORE-H',
-    'BEFORE-HI', 'BEFORE-HID', 'BEGIN-EVENT-GROUP', 'BEGINS', 'BELL', 'BETWEEN',
-    'BGCOLOR', 'BGC', 'BGCO', 'BGCOL', 'BGCOLO', 'BIG-ENDIAN', 'BINARY', 'BIND',
-    'BIND-WHERE', 'BLANK', 'BLOCK-ITERATION-DISPLAY', 'BORDER-BOTTOM-CHARS',
-    'BORDER-B', 'BORDER-BO', 'BORDER-BOT', 'BORDER-BOTT', 'BORDER-BOTTO',
-    'BORDER-BOTTOM-PIXELS', 'BORDER-BOTTOM-P', 'BORDER-BOTTOM-PI',
-    'BORDER-BOTTOM-PIX', 'BORDER-BOTTOM-PIXE', 'BORDER-BOTTOM-PIXEL',
-    'BORDER-LEFT-CHARS', 'BORDER-L', 'BORDER-LE', 'BORDER-LEF', 'BORDER-LEFT',
-    'BORDER-LEFT-', 'BORDER-LEFT-C', 'BORDER-LEFT-CH', 'BORDER-LEFT-CHA',
-    'BORDER-LEFT-CHAR', 'BORDER-LEFT-PIXELS', 'BORDER-LEFT-P', 'BORDER-LEFT-PI',
-    'BORDER-LEFT-PIX', 'BORDER-LEFT-PIXE', 'BORDER-LEFT-PIXEL',
-    'BORDER-RIGHT-CHARS', 'BORDER-R', 'BORDER-RI', 'BORDER-RIG', 'BORDER-RIGH',
-    'BORDER-RIGHT', 'BORDER-RIGHT-', 'BORDER-RIGHT-C', 'BORDER-RIGHT-CH',
-    'BORDER-RIGHT-CHA', 'BORDER-RIGHT-CHAR', 'BORDER-RIGHT-PIXELS',
-    'BORDER-RIGHT-P', 'BORDER-RIGHT-PI', 'BORDER-RIGHT-PIX',
-    'BORDER-RIGHT-PIXE', 'BORDER-RIGHT-PIXEL', 'BORDER-TOP-CHARS', 'BORDER-T',
-    'BORDER-TO', 'BORDER-TOP', 'BORDER-TOP-', 'BORDER-TOP-C', 'BORDER-TOP-CH',
-    'BORDER-TOP-CHA', 'BORDER-TOP-CHAR', 'BORDER-TOP-PIXELS', 'BORDER-TOP-P',
-    'BORDER-TOP-PI', 'BORDER-TOP-PIX', 'BORDER-TOP-PIXE', 'BORDER-TOP-PIXEL',
-    'BOX', 'BOX-SELECTABLE', 'BOX-SELECT', 'BOX-SELECTA', 'BOX-SELECTAB',
-    'BOX-SELECTABL', 'BREAK', 'BROWSE', 'BUFFER', 'BUFFER-CHARS',
-    'BUFFER-COMPARE', 'BUFFER-COPY', 'BUFFER-CREATE', 'BUFFER-DELETE',
-    'BUFFER-FIELD', 'BUFFER-HANDLE', 'BUFFER-LINES', 'BUFFER-NAME',
-    'BUFFER-RELEASE', 'BUFFER-VALUE', 'BUTTON', 'BUTTONS', 'BUTTON', 'BY',
-    'BY-POINTER', 'BY-VARIANT-POINTER', 'CACHE', 'CACHE-SIZE', 'CALL',
-    'CALL-NAME', 'CALL-TYPE', 'CANCEL-BREAK', 'CANCEL-BUTTON', 'CAN-CREATE',
-    'CAN-DELETE', 'CAN-DO', 'CAN-FIND', 'CAN-QUERY', 'CAN-READ', 'CAN-SET',
-    'CAN-WRITE', 'CAPS', 'CAREFUL-PAINT', 'CASE', 'CASE-SENSITIVE', 'CASE-SEN',
-    'CASE-SENS', 'CASE-SENSI', 'CASE-SENSIT', 'CASE-SENSITI', 'CASE-SENSITIV',
-    'CAST', 'CATCH', 'CDECL', 'CENTERED', 'CENTER', 'CENTERE', 'CHAINED',
-    'CHARACTER_LENGTH', 'CHARSET', 'CHECK', 'CHECKED', 'CHOOSE', 'CHR', 'CLASS',
-    'CLASS-TYPE', 'CLEAR', 'CLEAR-APPL-CONTEXT', 'CLEAR-LOG', 'CLEAR-SELECTION',
-    'CLEAR-SELECT', 'CLEAR-SELECTI', 'CLEAR-SELECTIO', 'CLEAR-SORT-ARROWS',
-    'CLEAR-SORT-ARROW', 'CLIENT-CONNECTION-ID', 'CLIENT-PRINCIPAL',
-    'CLIENT-TTY', 'CLIENT-TYPE', 'CLIENT-WORKSTATION', 'CLIPBOARD', 'CLOSE',
-    'CLOSE-LOG', 'CODE', 'CODEBASE-LOCATOR', 'CODEPAGE', 'CODEPAGE-CONVERT',
-    'COLLATE', 'COL-OF', 'COLON', 'COLON-ALIGNED', 'COLON-ALIGN',
-    'COLON-ALIGNE', 'COLOR', 'COLOR-TABLE', 'COLUMN', 'COL', 'COLU', 'COLUM',
-    'COLUMN-BGCOLOR', 'COLUMN-DCOLOR', 'COLUMN-FGCOLOR', 'COLUMN-FONT',
-    'COLUMN-LABEL', 'COLUMN-LAB', 'COLUMN-LABE', 'COLUMN-MOVABLE', 'COLUMN-OF',
-    'COLUMN-PFCOLOR', 'COLUMN-READ-ONLY', 'COLUMN-RESIZABLE', 'COLUMNS',
-    'COLUMN-SCROLLING', 'COMBO-BOX', 'COMMAND', 'COMPARES', 'COMPILE',
-    'COMPILER', 'COMPLETE', 'COM-SELF', 'CONFIG-NAME', 'CONNECT', 'CONNECTED',
-    'CONSTRUCTOR', 'CONTAINS', 'CONTENTS', 'CONTEXT', 'CONTEXT-HELP',
-    'CONTEXT-HELP-FILE', 'CONTEXT-HELP-ID', 'CONTEXT-POPUP', 'CONTROL',
-    'CONTROL-BOX', 'CONTROL-FRAME', 'CONVERT', 'CONVERT-3D-COLORS',
-    'CONVERT-TO-OFFSET', 'CONVERT-TO-OFFS', 'CONVERT-TO-OFFSE', 'COPY-DATASET',
-    'COPY-LOB', 'COPY-SAX-ATTRIBUTES', 'COPY-TEMP-TABLE', 'COUNT', 'COUNT-OF',
-    'CPCASE', 'CPCOLL', 'CPINTERNAL', 'CPLOG', 'CPPRINT', 'CPRCODEIN',
-    'CPRCODEOUT', 'CPSTREAM', 'CPTERM', 'CRC-VALUE', 'CREATE', 'CREATE-LIKE',
-    'CREATE-LIKE-SEQUENTIAL', 'CREATE-NODE-NAMESPACE',
-    'CREATE-RESULT-LIST-ENTRY', 'CREATE-TEST-FILE', 'CURRENT', 'CURRENT_DATE',
-    'CURRENT_DATE', 'CURRENT-CHANGED', 'CURRENT-COLUMN', 'CURRENT-ENVIRONMENT',
-    'CURRENT-ENV', 'CURRENT-ENVI', 'CURRENT-ENVIR', 'CURRENT-ENVIRO',
-    'CURRENT-ENVIRON', 'CURRENT-ENVIRONM', 'CURRENT-ENVIRONME',
-    'CURRENT-ENVIRONMEN', 'CURRENT-ITERATION', 'CURRENT-LANGUAGE',
-    'CURRENT-LANG', 'CURRENT-LANGU', 'CURRENT-LANGUA', 'CURRENT-LANGUAG',
-    'CURRENT-QUERY', 'CURRENT-RESULT-ROW', 'CURRENT-ROW-MODIFIED',
-    'CURRENT-VALUE', 'CURRENT-WINDOW', 'CURSOR', 'CURS', 'CURSO', 'CURSOR-CHAR',
-    'CURSOR-LINE', 'CURSOR-OFFSET', 'DATABASE', 'DATA-BIND',
-    'DATA-ENTRY-RETURN', 'DATA-ENTRY-RET', 'DATA-ENTRY-RETU',
-    'DATA-ENTRY-RETUR', 'DATA-RELATION', 'DATA-REL', 'DATA-RELA', 'DATA-RELAT',
-    'DATA-RELATI', 'DATA-RELATIO', 'DATASERVERS', 'DATASET', 'DATASET-HANDLE',
-    'DATA-SOURCE', 'DATA-SOURCE-COMPLETE-MAP', 'DATA-SOURCE-MODIFIED',
-    'DATA-SOURCE-ROWID', 'DATA-TYPE', 'DATA-T', 'DATA-TY', 'DATA-TYP',
-    'DATE-FORMAT', 'DATE-F', 'DATE-FO', 'DATE-FOR', 'DATE-FORM', 'DATE-FORMA',
-    'DAY', 'DBCODEPAGE', 'DBCOLLATION', 'DBNAME', 'DBPARAM', 'DB-REFERENCES',
-    'DBRESTRICTIONS', 'DBREST', 'DBRESTR', 'DBRESTRI', 'DBRESTRIC',
-    'DBRESTRICT', 'DBRESTRICTI', 'DBRESTRICTIO', 'DBRESTRICTION', 'DBTASKID',
-    'DBTYPE', 'DBVERSION', 'DBVERS', 'DBVERSI', 'DBVERSIO', 'DCOLOR', 'DDE',
-    'DDE-ERROR', 'DDE-ID', 'DDE-I', 'DDE-ITEM', 'DDE-NAME', 'DDE-TOPIC',
-    'DEBLANK', 'DEBUG', 'DEBU', 'DEBUG-ALERT', 'DEBUGGER', 'DEBUG-LIST',
-    'DECIMALS', 'DECLARE', 'DECLARE-NAMESPACE', 'DECRYPT', 'DEFAULT',
-    'DEFAULT-BUFFER-HANDLE', 'DEFAULT-BUTTON', 'DEFAUT-B', 'DEFAUT-BU',
-    'DEFAUT-BUT', 'DEFAUT-BUTT', 'DEFAUT-BUTTO', 'DEFAULT-COMMIT',
-    'DEFAULT-EXTENSION', 'DEFAULT-EX', 'DEFAULT-EXT', 'DEFAULT-EXTE',
-    'DEFAULT-EXTEN', 'DEFAULT-EXTENS', 'DEFAULT-EXTENSI', 'DEFAULT-EXTENSIO',
-    'DEFAULT-NOXLATE', 'DEFAULT-NOXL', 'DEFAULT-NOXLA', 'DEFAULT-NOXLAT',
-    'DEFAULT-VALUE', 'DEFAULT-WINDOW', 'DEFINED', 'DEFINE-USER-EVENT-MANAGER',
-    'DELETE', 'DEL', 'DELE', 'DELET', 'DELETE-CHARACTER', 'DELETE-CHAR',
-    'DELETE-CHARA', 'DELETE-CHARAC', 'DELETE-CHARACT', 'DELETE-CHARACTE',
-    'DELETE-CURRENT-ROW', 'DELETE-LINE', 'DELETE-RESULT-LIST-ENTRY',
-    'DELETE-SELECTED-ROW', 'DELETE-SELECTED-ROWS', 'DELIMITER', 'DESC',
-    'DESCENDING', 'DESC', 'DESCE', 'DESCEN', 'DESCEND', 'DESCENDI', 'DESCENDIN',
-    'DESELECT-FOCUSED-ROW', 'DESELECTION', 'DESELECT-ROWS',
-    'DESELECT-SELECTED-ROW', 'DESTRUCTOR', 'DIALOG-BOX', 'DICTIONARY', 'DICT',
-    'DICTI', 'DICTIO', 'DICTION', 'DICTIONA', 'DICTIONAR', 'DIR', 'DISABLE',
-    'DISABLE-AUTO-ZAP', 'DISABLED', 'DISABLE-DUMP-TRIGGERS',
-    'DISABLE-LOAD-TRIGGERS', 'DISCONNECT', 'DISCON', 'DISCONN', 'DISCONNE',
-    'DISCONNEC', 'DISP', 'DISPLAY', 'DISP', 'DISPL', 'DISPLA',
-    'DISPLAY-MESSAGE', 'DISPLAY-TYPE', 'DISPLAY-T', 'DISPLAY-TY', 'DISPLAY-TYP',
-    'DISTINCT', 'DO', 'DOMAIN-DESCRIPTION', 'DOMAIN-NAME', 'DOMAIN-TYPE', 'DOS',
-    'DOUBLE', 'DOWN', 'DRAG-ENABLED', 'DROP', 'DROP-DOWN', 'DROP-DOWN-LIST',
-    'DROP-FILE-NOTIFY', 'DROP-TARGET', 'DUMP', 'DYNAMIC', 'DYNAMIC-FUNCTION',
-    'EACH', 'ECHO', 'EDGE-CHARS', 'EDGE', 'EDGE-', 'EDGE-C', 'EDGE-CH',
-    'EDGE-CHA', 'EDGE-CHAR', 'EDGE-PIXELS', 'EDGE-P', 'EDGE-PI', 'EDGE-PIX',
-    'EDGE-PIXE', 'EDGE-PIXEL', 'EDIT-CAN-PASTE', 'EDIT-CAN-UNDO', 'EDIT-CLEAR',
-    'EDIT-COPY', 'EDIT-CUT', 'EDITING', 'EDITOR', 'EDIT-PASTE', 'EDIT-UNDO',
-    'ELSE', 'EMPTY', 'EMPTY-TEMP-TABLE', 'ENABLE', 'ENABLED-FIELDS', 'ENCODE',
-    'ENCRYPT', 'ENCRYPT-AUDIT-MAC-KEY', 'ENCRYPTION-SALT', 'END',
-    'END-DOCUMENT', 'END-ELEMENT', 'END-EVENT-GROUP', 'END-FILE-DROP', 'ENDKEY',
-    'END-KEY', 'END-MOVE', 'END-RESIZE', 'END-ROW-RESIZE', 'END-USER-PROMPT',
-    'ENTERED', 'ENTRY', 'EQ', 'ERROR', 'ERROR-COLUMN', 'ERROR-COL',
-    'ERROR-COLU', 'ERROR-COLUM', 'ERROR-ROW', 'ERROR-STACK-TRACE',
-    'ERROR-STATUS', 'ERROR-STAT', 'ERROR-STATU', 'ESCAPE', 'ETIME',
-    'EVENT-GROUP-ID', 'EVENT-PROCEDURE', 'EVENT-PROCEDURE-CONTEXT', 'EVENTS',
-    'EVENT', 'EVENT-TYPE', 'EVENT-T', 'EVENT-TY', 'EVENT-TYP', 'EXCEPT',
-    'EXCLUSIVE-ID', 'EXCLUSIVE-LOCK', 'EXCLUSIVE', 'EXCLUSIVE-', 'EXCLUSIVE-L',
-    'EXCLUSIVE-LO', 'EXCLUSIVE-LOC', 'EXCLUSIVE-WEB-USER', 'EXECUTE', 'EXISTS',
-    'EXP', 'EXPAND', 'EXPANDABLE', 'EXPLICIT', 'EXPORT', 'EXPORT-PRINCIPAL',
-    'EXTENDED', 'EXTENT', 'EXTERNAL', 'FALSE', 'FETCH', 'FETCH-SELECTED-ROW',
-    'FGCOLOR', 'FGC', 'FGCO', 'FGCOL', 'FGCOLO', 'FIELD', 'FIELDS', 'FIELD',
-    'FILE', 'FILE-CREATE-DATE', 'FILE-CREATE-TIME', 'FILE-INFORMATION',
-    'FILE-INFO', 'FILE-INFOR', 'FILE-INFORM', 'FILE-INFORMA', 'FILE-INFORMAT',
-    'FILE-INFORMATI', 'FILE-INFORMATIO', 'FILE-MOD-DATE', 'FILE-MOD-TIME',
-    'FILENAME', 'FILE-NAME', 'FILE-OFFSET', 'FILE-OFF', 'FILE-OFFS',
-    'FILE-OFFSE', 'FILE-SIZE', 'FILE-TYPE', 'FILL', 'FILLED', 'FILL-IN',
-    'FILTERS', 'FINAL', 'FINALLY', 'FIND', 'FIND-BY-ROWID',
-    'FIND-CASE-SENSITIVE', 'FIND-CURRENT', 'FINDER', 'FIND-FIRST',
-    'FIND-GLOBAL', 'FIND-LAST', 'FIND-NEXT-OCCURRENCE', 'FIND-PREV-OCCURRENCE',
-    'FIND-SELECT', 'FIND-UNIQUE', 'FIND-WRAP-AROUND', 'FIRST',
-    'FIRST-ASYNCH-REQUEST', 'FIRST-CHILD', 'FIRST-COLUMN', 'FIRST-FORM',
-    'FIRST-OBJECT', 'FIRST-OF', 'FIRST-PROCEDURE', 'FIRST-PROC', 'FIRST-PROCE',
-    'FIRST-PROCED', 'FIRST-PROCEDU', 'FIRST-PROCEDUR', 'FIRST-SERVER',
-    'FIRST-TAB-ITEM', 'FIRST-TAB-I', 'FIRST-TAB-IT', 'FIRST-TAB-ITE',
-    'FIT-LAST-COLUMN', 'FIXED-ONLY', 'FLAT-BUTTON', 'FLOAT', 'FOCUS',
-    'FOCUSED-ROW', 'FOCUSED-ROW-SELECTED', 'FONT', 'FONT-TABLE', 'FOR',
-    'FORCE-FILE', 'FOREGROUND', 'FORE', 'FOREG', 'FOREGR', 'FOREGRO',
-    'FOREGROU', 'FOREGROUN', 'FORM', 'FORMAT', 'FORM', 'FORMA', 'FORMATTED',
-    'FORMATTE', 'FORM-LONG-INPUT', 'FORWARD', 'FORWARDS', 'FORWARD', 'FRAGMENT',
-    'FRAGMEN', 'FRAME', 'FRAM', 'FRAME-COL', 'FRAME-DB', 'FRAME-DOWN',
-    'FRAME-FIELD', 'FRAME-FILE', 'FRAME-INDEX', 'FRAME-INDE', 'FRAME-LINE',
-    'FRAME-NAME', 'FRAME-ROW', 'FRAME-SPACING', 'FRAME-SPA', 'FRAME-SPAC',
-    'FRAME-SPACI', 'FRAME-SPACIN', 'FRAME-VALUE', 'FRAME-VAL', 'FRAME-VALU',
-    'FRAME-X', 'FRAME-Y', 'FREQUENCY', 'FROM', 'FROM-CHARS', 'FROM-C',
-    'FROM-CH', 'FROM-CHA', 'FROM-CHAR', 'FROM-CURRENT', 'FROM-CUR', 'FROM-CURR',
-    'FROM-CURRE', 'FROM-CURREN', 'FROM-PIXELS', 'FROM-P', 'FROM-PI', 'FROM-PIX',
-    'FROM-PIXE', 'FROM-PIXEL', 'FULL-HEIGHT-CHARS', 'FULL-HEIGHT',
-    'FULL-HEIGHT-', 'FULL-HEIGHT-C', 'FULL-HEIGHT-CH', 'FULL-HEIGHT-CHA',
-    'FULL-HEIGHT-CHAR', 'FULL-HEIGHT-PIXELS', 'FULL-HEIGHT-P', 'FULL-HEIGHT-PI',
-    'FULL-HEIGHT-PIX', 'FULL-HEIGHT-PIXE', 'FULL-HEIGHT-PIXEL', 'FULL-PATHNAME',
-    'FULL-PATHN', 'FULL-PATHNA', 'FULL-PATHNAM', 'FULL-WIDTH-CHARS',
-    'FULL-WIDTH', 'FULL-WIDTH-', 'FULL-WIDTH-C', 'FULL-WIDTH-CH',
-    'FULL-WIDTH-CHA', 'FULL-WIDTH-CHAR', 'FULL-WIDTH-PIXELS', 'FULL-WIDTH-P',
-    'FULL-WIDTH-PI', 'FULL-WIDTH-PIX', 'FULL-WIDTH-PIXE', 'FULL-WIDTH-PIXEL',
-    'FUNCTION', 'FUNCTION-CALL-TYPE', 'GATEWAYS', 'GATEWAY', 'GE',
-    'GENERATE-MD5', 'GENERATE-PBE-KEY', 'GENERATE-PBE-SALT',
-    'GENERATE-RANDOM-KEY', 'GENERATE-UUID', 'GET', 'GET-ATTR-CALL-TYPE',
-    'GET-ATTRIBUTE-NODE', 'GET-BINARY-DATA', 'GET-BLUE-VALUE', 'GET-BLUE',
-    'GET-BLUE-', 'GET-BLUE-V', 'GET-BLUE-VA', 'GET-BLUE-VAL', 'GET-BLUE-VALU',
-    'GET-BROWSE-COLUMN', 'GET-BUFFER-HANDLEGETBYTE', 'GET-BYTE',
-    'GET-CALLBACK-PROC-CONTEXT', 'GET-CALLBACK-PROC-NAME', 'GET-CGI-LIST',
-    'GET-CGI-LONG-VALUE', 'GET-CGI-VALUE', 'GET-CODEPAGES', 'GET-COLLATIONS',
-    'GET-CONFIG-VALUE', 'GET-CURRENT', 'GET-DOUBLE', 'GET-DROPPED-FILE',
-    'GET-DYNAMIC', 'GET-ERROR-COLUMN', 'GET-ERROR-ROW', 'GET-FILE',
-    'GET-FILE-NAME', 'GET-FILE-OFFSET', 'GET-FILE-OFFSE', 'GET-FIRST',
-    'GET-FLOAT', 'GET-GREEN-VALUE', 'GET-GREEN', 'GET-GREEN-', 'GET-GREEN-V',
-    'GET-GREEN-VA', 'GET-GREEN-VAL', 'GET-GREEN-VALU',
-    'GET-INDEX-BY-NAMESPACE-NAME', 'GET-INDEX-BY-QNAME', 'GET-INT64',
-    'GET-ITERATION', 'GET-KEY-VALUE', 'GET-KEY-VAL', 'GET-KEY-VALU', 'GET-LAST',
-    'GET-LOCALNAME-BY-INDEX', 'GET-LONG', 'GET-MESSAGE', 'GET-NEXT',
-    'GET-NUMBER', 'GET-POINTER-VALUE', 'GET-PREV', 'GET-PRINTERS',
-    'GET-PROPERTY', 'GET-QNAME-BY-INDEX', 'GET-RED-VALUE', 'GET-RED',
-    'GET-RED-', 'GET-RED-V', 'GET-RED-VA', 'GET-RED-VAL', 'GET-RED-VALU',
-    'GET-REPOSITIONED-ROW', 'GET-RGB-VALUE', 'GET-SELECTED-WIDGET',
-    'GET-SELECTED', 'GET-SELECTED-', 'GET-SELECTED-W', 'GET-SELECTED-WI',
-    'GET-SELECTED-WID', 'GET-SELECTED-WIDG', 'GET-SELECTED-WIDGE', 'GET-SHORT',
-    'GET-SIGNATURE', 'GET-SIZE', 'GET-STRING', 'GET-TAB-ITEM',
-    'GET-TEXT-HEIGHT-CHARS', 'GET-TEXT-HEIGHT', 'GET-TEXT-HEIGHT-',
-    'GET-TEXT-HEIGHT-C', 'GET-TEXT-HEIGHT-CH', 'GET-TEXT-HEIGHT-CHA',
-    'GET-TEXT-HEIGHT-CHAR', 'GET-TEXT-HEIGHT-PIXELS', 'GET-TEXT-HEIGHT-P',
-    'GET-TEXT-HEIGHT-PI', 'GET-TEXT-HEIGHT-PIX', 'GET-TEXT-HEIGHT-PIXE',
-    'GET-TEXT-HEIGHT-PIXEL', 'GET-TEXT-WIDTH-CHARS', 'GET-TEXT-WIDTH',
-    'GET-TEXT-WIDTH-', 'GET-TEXT-WIDTH-C', 'GET-TEXT-WIDTH-CH',
-    'GET-TEXT-WIDTH-CHA', 'GET-TEXT-WIDTH-CHAR', 'GET-TEXT-WIDTH-PIXELS',
-    'GET-TEXT-WIDTH-P', 'GET-TEXT-WIDTH-PI', 'GET-TEXT-WIDTH-PIX',
-    'GET-TEXT-WIDTH-PIXE', 'GET-TEXT-WIDTH-PIXEL', 'GET-TYPE-BY-INDEX',
-    'GET-TYPE-BY-NAMESPACE-NAME', 'GET-TYPE-BY-QNAME', 'GET-UNSIGNED-LONG',
-    'GET-UNSIGNED-SHORT', 'GET-URI-BY-INDEX', 'GET-VALUE-BY-INDEX',
-    'GET-VALUE-BY-NAMESPACE-NAME', 'GET-VALUE-BY-QNAME', 'GET-WAIT-STATE',
-    'GLOBAL', 'GO-ON', 'GO-PENDING', 'GO-PEND', 'GO-PENDI', 'GO-PENDIN',
-    'GRANT', 'GRAPHIC-EDGE', 'GRAPHIC-E', 'GRAPHIC-ED', 'GRAPHIC-EDG',
-    'GRID-FACTOR-HORIZONTAL', 'GRID-FACTOR-H', 'GRID-FACTOR-HO',
-    'GRID-FACTOR-HOR', 'GRID-FACTOR-HORI', 'GRID-FACTOR-HORIZ',
-    'GRID-FACTOR-HORIZO', 'GRID-FACTOR-HORIZON', 'GRID-FACTOR-HORIZONT',
-    'GRID-FACTOR-HORIZONTA', 'GRID-FACTOR-VERTICAL', 'GRID-FACTOR-V',
-    'GRID-FACTOR-VE', 'GRID-FACTOR-VER', 'GRID-FACTOR-VERT', 'GRID-FACTOR-VERT',
-    'GRID-FACTOR-VERTI', 'GRID-FACTOR-VERTIC', 'GRID-FACTOR-VERTICA',
-    'GRID-SNAP', 'GRID-UNIT-HEIGHT-CHARS', 'GRID-UNIT-HEIGHT',
-    'GRID-UNIT-HEIGHT-', 'GRID-UNIT-HEIGHT-C', 'GRID-UNIT-HEIGHT-CH',
-    'GRID-UNIT-HEIGHT-CHA', 'GRID-UNIT-HEIGHT-PIXELS', 'GRID-UNIT-HEIGHT-P',
-    'GRID-UNIT-HEIGHT-PI', 'GRID-UNIT-HEIGHT-PIX', 'GRID-UNIT-HEIGHT-PIXE',
-    'GRID-UNIT-HEIGHT-PIXEL', 'GRID-UNIT-WIDTH-CHARS', 'GRID-UNIT-WIDTH',
-    'GRID-UNIT-WIDTH-', 'GRID-UNIT-WIDTH-C', 'GRID-UNIT-WIDTH-CH',
-    'GRID-UNIT-WIDTH-CHA', 'GRID-UNIT-WIDTH-CHAR', 'GRID-UNIT-WIDTH-PIXELS',
-    'GRID-UNIT-WIDTH-P', 'GRID-UNIT-WIDTH-PI', 'GRID-UNIT-WIDTH-PIX',
-    'GRID-UNIT-WIDTH-PIXE', 'GRID-UNIT-WIDTH-PIXEL', 'GRID-VISIBLE', 'GROUP',
-    'GT', 'GUID', 'HANDLER', 'HAS-RECORDS', 'HAVING', 'HEADER', 'HEIGHT-CHARS',
-    'HEIGHT', 'HEIGHT-', 'HEIGHT-C', 'HEIGHT-CH', 'HEIGHT-CHA', 'HEIGHT-CHAR',
-    'HEIGHT-PIXELS', 'HEIGHT-P', 'HEIGHT-PI', 'HEIGHT-PIX', 'HEIGHT-PIXE',
-    'HEIGHT-PIXEL', 'HELP', 'HEX-DECODE', 'HEX-ENCODE', 'HIDDEN', 'HIDE',
-    'HORIZONTAL', 'HORI', 'HORIZ', 'HORIZO', 'HORIZON', 'HORIZONT', 'HORIZONTA',
-    'HOST-BYTE-ORDER', 'HTML-CHARSET', 'HTML-END-OF-LINE', 'HTML-END-OF-PAGE',
-    'HTML-FRAME-BEGIN', 'HTML-FRAME-END', 'HTML-HEADER-BEGIN',
-    'HTML-HEADER-END', 'HTML-TITLE-BEGIN', 'HTML-TITLE-END', 'HWND', 'ICON',
-    'IF', 'IMAGE', 'IMAGE-DOWN', 'IMAGE-INSENSITIVE', 'IMAGE-SIZE',
-    'IMAGE-SIZE-CHARS', 'IMAGE-SIZE-C', 'IMAGE-SIZE-CH', 'IMAGE-SIZE-CHA',
-    'IMAGE-SIZE-CHAR', 'IMAGE-SIZE-PIXELS', 'IMAGE-SIZE-P', 'IMAGE-SIZE-PI',
-    'IMAGE-SIZE-PIX', 'IMAGE-SIZE-PIXE', 'IMAGE-SIZE-PIXEL', 'IMAGE-UP',
-    'IMMEDIATE-DISPLAY', 'IMPLEMENTS', 'IMPORT', 'IMPORT-PRINCIPAL', 'IN',
-    'INCREMENT-EXCLUSIVE-ID', 'INDEX', 'INDEXED-REPOSITION', 'INDEX-HINT',
-    'INDEX-INFORMATION', 'INDICATOR', 'INFORMATION', 'INFO', 'INFOR', 'INFORM',
-    'INFORMA', 'INFORMAT', 'INFORMATI', 'INFORMATIO', 'IN-HANDLE',
-    'INHERIT-BGCOLOR', 'INHERIT-BGC', 'INHERIT-BGCO', 'INHERIT-BGCOL',
-    'INHERIT-BGCOLO', 'INHERIT-FGCOLOR', 'INHERIT-FGC', 'INHERIT-FGCO',
-    'INHERIT-FGCOL', 'INHERIT-FGCOLO', 'INHERITS', 'INITIAL', 'INIT', 'INITI',
-    'INITIA', 'INITIAL-DIR', 'INITIAL-FILTER', 'INITIALIZE-DOCUMENT-TYPE',
-    'INITIATE', 'INNER-CHARS', 'INNER-LINES', 'INPUT', 'INPUT-OUTPUT',
-    'INPUT-O', 'INPUT-OU', 'INPUT-OUT', 'INPUT-OUTP', 'INPUT-OUTPU',
-    'INPUT-VALUE', 'INSERT', 'INSERT-ATTRIBUTE', 'INSERT-BACKTAB', 'INSERT-B',
-    'INSERT-BA', 'INSERT-BAC', 'INSERT-BACK', 'INSERT-BACKT', 'INSERT-BACKTA',
-    'INSERT-FILE', 'INSERT-ROW', 'INSERT-STRING', 'INSERT-TAB', 'INSERT-T',
-    'INSERT-TA', 'INTERFACE', 'INTERNAL-ENTRIES', 'INTO', 'INVOKE', 'IS',
-    'IS-ATTR-SPACE', 'IS-ATTR', 'IS-ATTR-', 'IS-ATTR-S', 'IS-ATTR-SP',
-    'IS-ATTR-SPA', 'IS-ATTR-SPAC', 'IS-CLASS', 'IS-CLAS', 'IS-LEAD-BYTE',
-    'IS-ATTR', 'IS-OPEN', 'IS-PARAMETER-SET', 'IS-ROW-SELECTED', 'IS-SELECTED',
-    'ITEM', 'ITEMS-PER-ROW', 'JOIN', 'JOIN-BY-SQLDB', 'KBLABEL',
-    'KEEP-CONNECTION-OPEN', 'KEEP-FRAME-Z-ORDER', 'KEEP-FRAME-Z',
-    'KEEP-FRAME-Z-', 'KEEP-FRAME-Z-O', 'KEEP-FRAME-Z-OR', 'KEEP-FRAME-Z-ORD',
-    'KEEP-FRAME-Z-ORDE', 'KEEP-MESSAGES', 'KEEP-SECURITY-CACHE',
-    'KEEP-TAB-ORDER', 'KEY', 'KEYCODE', 'KEY-CODE', 'KEYFUNCTION', 'KEYFUNC',
-    'KEYFUNCT', 'KEYFUNCTI', 'KEYFUNCTIO', 'KEY-FUNCTION', 'KEY-FUNC',
-    'KEY-FUNCT', 'KEY-FUNCTI', 'KEY-FUNCTIO', 'KEYLABEL', 'KEY-LABEL', 'KEYS',
-    'KEYWORD', 'KEYWORD-ALL', 'LABEL', 'LABEL-BGCOLOR', 'LABEL-BGC',
-    'LABEL-BGCO', 'LABEL-BGCOL', 'LABEL-BGCOLO', 'LABEL-DCOLOR', 'LABEL-DC',
-    'LABEL-DCO', 'LABEL-DCOL', 'LABEL-DCOLO', 'LABEL-FGCOLOR', 'LABEL-FGC',
-    'LABEL-FGCO', 'LABEL-FGCOL', 'LABEL-FGCOLO', 'LABEL-FONT', 'LABEL-PFCOLOR',
-    'LABEL-PFC', 'LABEL-PFCO', 'LABEL-PFCOL', 'LABEL-PFCOLO', 'LABELS',
-    'LANDSCAPE', 'LANGUAGES', 'LANGUAGE', 'LARGE', 'LARGE-TO-SMALL', 'LAST',
-    'LAST-ASYNCH-REQUEST', 'LAST-BATCH', 'LAST-CHILD', 'LAST-EVENT',
-    'LAST-EVEN', 'LAST-FORM', 'LASTKEY', 'LAST-KEY', 'LAST-OBJECT', 'LAST-OF',
-    'LAST-PROCEDURE', 'LAST-PROCE', 'LAST-PROCED', 'LAST-PROCEDU',
-    'LAST-PROCEDUR', 'LAST-SERVER', 'LAST-TAB-ITEM', 'LAST-TAB-I',
-    'LAST-TAB-IT', 'LAST-TAB-ITE', 'LC', 'LDBNAME', 'LE', 'LEAVE',
-    'LEFT-ALIGNED', 'LEFT-ALIGN', 'LEFT-ALIGNE', 'LEFT-TRIM', 'LENGTH',
-    'LIBRARY', 'LIKE', 'LIKE-SEQUENTIAL', 'LINE', 'LINE-COUNTER', 'LINE-COUNT',
-    'LINE-COUNTE', 'LIST-EVENTS', 'LISTING', 'LISTI', 'LISTIN',
-    'LIST-ITEM-PAIRS', 'LIST-ITEMS', 'LIST-PROPERTY-NAMES', 'LIST-QUERY-ATTRS',
-    'LIST-SET-ATTRS', 'LIST-WIDGETS', 'LITERAL-QUESTION', 'LITTLE-ENDIAN',
-    'LOAD', 'LOAD-DOMAINS', 'LOAD-ICON', 'LOAD-IMAGE', 'LOAD-IMAGE-DOWN',
-    'LOAD-IMAGE-INSENSITIVE', 'LOAD-IMAGE-UP', 'LOAD-MOUSE-POINTER',
-    'LOAD-MOUSE-P', 'LOAD-MOUSE-PO', 'LOAD-MOUSE-POI', 'LOAD-MOUSE-POIN',
-    'LOAD-MOUSE-POINT', 'LOAD-MOUSE-POINTE', 'LOAD-PICTURE', 'LOAD-SMALL-ICON',
-    'LOCAL-NAME', 'LOCATOR-COLUMN-NUMBER', 'LOCATOR-LINE-NUMBER',
-    'LOCATOR-PUBLIC-ID', 'LOCATOR-SYSTEM-ID', 'LOCATOR-TYPE', 'LOCKED',
-    'LOCK-REGISTRATION', 'LOG', 'LOG-AUDIT-EVENT', 'LOGIN-EXPIRATION-TIMESTAMP',
-    'LOGIN-HOST', 'LOGIN-STATE', 'LOG-MANAGER', 'LOGOUT', 'LOOKAHEAD', 'LOOKUP',
-    'LT', 'MACHINE-CLASS', 'MANDATORY', 'MANUAL-HIGHLIGHT', 'MAP',
-    'MARGIN-EXTRA', 'MARGIN-HEIGHT-CHARS', 'MARGIN-HEIGHT', 'MARGIN-HEIGHT-',
-    'MARGIN-HEIGHT-C', 'MARGIN-HEIGHT-CH', 'MARGIN-HEIGHT-CHA',
-    'MARGIN-HEIGHT-CHAR', 'MARGIN-HEIGHT-PIXELS', 'MARGIN-HEIGHT-P',
-    'MARGIN-HEIGHT-PI', 'MARGIN-HEIGHT-PIX', 'MARGIN-HEIGHT-PIXE',
-    'MARGIN-HEIGHT-PIXEL', 'MARGIN-WIDTH-CHARS', 'MARGIN-WIDTH',
-    'MARGIN-WIDTH-', 'MARGIN-WIDTH-C', 'MARGIN-WIDTH-CH', 'MARGIN-WIDTH-CHA',
-    'MARGIN-WIDTH-CHAR', 'MARGIN-WIDTH-PIXELS', 'MARGIN-WIDTH-P',
-    'MARGIN-WIDTH-PI', 'MARGIN-WIDTH-PIX', 'MARGIN-WIDTH-PIXE',
-    'MARGIN-WIDTH-PIXEL', 'MARK-NEW', 'MARK-ROW-STATE', 'MATCHES', 'MAX',
-    'MAX-BUTTON', 'MAX-CHARS', 'MAX-DATA-GUESS', 'MAX-HEIGHT',
-    'MAX-HEIGHT-CHARS', 'MAX-HEIGHT-C', 'MAX-HEIGHT-CH', 'MAX-HEIGHT-CHA',
-    'MAX-HEIGHT-CHAR', 'MAX-HEIGHT-PIXELS', 'MAX-HEIGHT-P', 'MAX-HEIGHT-PI',
-    'MAX-HEIGHT-PIX', 'MAX-HEIGHT-PIXE', 'MAX-HEIGHT-PIXEL', 'MAXIMIZE',
-    'MAXIMUM', 'MAX', 'MAXI', 'MAXIM', 'MAXIMU', 'MAXIMUM-LEVEL', 'MAX-ROWS',
-    'MAX-SIZE', 'MAX-VALUE', 'MAX-VAL', 'MAX-VALU', 'MAX-WIDTH',
-    'MAX-WIDTH-CHARS', 'MAX-WIDTH', 'MAX-WIDTH-', 'MAX-WIDTH-C', 'MAX-WIDTH-CH',
-    'MAX-WIDTH-CHA', 'MAX-WIDTH-CHAR', 'MAX-WIDTH-PIXELS', 'MAX-WIDTH-P',
-    'MAX-WIDTH-PI', 'MAX-WIDTH-PIX', 'MAX-WIDTH-PIXE', 'MAX-WIDTH-PIXEL',
-    'MD5-DIGEST', 'MEMBER', 'MEMPTR-TO-NODE-VALUE', 'MENU', 'MENUBAR',
-    'MENU-BAR', 'MENU-ITEM', 'MENU-KEY', 'MENU-K', 'MENU-KE', 'MENU-MOUSE',
-    'MENU-M', 'MENU-MO', 'MENU-MOU', 'MENU-MOUS', 'MERGE-BY-FIELD', 'MESSAGE',
-    'MESSAGE-AREA', 'MESSAGE-AREA-FONT', 'MESSAGE-LINES', 'METHOD', 'MIN',
-    'MIN-BUTTON', 'MIN-COLUMN-WIDTH-CHARS', 'MIN-COLUMN-WIDTH-C',
-    'MIN-COLUMN-WIDTH-CH', 'MIN-COLUMN-WIDTH-CHA', 'MIN-COLUMN-WIDTH-CHAR',
-    'MIN-COLUMN-WIDTH-PIXELS', 'MIN-COLUMN-WIDTH-P', 'MIN-COLUMN-WIDTH-PI',
-    'MIN-COLUMN-WIDTH-PIX', 'MIN-COLUMN-WIDTH-PIXE', 'MIN-COLUMN-WIDTH-PIXEL',
-    'MIN-HEIGHT-CHARS', 'MIN-HEIGHT', 'MIN-HEIGHT-', 'MIN-HEIGHT-C',
-    'MIN-HEIGHT-CH', 'MIN-HEIGHT-CHA', 'MIN-HEIGHT-CHAR', 'MIN-HEIGHT-PIXELS',
-    'MIN-HEIGHT-P', 'MIN-HEIGHT-PI', 'MIN-HEIGHT-PIX', 'MIN-HEIGHT-PIXE',
-    'MIN-HEIGHT-PIXEL', 'MINIMUM', 'MIN', 'MINI', 'MINIM', 'MINIMU', 'MIN-SIZE',
-    'MIN-VALUE', 'MIN-VAL', 'MIN-VALU', 'MIN-WIDTH-CHARS', 'MIN-WIDTH',
-    'MIN-WIDTH-', 'MIN-WIDTH-C', 'MIN-WIDTH-CH', 'MIN-WIDTH-CHA',
-    'MIN-WIDTH-CHAR', 'MIN-WIDTH-PIXELS', 'MIN-WIDTH-P', 'MIN-WIDTH-PI',
-    'MIN-WIDTH-PIX', 'MIN-WIDTH-PIXE', 'MIN-WIDTH-PIXEL', 'MODIFIED', 'MODULO',
-    'MOD', 'MODU', 'MODUL', 'MONTH', 'MOUSE', 'MOUSE-POINTER', 'MOUSE-P',
-    'MOUSE-PO', 'MOUSE-POI', 'MOUSE-POIN', 'MOUSE-POINT', 'MOUSE-POINTE',
-    'MOVABLE', 'MOVE-AFTER-TAB-ITEM', 'MOVE-AFTER', 'MOVE-AFTER-',
-    'MOVE-AFTER-T', 'MOVE-AFTER-TA', 'MOVE-AFTER-TAB', 'MOVE-AFTER-TAB-',
-    'MOVE-AFTER-TAB-I', 'MOVE-AFTER-TAB-IT', 'MOVE-AFTER-TAB-ITE',
-    'MOVE-BEFORE-TAB-ITEM', 'MOVE-BEFOR', 'MOVE-BEFORE', 'MOVE-BEFORE-',
-    'MOVE-BEFORE-T', 'MOVE-BEFORE-TA', 'MOVE-BEFORE-TAB', 'MOVE-BEFORE-TAB-',
-    'MOVE-BEFORE-TAB-I', 'MOVE-BEFORE-TAB-IT', 'MOVE-BEFORE-TAB-ITE',
-    'MOVE-COLUMN', 'MOVE-COL', 'MOVE-COLU', 'MOVE-COLUM', 'MOVE-TO-BOTTOM',
-    'MOVE-TO-B', 'MOVE-TO-BO', 'MOVE-TO-BOT', 'MOVE-TO-BOTT', 'MOVE-TO-BOTTO',
-    'MOVE-TO-EOF', 'MOVE-TO-TOP', 'MOVE-TO-T', 'MOVE-TO-TO', 'MPE',
-    'MULTI-COMPILE', 'MULTIPLE', 'MULTIPLE-KEY', 'MULTITASKING-INTERVAL',
-    'MUST-EXIST', 'NAME', 'NAMESPACE-PREFIX', 'NAMESPACE-URI', 'NATIVE', 'NE',
-    'NEEDS-APPSERVER-PROMPT', 'NEEDS-PROMPT', 'NEW', 'NEW-INSTANCE', 'NEW-ROW',
-    'NEXT', 'NEXT-COLUMN', 'NEXT-PROMPT', 'NEXT-ROWID', 'NEXT-SIBLING',
-    'NEXT-TAB-ITEM', 'NEXT-TAB-I', 'NEXT-TAB-IT', 'NEXT-TAB-ITE', 'NEXT-VALUE',
-    'NO', 'NO-APPLY', 'NO-ARRAY-MESSAGE', 'NO-ASSIGN', 'NO-ATTR-LIST',
-    'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-L', 'NO-ATTR-LI', 'NO-ATTR-LIS',
-    'NO-ATTR-SPACE', 'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-S', 'NO-ATTR-SP',
-    'NO-ATTR-SPA', 'NO-ATTR-SPAC', 'NO-AUTO-VALIDATE', 'NO-BIND-WHERE',
-    'NO-BOX', 'NO-CONSOLE', 'NO-CONVERT', 'NO-CONVERT-3D-COLORS',
-    'NO-CURRENT-VALUE', 'NO-DEBUG', 'NODE-VALUE-TO-MEMPTR', 'NO-DRAG',
-    'NO-ECHO', 'NO-EMPTY-SPACE', 'NO-ERROR', 'NO-FILL', 'NO-F', 'NO-FI',
-    'NO-FIL', 'NO-FOCUS', 'NO-HELP', 'NO-HIDE', 'NO-INDEX-HINT',
-    'NO-INHERIT-BGCOLOR', 'NO-INHERIT-BGC', 'NO-INHERIT-BGCO', 'LABEL-BGCOL',
-    'LABEL-BGCOLO', 'NO-INHERIT-FGCOLOR', 'NO-INHERIT-FGC', 'NO-INHERIT-FGCO',
-    'NO-INHERIT-FGCOL', 'NO-INHERIT-FGCOLO', 'NO-JOIN-BY-SQLDB', 'NO-LABELS',
-    'NO-LABE', 'NO-LOBS', 'NO-LOCK', 'NO-LOOKAHEAD', 'NO-MAP', 'NO-MESSAGE',
-    'NO-MES', 'NO-MESS', 'NO-MESSA', 'NO-MESSAG', 'NONAMESPACE-SCHEMA-LOCATION',
-    'NONE', 'NO-PAUSE', 'NO-PREFETCH', 'NO-PREFE', 'NO-PREFET', 'NO-PREFETC',
-    'NORMALIZE', 'NO-ROW-MARKERS', 'NO-SCROLLBAR-VERTICAL',
-    'NO-SEPARATE-CONNECTION', 'NO-SEPARATORS', 'NOT', 'NO-TAB-STOP',
-    'NOT-ACTIVE', 'NO-UNDERLINE', 'NO-UND', 'NO-UNDE', 'NO-UNDER', 'NO-UNDERL',
-    'NO-UNDERLI', 'NO-UNDERLIN', 'NO-UNDO', 'NO-VALIDATE', 'NO-VAL', 'NO-VALI',
-    'NO-VALID', 'NO-VALIDA', 'NO-VALIDAT', 'NOW', 'NO-WAIT', 'NO-WORD-WRAP',
-    'NULL', 'NUM-ALIASES', 'NUM-ALI', 'NUM-ALIA', 'NUM-ALIAS', 'NUM-ALIASE',
-    'NUM-BUFFERS', 'NUM-BUTTONS', 'NUM-BUT', 'NUM-BUTT', 'NUM-BUTTO',
-    'NUM-BUTTON', 'NUM-COLUMNS', 'NUM-COL', 'NUM-COLU', 'NUM-COLUM',
-    'NUM-COLUMN', 'NUM-COPIES', 'NUM-DBS', 'NUM-DROPPED-FILES', 'NUM-ENTRIES',
-    'NUMERIC', 'NUMERIC-FORMAT', 'NUMERIC-F', 'NUMERIC-FO', 'NUMERIC-FOR',
-    'NUMERIC-FORM', 'NUMERIC-FORMA', 'NUM-FIELDS', 'NUM-FORMATS', 'NUM-ITEMS',
-    'NUM-ITERATIONS', 'NUM-LINES', 'NUM-LOCKED-COLUMNS', 'NUM-LOCKED-COL',
-    'NUM-LOCKED-COLU', 'NUM-LOCKED-COLUM', 'NUM-LOCKED-COLUMN', 'NUM-MESSAGES',
-    'NUM-PARAMETERS', 'NUM-REFERENCES', 'NUM-REPLACED', 'NUM-RESULTS',
-    'NUM-SELECTED-ROWS', 'NUM-SELECTED-WIDGETS', 'NUM-SELECTED',
-    'NUM-SELECTED-', 'NUM-SELECTED-W', 'NUM-SELECTED-WI', 'NUM-SELECTED-WID',
-    'NUM-SELECTED-WIDG', 'NUM-SELECTED-WIDGE', 'NUM-SELECTED-WIDGET',
-    'NUM-TABS', 'NUM-TO-RETAIN', 'NUM-VISIBLE-COLUMNS', 'OCTET-LENGTH', 'OF',
-    'OFF', 'OK', 'OK-CANCEL', 'OLD', 'ON', 'ON-FRAME-BORDER', 'ON-FRAME',
-    'ON-FRAME-', 'ON-FRAME-B', 'ON-FRAME-BO', 'ON-FRAME-BOR', 'ON-FRAME-BORD',
-    'ON-FRAME-BORDE', 'OPEN', 'OPSYS', 'OPTION', 'OR', 'ORDERED-JOIN',
-    'ORDINAL', 'OS-APPEND', 'OS-COMMAND', 'OS-COPY', 'OS-CREATE-DIR',
-    'OS-DELETE', 'OS-DIR', 'OS-DRIVES', 'OS-DRIVE', 'OS-ERROR', 'OS-GETENV',
-    'OS-RENAME', 'OTHERWISE', 'OUTPUT', 'OVERLAY', 'OVERRIDE', 'OWNER', 'PAGE',
-    'PAGE-BOTTOM', 'PAGE-BOT', 'PAGE-BOTT', 'PAGE-BOTTO', 'PAGED',
-    'PAGE-NUMBER', 'PAGE-NUM', 'PAGE-NUMB', 'PAGE-NUMBE', 'PAGE-SIZE',
-    'PAGE-TOP', 'PAGE-WIDTH', 'PAGE-WID', 'PAGE-WIDT', 'PARAMETER', 'PARAM',
-    'PARAME', 'PARAMET', 'PARAMETE', 'PARENT', 'PARSE-STATUS', 'PARTIAL-KEY',
-    'PASCAL', 'PASSWORD-FIELD', 'PATHNAME', 'PAUSE', 'PBE-HASH-ALGORITHM',
-    'PBE-HASH-ALG', 'PBE-HASH-ALGO', 'PBE-HASH-ALGOR', 'PBE-HASH-ALGORI',
-    'PBE-HASH-ALGORIT', 'PBE-HASH-ALGORITH', 'PBE-KEY-ROUNDS', 'PDBNAME',
-    'PERSISTENT', 'PERSIST', 'PERSISTE', 'PERSISTEN',
-    'PERSISTENT-CACHE-DISABLED', 'PFCOLOR', 'PFC', 'PFCO', 'PFCOL', 'PFCOLO',
-    'PIXELS', 'PIXELS-PER-COLUMN', 'PIXELS-PER-COL', 'PIXELS-PER-COLU',
-    'PIXELS-PER-COLUM', 'PIXELS-PER-ROW', 'POPUP-MENU', 'POPUP-M', 'POPUP-ME',
-    'POPUP-MEN', 'POPUP-ONLY', 'POPUP-O', 'POPUP-ON', 'POPUP-ONL', 'PORTRAIT',
-    'POSITION', 'PRECISION', 'PREFER-DATASET', 'PREPARED', 'PREPARE-STRING',
-    'PREPROCESS', 'PREPROC', 'PREPROCE', 'PREPROCES', 'PRESELECT', 'PRESEL',
-    'PRESELE', 'PRESELEC', 'PREV', 'PREV-COLUMN', 'PREV-SIBLING',
-    'PREV-TAB-ITEM', 'PREV-TAB-I', 'PREV-TAB-IT', 'PREV-TAB-ITE', 'PRIMARY',
-    'PRINTER', 'PRINTER-CONTROL-HANDLE', 'PRINTER-HDC', 'PRINTER-NAME',
-    'PRINTER-PORT', 'PRINTER-SETUP', 'PRIVATE', 'PRIVATE-DATA', 'PRIVATE-D',
-    'PRIVATE-DA', 'PRIVATE-DAT', 'PRIVILEGES', 'PROCEDURE', 'PROCE', 'PROCED',
-    'PROCEDU', 'PROCEDUR', 'PROCEDURE-CALL-TYPE', 'PROCESS', 'PROC-HANDLE',
-    'PROC-HA', 'PROC-HAN', 'PROC-HAND', 'PROC-HANDL', 'PROC-STATUS', 'PROC-ST',
-    'PROC-STA', 'PROC-STAT', 'PROC-STATU', 'proc-text', 'proc-text-buffe',
-    'PROFILER', 'PROGRAM-NAME', 'PROGRESS', 'PROGRESS-SOURCE', 'PROGRESS-S',
-    'PROGRESS-SO', 'PROGRESS-SOU', 'PROGRESS-SOUR', 'PROGRESS-SOURC', 'PROMPT',
-    'PROMPT-FOR', 'PROMPT-F', 'PROMPT-FO', 'PROMSGS', 'PROPATH', 'PROPERTY',
-    'PROTECTED', 'PROVERSION', 'PROVERS', 'PROVERSI', 'PROVERSIO', 'PROXY',
-    'PROXY-PASSWORD', 'PROXY-USERID', 'PUBLIC', 'PUBLIC-ID', 'PUBLISH',
-    'PUBLISHED-EVENTS', 'PUT', 'PUTBYTE', 'PUT-BYTE', 'PUT-DOUBLE', 'PUT-FLOAT',
-    'PUT-INT64', 'PUT-KEY-VALUE', 'PUT-KEY-VAL', 'PUT-KEY-VALU', 'PUT-LONG',
-    'PUT-SHORT', 'PUT-STRING', 'PUT-UNSIGNED-LONG', 'QUERY', 'QUERY-CLOSE',
-    'QUERY-OFF-END', 'QUERY-OPEN', 'QUERY-PREPARE', 'QUERY-TUNING', 'QUESTION',
-    'QUIT', 'QUOTER', 'RADIO-BUTTONS', 'RADIO-SET', 'RANDOM', 'RAW-TRANSFER',
-    'RCODE-INFORMATION', 'RCODE-INFO', 'RCODE-INFOR', 'RCODE-INFORM',
-    'RCODE-INFORMA', 'RCODE-INFORMAT', 'RCODE-INFORMATI', 'RCODE-INFORMATIO',
-    'READ-AVAILABLE', 'READ-EXACT-NUM', 'READ-FILE', 'READKEY', 'READ-ONLY',
-    'READ-XML', 'READ-XMLSCHEMA', 'REAL', 'RECORD-LENGTH', 'RECTANGLE', 'RECT',
-    'RECTA', 'RECTAN', 'RECTANG', 'RECTANGL', 'RECURSIVE', 'REFERENCE-ONLY',
-    'REFRESH', 'REFRESHABLE', 'REFRESH-AUDIT-POLICY', 'REGISTER-DOMAIN',
-    'RELEASE', 'REMOTE', 'REMOVE-EVENTS-PROCEDURE', 'REMOVE-SUPER-PROCEDURE',
-    'REPEAT', 'REPLACE', 'REPLACE-SELECTION-TEXT', 'REPOSITION',
-    'REPOSITION-BACKWARD', 'REPOSITION-FORWARD', 'REPOSITION-MODE',
-    'REPOSITION-TO-ROW', 'REPOSITION-TO-ROWID', 'REQUEST', 'RESET', 'RESIZABLE',
-    'RESIZA', 'RESIZAB', 'RESIZABL', 'RESIZE', 'RESTART-ROW', 'RESTART-ROWID',
-    'RETAIN', 'RETAIN-SHAPE', 'RETRY', 'RETRY-CANCEL', 'RETURN',
-    'RETURN-INSERTED', 'RETURN-INS', 'RETURN-INSE', 'RETURN-INSER',
-    'RETURN-INSERT', 'RETURN-INSERTE', 'RETURNS', 'RETURN-TO-START-DIR',
-    'RETURN-TO-START-DI', 'RETURN-VALUE', 'RETURN-VAL', 'RETURN-VALU',
-    'RETURN-VALUE-DATA-TYPE', 'REVERSE-FROM', 'REVERT', 'REVOKE', 'RGB-VALUE',
-    'RIGHT-ALIGNED', 'RETURN-ALIGN', 'RETURN-ALIGNE', 'RIGHT-TRIM', 'R-INDEX',
-    'ROLES', 'ROUND', 'ROUTINE-LEVEL', 'ROW', 'ROW-HEIGHT-CHARS', 'HEIGHT',
-    'ROW-HEIGHT-PIXELS', 'HEIGHT-P', 'ROW-MARKERS', 'ROW-OF', 'ROW-RESIZABLE',
-    'RULE', 'RUN', 'RUN-PROCEDURE', 'SAVE', 'SAVE-AS', 'SAVE-FILE',
-    'SAX-COMPLETE', 'SAX-COMPLE', 'SAX-COMPLET', 'SAX-PARSE', 'SAX-PARSE-FIRST',
-    'SAX-PARSE-NEXT', 'SAX-PARSER-ERROR', 'SAX-RUNNING', 'SAX-UNINITIALIZED',
-    'SAX-WRITE-BEGIN', 'SAX-WRITE-COMPLETE', 'SAX-WRITE-CONTENT',
-    'SAX-WRITE-ELEMENT', 'SAX-WRITE-ERROR', 'SAX-WRITE-IDLE', 'SAX-WRITER',
-    'SAX-WRITE-TAG', 'SCHEMA', 'SCHEMA-LOCATION', 'SCHEMA-MARSHAL',
-    'SCHEMA-PATH', 'SCREEN', 'SCREEN-IO', 'SCREEN-LINES', 'SCREEN-VALUE',
-    'SCREEN-VAL', 'SCREEN-VALU', 'SCROLL', 'SCROLLABLE', 'SCROLLBAR-HORIZONTAL',
-    'SCROLLBAR-H', 'SCROLLBAR-HO', 'SCROLLBAR-HOR', 'SCROLLBAR-HORI',
-    'SCROLLBAR-HORIZ', 'SCROLLBAR-HORIZO', 'SCROLLBAR-HORIZON',
-    'SCROLLBAR-HORIZONT', 'SCROLLBAR-HORIZONTA', 'SCROLL-BARS',
-    'SCROLLBAR-VERTICAL', 'SCROLLBAR-V', 'SCROLLBAR-VE', 'SCROLLBAR-VER',
-    'SCROLLBAR-VERT', 'SCROLLBAR-VERTI', 'SCROLLBAR-VERTIC',
-    'SCROLLBAR-VERTICA', 'SCROLL-DELTA', 'SCROLLED-ROW-POSITION',
-    'SCROLLED-ROW-POS', 'SCROLLED-ROW-POSI', 'SCROLLED-ROW-POSIT',
-    'SCROLLED-ROW-POSITI', 'SCROLLED-ROW-POSITIO', 'SCROLLING', 'SCROLL-OFFSET',
-    'SCROLL-TO-CURRENT-ROW', 'SCROLL-TO-ITEM', 'SCROLL-TO-I', 'SCROLL-TO-IT',
-    'SCROLL-TO-ITE', 'SCROLL-TO-SELECTED-ROW', 'SDBNAME', 'SEAL',
-    'SEAL-TIMESTAMP', 'SEARCH', 'SEARCH-SELF', 'SEARCH-TARGET', 'SECTION',
-    'SECURITY-POLICY', 'SEEK', 'SELECT', 'SELECTABLE', 'SELECT-ALL', 'SELECTED',
-    'SELECT-FOCUSED-ROW', 'SELECTION', 'SELECTION-END', 'SELECTION-LIST',
-    'SELECTION-START', 'SELECTION-TEXT', 'SELECT-NEXT-ROW', 'SELECT-PREV-ROW',
-    'SELECT-ROW', 'SELF', 'SEND', 'send-sql-statement', 'send-sql', 'SENSITIVE',
-    'SEPARATE-CONNECTION', 'SEPARATOR-FGCOLOR', 'SEPARATORS', 'SERVER',
-    'SERVER-CONNECTION-BOUND', 'SERVER-CONNECTION-BOUND-REQUEST',
-    'SERVER-CONNECTION-CONTEXT', 'SERVER-CONNECTION-ID',
-    'SERVER-OPERATING-MODE', 'SESSION', 'SESSION-ID', 'SET', 'SET-APPL-CONTEXT',
-    'SET-ATTR-CALL-TYPE', 'SET-ATTRIBUTE-NODE', 'SET-BLUE-VALUE', 'SET-BLUE',
-    'SET-BLUE-', 'SET-BLUE-V', 'SET-BLUE-VA', 'SET-BLUE-VAL', 'SET-BLUE-VALU',
-    'SET-BREAK', 'SET-BUFFERS', 'SET-CALLBACK', 'SET-CLIENT', 'SET-COMMIT',
-    'SET-CONTENTS', 'SET-CURRENT-VALUE', 'SET-DB-CLIENT', 'SET-DYNAMIC',
-    'SET-EVENT-MANAGER-OPTION', 'SET-GREEN-VALUE', 'SET-GREEN', 'SET-GREEN-',
-    'SET-GREEN-V', 'SET-GREEN-VA', 'SET-GREEN-VAL', 'SET-GREEN-VALU',
-    'SET-INPUT-SOURCE', 'SET-OPTION', 'SET-OUTPUT-DESTINATION', 'SET-PARAMETER',
-    'SET-POINTER-VALUE', 'SET-PROPERTY', 'SET-RED-VALUE', 'SET-RED', 'SET-RED-',
-    'SET-RED-V', 'SET-RED-VA', 'SET-RED-VAL', 'SET-RED-VALU',
-    'SET-REPOSITIONED-ROW', 'SET-RGB-VALUE', 'SET-ROLLBACK', 'SET-SELECTION',
-    'SET-SIZE', 'SET-SORT-ARROW', 'SETUSERID', 'SETUSER', 'SETUSERI',
-    'SET-WAIT-STATE', 'SHA1-DIGEST', 'SHARED', 'SHARE-LOCK', 'SHARE', 'SHARE-',
-    'SHARE-L', 'SHARE-LO', 'SHARE-LOC', 'SHOW-IN-TASKBAR', 'SHOW-STATS',
-    'SHOW-STAT', 'SIDE-LABEL-HANDLE', 'SIDE-LABEL-H', 'SIDE-LABEL-HA',
-    'SIDE-LABEL-HAN', 'SIDE-LABEL-HAND', 'SIDE-LABEL-HANDL', 'SIDE-LABELS',
-    'SIDE-LAB', 'SIDE-LABE', 'SIDE-LABEL', 'SILENT', 'SIMPLE', 'SINGLE', 'SIZE',
-    'SIZE-CHARS', 'SIZE-C', 'SIZE-CH', 'SIZE-CHA', 'SIZE-CHAR', 'SIZE-PIXELS',
-    'SIZE-P', 'SIZE-PI', 'SIZE-PIX', 'SIZE-PIXE', 'SIZE-PIXEL', 'SKIP',
-    'SKIP-DELETED-RECORD', 'SLIDER', 'SMALL-ICON', 'SMALLINT', 'SMALL-TITLE',
-    'SOME', 'SORT', 'SORT-ASCENDING', 'SORT-NUMBER', 'SOURCE',
-    'SOURCE-PROCEDURE', 'SPACE', 'SQL', 'SQRT', 'SSL-SERVER-NAME', 'STANDALONE',
-    'START', 'START-DOCUMENT', 'START-ELEMENT', 'START-MOVE', 'START-RESIZE',
-    'START-ROW-RESIZE', 'STATE-DETAIL', 'STATIC', 'STATUS', 'STATUS-AREA',
-    'STATUS-AREA-FONT', 'STDCALL', 'STOP', 'STOP-PARSING', 'STOPPED', 'STOPPE',
-    'STORED-PROCEDURE', 'STORED-PROC', 'STORED-PROCE', 'STORED-PROCED',
-    'STORED-PROCEDU', 'STORED-PROCEDUR', 'STREAM', 'STREAM-HANDLE', 'STREAM-IO',
-    'STRETCH-TO-FIT', 'STRICT', 'STRING', 'STRING-VALUE', 'STRING-XREF',
-    'SUB-AVERAGE', 'SUB-AVE', 'SUB-AVER', 'SUB-AVERA', 'SUB-AVERAG',
-    'SUB-COUNT', 'SUB-MAXIMUM', 'SUM-MAX', 'SUM-MAXI', 'SUM-MAXIM',
-    'SUM-MAXIMU', 'SUB-MENU', 'SUBSUB-', 'MINIMUM', 'SUB-MIN', 'SUBSCRIBE',
-    'SUBSTITUTE', 'SUBST', 'SUBSTI', 'SUBSTIT', 'SUBSTITU', 'SUBSTITUT',
-    'SUBSTRING', 'SUBSTR', 'SUBSTRI', 'SUBSTRIN', 'SUB-TOTAL', 'SUBTYPE', 'SUM',
-    'SUPER', 'SUPER-PROCEDURES', 'SUPPRESS-NAMESPACE-PROCESSING',
-    'SUPPRESS-WARNINGS', 'SUPPRESS-W', 'SUPPRESS-WA', 'SUPPRESS-WAR',
-    'SUPPRESS-WARN', 'SUPPRESS-WARNI', 'SUPPRESS-WARNIN', 'SUPPRESS-WARNING',
-    'SYMMETRIC-ENCRYPTION-ALGORITHM', 'SYMMETRIC-ENCRYPTION-IV',
-    'SYMMETRIC-ENCRYPTION-KEY', 'SYMMETRIC-SUPPORT', 'SYSTEM-ALERT-BOXES',
-    'SYSTEM-ALERT', 'SYSTEM-ALERT-', 'SYSTEM-ALERT-B', 'SYSTEM-ALERT-BO',
-    'SYSTEM-ALERT-BOX', 'SYSTEM-ALERT-BOXE', 'SYSTEM-DIALOG', 'SYSTEM-HELP',
-    'SYSTEM-ID', 'TABLE', 'TABLE-HANDLE', 'TABLE-NUMBER', 'TAB-POSITION',
-    'TAB-STOP', 'TARGET', 'TARGET-PROCEDURE', 'TEMP-DIRECTORY', 'TEMP-DIR',
-    'TEMP-DIRE', 'TEMP-DIREC', 'TEMP-DIRECT', 'TEMP-DIRECTO', 'TEMP-DIRECTOR',
-    'TEMP-TABLE', 'TEMP-TABLE-PREPARE', 'TERM', 'TERMINAL', 'TERM', 'TERMI',
-    'TERMIN', 'TERMINA', 'TERMINATE', 'TEXT', 'TEXT-CURSOR', 'TEXT-SEG-GROW',
-    'TEXT-SELECTED', 'THEN', 'THIS-OBJECT', 'THIS-PROCEDURE', 'THREE-D',
-    'THROW', 'THROUGH', 'THRU', 'TIC-MARKS', 'TIME', 'TIME-SOURCE', 'TITLE',
-    'TITLE-BGCOLOR', 'TITLE-BGC', 'TITLE-BGCO', 'TITLE-BGCOL', 'TITLE-BGCOLO',
-    'TITLE-DCOLOR', 'TITLE-DC', 'TITLE-DCO', 'TITLE-DCOL', 'TITLE-DCOLO',
-    'TITLE-FGCOLOR', 'TITLE-FGC', 'TITLE-FGCO', 'TITLE-FGCOL', 'TITLE-FGCOLO',
-    'TITLE-FONT', 'TITLE-FO', 'TITLE-FON', 'TO', 'TODAY', 'TOGGLE-BOX',
-    'TOOLTIP', 'TOOLTIPS', 'TOPIC', 'TOP-NAV-QUERY', 'TOP-ONLY', 'TO-ROWID',
-    'TOTAL', 'TRAILING', 'TRANS', 'TRANSACTION', 'TRANSACTION-MODE',
-    'TRANS-INIT-PROCEDURE', 'TRANSPARENT', 'TRIGGER', 'TRIGGERS', 'TRIM',
-    'TRUE', 'TRUNCATE', 'TRUNC', 'TRUNCA', 'TRUNCAT', 'TYPE', 'TYPE-OF',
-    'UNBOX', 'UNBUFFERED', 'UNBUFF', 'UNBUFFE', 'UNBUFFER', 'UNBUFFERE',
-    'UNDERLINE', 'UNDERL', 'UNDERLI', 'UNDERLIN', 'UNDO', 'UNFORMATTED',
-    'UNFORM', 'UNFORMA', 'UNFORMAT', 'UNFORMATT', 'UNFORMATTE', 'UNION',
-    'UNIQUE', 'UNIQUE-ID', 'UNIQUE-MATCH', 'UNIX', 'UNLESS-HIDDEN', 'UNLOAD',
-    'UNSIGNED-LONG', 'UNSUBSCRIBE', 'UP', 'UPDATE', 'UPDATE-ATTRIBUTE', 'URL',
-    'URL-DECODE', 'URL-ENCODE', 'URL-PASSWORD', 'URL-USERID', 'USE',
-    'USE-DICT-EXPS', 'USE-FILENAME', 'USE-INDEX', 'USER', 'USE-REVVIDEO',
-    'USERID', 'USER-ID', 'USE-TEXT', 'USE-UNDERLINE', 'USE-WIDGET-POOL',
-    'USING', 'V6DISPLAY', 'V6FRAME', 'VALIDATE', 'VALIDATE-EXPRESSION',
-    'VALIDATE-MESSAGE', 'VALIDATE-SEAL', 'VALIDATION-ENABLED', 'VALID-EVENT',
-    'VALID-HANDLE', 'VALID-OBJECT', 'VALUE', 'VALUE-CHANGED', 'VALUES',
-    'VARIABLE', 'VAR', 'VARI', 'VARIA', 'VARIAB', 'VARIABL', 'VERBOSE',
-    'VERSION', 'VERTICAL', 'VERT', 'VERTI', 'VERTIC', 'VERTICA', 'VIEW',
-    'VIEW-AS', 'VIEW-FIRST-COLUMN-ON-REOPEN', 'VIRTUAL-HEIGHT-CHARS',
-    'VIRTUAL-HEIGHT', 'VIRTUAL-HEIGHT-', 'VIRTUAL-HEIGHT-C',
-    'VIRTUAL-HEIGHT-CH', 'VIRTUAL-HEIGHT-CHA', 'VIRTUAL-HEIGHT-CHAR',
-    'VIRTUAL-HEIGHT-PIXELS', 'VIRTUAL-HEIGHT-P', 'VIRTUAL-HEIGHT-PI',
-    'VIRTUAL-HEIGHT-PIX', 'VIRTUAL-HEIGHT-PIXE', 'VIRTUAL-HEIGHT-PIXEL',
-    'VIRTUAL-WIDTH-CHARS', 'VIRTUAL-WIDTH', 'VIRTUAL-WIDTH-', 'VIRTUAL-WIDTH-C',
-    'VIRTUAL-WIDTH-CH', 'VIRTUAL-WIDTH-CHA', 'VIRTUAL-WIDTH-CHAR',
-    'VIRTUAL-WIDTH-PIXELS', 'VIRTUAL-WIDTH-P', 'VIRTUAL-WIDTH-PI',
-    'VIRTUAL-WIDTH-PIX', 'VIRTUAL-WIDTH-PIXE', 'VIRTUAL-WIDTH-PIXEL', 'VISIBLE',
-    'VOID', 'WAIT', 'WAIT-FOR', 'WARNING', 'WEB-CONTEXT', 'WEEKDAY', 'WHEN',
-    'WHERE', 'WHILE', 'WIDGET', 'WIDGET-ENTER', 'WIDGET-E', 'WIDGET-EN',
-    'WIDGET-ENT', 'WIDGET-ENTE', 'WIDGET-ID', 'WIDGET-LEAVE', 'WIDGET-L',
-    'WIDGET-LE', 'WIDGET-LEA', 'WIDGET-LEAV', 'WIDGET-POOL', 'WIDTH',
-    'WIDTH-CHARS', 'WIDTH', 'WIDTH-', 'WIDTH-C', 'WIDTH-CH', 'WIDTH-CHA',
-    'WIDTH-CHAR', 'WIDTH-PIXELS', 'WIDTH-P', 'WIDTH-PI', 'WIDTH-PIX',
-    'WIDTH-PIXE', 'WIDTH-PIXEL', 'WINDOW', 'WINDOW-MAXIMIZED', 'WINDOW-MAXIM',
-    'WINDOW-MAXIMI', 'WINDOW-MAXIMIZ', 'WINDOW-MAXIMIZE', 'WINDOW-MINIMIZED',
-    'WINDOW-MINIM', 'WINDOW-MINIMI', 'WINDOW-MINIMIZ', 'WINDOW-MINIMIZE',
-    'WINDOW-NAME', 'WINDOW-NORMAL', 'WINDOW-STATE', 'WINDOW-STA', 'WINDOW-STAT',
-    'WINDOW-SYSTEM', 'WITH', 'WORD-INDEX', 'WORD-WRAP',
-    'WORK-AREA-HEIGHT-PIXELS', 'WORK-AREA-WIDTH-PIXELS', 'WORK-AREA-X',
-    'WORK-AREA-Y', 'WORKFILE', 'WORK-TABLE', 'WORK-TAB', 'WORK-TABL', 'WRITE',
-    'WRITE-CDATA', 'WRITE-CHARACTERS', 'WRITE-COMMENT', 'WRITE-DATA-ELEMENT',
-    'WRITE-EMPTY-ELEMENT', 'WRITE-ENTITY-REF', 'WRITE-EXTERNAL-DTD',
-    'WRITE-FRAGMENT', 'WRITE-MESSAGE', 'WRITE-PROCESSING-INSTRUCTION',
-    'WRITE-STATUS', 'WRITE-XML', 'WRITE-XMLSCHEMA', 'X', 'XCODE',
-    'XML-DATA-TYPE', 'XML-NODE-TYPE', 'XML-SCHEMA-PATH',
-    'XML-SUPPRESS-NAMESPACE-PROCESSING', 'X-OF', 'XREF', 'XREF-XML', 'Y',
-    'YEAR', 'YEAR-OFFSET', 'YES', 'YES-NO', 'YES-NO-CANCEL', 'Y-OF'
-]
diff --git a/python/ext-libs/pygments/lexers/_phpbuiltins.py b/python/ext-libs/pygments/lexers/_phpbuiltins.py
deleted file mode 100644
index 08eaaf2..0000000
--- a/python/ext-libs/pygments/lexers/_phpbuiltins.py
+++ /dev/null
@@ -1,3787 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._phpbuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This file loads the function names and their modules from the
-    php webpage and generates itself.
-
-    Do not alter the MODULES dict by hand!
-
-    WARNING: the generation transfers quite much data over your
-             internet connection. don't run that at home, use
-             a server ;-)
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-
-MODULES = {'.NET': ['dotnet_load'],
- 'APC': ['apc_add',
-         'apc_bin_dump',
-         'apc_bin_dumpfile',
-         'apc_bin_load',
-         'apc_bin_loadfile',
-         'apc_cache_info',
-         'apc_cas',
-         'apc_clear_cache',
-         'apc_compile_file',
-         'apc_dec',
-         'apc_define_constants',
-         'apc_delete_file',
-         'apc_delete',
-         'apc_exists',
-         'apc_fetch',
-         'apc_inc',
-         'apc_load_constants',
-         'apc_sma_info',
-         'apc_store'],
- 'APD': ['apd_breakpoint',
-         'apd_callstack',
-         'apd_clunk',
-         'apd_continue',
-         'apd_croak',
-         'apd_dump_function_table',
-         'apd_dump_persistent_resources',
-         'apd_dump_regular_resources',
-         'apd_echo',
-         'apd_get_active_symbols',
-         'apd_set_pprof_trace',
-         'apd_set_session_trace_socket',
-         'apd_set_session_trace',
-         'apd_set_session',
-         'override_function',
-         'rename_function'],
- 'Aliases and deprecated Mysqli': ['mysqli_bind_param',
-                                   'mysqli_bind_result',
-                                   'mysqli_client_encoding',
-                                   'mysqli_connect',
-                                   'mysqli_disable_reads_from_master',
-                                   'mysqli_disable_rpl_parse',
-                                   'mysqli_enable_reads_from_master',
-                                   'mysqli_enable_rpl_parse',
-                                   'mysqli_escape_string',
-                                   'mysqli_execute',
-                                   'mysqli_fetch',
-                                   'mysqli_get_metadata',
-                                   'mysqli_master_query',
-                                   'mysqli_param_count',
-                                   'mysqli_report',
-                                   'mysqli_rpl_parse_enabled',
-                                   'mysqli_rpl_probe',
-                                   'mysqli_rpl_query_type',
-                                   'mysqli_send_long_data',
-                                   'mysqli_send_query',
-                                   'mysqli_set_opt',
-                                   'mysqli_slave_query'],
- 'Apache': ['apache_child_terminate',
-            'apache_get_modules',
-            'apache_get_version',
-            'apache_getenv',
-            'apache_lookup_uri',
-            'apache_note',
-            'apache_request_headers',
-            'apache_reset_timeout',
-            'apache_response_headers',
-            'apache_setenv',
-            'getallheaders',
-            'virtual'],
- 'Array': ['array_change_key_case',
-           'array_chunk',
-           'array_combine',
-           'array_count_values',
-           'array_diff_assoc',
-           'array_diff_key',
-           'array_diff_uassoc',
-           'array_diff_ukey',
-           'array_diff',
-           'array_fill_keys',
-           'array_fill',
-           'array_filter',
-           'array_flip',
-           'array_intersect_assoc',
-           'array_intersect_key',
-           'array_intersect_uassoc',
-           'array_intersect_ukey',
-           'array_intersect',
-           'array_key_exists',
-           'array_keys',
-           'array_map',
-           'array_merge_recursive',
-           'array_merge',
-           'array_multisort',
-           'array_pad',
-           'array_pop',
-           'array_product',
-           'array_push',
-           'array_rand',
-           'array_reduce',
-           'array_replace_recursive',
-           'array_replace',
-           'array_reverse',
-           'array_search',
-           'array_shift',
-           'array_slice',
-           'array_splice',
-           'array_sum',
-           'array_udiff_assoc',
-           'array_udiff_uassoc',
-           'array_udiff',
-           'array_uintersect_assoc',
-           'array_uintersect_uassoc',
-           'array_uintersect',
-           'array_unique',
-           'array_unshift',
-           'array_values',
-           'array_walk_recursive',
-           'array_walk',
-           'array',
-           'arsort',
-           'asort',
-           'compact',
-           'count',
-           'current',
-           'each',
-           'end',
-           'extract',
-           'in_array',
-           'key',
-           'krsort',
-           'ksort',
-           'list',
-           'natcasesort',
-           'natsort',
-           'next',
-           'pos',
-           'prev',
-           'range',
-           'reset',
-           'rsort',
-           'shuffle',
-           'sizeof',
-           'sort',
-           'uasort',
-           'uksort',
-           'usort'],
- 'BBCode': ['bbcode_add_element',
-            'bbcode_add_smiley',
-            'bbcode_create',
-            'bbcode_destroy',
-            'bbcode_parse',
-            'bbcode_set_arg_parser',
-            'bbcode_set_flags'],
- 'BC Math': ['bcadd',
-             'bccomp',
-             'bcdiv',
-             'bcmod',
-             'bcmul',
-             'bcpow',
-             'bcpowmod',
-             'bcscale',
-             'bcsqrt',
-             'bcsub'],
- 'Bzip2': ['bzclose',
-           'bzcompress',
-           'bzdecompress',
-           'bzerrno',
-           'bzerror',
-           'bzerrstr',
-           'bzflush',
-           'bzopen',
-           'bzread',
-           'bzwrite'],
- 'COM': ['com_addref',
-         'com_create_guid',
-         'com_event_sink',
-         'com_get_active_object',
-         'com_get',
-         'com_invoke',
-         'com_isenum',
-         'com_load_typelib',
-         'com_load',
-         'com_message_pump',
-         'com_print_typeinfo',
-         'com_propget',
-         'com_propput',
-         'com_propset',
-         'com_release',
-         'com_set',
-         'variant_abs',
-         'variant_add',
-         'variant_and',
-         'variant_cast',
-         'variant_cat',
-         'variant_cmp',
-         'variant_date_from_timestamp',
-         'variant_date_to_timestamp',
-         'variant_div',
-         'variant_eqv',
-         'variant_fix',
-         'variant_get_type',
-         'variant_idiv',
-         'variant_imp',
-         'variant_int',
-         'variant_mod',
-         'variant_mul',
-         'variant_neg',
-         'variant_not',
-         'variant_or',
-         'variant_pow',
-         'variant_round',
-         'variant_set_type',
-         'variant_set',
-         'variant_sub',
-         'variant_xor'],
- 'CUBRID': ['cubrid_affected_rows',
-            'cubrid_bind',
-            'cubrid_close_prepare',
-            'cubrid_close_request',
-            'cubrid_col_get',
-            'cubrid_col_size',
-            'cubrid_column_names',
-            'cubrid_column_types',
-            'cubrid_commit',
-            'cubrid_connect_with_url',
-            'cubrid_connect',
-            'cubrid_current_oid',
-            'cubrid_disconnect',
-            'cubrid_drop',
-            'cubrid_error_code_facility',
-            'cubrid_error_code',
-            'cubrid_error_msg',
-            'cubrid_execute',
-            'cubrid_fetch',
-            'cubrid_free_result',
-            'cubrid_get_charset',
-            'cubrid_get_class_name',
-            'cubrid_get_client_info',
-            'cubrid_get_db_parameter',
-            'cubrid_get_server_info',
-            'cubrid_get',
-            'cubrid_insert_id',
-            'cubrid_is_instance',
-            'cubrid_lob_close',
-            'cubrid_lob_export',
-            'cubrid_lob_get',
-            'cubrid_lob_send',
-            'cubrid_lob_size',
-            'cubrid_lock_read',
-            'cubrid_lock_write',
-            'cubrid_move_cursor',
-            'cubrid_num_cols',
-            'cubrid_num_rows',
-            'cubrid_prepare',
-            'cubrid_put',
-            'cubrid_rollback',
-            'cubrid_schema',
-            'cubrid_seq_drop',
-            'cubrid_seq_insert',
-            'cubrid_seq_put',
-            'cubrid_set_add',
-            'cubrid_set_drop',
-            'cubrid_version'],
- 'Cairo': ['cairo_create',
-           'cairo_font_face_get_type',
-           'cairo_font_face_status',
-           'cairo_font_options_create',
-           'cairo_font_options_equal',
-           'cairo_font_options_get_antialias',
-           'cairo_font_options_get_hint_metrics',
-           'cairo_font_options_get_hint_style',
-           'cairo_font_options_get_subpixel_order',
-           'cairo_font_options_hash',
-           'cairo_font_options_merge',
-           'cairo_font_options_set_antialias',
-           'cairo_font_options_set_hint_metrics',
-           'cairo_font_options_set_hint_style',
-           'cairo_font_options_set_subpixel_order',
-           'cairo_font_options_status',
-           'cairo_format_stride_for_width',
-           'cairo_image_surface_create_for_data',
-           'cairo_image_surface_create_from_png',
-           'cairo_image_surface_create',
-           'cairo_image_surface_get_data',
-           'cairo_image_surface_get_format',
-           'cairo_image_surface_get_height',
-           'cairo_image_surface_get_stride',
-           'cairo_image_surface_get_width',
-           'cairo_matrix_create_scale',
-           'cairo_matrix_create_translate',
-           'cairo_matrix_invert',
-           'cairo_matrix_multiply',
-           'cairo_matrix_rotate',
-           'cairo_matrix_transform_distance',
-           'cairo_matrix_transform_point',
-           'cairo_matrix_translate',
-           'cairo_pattern_add_color_stop_rgb',
-           'cairo_pattern_add_color_stop_rgba',
-           'cairo_pattern_create_for_surface',
-           'cairo_pattern_create_linear',
-           'cairo_pattern_create_radial',
-           'cairo_pattern_create_rgb',
-           'cairo_pattern_create_rgba',
-           'cairo_pattern_get_color_stop_count',
-           'cairo_pattern_get_color_stop_rgba',
-           'cairo_pattern_get_extend',
-           'cairo_pattern_get_filter',
-           'cairo_pattern_get_linear_points',
-           'cairo_pattern_get_matrix',
-           'cairo_pattern_get_radial_circles',
-           'cairo_pattern_get_rgba',
-           'cairo_pattern_get_surface',
-           'cairo_pattern_get_type',
-           'cairo_pattern_set_extend',
-           'cairo_pattern_set_filter',
-           'cairo_pattern_set_matrix',
-           'cairo_pattern_status',
-           'cairo_pdf_surface_create',
-           'cairo_pdf_surface_set_size',
-           'cairo_ps_get_levels',
-           'cairo_ps_level_to_string',
-           'cairo_ps_surface_create',
-           'cairo_ps_surface_dsc_begin_page_setup',
-           'cairo_ps_surface_dsc_begin_setup',
-           'cairo_ps_surface_dsc_comment',
-           'cairo_ps_surface_get_eps',
-           'cairo_ps_surface_restrict_to_level',
-           'cairo_ps_surface_set_eps',
-           'cairo_ps_surface_set_size',
-           'cairo_scaled_font_create',
-           'cairo_scaled_font_extents',
-           'cairo_scaled_font_get_ctm',
-           'cairo_scaled_font_get_font_face',
-           'cairo_scaled_font_get_font_matrix',
-           'cairo_scaled_font_get_font_options',
-           'cairo_scaled_font_get_scale_matrix',
-           'cairo_scaled_font_get_type',
-           'cairo_scaled_font_glyph_extents',
-           'cairo_scaled_font_status',
-           'cairo_scaled_font_text_extents',
-           'cairo_surface_copy_page',
-           'cairo_surface_create_similar',
-           'cairo_surface_finish',
-           'cairo_surface_flush',
-           'cairo_surface_get_content',
-           'cairo_surface_get_device_offset',
-           'cairo_surface_get_font_options',
-           'cairo_surface_get_type',
-           'cairo_surface_mark_dirty_rectangle',
-           'cairo_surface_mark_dirty',
-           'cairo_surface_set_device_offset',
-           'cairo_surface_set_fallback_resolution',
-           'cairo_surface_show_page',
-           'cairo_surface_status',
-           'cairo_surface_write_to_png',
-           'cairo_svg_surface_create',
-           'cairo_svg_surface_restrict_to_version',
-           'cairo_svg_version_to_string'],
- 'Calendar': ['cal_days_in_month',
-              'cal_from_jd',
-              'cal_info',
-              'cal_to_jd',
-              'easter_date',
-              'easter_days',
-              'FrenchToJD',
-              'GregorianToJD',
-              'JDDayOfWeek',
-              'JDMonthName',
-              'JDToFrench',
-              'JDToGregorian',
-              'jdtojewish',
-              'JDToJulian',
-              'jdtounix',
-              'JewishToJD',
-              'JulianToJD',
-              'unixtojd'],
- 'Classes/Object': ['call_user_method_array',
-                    'call_user_method',
-                    'class_alias',
-                    'class_exists',
-                    'get_called_class',
-                    'get_class_methods',
-                    'get_class_vars',
-                    'get_class',
-                    'get_declared_classes',
-                    'get_declared_interfaces',
-                    'get_object_vars',
-                    'get_parent_class',
-                    'interface_exists',
-                    'is_a',
-                    'is_subclass_of',
-                    'method_exists',
-                    'property_exists'],
- 'Classkit': ['classkit_import',
-              'classkit_method_add',
-              'classkit_method_copy',
-              'classkit_method_redefine',
-              'classkit_method_remove',
-              'classkit_method_rename'],
- 'Crack': ['crack_check',
-           'crack_closedict',
-           'crack_getlastmessage',
-           'crack_opendict'],
- 'Ctype': ['ctype_alnum',
-           'ctype_alpha',
-           'ctype_cntrl',
-           'ctype_digit',
-           'ctype_graph',
-           'ctype_lower',
-           'ctype_print',
-           'ctype_punct'],
- 'Cyrus': ['cyrus_authenticate',
-           'cyrus_bind',
-           'cyrus_close',
-           'cyrus_connect',
-           'cyrus_query',
-           'cyrus_unbind'],
- 'DB++': ['dbplus_add',
-          'dbplus_aql',
-          'dbplus_chdir',
-          'dbplus_close',
-          'dbplus_curr',
-          'dbplus_errcode',
-          'dbplus_errno',
-          'dbplus_find',
-          'dbplus_first',
-          'dbplus_flush',
-          'dbplus_freealllocks',
-          'dbplus_freelock',
-          'dbplus_freerlocks',
-          'dbplus_getlock',
-          'dbplus_getunique',
-          'dbplus_info',
-          'dbplus_last',
-          'dbplus_lockrel',
-          'dbplus_next',
-          'dbplus_open',
-          'dbplus_prev',
-          'dbplus_rchperm',
-          'dbplus_rcreate',
-          'dbplus_rcrtexact',
-          'dbplus_rcrtlike',
-          'dbplus_resolve',
-          'dbplus_restorepos',
-          'dbplus_rkeys',
-          'dbplus_ropen',
-          'dbplus_rquery',
-          'dbplus_rrename',
-          'dbplus_rsecindex',
-          'dbplus_runlink',
-          'dbplus_rzap',
-          'dbplus_savepos',
-          'dbplus_setindex',
-          'dbplus_setindexbynumber',
-          'dbplus_sql',
-          'dbplus_tcl',
-          'dbplus_tremove',
-          'dbplus_undo',
-          'dbplus_undoprepare',
-          'dbplus_unlockrel',
-          'dbplus_unselect',
-          'dbplus_update',
-          'dbplus_xlockrel',
-          'dbplus_xunlockrel'],
- 'DBA': ['dba_close',
-         'dba_delete',
-         'dba_exists',
-         'dba_fetch',
-         'dba_firstkey',
-         'dba_handlers',
-         'dba_insert',
-         'dba_key_split',
-         'dba_list',
-         'dba_nextkey',
-         'dba_open',
-         'dba_optimize',
-         'dba_popen',
-         'dba_replace',
-         'dba_sync'],
- 'DOM': ['dom_import_simplexml'],
- 'DOM XML (PHP 4)': ['domxml_new_doc',
-                     'domxml_open_file',
-                     'domxml_open_mem',
-                     'domxml_version',
-                     'domxml_xmltree',
-                     'domxml_xslt_stylesheet_doc',
-                     'domxml_xslt_stylesheet_file',
-                     'domxml_xslt_stylesheet',
-                     'domxml_xslt_version',
-                     'xpath_eval_expression',
-                     'xpath_eval',
-                     'xpath_new_context',
-                     'xpath_register_ns_auto',
-                     'xpath_register_ns',
-                     'xptr_eval',
-                     'xptr_new_context'],
- 'Date/Time': ['checkdate',
-               'date_add',
-               'date_create_from_format',
-               'date_create',
-               'date_date_set',
-               'date_default_timezone_get',
-               'date_default_timezone_set',
-               'date_diff',
-               'date_format',
-               'date_get_last_errors',
-               'date_interval_create_from_date_string',
-               'date_interval_format',
-               'date_isodate_set',
-               'date_modify',
-               'date_offset_get',
-               'date_parse_from_format',
-               'date_parse',
-               'date_sub',
-               'date_sun_info',
-               'date_sunrise',
-               'date_sunset',
-               'date_time_set',
-               'date_timestamp_get',
-               'date_timestamp_set',
-               'date_timezone_get',
-               'date_timezone_set',
-               'date',
-               'getdate',
-               'gettimeofday',
-               'gmdate',
-               'gmmktime',
-               'gmstrftime',
-               'idate',
-               'localtime',
-               'microtime',
-               'mktime',
-               'strftime',
-               'strptime',
-               'strtotime',
-               'time',
-               'timezone_abbreviations_list',
-               'timezone_identifiers_list',
-               'timezone_location_get',
-               'timezone_name_from_abbr',
-               'timezone_name_get',
-               'timezone_offset_get',
-               'timezone_open',
-               'timezone_transitions_get',
-               'timezone_version_get'],
- 'Direct IO': ['dio_close', 'dio_fcntl', 'dio_open'],
- 'Directory': ['chdir',
-               'chroot',
-               'closedir',
-               'getcwd',
-               'opendir',
-               'readdir',
-               'rewinddir',
-               'scandir'],
- 'Enchant': ['enchant_broker_describe',
-             'enchant_broker_dict_exists',
-             'enchant_broker_free_dict',
-             'enchant_broker_free',
-             'enchant_broker_get_error',
-             'enchant_broker_init',
-             'enchant_broker_list_dicts',
-             'enchant_broker_request_dict',
-             'enchant_broker_request_pwl_dict',
-             'enchant_broker_set_ordering',
-             'enchant_dict_add_to_personal',
-             'enchant_dict_add_to_session',
-             'enchant_dict_check',
-             'enchant_dict_describe',
-             'enchant_dict_get_error',
-             'enchant_dict_is_in_session',
-             'enchant_dict_quick_check',
-             'enchant_dict_store_replacement',
-             'enchant_dict_suggest'],
- 'Error Handling': ['debug_backtrace',
-                    'debug_print_backtrace',
-                    'error_get_last',
-                    'error_log',
-                    'error_reporting',
-                    'restore_error_handler',
-                    'restore_exception_handler',
-                    'set_error_handler',
-                    'set_exception_handler',
-                    'trigger_error',
-                    'user_error'],
- 'Exif': ['exif_imagetype',
-          'exif_read_data',
-          'exif_tagname',
-          'exif_thumbnail',
-          'read_exif_data'],
- 'Expect': ['expect_expectl'],
- 'FAM': ['fam_cancel_monitor',
-         'fam_close',
-         'fam_monitor_collection',
-         'fam_monitor_directory',
-         'fam_monitor_file',
-         'fam_next_event',
-         'fam_open',
-         'fam_pending',
-         'fam_resume_monitor',
-         'fam_suspend_monitor'],
- 'FDF': ['fdf_add_doc_javascript',
-         'fdf_add_template',
-         'fdf_close',
-         'fdf_create',
-         'fdf_enum_values',
-         'fdf_errno',
-         'fdf_error',
-         'fdf_get_ap',
-         'fdf_get_attachment',
-         'fdf_get_encoding',
-         'fdf_get_file',
-         'fdf_get_flags',
-         'fdf_get_opt',
-         'fdf_get_status',
-         'fdf_get_value',
-         'fdf_get_version',
-         'fdf_header',
-         'fdf_next_field_name',
-         'fdf_open_string',
-         'fdf_open',
-         'fdf_remove_item',
-         'fdf_save_string',
-         'fdf_save',
-         'fdf_set_ap',
-         'fdf_set_encoding',
-         'fdf_set_file',
-         'fdf_set_flags',
-         'fdf_set_javascript_action',
-         'fdf_set_on_import_javascript',
-         'fdf_set_opt',
-         'fdf_set_status',
-         'fdf_set_submit_form_action',
-         'fdf_set_target_frame',
-         'fdf_set_value',
-         'fdf_set_version'],
- 'FTP': ['ftp_alloc',
-         'ftp_cdup',
-         'ftp_chdir',
-         'ftp_chmod',
-         'ftp_close',
-         'ftp_connect',
-         'ftp_delete',
-         'ftp_exec',
-         'ftp_fget',
-         'ftp_fput',
-         'ftp_get_option',
-         'ftp_get',
-         'ftp_login',
-         'ftp_mdtm',
-         'ftp_mkdir',
-         'ftp_nb_continue',
-         'ftp_nb_fget',
-         'ftp_nb_fput',
-         'ftp_nb_get',
-         'ftp_nb_put',
-         'ftp_nlist',
-         'ftp_pasv',
-         'ftp_put',
-         'ftp_pwd',
-         'ftp_quit',
-         'ftp_raw',
-         'ftp_rawlist',
-         'ftp_rename',
-         'ftp_rmdir',
-         'ftp_set_option',
-         'ftp_site',
-         'ftp_size',
-         'ftp_ssl_connect',
-         'ftp_systype'],
- 'Fileinfo': ['finfo_buffer',
-              'finfo_close',
-              'finfo_file',
-              'finfo_open',
-              'finfo_set_flags',
-              'mime_content_type'],
- 'Filesystem': ['basename',
-                'chgrp',
-                'chmod',
-                'chown',
-                'clearstatcache',
-                'copy',
-                'dirname',
-                'disk_free_space',
-                'disk_total_space',
-                'diskfreespace',
-                'fclose',
-                'feof',
-                'fflush',
-                'fgetc',
-                'fgetcsv',
-                'fgets',
-                'fgetss',
-                'file_exists',
-                'file_get_contents',
-                'file_put_contents',
-                'file',
-                'fileatime',
-                'filectime',
-                'filegroup',
-                'fileinode',
-                'filemtime',
-                'fileowner',
-                'fileperms',
-                'filesize',
-                'filetype',
-                'flock',
-                'fnmatch',
-                'fopen',
-                'fpassthru',
-                'fputcsv',
-                'fputs',
-                'fread',
-                'fscanf',
-                'fseek',
-                'fstat',
-                'ftell',
-                'ftruncate',
-                'fwrite',
-                'glob',
-                'is_dir',
-                'is_executable',
-                'is_file',
-                'is_link',
-                'is_readable',
-                'is_uploaded_file',
-                'is_writable',
-                'is_writeable',
-                'lchgrp',
-                'lchown',
-                'link',
-                'linkinfo',
-                'lstat',
-                'mkdir',
-                'move_uploaded_file',
-                'parse_ini_file',
-                'parse_ini_string',
-                'pathinfo',
-                'pclose',
-                'popen',
-                'readfile',
-                'readlink',
-                'realpath_cache_get',
-                'realpath_cache_size',
-                'realpath',
-                'rename',
-                'rewind',
-                'rmdir',
-                'set_file_buffer',
-                'stat',
-                'symlink',
-                'tempnam',
-                'tmpfile',
-                'touch',
-                'umask',
-                'unlink'],
- 'Filter': ['filter_has_var',
-            'filter_id',
-            'filter_input_array',
-            'filter_input',
-            'filter_list',
-            'filter_var_array',
-            'filter_var'],
- 'Firebird/InterBase': ['ibase_add_user',
-                        'ibase_affected_rows',
-                        'ibase_backup',
-                        'ibase_blob_add',
-                        'ibase_blob_cancel',
-                        'ibase_blob_close',
-                        'ibase_blob_create',
-                        'ibase_blob_echo',
-                        'ibase_blob_get',
-                        'ibase_blob_import',
-                        'ibase_blob_info',
-                        'ibase_blob_open',
-                        'ibase_close',
-                        'ibase_commit_ret',
-                        'ibase_commit',
-                        'ibase_connect',
-                        'ibase_db_info',
-                        'ibase_delete_user',
-                        'ibase_drop_db',
-                        'ibase_errcode',
-                        'ibase_errmsg',
-                        'ibase_execute',
-                        'ibase_fetch_assoc',
-                        'ibase_fetch_object',
-                        'ibase_fetch_row',
-                        'ibase_field_info',
-                        'ibase_free_event_handler',
-                        'ibase_free_query',
-                        'ibase_free_result',
-                        'ibase_gen_id',
-                        'ibase_maintain_db',
-                        'ibase_modify_user',
-                        'ibase_name_result',
-                        'ibase_num_fields',
-                        'ibase_num_params',
-                        'ibase_param_info',
-                        'ibase_pconnect',
-                        'ibase_prepare',
-                        'ibase_query',
-                        'ibase_restore',
-                        'ibase_rollback_ret',
-                        'ibase_rollback',
-                        'ibase_server_info',
-                        'ibase_service_attach',
-                        'ibase_service_detach',
-                        'ibase_set_event_handler',
-                        'ibase_timefmt',
-                        'ibase_trans',
-                        'ibase_wait_event'],
- 'FriBiDi': ['fribidi_log2vis'],
- 'FrontBase': ['fbsql_affected_rows',
-               'fbsql_autocommit',
-               'fbsql_blob_size',
-               'fbsql_change_user',
-               'fbsql_clob_size',
-               'fbsql_close',
-               'fbsql_commit',
-               'fbsql_connect',
-               'fbsql_create_blob',
-               'fbsql_create_clob',
-               'fbsql_create_db',
-               'fbsql_data_seek',
-               'fbsql_database_password',
-               'fbsql_database',
-               'fbsql_db_query',
-               'fbsql_db_status',
-               'fbsql_drop_db',
-               'fbsql_errno',
-               'fbsql_error',
-               'fbsql_fetch_array',
-               'fbsql_fetch_assoc',
-               'fbsql_fetch_field',
-               'fbsql_fetch_lengths',
-               'fbsql_fetch_object',
-               'fbsql_fetch_row',
-               'fbsql_field_flags',
-               'fbsql_field_len',
-               'fbsql_field_name',
-               'fbsql_field_seek',
-               'fbsql_field_table',
-               'fbsql_field_type',
-               'fbsql_free_result',
-               'fbsql_get_autostart_info',
-               'fbsql_hostname',
-               'fbsql_insert_id',
-               'fbsql_list_dbs',
-               'fbsql_list_fields',
-               'fbsql_list_tables',
-               'fbsql_next_result',
-               'fbsql_num_fields',
-               'fbsql_num_rows',
-               'fbsql_password',
-               'fbsql_pconnect',
-               'fbsql_query',
-               'fbsql_read_blob',
-               'fbsql_read_clob',
-               'fbsql_result',
-               'fbsql_rollback',
-               'fbsql_rows_fetched',
-               'fbsql_select_db',
-               'fbsql_set_characterset',
-               'fbsql_set_lob_mode',
-               'fbsql_set_password',
-               'fbsql_set_transaction',
-               'fbsql_start_db',
-               'fbsql_stop_db',
-               'fbsql_table_name',
-               'fbsql_tablename',
-               'fbsql_username',
-               'fbsql_warnings'],
- 'Function handling': ['call_user_func_array',
-                       'call_user_func',
-                       'create_function',
-                       'forward_static_call_array',
-                       'forward_static_call',
-                       'func_get_arg',
-                       'func_get_args',
-                       'func_num_args',
-                       'function_exists',
-                       'get_defined_functions',
-                       'register_shutdown_function',
-                       'register_tick_function',
-                       'unregister_tick_function'],
- 'GD and Image': ['gd_info',
-                  'getimagesize',
-                  'image_type_to_extension',
-                  'image_type_to_mime_type'],
- 'GMP': ['gmp_abs',
-         'gmp_add',
-         'gmp_and',
-         'gmp_clrbit',
-         'gmp_cmp',
-         'gmp_com',
-         'gmp_div_q',
-         'gmp_div_qr',
-         'gmp_div_r',
-         'gmp_div',
-         'gmp_divexact',
-         'gmp_fact',
-         'gmp_gcd',
-         'gmp_gcdext',
-         'gmp_hamdist',
-         'gmp_init',
-         'gmp_intval',
-         'gmp_invert',
-         'gmp_jacobi',
-         'gmp_legendre',
-         'gmp_mod',
-         'gmp_mul',
-         'gmp_neg',
-         'gmp_nextprime',
-         'gmp_or',
-         'gmp_perfect_square',
-         'gmp_popcount',
-         'gmp_pow',
-         'gmp_powm',
-         'gmp_prob_prime',
-         'gmp_random',
-         'gmp_scan0',
-         'gmp_scan1',
-         'gmp_setbit',
-         'gmp_sign',
-         'gmp_sqrt',
-         'gmp_sqrtrem',
-         'gmp_strval',
-         'gmp_sub',
-         'gmp_testbit',
-         'gmp_xor'],
- 'GeoIP': ['geoip_continent_code_by_name',
-           'geoip_country_code_by_name',
-           'geoip_country_code3_by_name',
-           'geoip_country_name_by_name',
-           'geoip_database_info',
-           'geoip_db_avail',
-           'geoip_db_filename',
-           'geoip_db_get_all_info',
-           'geoip_id_by_name',
-           'geoip_isp_by_name',
-           'geoip_org_by_name',
-           'geoip_record_by_name',
-           'geoip_region_by_name',
-           'geoip_region_name_by_code',
-           'geoip_time_zone_by_country_and_region'],
- 'Gettext': ['bind_textdomain_codeset',
-             'bindtextdomain',
-             'dcgettext',
-             'dcngettext',
-             'dgettext',
-             'dngettext',
-             'gettext',
-             'ngettext',
-             'textdomain'],
- 'GnuPG': ['gnupg_adddecryptkey',
-           'gnupg_addencryptkey',
-           'gnupg_addsignkey',
-           'gnupg_cleardecryptkeys',
-           'gnupg_clearencryptkeys',
-           'gnupg_clearsignkeys',
-           'gnupg_decrypt',
-           'gnupg_decryptverify',
-           'gnupg_encrypt',
-           'gnupg_encryptsign',
-           'gnupg_export',
-           'gnupg_geterror',
-           'gnupg_getprotocol',
-           'gnupg_import',
-           'gnupg_init',
-           'gnupg_keyinfo',
-           'gnupg_setarmor',
-           'gnupg_seterrormode',
-           'gnupg_setsignmode',
-           'gnupg_sign',
-           'gnupg_verify'],
- 'Gopher': ['gopher_parsedir'],
- 'Grapheme': ['grapheme_extract',
-              'grapheme_stripos',
-              'grapheme_stristr',
-              'grapheme_strlen',
-              'grapheme_strpos',
-              'grapheme_strripos',
-              'grapheme_strrpos',
-              'grapheme_strstr',
-              'grapheme_substr'],
- 'Gupnp': ['gupnp_context_get_host_ip',
-           'gupnp_context_get_port',
-           'gupnp_context_get_subscription_timeout',
-           'gupnp_context_host_path',
-           'gupnp_context_new',
-           'gupnp_context_set_subscription_timeout',
-           'gupnp_context_timeout_add',
-           'gupnp_context_unhost_path',
-           'gupnp_control_point_browse_start',
-           'gupnp_control_point_browse_stop',
-           'gupnp_control_point_callback_set',
-           'gupnp_control_point_new',
-           'gupnp_device_action_callback_set',
-           'gupnp_device_info_get_service',
-           'gupnp_device_info_get',
-           'gupnp_root_device_get_available',
-           'gupnp_root_device_get_relative_location',
-           'gupnp_root_device_new',
-           'gupnp_root_device_set_available',
-           'gupnp_root_device_start',
-           'gupnp_root_device_stop',
-           'gupnp_service_action_get',
-           'gupnp_service_action_return_error',
-           'gupnp_service_action_return',
-           'gupnp_service_action_set',
-           'gupnp_service_freeze_notify',
-           'gupnp_service_info_get_introspection',
-           'gupnp_service_info_get',
-           'gupnp_service_introspection_get_state_variable',
-           'gupnp_service_notify',
-           'gupnp_service_proxy_action_get',
-           'gupnp_service_proxy_action_set',
-           'gupnp_service_proxy_add_notify',
-           'gupnp_service_proxy_callback_set',
-           'gupnp_service_proxy_get_subscribed',
-           'gupnp_service_proxy_remove_notify',
-           'gupnp_service_proxy_set_subscribed',
-           'gupnp_service_thaw_notify'],
- 'HTTP': ['http_cache_etag',
-          'http_cache_last_modified',
-          'http_chunked_decode',
-          'http_deflate',
-          'http_inflate',
-          'http_build_cookie',
-          'http_date',
-          'http_get_request_body_stream',
-          'http_get_request_body',
-          'http_get_request_headers',
-          'http_match_etag',
-          'http_match_modified',
-          'http_match_request_header',
-          'http_support',
-          'http_negotiate_charset',
-          'http_negotiate_content_type',
-          'http_negotiate_language',
-          'ob_deflatehandler',
-          'ob_etaghandler',
-          'ob_inflatehandler',
-          'http_parse_cookie',
-          'http_parse_headers',
-          'http_parse_message',
-          'http_parse_params',
-          'http_persistent_handles_clean',
-          'http_persistent_handles_count',
-          'http_persistent_handles_ident',
-          'http_get',
-          'http_head',
-          'http_post_data',
-          'http_post_fields',
-          'http_put_data',
-          'http_put_file',
-          'http_put_stream',
-          'http_request_body_encode',
-          'http_request_method_exists',
-          'http_request_method_name',
-          'http_request_method_register',
-          'http_request_method_unregister',
-          'http_request',
-          'http_redirect',
-          'http_send_content_disposition',
-          'http_send_content_type',
-          'http_send_data',
-          'http_send_file',
-          'http_send_last_modified',
-          'http_send_status',
-          'http_send_stream',
-          'http_throttle',
-          'http_build_str',
-          'http_build_url'],
- 'Hash': ['hash_algos',
-          'hash_copy',
-          'hash_file',
-          'hash_final',
-          'hash_hmac_file',
-          'hash_hmac',
-          'hash_init',
-          'hash_update_file',
-          'hash_update_stream',
-          'hash_update',
-          'hash'],
- 'Hyperwave': ['hw_Array2Objrec',
-               'hw_changeobject',
-               'hw_Children',
-               'hw_ChildrenObj',
-               'hw_Close',
-               'hw_Connect',
-               'hw_connection_info',
-               'hw_cp',
-               'hw_Deleteobject',
-               'hw_DocByAnchor',
-               'hw_DocByAnchorObj',
-               'hw_Document_Attributes',
-               'hw_Document_BodyTag',
-               'hw_Document_Content',
-               'hw_Document_SetContent',
-               'hw_Document_Size',
-               'hw_dummy',
-               'hw_EditText',
-               'hw_Error',
-               'hw_ErrorMsg',
-               'hw_Free_Document',
-               'hw_GetAnchors',
-               'hw_GetAnchorsObj',
-               'hw_GetAndLock',
-               'hw_GetChildColl',
-               'hw_GetChildCollObj',
-               'hw_GetChildDocColl',
-               'hw_GetChildDocCollObj',
-               'hw_GetObject',
-               'hw_GetObjectByQuery',
-               'hw_GetObjectByQueryColl',
-               'hw_GetObjectByQueryCollObj',
-               'hw_GetObjectByQueryObj',
-               'hw_GetParents',
-               'hw_GetParentsObj',
-               'hw_getrellink',
-               'hw_GetRemote',
-               'hw_getremotechildren',
-               'hw_GetSrcByDestObj',
-               'hw_GetText',
-               'hw_getusername',
-               'hw_Identify',
-               'hw_InCollections',
-               'hw_Info',
-               'hw_InsColl',
-               'hw_InsDoc',
-               'hw_insertanchors',
-               'hw_InsertDocument',
-               'hw_InsertObject',
-               'hw_mapid',
-               'hw_Modifyobject',
-               'hw_mv',
-               'hw_New_Document',
-               'hw_objrec2array',
-               'hw_Output_Document',
-               'hw_pConnect',
-               'hw_PipeDocument',
-               'hw_Root',
-               'hw_setlinkroot',
-               'hw_stat',
-               'hw_Unlock',
-               'hw_Who'],
- 'Hyperwave API': ['hw_api_attribute',
-                   'hwapi_hgcsp',
-                   'hw_api_content',
-                   'hw_api_object'],
- 'IBM DB2': ['db2_autocommit',
-             'db2_bind_param',
-             'db2_client_info',
-             'db2_close',
-             'db2_column_privileges',
-             'db2_columns',
-             'db2_commit',
-             'db2_conn_error',
-             'db2_conn_errormsg',
-             'db2_connect',
-             'db2_cursor_type',
-             'db2_escape_string',
-             'db2_exec',
-             'db2_execute',
-             'db2_fetch_array',
-             'db2_fetch_assoc',
-             'db2_fetch_both',
-             'db2_fetch_object',
-             'db2_fetch_row',
-             'db2_field_display_size',
-             'db2_field_name',
-             'db2_field_num',
-             'db2_field_precision',
-             'db2_field_scale',
-             'db2_field_type',
-             'db2_field_width',
-             'db2_foreign_keys',
-             'db2_free_result',
-             'db2_free_stmt',
-             'db2_get_option',
-             'db2_last_insert_id'],
- 'ID3': ['id3_get_frame_long_name',
-         'id3_get_frame_short_name',
-         'id3_get_genre_id',
-         'id3_get_genre_list',
-         'id3_get_genre_name',
-         'id3_get_tag',
-         'id3_get_version',
-         'id3_remove_tag',
-         'id3_set_tag'],
- 'IDN': ['idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
- 'IIS': ['iis_add_server',
-         'iis_get_dir_security',
-         'iis_get_script_map',
-         'iis_get_server_by_comment',
-         'iis_get_server_by_path',
-         'iis_get_server_rights',
-         'iis_get_service_state',
-         'iis_remove_server',
-         'iis_set_app_settings',
-         'iis_set_dir_security',
-         'iis_set_script_map',
-         'iis_set_server_rights',
-         'iis_start_server',
-         'iis_start_service',
-         'iis_stop_server',
-         'iis_stop_service'],
- 'IMAP': ['imap_8bit',
-          'imap_alerts',
-          'imap_append',
-          'imap_base64',
-          'imap_binary',
-          'imap_body',
-          'imap_bodystruct',
-          'imap_check',
-          'imap_clearflag_full',
-          'imap_close',
-          'imap_createmailbox',
-          'imap_delete',
-          'imap_deletemailbox',
-          'imap_errors',
-          'imap_expunge',
-          'imap_fetch_overview',
-          'imap_fetchbody',
-          'imap_fetchheader',
-          'imap_fetchmime',
-          'imap_fetchstructure',
-          'imap_gc',
-          'imap_get_quota',
-          'imap_get_quotaroot',
-          'imap_getacl',
-          'imap_getmailboxes',
-          'imap_getsubscribed',
-          'imap_header',
-          'imap_headerinfo',
-          'imap_headers',
-          'imap_last_error',
-          'imap_list',
-          'imap_listmailbox',
-          'imap_listscan',
-          'imap_listsubscribed',
-          'imap_lsub',
-          'imap_mail_compose',
-          'imap_mail_copy',
-          'imap_mail_move',
-          'imap_mail',
-          'imap_mailboxmsginfo',
-          'imap_mime_header_decode',
-          'imap_msgno',
-          'imap_num_msg',
-          'imap_num_recent',
-          'imap_open',
-          'imap_ping',
-          'imap_qprint',
-          'imap_renamemailbox',
-          'imap_reopen',
-          'imap_rfc822_parse_adrlist',
-          'imap_rfc822_parse_headers',
-          'imap_rfc822_write_address',
-          'imap_savebody',
-          'imap_scanmailbox',
-          'imap_search',
-          'imap_set_quota',
-          'imap_setacl',
-          'imap_setflag_full',
-          'imap_sort',
-          'imap_status',
-          'imap_subscribe',
-          'imap_thread',
-          'imap_timeout',
-          'imap_uid',
-          'imap_undelete',
-          'imap_unsubscribe',
-          'imap_utf7_decode',
-          'imap_utf7_encode',
-          'imap_utf8'],
- 'Informix': ['ifx_affected_rows',
-              'ifx_blobinfile_mode',
-              'ifx_byteasvarchar',
-              'ifx_close',
-              'ifx_connect',
-              'ifx_copy_blob',
-              'ifx_create_blob',
-              'ifx_create_char',
-              'ifx_do',
-              'ifx_error',
-              'ifx_errormsg',
-              'ifx_fetch_row',
-              'ifx_fieldproperties',
-              'ifx_fieldtypes',
-              'ifx_free_blob',
-              'ifx_free_char',
-              'ifx_free_result',
-              'ifx_get_blob',
-              'ifx_get_char',
-              'ifx_getsqlca',
-              'ifx_htmltbl_result',
-              'ifx_nullformat',
-              'ifx_num_fields',
-              'ifx_num_rows',
-              'ifx_pconnect',
-              'ifx_prepare',
-              'ifx_query',
-              'ifx_textasvarchar',
-              'ifx_update_blob',
-              'ifx_update_char',
-              'ifxus_close_slob',
-              'ifxus_create_slob',
-              'ifxus_free_slob',
-              'ifxus_open_slob',
-              'ifxus_read_slob',
-              'ifxus_seek_slob',
-              'ifxus_tell_slob',
-              'ifxus_write_slob'],
- 'Ingres': ['ingres_autocommit_state',
-            'ingres_autocommit',
-            'ingres_charset',
-            'ingres_close',
-            'ingres_commit',
-            'ingres_connect',
-            'ingres_cursor',
-            'ingres_errno',
-            'ingres_error',
-            'ingres_errsqlstate',
-            'ingres_escape_string',
-            'ingres_execute',
-            'ingres_fetch_array',
-            'ingres_fetch_assoc',
-            'ingres_fetch_object',
-            'ingres_fetch_proc_return',
-            'ingres_fetch_row',
-            'ingres_field_length',
-            'ingres_field_name',
-            'ingres_field_nullable',
-            'ingres_field_precision',
-            'ingres_field_scale',
-            'ingres_field_type',
-            'ingres_free_result',
-            'ingres_next_error',
-            'ingres_num_fields',
-            'ingres_num_rows',
-            'ingres_pconnect',
-            'ingres_prepare',
-            'ingres_query',
-            'ingres_result_seek',
-            'ingres_rollback',
-            'ingres_set_environment',
-            'ingres_unbuffered_query'],
- 'Inotify': ['inotify_add_watch',
-             'inotify_init',
-             'inotify_queue_len',
-             'inotify_read',
-             'inotify_rm_watch'],
- 'JSON': ['json_decode', 'json_encode', 'json_last_error'],
- 'Java': ['java_last_exception_clear', 'java_last_exception_get'],
- 'Judy': ['judy_type', 'judy_version'],
- 'KADM5': ['kadm5_chpass_principal',
-           'kadm5_create_principal',
-           'kadm5_delete_principal',
-           'kadm5_destroy',
-           'kadm5_flush',
-           'kadm5_get_policies',
-           'kadm5_get_principal',
-           'kadm5_get_principals',
-           'kadm5_init_with_password',
-           'kadm5_modify_principal'],
- 'LDAP': ['ldap_8859_to_t61',
-          'ldap_add',
-          'ldap_bind',
-          'ldap_close',
-          'ldap_compare',
-          'ldap_connect',
-          'ldap_count_entries',
-          'ldap_delete',
-          'ldap_dn2ufn',
-          'ldap_err2str',
-          'ldap_errno',
-          'ldap_error',
-          'ldap_explode_dn',
-          'ldap_first_attribute',
-          'ldap_first_entry',
-          'ldap_first_reference',
-          'ldap_free_result',
-          'ldap_get_attributes',
-          'ldap_get_dn',
-          'ldap_get_entries',
-          'ldap_get_option',
-          'ldap_get_values_len',
-          'ldap_get_values',
-          'ldap_list',
-          'ldap_mod_add',
-          'ldap_mod_del',
-          'ldap_mod_replace',
-          'ldap_modify',
-          'ldap_next_attribute',
-          'ldap_next_entry',
-          'ldap_next_reference',
-          'ldap_parse_reference',
-          'ldap_parse_result',
-          'ldap_read',
-          'ldap_rename',
-          'ldap_sasl_bind',
-          'ldap_search',
-          'ldap_set_option',
-          'ldap_set_rebind_proc',
-          'ldap_sort',
-          'ldap_start_tls',
-          'ldap_t61_to_8859',
-          'ldap_unbind'],
- 'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
- 'Libevent': ['event_add',
-              'event_base_free',
-              'event_base_loop',
-              'event_base_loopbreak',
-              'event_base_loopexit',
-              'event_base_new',
-              'event_base_priority_init',
-              'event_base_set',
-              'event_buffer_base_set',
-              'event_buffer_disable',
-              'event_buffer_enable',
-              'event_buffer_fd_set',
-              'event_buffer_free',
-              'event_buffer_new',
-              'event_buffer_priority_set',
-              'event_buffer_read',
-              'event_buffer_set_callback',
-              'event_buffer_timeout_set',
-              'event_buffer_watermark_set',
-              'event_buffer_write',
-              'event_del',
-              'event_free',
-              'event_new',
-              'event_set'],
- 'Lotus Notes': ['notes_body',
-                 'notes_copy_db',
-                 'notes_create_db',
-                 'notes_create_note',
-                 'notes_drop_db',
-                 'notes_find_note',
-                 'notes_header_info',
-                 'notes_list_msgs',
-                 'notes_mark_read',
-                 'notes_mark_unread',
-                 'notes_nav_create',
-                 'notes_search',
-                 'notes_unread',
-                 'notes_version'],
- 'MCVE': ['m_checkstatus',
-          'm_completeauthorizations',
-          'm_connect',
-          'm_connectionerror',
-          'm_deletetrans',
-          'm_destroyconn',
-          'm_destroyengine',
-          'm_getcell',
-          'm_getcellbynum',
-          'm_getcommadelimited',
-          'm_getheader',
-          'm_initconn',
-          'm_initengine',
-          'm_iscommadelimited',
-          'm_maxconntimeout',
-          'm_monitor',
-          'm_numcolumns',
-          'm_numrows',
-          'm_parsecommadelimited',
-          'm_responsekeys'],
- 'Mail': ['ezmlm_hash', 'mail'],
- 'Mailparse': ['mailparse_determine_best_xfer_encoding',
-               'mailparse_msg_create',
-               'mailparse_msg_extract_part_file',
-               'mailparse_msg_extract_part',
-               'mailparse_msg_extract_whole_part_file',
-               'mailparse_msg_free',
-               'mailparse_msg_get_part_data',
-               'mailparse_msg_get_part',
-               'mailparse_msg_get_structure',
-               'mailparse_msg_parse_file',
-               'mailparse_msg_parse',
-               'mailparse_rfc822_parse_addresses',
-               'mailparse_stream_encode',
-               'mailparse_uudecode_all'],
- 'Math': ['abs',
-          'acos',
-          'acosh',
-          'asin',
-          'asinh',
-          'atan2',
-          'atan',
-          'atanh',
-          'base_convert',
-          'bindec',
-          'ceil',
-          'cos',
-          'cosh',
-          'decbin',
-          'dechex',
-          'decoct',
-          'deg2rad',
-          'exp',
-          'expm1'],
- 'MaxDB': ['maxdb_affected_rows',
-           'maxdb_autocommit',
-           'maxdb_bind_param',
-           'maxdb_bind_result',
-           'maxdb_change_user',
-           'maxdb_character_set_name',
-           'maxdb_client_encoding',
-           'maxdb_close_long_data',
-           'maxdb_close',
-           'maxdb_commit',
-           'maxdb_connect_errno',
-           'maxdb_connect_error',
-           'maxdb_connect',
-           'maxdb_data_seek',
-           'maxdb_debug',
-           'maxdb_disable_reads_from_master',
-           'maxdb_disable_rpl_parse',
-           'maxdb_dump_debug_info',
-           'maxdb_embedded_connect',
-           'maxdb_enable_reads_from_master',
-           'maxdb_enable_rpl_parse',
-           'maxdb_errno',
-           'maxdb_error',
-           'maxdb_escape_string',
-           'maxdb_execute',
-           'maxdb_fetch_array',
-           'maxdb_fetch_assoc',
-           'maxdb_fetch_field_direct',
-           'maxdb_fetch_field',
-           'maxdb_fetch_fields',
-           'maxdb_fetch_lengths',
-           'maxdb_fetch_object',
-           'maxdb_fetch_row',
-           'maxdb_fetch',
-           'maxdb_field_count',
-           'maxdb_field_seek',
-           'maxdb_field_tell',
-           'maxdb_free_result',
-           'maxdb_get_client_info',
-           'maxdb_get_client_version',
-           'maxdb_get_host_info',
-           'maxdb_get_metadata',
-           'maxdb_get_proto_info',
-           'maxdb_get_server_info',
-           'maxdb_get_server_version',
-           'maxdb_info',
-           'maxdb_init',
-           'maxdb_insert_id',
-           'maxdb_kill',
-           'maxdb_master_query',
-           'maxdb_more_results',
-           'maxdb_multi_query',
-           'maxdb_next_result',
-           'maxdb_num_fields',
-           'maxdb_num_rows',
-           'maxdb_options',
-           'maxdb_param_count',
-           'maxdb_ping',
-           'maxdb_prepare',
-           'maxdb_query',
-           'maxdb_real_connect',
-           'maxdb_real_escape_string',
-           'maxdb_real_query',
-           'maxdb_report',
-           'maxdb_rollback',
-           'maxdb_rpl_parse_enabled',
-           'maxdb_rpl_probe',
-           'maxdb_rpl_query_type',
-           'maxdb_select_db',
-           'maxdb_send_long_data',
-           'maxdb_send_query',
-           'maxdb_server_end',
-           'maxdb_server_init',
-           'maxdb_set_opt',
-           'maxdb_sqlstate',
-           'maxdb_ssl_set',
-           'maxdb_stat',
-           'maxdb_stmt_affected_rows'],
- 'Mcrypt': ['mcrypt_cbc',
-            'mcrypt_cfb',
-            'mcrypt_create_iv',
-            'mcrypt_decrypt',
-            'mcrypt_ecb',
-            'mcrypt_enc_get_algorithms_name',
-            'mcrypt_enc_get_block_size',
-            'mcrypt_enc_get_iv_size',
-            'mcrypt_enc_get_key_size',
-            'mcrypt_enc_get_modes_name',
-            'mcrypt_enc_get_supported_key_sizes',
-            'mcrypt_enc_is_block_algorithm_mode',
-            'mcrypt_enc_is_block_algorithm',
-            'mcrypt_enc_is_block_mode',
-            'mcrypt_enc_self_test',
-            'mcrypt_encrypt',
-            'mcrypt_generic_deinit',
-            'mcrypt_generic_end',
-            'mcrypt_generic_init',
-            'mcrypt_generic',
-            'mcrypt_get_block_size',
-            'mcrypt_get_cipher_name',
-            'mcrypt_get_iv_size',
-            'mcrypt_get_key_size',
-            'mcrypt_list_algorithms',
-            'mcrypt_list_modes',
-            'mcrypt_module_close',
-            'mcrypt_module_get_algo_block_size',
-            'mcrypt_module_get_algo_key_size',
-            'mcrypt_module_get_supported_key_sizes',
-            'mcrypt_module_is_block_algorithm_mode',
-            'mcrypt_module_is_block_algorithm',
-            'mcrypt_module_is_block_mode',
-            'mcrypt_module_open',
-            'mcrypt_module_self_test',
-            'mcrypt_ofb',
-            'mdecrypt_generic'],
- 'Memcache': ['memcache_debug'],
- 'Mhash': ['mhash_count',
-           'mhash_get_block_size',
-           'mhash_get_hash_name',
-           'mhash_keygen_s2k',
-           'mhash'],
- 'Ming': ['ming_keypress',
-          'ming_setcubicthreshold',
-          'ming_setscale',
-          'ming_setswfcompression',
-          'ming_useconstants',
-          'ming_useswfversion'],
- 'Misc.': ['connection_aborted',
-           'connection_status',
-           'connection_timeout',
-           'constant',
-           'define',
-           'defined',
-           'die',
-           'eval',
-           'exit',
-           'get_browser',
-           '__halt_compiler',
-           'highlight_file',
-           'highlight_string',
-           'ignore_user_abort',
-           'pack',
-           'php_check_syntax',
-           'php_strip_whitespace',
-           'show_source',
-           'sleep',
-           'sys_getloadavg',
-           'time_nanosleep',
-           'time_sleep_until',
-           'uniqid',
-           'unpack',
-           'usleep'],
- 'Mongo': ['bson_decode', 'bson_encode'],
- 'Msession': ['msession_connect',
-              'msession_count',
-              'msession_create',
-              'msession_destroy',
-              'msession_disconnect',
-              'msession_find',
-              'msession_get_array',
-              'msession_get_data',
-              'msession_get',
-              'msession_inc',
-              'msession_list',
-              'msession_listvar',
-              'msession_lock',
-              'msession_plugin',
-              'msession_randstr',
-              'msession_set_array',
-              'msession_set_data',
-              'msession_set',
-              'msession_timeout',
-              'msession_uniq',
-              'msession_unlock'],
- 'Mssql': ['mssql_bind',
-           'mssql_close',
-           'mssql_connect',
-           'mssql_data_seek',
-           'mssql_execute',
-           'mssql_fetch_array',
-           'mssql_fetch_assoc',
-           'mssql_fetch_batch',
-           'mssql_fetch_field',
-           'mssql_fetch_object',
-           'mssql_fetch_row',
-           'mssql_field_length',
-           'mssql_field_name',
-           'mssql_field_seek',
-           'mssql_field_type',
-           'mssql_free_result',
-           'mssql_free_statement',
-           'mssql_get_last_message',
-           'mssql_guid_string',
-           'mssql_init',
-           'mssql_min_error_severity',
-           'mssql_min_message_severity',
-           'mssql_next_result',
-           'mssql_num_fields',
-           'mssql_num_rows',
-           'mssql_pconnect',
-           'mssql_query',
-           'mssql_result',
-           'mssql_rows_affected',
-           'mssql_select_db'],
- 'Multibyte String': ['mb_check_encoding',
-                      'mb_convert_case',
-                      'mb_convert_encoding',
-                      'mb_convert_kana',
-                      'mb_convert_variables',
-                      'mb_decode_mimeheader',
-                      'mb_decode_numericentity',
-                      'mb_detect_encoding',
-                      'mb_detect_order',
-                      'mb_encode_mimeheader',
-                      'mb_encode_numericentity',
-                      'mb_encoding_aliases',
-                      'mb_ereg_match',
-                      'mb_ereg_replace',
-                      'mb_ereg_search_getpos',
-                      'mb_ereg_search_getregs',
-                      'mb_ereg_search_init',
-                      'mb_ereg_search_pos',
-                      'mb_ereg_search_regs',
-                      'mb_ereg_search_setpos',
-                      'mb_ereg_search',
-                      'mb_ereg',
-                      'mb_eregi_replace',
-                      'mb_eregi',
-                      'mb_get_info',
-                      'mb_http_input',
-                      'mb_http_output',
-                      'mb_internal_encoding',
-                      'mb_language',
-                      'mb_list_encodings',
-                      'mb_output_handler',
-                      'mb_parse_str',
-                      'mb_preferred_mime_name',
-                      'mb_regex_encoding',
-                      'mb_regex_set_options',
-                      'mb_send_mail',
-                      'mb_split',
-                      'mb_strcut',
-                      'mb_strimwidth',
-                      'mb_stripos',
-                      'mb_stristr',
-                      'mb_strlen',
-                      'mb_strpos',
-                      'mb_strrchr',
-                      'mb_strrichr',
-                      'mb_strripos',
-                      'mb_strrpos',
-                      'mb_strstr',
-                      'mb_strtolower',
-                      'mb_strtoupper',
-                      'mb_strwidth',
-                      'mb_substitute_character',
-                      'mb_substr_count',
-                      'mb_substr'],
- 'MySQL': ['mysql_affected_rows',
-           'mysql_client_encoding',
-           'mysql_close',
-           'mysql_connect',
-           'mysql_create_db',
-           'mysql_data_seek',
-           'mysql_db_name',
-           'mysql_db_query',
-           'mysql_drop_db',
-           'mysql_errno',
-           'mysql_error',
-           'mysql_escape_string',
-           'mysql_fetch_array',
-           'mysql_fetch_assoc',
-           'mysql_fetch_field',
-           'mysql_fetch_lengths',
-           'mysql_fetch_object',
-           'mysql_fetch_row',
-           'mysql_field_flags',
-           'mysql_field_len',
-           'mysql_field_name',
-           'mysql_field_seek',
-           'mysql_field_table',
-           'mysql_field_type',
-           'mysql_free_result',
-           'mysql_get_client_info',
-           'mysql_get_host_info',
-           'mysql_get_proto_info',
-           'mysql_get_server_info',
-           'mysql_info',
-           'mysql_insert_id',
-           'mysql_list_dbs',
-           'mysql_list_fields',
-           'mysql_list_processes',
-           'mysql_list_tables',
-           'mysql_num_fields',
-           'mysql_num_rows',
-           'mysql_pconnect',
-           'mysql_ping',
-           'mysql_query',
-           'mysql_real_escape_string',
-           'mysql_result',
-           'mysql_select_db',
-           'mysql_set_charset',
-           'mysql_stat',
-           'mysql_tablename',
-           'mysql_thread_id',
-           'mysql_unbuffered_query'],
- 'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
- 'Ncurses': ['ncurses_addch',
-             'ncurses_addchnstr',
-             'ncurses_addchstr',
-             'ncurses_addnstr',
-             'ncurses_addstr',
-             'ncurses_assume_default_colors',
-             'ncurses_attroff',
-             'ncurses_attron',
-             'ncurses_attrset',
-             'ncurses_baudrate',
-             'ncurses_beep',
-             'ncurses_bkgd',
-             'ncurses_bkgdset',
-             'ncurses_border',
-             'ncurses_bottom_panel',
-             'ncurses_can_change_color',
-             'ncurses_cbreak',
-             'ncurses_clear',
-             'ncurses_clrtobot',
-             'ncurses_clrtoeol',
-             'ncurses_color_content',
-             'ncurses_color_set',
-             'ncurses_curs_set',
-             'ncurses_def_prog_mode',
-             'ncurses_def_shell_mode',
-             'ncurses_define_key',
-             'ncurses_del_panel',
-             'ncurses_delay_output',
-             'ncurses_delch',
-             'ncurses_deleteln',
-             'ncurses_delwin',
-             'ncurses_doupdate',
-             'ncurses_echo',
-             'ncurses_echochar',
-             'ncurses_end',
-             'ncurses_erase',
-             'ncurses_erasechar',
-             'ncurses_filter',
-             'ncurses_flash',
-             'ncurses_flushinp',
-             'ncurses_getch',
-             'ncurses_getmaxyx',
-             'ncurses_getmouse',
-             'ncurses_getyx',
-             'ncurses_halfdelay',
-             'ncurses_has_colors',
-             'ncurses_has_ic',
-             'ncurses_has_il',
-             'ncurses_has_key',
-             'ncurses_hide_panel',
-             'ncurses_hline',
-             'ncurses_inch',
-             'ncurses_init_color',
-             'ncurses_init_pair',
-             'ncurses_init',
-             'ncurses_insch',
-             'ncurses_insdelln',
-             'ncurses_insertln',
-             'ncurses_insstr',
-             'ncurses_instr',
-             'ncurses_isendwin',
-             'ncurses_keyok',
-             'ncurses_keypad',
-             'ncurses_killchar',
-             'ncurses_longname',
-             'ncurses_meta',
-             'ncurses_mouse_trafo',
-             'ncurses_mouseinterval',
-             'ncurses_mousemask',
-             'ncurses_move_panel',
-             'ncurses_move',
-             'ncurses_mvaddch',
-             'ncurses_mvaddchnstr',
-             'ncurses_mvaddchstr',
-             'ncurses_mvaddnstr',
-             'ncurses_mvaddstr',
-             'ncurses_mvcur',
-             'ncurses_mvdelch',
-             'ncurses_mvgetch',
-             'ncurses_mvhline',
-             'ncurses_mvinch',
-             'ncurses_mvvline',
-             'ncurses_mvwaddstr',
-             'ncurses_napms',
-             'ncurses_new_panel',
-             'ncurses_newpad',
-             'ncurses_newwin',
-             'ncurses_nl',
-             'ncurses_nocbreak',
-             'ncurses_noecho',
-             'ncurses_nonl',
-             'ncurses_noqiflush',
-             'ncurses_noraw',
-             'ncurses_pair_content',
-             'ncurses_panel_above',
-             'ncurses_panel_below',
-             'ncurses_panel_window',
-             'ncurses_pnoutrefresh',
-             'ncurses_prefresh',
-             'ncurses_putp',
-             'ncurses_qiflush',
-             'ncurses_raw',
-             'ncurses_refresh',
-             'ncurses_replace_panel',
-             'ncurses_reset_prog_mode',
-             'ncurses_reset_shell_mode',
-             'ncurses_resetty',
-             'ncurses_savetty',
-             'ncurses_scr_dump',
-             'ncurses_scr_init',
-             'ncurses_scr_restore',
-             'ncurses_scr_set',
-             'ncurses_scrl',
-             'ncurses_show_panel',
-             'ncurses_slk_attr',
-             'ncurses_slk_attroff',
-             'ncurses_slk_attron',
-             'ncurses_slk_attrset',
-             'ncurses_slk_clear',
-             'ncurses_slk_color',
-             'ncurses_slk_init',
-             'ncurses_slk_noutrefresh',
-             'ncurses_slk_refresh',
-             'ncurses_slk_restore',
-             'ncurses_slk_set',
-             'ncurses_slk_touch',
-             'ncurses_standend',
-             'ncurses_standout',
-             'ncurses_start_color',
-             'ncurses_termattrs',
-             'ncurses_termname',
-             'ncurses_timeout',
-             'ncurses_top_panel',
-             'ncurses_typeahead',
-             'ncurses_ungetch',
-             'ncurses_ungetmouse',
-             'ncurses_update_panels',
-             'ncurses_use_default_colors',
-             'ncurses_use_env',
-             'ncurses_use_extended_names',
-             'ncurses_vidattr',
-             'ncurses_vline',
-             'ncurses_waddch',
-             'ncurses_waddstr',
-             'ncurses_wattroff',
-             'ncurses_wattron',
-             'ncurses_wattrset',
-             'ncurses_wborder',
-             'ncurses_wclear',
-             'ncurses_wcolor_set',
-             'ncurses_werase',
-             'ncurses_wgetch',
-             'ncurses_whline',
-             'ncurses_wmouse_trafo',
-             'ncurses_wmove',
-             'ncurses_wnoutrefresh',
-             'ncurses_wrefresh',
-             'ncurses_wstandend',
-             'ncurses_wstandout',
-             'ncurses_wvline'],
- 'Network': ['checkdnsrr',
-             'closelog',
-             'define_syslog_variables',
-             'dns_check_record',
-             'dns_get_mx',
-             'dns_get_record',
-             'fsockopen',
-             'gethostbyaddr',
-             'gethostbyname',
-             'gethostbynamel'],
- 'Newt': ['newt_bell',
-          'newt_button_bar',
-          'newt_button',
-          'newt_centered_window',
-          'newt_checkbox_get_value',
-          'newt_checkbox_set_flags',
-          'newt_checkbox_set_value',
-          'newt_checkbox_tree_add_item',
-          'newt_checkbox_tree_find_item',
-          'newt_checkbox_tree_get_current',
-          'newt_checkbox_tree_get_entry_value',
-          'newt_checkbox_tree_get_multi_selection',
-          'newt_checkbox_tree_get_selection',
-          'newt_checkbox_tree_multi',
-          'newt_checkbox_tree_set_current',
-          'newt_checkbox_tree_set_entry_value',
-          'newt_checkbox_tree_set_entry',
-          'newt_checkbox_tree_set_width',
-          'newt_checkbox_tree',
-          'newt_checkbox',
-          'newt_clear_key_buffer'],
- 'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
- 'OCI8': ['oci_bind_array_by_name',
-          'oci_bind_by_name',
-          'oci_cancel',
-          'oci_close',
-          'oci_commit',
-          'oci_connect',
-          'oci_define_by_name',
-          'oci_error',
-          'oci_execute',
-          'oci_fetch_all',
-          'oci_fetch_array',
-          'oci_fetch_assoc',
-          'oci_fetch_object',
-          'oci_fetch_row',
-          'oci_fetch',
-          'oci_field_is_null',
-          'oci_field_name',
-          'oci_field_precision',
-          'oci_field_scale',
-          'oci_field_size',
-          'oci_field_type_raw',
-          'oci_field_type',
-          'oci_free_statement',
-          'oci_internal_debug',
-          'oci_lob_copy',
-          'oci_lob_is_equal',
-          'oci_new_collection',
-          'oci_new_connect',
-          'oci_new_cursor',
-          'oci_new_descriptor',
-          'oci_num_fields',
-          'oci_num_rows',
-          'oci_parse',
-          'oci_password_change',
-          'oci_pconnect',
-          'oci_result',
-          'oci_rollback',
-          'oci_server_version',
-          'oci_set_action',
-          'oci_set_client_identifier',
-          'oci_set_client_info',
-          'oci_set_edition',
-          'oci_set_module_name',
-          'oci_set_prefetch',
-          'oci_statement_type'],
- 'ODBC': ['odbc_autocommit',
-          'odbc_binmode',
-          'odbc_close_all',
-          'odbc_close',
-          'odbc_columnprivileges',
-          'odbc_columns',
-          'odbc_commit',
-          'odbc_connect',
-          'odbc_cursor',
-          'odbc_data_source',
-          'odbc_do',
-          'odbc_error',
-          'odbc_errormsg',
-          'odbc_exec',
-          'odbc_execute',
-          'odbc_fetch_array',
-          'odbc_fetch_into',
-          'odbc_fetch_object',
-          'odbc_fetch_row',
-          'odbc_field_len',
-          'odbc_field_name',
-          'odbc_field_num',
-          'odbc_field_precision',
-          'odbc_field_scale',
-          'odbc_field_type',
-          'odbc_foreignkeys',
-          'odbc_free_result',
-          'odbc_gettypeinfo',
-          'odbc_longreadlen',
-          'odbc_next_result',
-          'odbc_num_fields',
-          'odbc_num_rows',
-          'odbc_pconnect',
-          'odbc_prepare',
-          'odbc_primarykeys',
-          'odbc_procedurecolumns',
-          'odbc_procedures',
-          'odbc_result_all',
-          'odbc_result',
-          'odbc_rollback',
-          'odbc_setoption',
-          'odbc_specialcolumns',
-          'odbc_statistics',
-          'odbc_tableprivileges',
-          'odbc_tables'],
- 'Object Aggregation': ['aggregate_info',
-                        'aggregate_methods_by_list',
-                        'aggregate_methods_by_regexp'],
- 'Object overloading': ['overload'],
- 'OpenAL': ['openal_buffer_create',
-            'openal_buffer_data',
-            'openal_buffer_destroy',
-            'openal_buffer_get',
-            'openal_buffer_loadwav',
-            'openal_context_create',
-            'openal_context_current',
-            'openal_context_destroy',
-            'openal_context_process',
-            'openal_context_suspend',
-            'openal_device_close',
-            'openal_device_open',
-            'openal_listener_get',
-            'openal_listener_set',
-            'openal_source_create',
-            'openal_source_destroy',
-            'openal_source_get',
-            'openal_source_pause',
-            'openal_source_play',
-            'openal_source_rewind',
-            'openal_source_set',
-            'openal_source_stop',
-            'openal_stream'],
- 'OpenSSL': ['openssl_csr_export_to_file',
-             'openssl_csr_export',
-             'openssl_csr_get_public_key',
-             'openssl_csr_get_subject',
-             'openssl_csr_new',
-             'openssl_csr_sign',
-             'openssl_decrypt',
-             'openssl_dh_compute_key',
-             'openssl_digest',
-             'openssl_encrypt',
-             'openssl_error_string',
-             'openssl_free_key',
-             'openssl_get_cipher_methods',
-             'openssl_get_md_methods',
-             'openssl_get_privatekey',
-             'openssl_get_publickey',
-             'openssl_open',
-             'openssl_pkcs12_export_to_file',
-             'openssl_pkcs12_export',
-             'openssl_pkcs12_read',
-             'openssl_pkcs7_decrypt',
-             'openssl_pkcs7_encrypt',
-             'openssl_pkcs7_sign',
-             'openssl_pkcs7_verify',
-             'openssl_pkey_export_to_file',
-             'openssl_pkey_export',
-             'openssl_pkey_free',
-             'openssl_pkey_get_details',
-             'openssl_pkey_get_private',
-             'openssl_pkey_get_public',
-             'openssl_pkey_new',
-             'openssl_private_decrypt',
-             'openssl_private_encrypt',
-             'openssl_public_decrypt',
-             'openssl_public_encrypt',
-             'openssl_random_pseudo_bytes',
-             'openssl_seal',
-             'openssl_sign',
-             'openssl_verify',
-             'openssl_x509_check_private_key',
-             'openssl_x509_checkpurpose',
-             'openssl_x509_export_to_file',
-             'openssl_x509_export',
-             'openssl_x509_free',
-             'openssl_x509_parse',
-             'openssl_x509_read'],
- 'Output Control': ['flush',
-                    'ob_clean',
-                    'ob_end_clean',
-                    'ob_end_flush',
-                    'ob_flush',
-                    'ob_get_clean',
-                    'ob_get_contents',
-                    'ob_get_flush',
-                    'ob_get_length',
-                    'ob_get_level',
-                    'ob_get_status',
-                    'ob_gzhandler',
-                    'ob_implicit_flush',
-                    'ob_list_handlers',
-                    'ob_start',
-                    'output_add_rewrite_var',
-                    'output_reset_rewrite_vars'],
- 'Ovrimos SQL': ['ovrimos_close',
-                 'ovrimos_commit',
-                 'ovrimos_connect',
-                 'ovrimos_cursor',
-                 'ovrimos_exec',
-                 'ovrimos_execute',
-                 'ovrimos_fetch_into',
-                 'ovrimos_fetch_row',
-                 'ovrimos_field_len',
-                 'ovrimos_field_name',
-                 'ovrimos_field_num',
-                 'ovrimos_field_type',
-                 'ovrimos_free_result',
-                 'ovrimos_longreadlen',
-                 'ovrimos_num_fields',
-                 'ovrimos_num_rows',
-                 'ovrimos_prepare',
-                 'ovrimos_result_all',
-                 'ovrimos_result',
-                 'ovrimos_rollback'],
- 'PCNTL': ['pcntl_alarm',
-           'pcntl_exec',
-           'pcntl_fork',
-           'pcntl_getpriority',
-           'pcntl_setpriority',
-           'pcntl_signal_dispatch',
-           'pcntl_signal',
-           'pcntl_sigprocmask',
-           'pcntl_sigtimedwait',
-           'pcntl_sigwaitinfo',
-           'pcntl_wait',
-           'pcntl_waitpid',
-           'pcntl_wexitstatus',
-           'pcntl_wifexited',
-           'pcntl_wifsignaled',
-           'pcntl_wifstopped',
-           'pcntl_wstopsig',
-           'pcntl_wtermsig'],
- 'PCRE': ['preg_filter',
-          'preg_grep',
-          'preg_last_error',
-          'preg_match_all',
-          'preg_match',
-          'preg_quote',
-          'preg_replace_callback',
-          'preg_replace',
-          'preg_split'],
- 'PDF': ['PDF_activate_item',
-         'PDF_add_annotation',
-         'PDF_add_bookmark',
-         'PDF_add_launchlink',
-         'PDF_add_locallink',
-         'PDF_add_nameddest',
-         'PDF_add_note',
-         'PDF_add_outline',
-         'PDF_add_pdflink',
-         'PDF_add_table_cell',
-         'PDF_add_textflow',
-         'PDF_add_thumbnail',
-         'PDF_add_weblink',
-         'PDF_arc',
-         'PDF_arcn',
-         'PDF_attach_file',
-         'PDF_begin_document',
-         'PDF_begin_font',
-         'PDF_begin_glyph',
-         'PDF_begin_item',
-         'PDF_begin_layer',
-         'PDF_begin_page_ext',
-         'PDF_begin_page',
-         'PDF_begin_pattern',
-         'PDF_begin_template_ext',
-         'PDF_begin_template',
-         'PDF_circle',
-         'PDF_clip',
-         'PDF_close_image',
-         'PDF_close_pdi_page',
-         'PDF_close_pdi',
-         'PDF_close',
-         'PDF_closepath_fill_stroke',
-         'PDF_closepath_stroke',
-         'PDF_closepath',
-         'PDF_concat',
-         'PDF_continue_text',
-         'PDF_create_3dview',
-         'PDF_create_action',
-         'PDF_create_annotation',
-         'PDF_create_bookmark',
-         'PDF_create_field',
-         'PDF_create_fieldgroup',
-         'PDF_create_gstate',
-         'PDF_create_pvf',
-         'PDF_create_textflow',
-         'PDF_curveto',
-         'PDF_define_layer',
-         'PDF_delete_pvf',
-         'PDF_delete_table',
-         'PDF_delete_textflow',
-         'PDF_delete',
-         'PDF_encoding_set_char',
-         'PDF_end_document',
-         'PDF_end_font',
-         'PDF_end_glyph',
-         'PDF_end_item',
-         'PDF_end_layer',
-         'PDF_end_page_ext',
-         'PDF_end_page',
-         'PDF_end_pattern',
-         'PDF_end_template',
-         'PDF_endpath',
-         'PDF_fill_imageblock',
-         'PDF_fill_pdfblock',
-         'PDF_fill_stroke',
-         'PDF_fill_textblock',
-         'PDF_fill',
-         'PDF_findfont',
-         'PDF_fit_image',
-         'PDF_fit_pdi_page',
-         'PDF_fit_table',
-         'PDF_fit_textflow',
-         'PDF_fit_textline',
-         'PDF_get_apiname',
-         'PDF_get_buffer',
-         'PDF_get_errmsg',
-         'PDF_get_errnum',
-         'PDF_get_font',
-         'PDF_get_fontname',
-         'PDF_get_fontsize',
-         'PDF_get_image_height',
-         'PDF_get_image_width',
-         'PDF_get_majorversion',
-         'PDF_get_minorversion',
-         'PDF_get_parameter',
-         'PDF_get_pdi_parameter',
-         'PDF_get_pdi_value',
-         'PDF_get_value',
-         'PDF_info_font',
-         'PDF_info_matchbox',
-         'PDF_info_table',
-         'PDF_info_textflow',
-         'PDF_info_textline',
-         'PDF_initgraphics',
-         'PDF_lineto',
-         'PDF_load_3ddata',
-         'PDF_load_font',
-         'PDF_load_iccprofile',
-         'PDF_load_image',
-         'PDF_makespotcolor',
-         'PDF_moveto',
-         'PDF_new',
-         'PDF_open_ccitt',
-         'PDF_open_file',
-         'PDF_open_gif',
-         'PDF_open_image_file',
-         'PDF_open_image',
-         'PDF_open_jpeg',
-         'PDF_open_memory_image',
-         'PDF_open_pdi_document',
-         'PDF_open_pdi_page',
-         'PDF_open_pdi',
-         'PDF_open_tiff',
-         'PDF_pcos_get_number',
-         'PDF_pcos_get_stream',
-         'PDF_pcos_get_string',
-         'PDF_place_image',
-         'PDF_place_pdi_page',
-         'PDF_process_pdi',
-         'PDF_rect',
-         'PDF_restore',
-         'PDF_resume_page',
-         'PDF_rotate',
-         'PDF_save',
-         'PDF_scale',
-         'PDF_set_border_color',
-         'PDF_set_border_dash',
-         'PDF_set_border_style',
-         'PDF_set_char_spacing',
-         'PDF_set_duration',
-         'PDF_set_gstate',
-         'PDF_set_horiz_scaling',
-         'PDF_set_info_author',
-         'PDF_set_info_creator',
-         'PDF_set_info_keywords',
-         'PDF_set_info_subject',
-         'PDF_set_info_title',
-         'PDF_set_info',
-         'PDF_set_layer_dependency',
-         'PDF_set_leading',
-         'PDF_set_parameter',
-         'PDF_set_text_matrix',
-         'PDF_set_text_pos',
-         'PDF_set_text_rendering',
-         'PDF_set_text_rise',
-         'PDF_set_value',
-         'PDF_set_word_spacing',
-         'PDF_setcolor',
-         'PDF_setdash',
-         'PDF_setdashpattern',
-         'PDF_setflat',
-         'PDF_setfont',
-         'PDF_setgray_fill',
-         'PDF_setgray_stroke',
-         'PDF_setgray',
-         'PDF_setlinecap',
-         'PDF_setlinejoin',
-         'PDF_setlinewidth',
-         'PDF_setmatrix',
-         'PDF_setmiterlimit',
-         'PDF_setpolydash',
-         'PDF_setrgbcolor_fill',
-         'PDF_setrgbcolor_stroke',
-         'PDF_setrgbcolor',
-         'PDF_shading_pattern',
-         'PDF_shading',
-         'PDF_shfill',
-         'PDF_show_boxed',
-         'PDF_show_xy',
-         'PDF_show',
-         'PDF_skew',
-         'PDF_stringwidth',
-         'PDF_stroke',
-         'PDF_suspend_page',
-         'PDF_translate',
-         'PDF_utf16_to_utf8',
-         'PDF_utf32_to_utf16',
-         'PDF_utf8_to_utf16'],
- 'PHP Options/Info': ['assert_options',
-                      'assert',
-                      'dl',
-                      'extension_loaded',
-                      'gc_collect_cycles',
-                      'gc_disable',
-                      'gc_enable',
-                      'gc_enabled',
-                      'get_cfg_var',
-                      'get_current_user',
-                      'get_defined_constants',
-                      'get_extension_funcs',
-                      'get_include_path',
-                      'get_included_files',
-                      'get_loaded_extensions',
-                      'get_magic_quotes_gpc',
-                      'get_magic_quotes_runtime',
-                      'get_required_files',
-                      'getenv',
-                      'getlastmod',
-                      'getmygid',
-                      'getmyinode',
-                      'getmypid',
-                      'getmyuid',
-                      'getopt',
-                      'getrusage',
-                      'ini_alter',
-                      'ini_get_all',
-                      'ini_get',
-                      'ini_restore',
-                      'ini_set',
-                      'magic_quotes_runtime',
-                      'memory_get_peak_usage',
-                      'memory_get_usage',
-                      'php_ini_loaded_file',
-                      'php_ini_scanned_files',
-                      'php_logo_guid',
-                      'php_sapi_name',
-                      'php_uname',
-                      'phpcredits',
-                      'phpinfo',
-                      'phpversion',
-                      'putenv',
-                      'restore_include_path',
-                      'set_include_path',
-                      'set_magic_quotes_runtime',
-                      'set_time_limit',
-                      'sys_get_temp_dir',
-                      'version_compare',
-                      'zend_logo_guid',
-                      'zend_thread_id',
-                      'zend_version'],
- 'POSIX': ['posix_access',
-           'posix_ctermid',
-           'posix_errno',
-           'posix_get_last_error',
-           'posix_getcwd',
-           'posix_getegid',
-           'posix_geteuid',
-           'posix_getgid',
-           'posix_getgrgid',
-           'posix_getgrnam',
-           'posix_getgroups',
-           'posix_getlogin',
-           'posix_getpgid',
-           'posix_getpgrp',
-           'posix_getpid',
-           'posix_getppid',
-           'posix_getpwnam',
-           'posix_getpwuid',
-           'posix_getrlimit',
-           'posix_getsid',
-           'posix_getuid',
-           'posix_initgroups',
-           'posix_isatty',
-           'posix_kill',
-           'posix_mkfifo',
-           'posix_mknod',
-           'posix_setegid',
-           'posix_seteuid',
-           'posix_setgid',
-           'posix_setpgid',
-           'posix_setsid',
-           'posix_setuid',
-           'posix_strerror',
-           'posix_times',
-           'posix_ttyname',
-           'posix_uname'],
- 'POSIX Regex': ['ereg_replace',
-                 'ereg',
-                 'eregi_replace',
-                 'eregi',
-                 'split',
-                 'spliti',
-                 'sql_regcase'],
- 'PS': ['ps_add_bookmark',
-        'ps_add_launchlink',
-        'ps_add_locallink',
-        'ps_add_note',
-        'ps_add_pdflink',
-        'ps_add_weblink',
-        'ps_arc',
-        'ps_arcn',
-        'ps_begin_page',
-        'ps_begin_pattern',
-        'ps_begin_template',
-        'ps_circle',
-        'ps_clip',
-        'ps_close_image',
-        'ps_close',
-        'ps_closepath_stroke',
-        'ps_closepath',
-        'ps_continue_text',
-        'ps_curveto',
-        'ps_delete',
-        'ps_end_page',
-        'ps_end_pattern',
-        'ps_end_template',
-        'ps_fill_stroke',
-        'ps_fill',
-        'ps_findfont',
-        'ps_get_buffer',
-        'ps_get_parameter',
-        'ps_get_value',
-        'ps_hyphenate',
-        'ps_include_file',
-        'ps_lineto',
-        'ps_makespotcolor',
-        'ps_moveto',
-        'ps_new',
-        'ps_open_file',
-        'ps_open_image_file',
-        'ps_open_image',
-        'ps_open_memory_image',
-        'ps_place_image',
-        'ps_rect',
-        'ps_restore',
-        'ps_rotate',
-        'ps_save',
-        'ps_scale',
-        'ps_set_border_color',
-        'ps_set_border_dash',
-        'ps_set_border_style',
-        'ps_set_info',
-        'ps_set_parameter',
-        'ps_set_text_pos',
-        'ps_set_value',
-        'ps_setcolor',
-        'ps_setdash',
-        'ps_setflat',
-        'ps_setfont',
-        'ps_setgray',
-        'ps_setlinecap',
-        'ps_setlinejoin',
-        'ps_setlinewidth',
-        'ps_setmiterlimit',
-        'ps_setoverprintmode',
-        'ps_setpolydash',
-        'ps_shading_pattern',
-        'ps_shading',
-        'ps_shfill',
-        'ps_show_boxed',
-        'ps_show_xy2',
-        'ps_show_xy',
-        'ps_show2',
-        'ps_show',
-        'ps_string_geometry',
-        'ps_stringwidth',
-        'ps_stroke',
-        'ps_symbol_name',
-        'ps_symbol_width',
-        'ps_symbol',
-        'ps_translate'],
- 'Paradox': ['px_close',
-             'px_create_fp',
-             'px_date2string',
-             'px_delete_record',
-             'px_delete',
-             'px_get_field',
-             'px_get_info',
-             'px_get_parameter',
-             'px_get_record',
-             'px_get_schema',
-             'px_get_value',
-             'px_insert_record',
-             'px_new',
-             'px_numfields',
-             'px_numrecords',
-             'px_open_fp',
-             'px_put_record',
-             'px_retrieve_record',
-             'px_set_blob_file',
-             'px_set_parameter',
-             'px_set_tablename',
-             'px_set_targetencoding',
-             'px_set_value',
-             'px_timestamp2string',
-             'px_update_record'],
- 'Parsekit': ['parsekit_compile_file',
-              'parsekit_compile_string',
-              'parsekit_func_arginfo'],
- 'PostgreSQL': ['pg_affected_rows',
-                'pg_cancel_query',
-                'pg_client_encoding',
-                'pg_close',
-                'pg_connect',
-                'pg_connection_busy',
-                'pg_connection_reset',
-                'pg_connection_status',
-                'pg_convert',
-                'pg_copy_from',
-                'pg_copy_to',
-                'pg_dbname',
-                'pg_delete',
-                'pg_end_copy',
-                'pg_escape_bytea',
-                'pg_escape_string',
-                'pg_execute',
-                'pg_fetch_all_columns',
-                'pg_fetch_all',
-                'pg_fetch_array',
-                'pg_fetch_assoc',
-                'pg_fetch_object',
-                'pg_fetch_result',
-                'pg_fetch_row',
-                'pg_field_is_null',
-                'pg_field_name',
-                'pg_field_num',
-                'pg_field_prtlen',
-                'pg_field_size',
-                'pg_field_table',
-                'pg_field_type_oid',
-                'pg_field_type',
-                'pg_free_result',
-                'pg_get_notify',
-                'pg_get_pid',
-                'pg_get_result',
-                'pg_host',
-                'pg_insert',
-                'pg_last_error',
-                'pg_last_notice',
-                'pg_last_oid',
-                'pg_lo_close',
-                'pg_lo_create',
-                'pg_lo_export',
-                'pg_lo_import',
-                'pg_lo_open',
-                'pg_lo_read_all',
-                'pg_lo_read',
-                'pg_lo_seek',
-                'pg_lo_tell',
-                'pg_lo_unlink',
-                'pg_lo_write',
-                'pg_meta_data',
-                'pg_num_fields',
-                'pg_num_rows',
-                'pg_options',
-                'pg_parameter_status',
-                'pg_pconnect',
-                'pg_ping',
-                'pg_port',
-                'pg_prepare'],
- 'Printer': ['printer_abort',
-             'printer_close',
-             'printer_create_brush',
-             'printer_create_dc',
-             'printer_create_font',
-             'printer_create_pen',
-             'printer_delete_brush',
-             'printer_delete_dc',
-             'printer_delete_font',
-             'printer_delete_pen',
-             'printer_draw_bmp',
-             'printer_draw_chord',
-             'printer_draw_elipse',
-             'printer_draw_line',
-             'printer_draw_pie',
-             'printer_draw_rectangle',
-             'printer_draw_roundrect',
-             'printer_draw_text',
-             'printer_end_doc',
-             'printer_end_page',
-             'printer_get_option',
-             'printer_list',
-             'printer_logical_fontheight',
-             'printer_open',
-             'printer_select_brush',
-             'printer_select_font',
-             'printer_select_pen',
-             'printer_set_option',
-             'printer_start_doc',
-             'printer_start_page',
-             'printer_write'],
- 'Program execution': ['escapeshellarg',
-                       'escapeshellcmd',
-                       'exec',
-                       'passthru',
-                       'proc_close',
-                       'proc_get_status',
-                       'proc_nice',
-                       'proc_open',
-                       'proc_terminate',
-                       'shell_exec',
-                       'system'],
- 'Pspell': ['pspell_add_to_personal',
-            'pspell_add_to_session',
-            'pspell_check',
-            'pspell_clear_session',
-            'pspell_config_create',
-            'pspell_config_data_dir',
-            'pspell_config_dict_dir',
-            'pspell_config_ignore',
-            'pspell_config_mode',
-            'pspell_config_personal',
-            'pspell_config_repl',
-            'pspell_config_runtogether',
-            'pspell_config_save_repl'],
- 'RPM Reader': ['rpm_close',
-                'rpm_get_tag',
-                'rpm_is_valid',
-                'rpm_open',
-                'rpm_version'],
- 'RRD': ['rrd_create',
-         'rrd_error',
-         'rrd_fetch',
-         'rrd_first',
-         'rrd_graph',
-         'rrd_info',
-         'rrd_last',
-         'rrd_lastupdate',
-         'rrd_restore',
-         'rrd_tune',
-         'rrd_update',
-         'rrd_xport'],
- 'Radius': ['radius_acct_open',
-            'radius_add_server',
-            'radius_auth_open',
-            'radius_close',
-            'radius_config',
-            'radius_create_request',
-            'radius_cvt_addr',
-            'radius_cvt_int',
-            'radius_cvt_string',
-            'radius_demangle_mppe_key',
-            'radius_demangle',
-            'radius_get_attr',
-            'radius_get_vendor_attr',
-            'radius_put_addr',
-            'radius_put_attr',
-            'radius_put_int',
-            'radius_put_string',
-            'radius_put_vendor_addr',
-            'radius_put_vendor_attr',
-            'radius_put_vendor_int',
-            'radius_put_vendor_string',
-            'radius_request_authenticator',
-            'radius_send_request',
-            'radius_server_secret',
-            'radius_strerror'],
- 'Rar': ['rar_wrapper_cache_stats'],
- 'Readline': ['readline_add_history',
-              'readline_callback_handler_install',
-              'readline_callback_handler_remove',
-              'readline_callback_read_char',
-              'readline_clear_history',
-              'readline_completion_function',
-              'readline_info',
-              'readline_list_history',
-              'readline_on_new_line',
-              'readline_read_history',
-              'readline_redisplay',
-              'readline_write_history',
-              'readline'],
- 'Recode': ['recode_file', 'recode_string', 'recode'],
- 'SNMP': ['snmp_get_quick_print',
-          'snmp_get_valueretrieval',
-          'snmp_read_mib',
-          'snmp_set_enum_print',
-          'snmp_set_oid_numeric_print',
-          'snmp_set_oid_output_format',
-          'snmp_set_quick_print',
-          'snmp_set_valueretrieval',
-          'snmp2_get',
-          'snmp2_getnext',
-          'snmp2_real_walk',
-          'snmp2_set',
-          'snmp2_walk',
-          'snmp3_get',
-          'snmp3_getnext',
-          'snmp3_real_walk',
-          'snmp3_set',
-          'snmp3_walk',
-          'snmpget',
-          'snmpgetnext',
-          'snmprealwalk',
-          'snmpset',
-          'snmpwalk',
-          'snmpwalkoid'],
- 'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
- 'SPL': ['class_implements',
-         'class_parents',
-         'iterator_apply',
-         'iterator_count',
-         'iterator_to_array',
-         'spl_autoload_call',
-         'spl_autoload_extensions',
-         'spl_autoload_functions',
-         'spl_autoload_register',
-         'spl_autoload_unregister',
-         'spl_autoload',
-         'spl_classes',
-         'spl_object_hash'],
- 'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
- 'SQLite': ['sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes'],
- 'SSH2': ['ssh2_auth_hostbased_file',
-          'ssh2_auth_none',
-          'ssh2_auth_password',
-          'ssh2_auth_pubkey_file',
-          'ssh2_connect',
-          'ssh2_exec',
-          'ssh2_fetch_stream',
-          'ssh2_fingerprint',
-          'ssh2_methods_negotiated',
-          'ssh2_publickey_add',
-          'ssh2_publickey_init',
-          'ssh2_publickey_list',
-          'ssh2_publickey_remove',
-          'ssh2_scp_recv',
-          'ssh2_scp_send',
-          'ssh2_sftp_lstat',
-          'ssh2_sftp_mkdir',
-          'ssh2_sftp_readlink',
-          'ssh2_sftp_realpath',
-          'ssh2_sftp_rename',
-          'ssh2_sftp_rmdir',
-          'ssh2_sftp_stat',
-          'ssh2_sftp_symlink',
-          'ssh2_sftp_unlink',
-          'ssh2_sftp',
-          'ssh2_shell',
-          'ssh2_tunnel'],
- 'SVN': ['svn_add',
-         'svn_auth_get_parameter',
-         'svn_auth_set_parameter',
-         'svn_blame',
-         'svn_cat',
-         'svn_checkout',
-         'svn_cleanup',
-         'svn_client_version',
-         'svn_commit',
-         'svn_delete',
-         'svn_diff',
-         'svn_export',
-         'svn_fs_abort_txn',
-         'svn_fs_apply_text',
-         'svn_fs_begin_txn2',
-         'svn_fs_change_node_prop',
-         'svn_fs_check_path',
-         'svn_fs_contents_changed',
-         'svn_fs_copy',
-         'svn_fs_delete',
-         'svn_fs_dir_entries',
-         'svn_fs_file_contents',
-         'svn_fs_file_length',
-         'svn_fs_is_dir',
-         'svn_fs_is_file',
-         'svn_fs_make_dir',
-         'svn_fs_make_file',
-         'svn_fs_node_created_rev',
-         'svn_fs_node_prop',
-         'svn_fs_props_changed',
-         'svn_fs_revision_prop',
-         'svn_fs_revision_root',
-         'svn_fs_txn_root',
-         'svn_fs_youngest_rev',
-         'svn_import',
-         'svn_log',
-         'svn_ls',
-         'svn_mkdir',
-         'svn_repos_create',
-         'svn_repos_fs_begin_txn_for_commit',
-         'svn_repos_fs_commit_txn',
-         'svn_repos_fs',
-         'svn_repos_hotcopy',
-         'svn_repos_open',
-         'svn_repos_recover',
-         'svn_revert',
-         'svn_status',
-         'svn_update'],
- 'SWF': ['swf_actiongeturl',
-         'swf_actiongotoframe',
-         'swf_actiongotolabel',
-         'swf_actionnextframe',
-         'swf_actionplay',
-         'swf_actionprevframe',
-         'swf_actionsettarget',
-         'swf_actionstop',
-         'swf_actiontogglequality',
-         'swf_actionwaitforframe',
-         'swf_addbuttonrecord',
-         'swf_addcolor',
-         'swf_closefile',
-         'swf_definebitmap',
-         'swf_definefont',
-         'swf_defineline',
-         'swf_definepoly',
-         'swf_definerect',
-         'swf_definetext',
-         'swf_endbutton',
-         'swf_enddoaction',
-         'swf_endshape',
-         'swf_endsymbol',
-         'swf_fontsize',
-         'swf_fontslant',
-         'swf_fonttracking',
-         'swf_getbitmapinfo',
-         'swf_getfontinfo',
-         'swf_getframe',
-         'swf_labelframe',
-         'swf_lookat',
-         'swf_modifyobject',
-         'swf_mulcolor',
-         'swf_nextid',
-         'swf_oncondition',
-         'swf_openfile',
-         'swf_ortho2',
-         'swf_ortho',
-         'swf_perspective',
-         'swf_placeobject',
-         'swf_polarview',
-         'swf_popmatrix',
-         'swf_posround',
-         'swf_pushmatrix',
-         'swf_removeobject',
-         'swf_rotate',
-         'swf_scale',
-         'swf_setfont',
-         'swf_setframe',
-         'swf_shapearc',
-         'swf_shapecurveto3',
-         'swf_shapecurveto',
-         'swf_shapefillbitmapclip',
-         'swf_shapefillbitmaptile',
-         'swf_shapefilloff',
-         'swf_shapefillsolid',
-         'swf_shapelinesolid',
-         'swf_shapelineto',
-         'swf_shapemoveto',
-         'swf_showframe',
-         'swf_startbutton',
-         'swf_startdoaction',
-         'swf_startshape',
-         'swf_startsymbol',
-         'swf_textwidth',
-         'swf_translate',
-         'swf_viewport'],
- 'Semaphore': ['ftok',
-               'msg_get_queue',
-               'msg_queue_exists',
-               'msg_receive',
-               'msg_remove_queue',
-               'msg_send',
-               'msg_set_queue',
-               'msg_stat_queue',
-               'sem_acquire',
-               'sem_get',
-               'sem_release',
-               'sem_remove',
-               'shm_attach',
-               'shm_detach',
-               'shm_get_var',
-               'shm_has_var',
-               'shm_put_var',
-               'shm_remove_var',
-               'shm_remove'],
- 'Session': ['session_cache_expire',
-             'session_cache_limiter',
-             'session_commit',
-             'session_decode',
-             'session_destroy',
-             'session_encode',
-             'session_get_cookie_params',
-             'session_id',
-             'session_is_registered',
-             'session_module_name',
-             'session_name',
-             'session_regenerate_id',
-             'session_register',
-             'session_save_path',
-             'session_set_cookie_params',
-             'session_set_save_handler',
-             'session_start',
-             'session_unregister',
-             'session_unset',
-             'session_write_close'],
- 'Session PgSQL': ['session_pgsql_add_error',
-                   'session_pgsql_get_error',
-                   'session_pgsql_get_field',
-                   'session_pgsql_reset',
-                   'session_pgsql_set_field',
-                   'session_pgsql_status'],
- 'Shared Memory': ['shmop_close',
-                   'shmop_delete',
-                   'shmop_open',
-                   'shmop_read',
-                   'shmop_size',
-                   'shmop_write'],
- 'SimpleXML': ['simplexml_import_dom',
-               'simplexml_load_file',
-               'simplexml_load_string'],
- 'Socket': ['socket_accept',
-            'socket_bind',
-            'socket_clear_error',
-            'socket_close',
-            'socket_connect',
-            'socket_create_listen',
-            'socket_create_pair',
-            'socket_create',
-            'socket_get_option',
-            'socket_getpeername',
-            'socket_getsockname',
-            'socket_last_error',
-            'socket_listen',
-            'socket_read',
-            'socket_recv',
-            'socket_recvfrom',
-            'socket_select',
-            'socket_send',
-            'socket_sendto',
-            'socket_set_block',
-            'socket_set_nonblock',
-            'socket_set_option',
-            'socket_shutdown',
-            'socket_strerror',
-            'socket_write'],
- 'Solr': ['solr_get_version'],
- 'Statistic': ['stats_absolute_deviation',
-               'stats_cdf_beta',
-               'stats_cdf_binomial',
-               'stats_cdf_cauchy',
-               'stats_cdf_chisquare',
-               'stats_cdf_exponential',
-               'stats_cdf_f',
-               'stats_cdf_gamma',
-               'stats_cdf_laplace',
-               'stats_cdf_logistic',
-               'stats_cdf_negative_binomial',
-               'stats_cdf_noncentral_chisquare',
-               'stats_cdf_noncentral_f',
-               'stats_cdf_poisson',
-               'stats_cdf_t',
-               'stats_cdf_uniform',
-               'stats_cdf_weibull',
-               'stats_covariance',
-               'stats_den_uniform',
-               'stats_dens_beta',
-               'stats_dens_cauchy',
-               'stats_dens_chisquare',
-               'stats_dens_exponential',
-               'stats_dens_f',
-               'stats_dens_gamma',
-               'stats_dens_laplace',
-               'stats_dens_logistic',
-               'stats_dens_negative_binomial',
-               'stats_dens_normal',
-               'stats_dens_pmf_binomial',
-               'stats_dens_pmf_hypergeometric',
-               'stats_dens_pmf_poisson',
-               'stats_dens_t',
-               'stats_dens_weibull',
-               'stats_harmonic_mean',
-               'stats_kurtosis',
-               'stats_rand_gen_beta',
-               'stats_rand_gen_chisquare',
-               'stats_rand_gen_exponential',
-               'stats_rand_gen_f',
-               'stats_rand_gen_funiform',
-               'stats_rand_gen_gamma',
-               'stats_rand_gen_ibinomial_negative',
-               'stats_rand_gen_ibinomial',
-               'stats_rand_gen_int',
-               'stats_rand_gen_ipoisson',
-               'stats_rand_gen_iuniform',
-               'stats_rand_gen_noncenral_chisquare',
-               'stats_rand_gen_noncentral_f',
-               'stats_rand_gen_noncentral_t',
-               'stats_rand_gen_normal',
-               'stats_rand_gen_t',
-               'stats_rand_get_seeds',
-               'stats_rand_phrase_to_seeds',
-               'stats_rand_ranf',
-               'stats_rand_setall',
-               'stats_skew',
-               'stats_standard_deviation',
-               'stats_stat_binomial_coef',
-               'stats_stat_correlation',
-               'stats_stat_gennch',
-               'stats_stat_independent_t',
-               'stats_stat_innerproduct',
-               'stats_stat_noncentral_t',
-               'stats_stat_paired_t',
-               'stats_stat_percentile',
-               'stats_stat_powersum',
-               'stats_variance'],
- 'Stomp': ['stomp_connect_error', 'stomp_version'],
- 'Stream': ['set_socket_blocking',
-            'stream_bucket_append',
-            'stream_bucket_make_writeable',
-            'stream_bucket_new',
-            'stream_bucket_prepend',
-            'stream_context_create',
-            'stream_context_get_default',
-            'stream_context_get_options',
-            'stream_context_get_params',
-            'stream_context_set_default',
-            'stream_context_set_option',
-            'stream_context_set_params',
-            'stream_copy_to_stream',
-            'stream_encoding',
-            'stream_filter_append',
-            'stream_filter_prepend',
-            'stream_filter_register',
-            'stream_filter_remove',
-            'stream_get_contents',
-            'stream_get_filters',
-            'stream_get_line',
-            'stream_get_meta_data',
-            'stream_get_transports',
-            'stream_get_wrappers',
-            'stream_is_local',
-            'stream_notification_callback',
-            'stream_register_wrapper',
-            'stream_resolve_include_path',
-            'stream_select'],
- 'String': ['addcslashes',
-            'addslashes',
-            'bin2hex',
-            'chop',
-            'chr',
-            'chunk_split',
-            'convert_cyr_string',
-            'convert_uudecode',
-            'convert_uuencode',
-            'count_chars',
-            'crc32',
-            'crypt',
-            'echo',
-            'explode',
-            'fprintf',
-            'get_html_translation_table',
-            'hebrev',
-            'hebrevc',
-            'html_entity_decode',
-            'htmlentities',
-            'htmlspecialchars_decode',
-            'htmlspecialchars',
-            'implode',
-            'join',
-            'lcfirst',
-            'levenshtein',
-            'localeconv',
-            'ltrim',
-            'md5_file',
-            'md5',
-            'metaphone',
-            'money_format',
-            'nl_langinfo',
-            'nl2br',
-            'number_format',
-            'ord',
-            'parse_str',
-            'print',
-            'printf',
-            'quoted_printable_decode',
-            'quoted_printable_encode',
-            'quotemeta',
-            'rtrim',
-            'setlocale',
-            'sha1_file',
-            'sha1',
-            'similar_text',
-            'soundex',
-            'sprintf',
-            'sscanf',
-            'str_getcsv',
-            'str_ireplace',
-            'str_pad',
-            'str_repeat',
-            'str_replace',
-            'str_rot13',
-            'str_shuffle',
-            'str_split',
-            'str_word_count',
-            'strcasecmp',
-            'strchr',
-            'strcmp',
-            'strcoll',
-            'strcspn',
-            'strip_tags',
-            'stripcslashes',
-            'stripos',
-            'stripslashes',
-            'stristr',
-            'strlen',
-            'strnatcasecmp',
-            'strnatcmp',
-            'strncasecmp',
-            'strncmp',
-            'strpbrk',
-            'strpos',
-            'strrchr',
-            'strrev',
-            'strripos',
-            'strrpos',
-            'strspn'],
- 'Sybase': ['sybase_affected_rows',
-            'sybase_close',
-            'sybase_connect',
-            'sybase_data_seek',
-            'sybase_deadlock_retry_count',
-            'sybase_fetch_array',
-            'sybase_fetch_assoc',
-            'sybase_fetch_field',
-            'sybase_fetch_object',
-            'sybase_fetch_row',
-            'sybase_field_seek',
-            'sybase_free_result',
-            'sybase_get_last_message',
-            'sybase_min_client_severity',
-            'sybase_min_error_severity',
-            'sybase_min_message_severity',
-            'sybase_min_server_severity',
-            'sybase_num_fields',
-            'sybase_num_rows',
-            'sybase_pconnect',
-            'sybase_query',
-            'sybase_result',
-            'sybase_select_db',
-            'sybase_set_message_handler',
-            'sybase_unbuffered_query'],
- 'TCP': ['tcpwrap_check'],
- 'Tidy': ['ob_tidyhandler',
-          'tidy_access_count',
-          'tidy_config_count',
-          'tidy_error_count',
-          'tidy_get_error_buffer',
-          'tidy_get_output',
-          'tidy_load_config',
-          'tidy_reset_config',
-          'tidy_save_config',
-          'tidy_set_encoding',
-          'tidy_setopt',
-          'tidy_warning_count'],
- 'Tokenizer': ['token_get_all', 'token_name'],
- 'URL': ['base64_decode',
-         'base64_encode',
-         'get_headers',
-         'get_meta_tags',
-         'http_build_query',
-         'parse_url',
-         'rawurldecode',
-         'rawurlencode',
-         'urldecode',
-         'urlencode'],
- 'Variable handling': ['debug_zval_dump',
-                       'doubleval',
-                       'empty',
-                       'floatval',
-                       'get_defined_vars',
-                       'get_resource_type',
-                       'gettype',
-                       'import_request_variables',
-                       'intval',
-                       'is_array',
-                       'is_bool',
-                       'is_callable',
-                       'is_double',
-                       'is_float',
-                       'is_int',
-                       'is_integer',
-                       'is_long',
-                       'is_null',
-                       'is_numeric',
-                       'is_object',
-                       'is_real',
-                       'is_resource',
-                       'is_scalar',
-                       'is_string',
-                       'isset',
-                       'print_r',
-                       'serialize',
-                       'settype',
-                       'strval',
-                       'unserialize',
-                       'unset',
-                       'var_dump',
-                       'var_export'],
- 'W32api': ['w32api_deftype',
-            'w32api_init_dtype',
-            'w32api_invoke_function',
-            'w32api_register_function',
-            'w32api_set_call_method'],
- 'WDDX': ['wddx_add_vars',
-          'wddx_deserialize',
-          'wddx_packet_end',
-          'wddx_packet_start',
-          'wddx_serialize_value',
-          'wddx_serialize_vars',
-          'wddx_unserialize'],
- 'WinCache': ['wincache_fcache_fileinfo',
-              'wincache_fcache_meminfo',
-              'wincache_lock',
-              'wincache_ocache_fileinfo',
-              'wincache_ocache_meminfo',
-              'wincache_refresh_if_changed',
-              'wincache_rplist_fileinfo',
-              'wincache_rplist_meminfo',
-              'wincache_scache_info',
-              'wincache_scache_meminfo',
-              'wincache_ucache_add',
-              'wincache_ucache_cas',
-              'wincache_ucache_clear',
-              'wincache_ucache_dec',
-              'wincache_ucache_delete',
-              'wincache_ucache_exists',
-              'wincache_ucache_get',
-              'wincache_ucache_inc',
-              'wincache_ucache_info',
-              'wincache_ucache_meminfo',
-              'wincache_ucache_set',
-              'wincache_unlock'],
- 'XML Parser': ['utf8_decode'],
- 'XML-RPC': ['xmlrpc_decode_request',
-             'xmlrpc_decode',
-             'xmlrpc_encode_request',
-             'xmlrpc_encode',
-             'xmlrpc_get_type',
-             'xmlrpc_is_fault',
-             'xmlrpc_parse_method_descriptions',
-             'xmlrpc_server_add_introspection_data',
-             'xmlrpc_server_call_method',
-             'xmlrpc_server_create',
-             'xmlrpc_server_destroy',
-             'xmlrpc_server_register_introspection_callback',
-             'xmlrpc_server_register_method',
-             'xmlrpc_set_type'],
- 'XSLT (PHP4)': ['xslt_backend_info',
-                 'xslt_backend_name',
-                 'xslt_backend_version',
-                 'xslt_create',
-                 'xslt_errno',
-                 'xslt_error',
-                 'xslt_free',
-                 'xslt_getopt',
-                 'xslt_process',
-                 'xslt_set_base',
-                 'xslt_set_encoding',
-                 'xslt_set_error_handler',
-                 'xslt_set_log',
-                 'xslt_set_object',
-                 'xslt_set_sax_handler',
-                 'xslt_set_sax_handlers',
-                 'xslt_set_scheme_handler',
-                 'xslt_set_scheme_handlers',
-                 'xslt_setopt'],
- 'YAZ': ['yaz_addinfo',
-         'yaz_ccl_conf',
-         'yaz_ccl_parse',
-         'yaz_close',
-         'yaz_connect',
-         'yaz_database',
-         'yaz_element',
-         'yaz_errno',
-         'yaz_error',
-         'yaz_es_result',
-         'yaz_es',
-         'yaz_get_option',
-         'yaz_hits',
-         'yaz_itemorder',
-         'yaz_present',
-         'yaz_range',
-         'yaz_record',
-         'yaz_scan_result',
-         'yaz_scan',
-         'yaz_schema',
-         'yaz_search',
-         'yaz_set_option',
-         'yaz_sort',
-         'yaz_syntax',
-         'yaz_wait'],
- 'YP/NIS': ['yp_all',
-            'yp_cat',
-            'yp_err_string',
-            'yp_errno',
-            'yp_first',
-            'yp_get_default_domain',
-            'yp_master',
-            'yp_match',
-            'yp_next',
-            'yp_order'],
- 'Yaml': ['yaml_emit_file',
-          'yaml_emit',
-          'yaml_parse_file',
-          'yaml_parse_url',
-          'yaml_parse'],
- 'Zip': ['zip_close',
-         'zip_entry_close',
-         'zip_entry_compressedsize',
-         'zip_entry_compressionmethod',
-         'zip_entry_filesize',
-         'zip_entry_name',
-         'zip_entry_open',
-         'zip_entry_read',
-         'zip_open',
-         'zip_read'],
- 'Zlib': ['gzclose',
-          'gzcompress',
-          'gzdecode',
-          'gzdeflate',
-          'gzencode',
-          'gzeof',
-          'gzfile',
-          'gzgetc',
-          'gzgets',
-          'gzgetss',
-          'gzinflate',
-          'gzopen',
-          'gzpassthru',
-          'gzputs',
-          'gzread',
-          'gzrewind',
-          'gzseek',
-          'gztell',
-          'gzuncompress',
-          'gzwrite',
-          'readgzfile',
-          'zlib_get_coding_type'],
- 'bcompiler': ['bcompiler_load_exe',
-               'bcompiler_load',
-               'bcompiler_parse_class',
-               'bcompiler_read',
-               'bcompiler_write_class',
-               'bcompiler_write_constant',
-               'bcompiler_write_exe_footer',
-               'bcompiler_write_file',
-               'bcompiler_write_footer',
-               'bcompiler_write_function',
-               'bcompiler_write_functions_from_file',
-               'bcompiler_write_header',
-               'bcompiler_write_included_filename'],
- 'cURL': ['curl_close',
-          'curl_copy_handle',
-          'curl_errno',
-          'curl_error',
-          'curl_exec',
-          'curl_getinfo',
-          'curl_init',
-          'curl_multi_add_handle',
-          'curl_multi_close',
-          'curl_multi_exec',
-          'curl_multi_getcontent',
-          'curl_multi_info_read',
-          'curl_multi_init',
-          'curl_multi_remove_handle',
-          'curl_multi_select',
-          'curl_setopt_array',
-          'curl_setopt',
-          'curl_version'],
- 'chdb': ['chdb_create'],
- 'dBase': ['dbase_add_record',
-           'dbase_close',
-           'dbase_create',
-           'dbase_delete_record',
-           'dbase_get_header_info',
-           'dbase_get_record_with_names',
-           'dbase_get_record',
-           'dbase_numfields',
-           'dbase_numrecords',
-           'dbase_open',
-           'dbase_pack',
-           'dbase_replace_record'],
- 'dbx': ['dbx_close',
-         'dbx_compare',
-         'dbx_connect',
-         'dbx_error',
-         'dbx_escape_string',
-         'dbx_fetch_row'],
- 'filePro': ['filepro_fieldcount',
-             'filepro_fieldname',
-             'filepro_fieldtype',
-             'filepro_fieldwidth',
-             'filepro_retrieve',
-             'filepro_rowcount',
-             'filepro'],
- 'iconv': ['iconv_get_encoding',
-           'iconv_mime_decode_headers',
-           'iconv_mime_decode',
-           'iconv_mime_encode',
-           'iconv_set_encoding',
-           'iconv_strlen',
-           'iconv_strpos',
-           'iconv_strrpos',
-           'iconv_substr',
-           'iconv',
-           'ob_iconv_handler'],
- 'inclued': ['inclued_get_data'],
- 'intl': ['intl_error_name',
-          'intl_get_error_code',
-          'intl_get_error_message',
-          'intl_is_failure'],
- 'libxml': ['libxml_clear_errors',
-            'libxml_disable_entity_loader',
-            'libxml_get_errors',
-            'libxml_get_last_error',
-            'libxml_set_streams_context',
-            'libxml_use_internal_errors'],
- 'mSQL': ['msql_affected_rows',
-          'msql_close',
-          'msql_connect',
-          'msql_create_db',
-          'msql_createdb',
-          'msql_data_seek',
-          'msql_db_query',
-          'msql_dbname',
-          'msql_drop_db',
-          'msql_error',
-          'msql_fetch_array',
-          'msql_fetch_field',
-          'msql_fetch_object',
-          'msql_fetch_row',
-          'msql_field_flags',
-          'msql_field_len',
-          'msql_field_name',
-          'msql_field_seek',
-          'msql_field_table',
-          'msql_field_type',
-          'msql_fieldflags',
-          'msql_fieldlen',
-          'msql_fieldname',
-          'msql_fieldtable',
-          'msql_fieldtype',
-          'msql_free_result',
-          'msql_list_dbs',
-          'msql_list_fields',
-          'msql_list_tables',
-          'msql_num_fields',
-          'msql_num_rows',
-          'msql_numfields',
-          'msql_numrows',
-          'msql_pconnect',
-          'msql_query',
-          'msql_regcase',
-          'msql_result',
-          'msql_select_db',
-          'msql_tablename',
-          'msql'],
- 'mnoGoSearch': ['udm_add_search_limit',
-                 'udm_alloc_agent_array',
-                 'udm_alloc_agent',
-                 'udm_api_version',
-                 'udm_cat_list',
-                 'udm_cat_path',
-                 'udm_check_charset',
-                 'udm_check_stored',
-                 'udm_clear_search_limits',
-                 'udm_close_stored',
-                 'udm_crc32',
-                 'udm_errno',
-                 'udm_error',
-                 'udm_find',
-                 'udm_free_agent',
-                 'udm_free_ispell_data',
-                 'udm_free_res',
-                 'udm_get_doc_count',
-                 'udm_get_res_field',
-                 'udm_get_res_param',
-                 'udm_hash32',
-                 'udm_load_ispell_data',
-                 'udm_open_stored',
-                 'udm_set_agent_param'],
- 'mqseries': ['mqseries_back',
-              'mqseries_begin',
-              'mqseries_close',
-              'mqseries_cmit',
-              'mqseries_conn',
-              'mqseries_connx',
-              'mqseries_disc',
-              'mqseries_get',
-              'mqseries_inq',
-              'mqseries_open',
-              'mqseries_put1',
-              'mqseries_put',
-              'mqseries_set',
-              'mqseries_strerror'],
- 'mysqlnd_qc': ['mysqlnd_qc_change_handler',
-                'mysqlnd_qc_clear_cache',
-                'mysqlnd_qc_get_cache_info',
-                'mysqlnd_qc_get_core_stats',
-                'mysqlnd_qc_get_handler',
-                'mysqlnd_qc_get_query_trace_log',
-                'mysqlnd_qc_set_user_handlers'],
- 'qtdom': ['qdom_error', 'qdom_tree'],
- 'runkit': ['runkit_class_adopt',
-            'runkit_class_emancipate',
-            'runkit_constant_add',
-            'runkit_constant_redefine',
-            'runkit_constant_remove',
-            'runkit_function_add',
-            'runkit_function_copy',
-            'runkit_function_redefine',
-            'runkit_function_remove',
-            'runkit_function_rename',
-            'runkit_import',
-            'runkit_lint_file',
-            'runkit_lint',
-            'runkit_method_add',
-            'runkit_method_copy',
-            'runkit_method_redefine',
-            'runkit_method_remove',
-            'runkit_method_rename',
-            'runkit_return_value_used',
-            'runkit_sandbox_output_handler',
-            'runkit_superglobals'],
- 'ssdeep': ['ssdeep_fuzzy_compare',
-            'ssdeep_fuzzy_hash_filename',
-            'ssdeep_fuzzy_hash'],
- 'vpopmail': ['vpopmail_add_alias_domain_ex',
-              'vpopmail_add_alias_domain',
-              'vpopmail_add_domain_ex',
-              'vpopmail_add_domain',
-              'vpopmail_add_user',
-              'vpopmail_alias_add',
-              'vpopmail_alias_del_domain',
-              'vpopmail_alias_del',
-              'vpopmail_alias_get_all',
-              'vpopmail_alias_get',
-              'vpopmail_auth_user',
-              'vpopmail_del_domain_ex',
-              'vpopmail_del_domain',
-              'vpopmail_del_user',
-              'vpopmail_error',
-              'vpopmail_passwd',
-              'vpopmail_set_user_quota'],
- 'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
- 'win32service': ['win32_continue_service',
-                  'win32_create_service',
-                  'win32_delete_service',
-                  'win32_get_last_control_message',
-                  'win32_pause_service',
-                  'win32_query_service_status',
-                  'win32_set_service_status',
-                  'win32_start_service_ctrl_dispatcher',
-                  'win32_start_service',
-                  'win32_stop_service'],
- 'xattr': ['xattr_get',
-           'xattr_list',
-           'xattr_remove',
-           'xattr_set',
-           'xattr_supported'],
- 'xdiff': ['xdiff_file_bdiff_size',
-           'xdiff_file_bdiff',
-           'xdiff_file_bpatch',
-           'xdiff_file_diff_binary',
-           'xdiff_file_diff',
-           'xdiff_file_merge3',
-           'xdiff_file_patch_binary',
-           'xdiff_file_patch',
-           'xdiff_file_rabdiff',
-           'xdiff_string_bdiff_size',
-           'xdiff_string_bdiff',
-           'xdiff_string_bpatch',
-           'xdiff_string_diff_binary',
-           'xdiff_string_diff',
-           'xdiff_string_merge3',
-           'xdiff_string_patch_binary',
-           'xdiff_string_patch',
-           'xdiff_string_rabdiff']}
-
-if __name__ == '__main__':
-    import glob
-    import os
-    import pprint
-    import re
-    import shutil
-    import tarfile
-    import urllib
-
-    PHP_MANUAL_URL     = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
-    PHP_MANUAL_DIR     = './php-chunked-xhtml/'
-    PHP_REFERENCE_GLOB = 'ref.*'
-    PHP_FUNCTION_RE    = '<a href="function\..*?\.html">(.*?)</a>'
-    PHP_MODULE_RE      = '<title>(.*?) Functions</title>'
-
-    def get_php_functions():
-        function_re = re.compile(PHP_FUNCTION_RE)
-        module_re   = re.compile(PHP_MODULE_RE)
-        modules     = {}
-
-        for file in get_php_references():
-            module = ''
-            for line in open(file):
-                if not module:
-                    search = module_re.search(line)
-                    if search:
-                        module = search.group(1)
-                        modules[module] = []
-
-                elif '<h2>Table of Contents</h2>' in line:
-                    for match in function_re.finditer(line):
-                        fn = match.group(1)
-                        if '->' not in fn and '::' not in fn:
-                            modules[module].append(fn)
-
-                    # These are dummy manual pages, not actual functions
-                    if module == 'PHP Options/Info':
-                        modules[module].remove('main')
-                    elif module == 'Filesystem':
-                        modules[module].remove('delete')
-
-                    if not modules[module]:
-                        del modules[module]
-
-                    break
-        return modules
-
-    def get_php_references():
-        download = urllib.urlretrieve(PHP_MANUAL_URL)
-        tar = tarfile.open(download[0])
-        tar.extractall()
-        tar.close()
-        for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
-            yield file
-        os.remove(download[0])
-
-    def regenerate(filename, modules):
-        f = open(filename)
-        try:
-            content = f.read()
-        finally:
-            f.close()
-
-        header = content[:content.find('MODULES = {')]
-        footer = content[content.find("if __name__ == '__main__':"):]
-
-        f = open(filename, 'w')
-        f.write(header)
-        f.write('MODULES = %s\n\n' % pprint.pformat(modules))
-        f.write(footer)
-        f.close()
-
-    def run():
-        print '>> Downloading Function Index'
-        modules = get_php_functions()
-        total = sum(len(v) for v in modules.itervalues())
-        print '%d functions found' % total
-        regenerate(__file__, modules)
-        shutil.rmtree(PHP_MANUAL_DIR)
-
-    run()
diff --git a/python/ext-libs/pygments/lexers/_postgres_builtins.py b/python/ext-libs/pygments/lexers/_postgres_builtins.py
deleted file mode 100644
index b232213..0000000
--- a/python/ext-libs/pygments/lexers/_postgres_builtins.py
+++ /dev/null
@@ -1,233 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._postgres_builtins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Self-updating data files for PostgreSQL lexer.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-import urllib
-
-# One man's constant is another man's variable.
-SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
-KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
-DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
-
-def update_myself():
-    data_file = list(fetch(DATATYPES_URL))
-    datatypes = parse_datatypes(data_file)
-    pseudos = parse_pseudos(data_file)
-
-    keywords = parse_keywords(fetch(KEYWORDS_URL))
-    update_consts(__file__, 'DATATYPES', datatypes)
-    update_consts(__file__, 'PSEUDO_TYPES', pseudos)
-    update_consts(__file__, 'KEYWORDS', keywords)
-
-def parse_keywords(f):
-    kw = []
-    for m in re.finditer(
-            r'\s*<entry><token>([^<]+)</token></entry>\s*'
-            r'<entry>([^<]+)</entry>', f.read()):
-        kw.append(m.group(1))
-
-    if not kw:
-        raise ValueError('no keyword found')
-
-    kw.sort()
-    return kw
-
-def parse_datatypes(f):
-    dt = set()
-    for line in f:
-        if '<sect1' in line:
-            break
-        if '<entry><type>' not in line:
-            continue
-
-        # Parse a string such as
-        # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
-        # into types "time" and "without time zone"
-
-        # remove all the tags
-        line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
-        line = re.sub("<[^>]+>", "", line)
-
-        # Drop the parts containing braces
-        for tmp in [t for tmp in line.split('[')
-                    for t in tmp.split(']') if "(" not in t]:
-            for t in tmp.split(','):
-                t = t.strip()
-                if not t: continue
-                dt.add(" ".join(t.split()))
-
-    dt = list(dt)
-    dt.sort()
-    return dt
-
-def parse_pseudos(f):
-    dt = []
-    re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
-    re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
-    re_end = re.compile(r'\s*</table>')
-
-    f = iter(f)
-    for line in f:
-        if re_start.match(line) is not None:
-            break
-    else:
-        raise ValueError('pseudo datatypes table not found')
-
-    for line in f:
-        m = re_entry.match(line)
-        if m is not None:
-            dt.append(m.group(1))
-
-        if re_end.match(line) is not None:
-            break
-    else:
-        raise ValueError('end of pseudo datatypes table not found')
-
-    if not dt:
-        raise ValueError('pseudo datatypes not found')
-
-    return dt
-
-def fetch(url):
-    return urllib.urlopen(url)
-
-def update_consts(filename, constname, content):
-    f = open(filename)
-    lines = f.readlines()
-    f.close()
-
-    # Line to start/end inserting
-    re_start = re.compile(r'^%s\s*=\s*\[\s*$' % constname)
-    re_end = re.compile(r'^\s*\]\s*$')
-    start = [ n for n, l in enumerate(lines) if re_start.match(l) ]
-    if not start:
-        raise ValueError("couldn't find line containing '%s = ['" % constname)
-    if len(start) > 1:
-        raise ValueError("too many lines containing '%s = ['" % constname)
-    start = start[0] + 1
-
-    end = [ n for n, l in enumerate(lines) if n >= start and re_end.match(l) ]
-    if not end:
-        raise ValueError("couldn't find line containing ']' after %s " % constname)
-    end = end[0]
-
-    # Pack the new content in lines not too long
-    content = [repr(item) for item in content ]
-    new_lines = [[]]
-    for item in content:
-        if sum(map(len, new_lines[-1])) + 2 * len(new_lines[-1]) + len(item) + 4 > 75:
-            new_lines.append([])
-        new_lines[-1].append(item)
-
-    lines[start:end] = [ "    %s,\n" % ", ".join(items) for items in new_lines ]
-
-    f = open(filename, 'w')
-    f.write(''.join(lines))
-    f.close()
-
-
-# Autogenerated: please edit them if you like wasting your time.
-
-KEYWORDS = [
-    'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER',
-    'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE',
-    'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT',
-    'ASYMMETRIC', 'AT', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', 'BEFORE',
-    'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BOOLEAN', 'BOTH', 'BY',
-    'CACHE', 'CALLED', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG',
-    'CHAIN', 'CHAR', 'CHARACTER', 'CHARACTERISTICS', 'CHECK', 'CHECKPOINT',
-    'CLASS', 'CLOSE', 'CLUSTER', 'COALESCE', 'COLLATE', 'COLLATION',
-    'COLUMN', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY',
-    'CONFIGURATION', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', 'CONTENT',
-    'CONTINUE', 'CONVERSION', 'COPY', 'COST', 'CREATE', 'CROSS', 'CSV',
-    'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', 'CURRENT_ROLE',
-    'CURRENT_SCHEMA', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
-    'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DAY', 'DEALLOCATE', 'DEC',
-    'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
-    'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DESC', 'DICTIONARY',
-    'DISABLE', 'DISCARD', 'DISTINCT', 'DO', 'DOCUMENT', 'DOMAIN', 'DOUBLE',
-    'DROP', 'EACH', 'ELSE', 'ENABLE', 'ENCODING', 'ENCRYPTED', 'END',
-    'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXCLUDING', 'EXCLUSIVE',
-    'EXECUTE', 'EXISTS', 'EXPLAIN', 'EXTENSION', 'EXTERNAL', 'EXTRACT',
-    'FALSE', 'FAMILY', 'FETCH', 'FIRST', 'FLOAT', 'FOLLOWING', 'FOR',
-    'FORCE', 'FOREIGN', 'FORWARD', 'FREEZE', 'FROM', 'FULL', 'FUNCTION',
-    'FUNCTIONS', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP',
-    'HANDLER', 'HAVING', 'HEADER', 'HOLD', 'HOUR', 'IDENTITY', 'IF',
-    'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', 'IN', 'INCLUDING',
-    'INCREMENT', 'INDEX', 'INDEXES', 'INHERIT', 'INHERITS', 'INITIALLY',
-    'INLINE', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD',
-    'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVOKER', 'IS',
-    'ISNULL', 'ISOLATION', 'JOIN', 'KEY', 'LABEL', 'LANGUAGE', 'LARGE',
-    'LAST', 'LC_COLLATE', 'LC_CTYPE', 'LEADING', 'LEAST', 'LEFT', 'LEVEL',
-    'LIKE', 'LIMIT', 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME',
-    'LOCALTIMESTAMP', 'LOCATION', 'LOCK', 'MAPPING', 'MATCH', 'MAXVALUE',
-    'MINUTE', 'MINVALUE', 'MODE', 'MONTH', 'MOVE', 'NAME', 'NAMES',
-    'NATIONAL', 'NATURAL', 'NCHAR', 'NEXT', 'NO', 'NONE', 'NOT', 'NOTHING',
-    'NOTIFY', 'NOTNULL', 'NOWAIT', 'NULL', 'NULLIF', 'NULLS', 'NUMERIC',
-    'OBJECT', 'OF', 'OFF', 'OFFSET', 'OIDS', 'ON', 'ONLY', 'OPERATOR',
-    'OPTION', 'OPTIONS', 'OR', 'ORDER', 'OUT', 'OUTER', 'OVER', 'OVERLAPS',
-    'OVERLAY', 'OWNED', 'OWNER', 'PARSER', 'PARTIAL', 'PARTITION',
-    'PASSING', 'PASSWORD', 'PLACING', 'PLANS', 'POSITION', 'PRECEDING',
-    'PRECISION', 'PREPARE', 'PREPARED', 'PRESERVE', 'PRIMARY', 'PRIOR',
-    'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', 'QUOTE', 'RANGE', 'READ',
-    'REAL', 'REASSIGN', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
-    'REINDEX', 'RELATIVE', 'RELEASE', 'RENAME', 'REPEATABLE', 'REPLACE',
-    'REPLICA', 'RESET', 'RESTART', 'RESTRICT', 'RETURNING', 'RETURNS',
-    'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROW', 'ROWS', 'RULE',
-    'SAVEPOINT', 'SCHEMA', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY',
-    'SELECT', 'SEQUENCE', 'SEQUENCES', 'SERIALIZABLE', 'SERVER', 'SESSION',
-    'SESSION_USER', 'SET', 'SETOF', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE',
-    'SMALLINT', 'SOME', 'STABLE', 'STANDALONE', 'START', 'STATEMENT',
-    'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRIP',
-    'SUBSTRING', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES',
-    'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', 'TIME',
-    'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', 'TREAT', 'TRIGGER',
-    'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNBOUNDED',
-    'UNCOMMITTED', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
-    'UNLOGGED', 'UNTIL', 'UPDATE', 'USER', 'USING', 'VACUUM', 'VALID',
-    'VALIDATE', 'VALIDATOR', 'VALUE', 'VALUES', 'VARCHAR', 'VARIADIC',
-    'VARYING', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHERE',
-    'WHITESPACE', 'WINDOW', 'WITH', 'WITHOUT', 'WORK', 'WRAPPER', 'WRITE',
-    'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS',
-    'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR',
-    'YES', 'ZONE',
-    ]
-
-DATATYPES = [
-    'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
-    'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
-    'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
-    'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
-    'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
-    'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
-    'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
-    'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
-    'with time zone', 'without time zone', 'xml',
-    ]
-
-PSEUDO_TYPES = [
-    'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
-    'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
-    'trigger', 'void', 'opaque',
-    ]
-
-# Remove 'trigger' from types
-PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))
-
-PLPGSQL_KEYWORDS = [
-    'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
-    'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
-    'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
-    ]
-
-if __name__ == '__main__':
-    update_myself()
-
diff --git a/python/ext-libs/pygments/lexers/_robotframeworklexer.py b/python/ext-libs/pygments/lexers/_robotframeworklexer.py
deleted file mode 100644
index 0192d28..0000000
--- a/python/ext-libs/pygments/lexers/_robotframeworklexer.py
+++ /dev/null
@@ -1,557 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._robotframeworklexer
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Robot Framework.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-#  Copyright 2012 Nokia Siemens Networks Oyj
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Token
-
-
-HEADING = Token.Generic.Heading
-SETTING = Token.Keyword.Namespace
-IMPORT = Token.Name.Namespace
-TC_KW_NAME = Token.Generic.Subheading
-KEYWORD = Token.Name.Function
-ARGUMENT = Token.String
-VARIABLE = Token.Name.Variable
-COMMENT = Token.Comment
-SEPARATOR = Token.Punctuation
-SYNTAX = Token.Punctuation
-GHERKIN = Token.Generic.Emph
-ERROR = Token.Error
-
-
-def normalize(string, remove=''):
-    string = string.lower()
-    for char in remove + ' ':
-        if char in string:
-            string = string.replace(char, '')
-    return string
-
-
-class RobotFrameworkLexer(Lexer):
-    """
-    For `Robot Framework <http://robotframework.org>`_ test data.
-
-    Supports both space and pipe separated plain text formats.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'RobotFramework'
-    aliases = ['RobotFramework', 'robotframework']
-    filenames = ['*.txt', '*.robot']
-    mimetypes = ['text/x-robotframework']
-
-    def __init__(self, **options):
-        options['tabsize'] = 2
-        options['encoding'] = 'UTF-8'
-        Lexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        row_tokenizer = RowTokenizer()
-        var_tokenizer = VariableTokenizer()
-        index = 0
-        for row in text.splitlines():
-            for value, token in row_tokenizer.tokenize(row):
-                for value, token in var_tokenizer.tokenize(value, token):
-                    if value:
-                        yield index, token, unicode(value)
-                        index += len(value)
-
-
-class VariableTokenizer(object):
-
-    def tokenize(self, string, token):
-        var = VariableSplitter(string, identifiers='$@%')
-        if var.start < 0 or token in (COMMENT, ERROR):
-            yield string, token
-            return
-        for value, token in self._tokenize(var, string, token):
-            if value:
-                yield value, token
-
-    def _tokenize(self, var, string, orig_token):
-        before = string[:var.start]
-        yield before, orig_token
-        yield var.identifier + '{', SYNTAX
-        for value, token in self.tokenize(var.base, VARIABLE):
-            yield value, token
-        yield '}', SYNTAX
-        if var.index:
-            yield '[', SYNTAX
-            for value, token in self.tokenize(var.index, VARIABLE):
-                yield value, token
-            yield ']', SYNTAX
-        for value, token in self.tokenize(string[var.end:], orig_token):
-            yield value, token
-
-
-class RowTokenizer(object):
-
-    def __init__(self):
-        self._table = UnknownTable()
-        self._splitter = RowSplitter()
-        testcases = TestCaseTable()
-        settings = SettingTable(testcases.set_default_template)
-        variables = VariableTable()
-        keywords = KeywordTable()
-        self._tables = {'settings': settings, 'setting': settings,
-                        'metadata': settings,
-                        'variables': variables, 'variable': variables,
-                        'testcases': testcases, 'testcase': testcases,
-                        'keywords': keywords, 'keyword': keywords,
-                        'userkeywords': keywords, 'userkeyword': keywords}
-
-    def tokenize(self, row):
-        commented = False
-        heading = False
-        for index, value in enumerate(self._splitter.split(row)):
-            # First value, and every second after that, is a separator.
-            index, separator = divmod(index-1, 2)
-            if value.startswith('#'):
-                commented = True
-            elif index == 0 and value.startswith('*'):
-                self._table = self._start_table(value)
-                heading = True
-            for value, token in self._tokenize(value, index, commented,
-                                               separator, heading):
-                yield value, token
-        self._table.end_row()
-
-    def _start_table(self, header):
-        name = normalize(header, remove='*')
-        return self._tables.get(name, UnknownTable())
-
-    def _tokenize(self, value, index, commented, separator, heading):
-        if commented:
-            yield value, COMMENT
-        elif separator:
-            yield value, SEPARATOR
-        elif heading:
-            yield value, HEADING
-        else:
-            for value, token in self._table.tokenize(value, index):
-                yield value, token
-
-
-class RowSplitter(object):
-    _space_splitter = re.compile('( {2,})')
-    _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))')
-
-    def split(self, row):
-        splitter = (row.startswith('| ') and self._split_from_pipes
-                    or self._split_from_spaces)
-        for value in splitter(row.rstrip()):
-            yield value
-        yield '\n'
-
-    def _split_from_spaces(self, row):
-        yield ''  # Start with (pseudo)separator similarly as with pipes
-        for value in self._space_splitter.split(row):
-            yield value
-
-    def _split_from_pipes(self, row):
-        _, separator, rest = self._pipe_splitter.split(row, 1)
-        yield separator
-        while self._pipe_splitter.search(rest):
-            cell, separator, rest = self._pipe_splitter.split(rest, 1)
-            yield cell
-            yield separator
-        yield rest
-
-
-class Tokenizer(object):
-    _tokens = None
-
-    def __init__(self):
-        self._index = 0
-
-    def tokenize(self, value):
-        values_and_tokens = self._tokenize(value, self._index)
-        self._index += 1
-        if isinstance(values_and_tokens, type(Token)):
-            values_and_tokens = [(value, values_and_tokens)]
-        return values_and_tokens
-
-    def _tokenize(self, value, index):
-        index = min(index, len(self._tokens) - 1)
-        return self._tokens[index]
-
-    def _is_assign(self, value):
-        if value.endswith('='):
-            value = value[:-1].strip()
-        var = VariableSplitter(value, identifiers='$@')
-        return var.start == 0 and var.end == len(value)
-
-
-class Comment(Tokenizer):
-    _tokens = (COMMENT,)
-
-
-class Setting(Tokenizer):
-    _tokens = (SETTING, ARGUMENT)
-    _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
-                         'suitepostcondition', 'testsetup', 'testprecondition',
-                         'testteardown', 'testpostcondition', 'testtemplate')
-    _import_settings = ('library', 'resource', 'variables')
-    _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
-                       'testtimeout')
-    _custom_tokenizer = None
-
-    def __init__(self, template_setter=None):
-        Tokenizer.__init__(self)
-        self._template_setter = template_setter
-
-    def _tokenize(self, value, index):
-        if index == 1 and self._template_setter:
-            self._template_setter(value)
-        if index == 0:
-            normalized = normalize(value)
-            if normalized in self._keyword_settings:
-                self._custom_tokenizer = KeywordCall(support_assign=False)
-            elif normalized in self._import_settings:
-                self._custom_tokenizer = ImportSetting()
-            elif normalized not in self._other_settings:
-                return ERROR
-        elif self._custom_tokenizer:
-            return self._custom_tokenizer.tokenize(value)
-        return Tokenizer._tokenize(self, value, index)
-
-
-class ImportSetting(Tokenizer):
-    _tokens = (IMPORT, ARGUMENT)
-
-
-class TestCaseSetting(Setting):
-    _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
-                         'template')
-    _import_settings = ()
-    _other_settings = ('documentation', 'tags', 'timeout')
-
-    def _tokenize(self, value, index):
-        if index == 0:
-            type = Setting._tokenize(self, value[1:-1], index)
-            return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
-        return Setting._tokenize(self, value, index)
-
-
-class KeywordSetting(TestCaseSetting):
-    _keyword_settings = ('teardown',)
-    _other_settings = ('documentation', 'arguments', 'return', 'timeout')
-
-
-class Variable(Tokenizer):
-    _tokens = (SYNTAX, ARGUMENT)
-
-    def _tokenize(self, value, index):
-        if index == 0 and not self._is_assign(value):
-            return ERROR
-        return Tokenizer._tokenize(self, value, index)
-
-
-class KeywordCall(Tokenizer):
-    _tokens = (KEYWORD, ARGUMENT)
-
-    def __init__(self, support_assign=True):
-        Tokenizer.__init__(self)
-        self._keyword_found = not support_assign
-        self._assigns = 0
-
-    def _tokenize(self, value, index):
-        if not self._keyword_found and self._is_assign(value):
-            self._assigns += 1
-            return SYNTAX  # VariableTokenizer tokenizes this later.
-        if self._keyword_found:
-            return Tokenizer._tokenize(self, value, index - self._assigns)
-        self._keyword_found = True
-        return GherkinTokenizer().tokenize(value, KEYWORD)
-
-
-class GherkinTokenizer(object):
-    _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
-
-    def tokenize(self, value, token):
-        match = self._gherkin_prefix.match(value)
-        if not match:
-            return [(value, token)]
-        end = match.end()
-        return [(value[:end], GHERKIN), (value[end:], token)]
-
-
-class TemplatedKeywordCall(Tokenizer):
-    _tokens = (ARGUMENT,)
-
-
-class ForLoop(Tokenizer):
-
-    def __init__(self):
-        Tokenizer.__init__(self)
-        self._in_arguments = False
-
-    def _tokenize(self, value, index):
-        token = self._in_arguments and ARGUMENT or SYNTAX
-        if value.upper() in ('IN', 'IN RANGE'):
-            self._in_arguments = True
-        return token
-
-
-class _Table(object):
-    _tokenizer_class = None
-
-    def __init__(self, prev_tokenizer=None):
-        self._tokenizer = self._tokenizer_class()
-        self._prev_tokenizer = prev_tokenizer
-        self._prev_values_on_row = []
-
-    def tokenize(self, value, index):
-        if self._continues(value, index):
-            self._tokenizer = self._prev_tokenizer
-            yield value, SYNTAX
-        else:
-            for value_and_token in self._tokenize(value, index):
-                yield value_and_token
-        self._prev_values_on_row.append(value)
-
-    def _continues(self, value, index):
-        return value == '...' and all(self._is_empty(t)
-                                      for t in self._prev_values_on_row)
-
-    def _is_empty(self, value):
-        return value in ('', '\\')
-
-    def _tokenize(self, value, index):
-        return self._tokenizer.tokenize(value)
-
-    def end_row(self):
-        self.__init__(prev_tokenizer=self._tokenizer)
-
-
-class UnknownTable(_Table):
-    _tokenizer_class = Comment
-
-    def _continues(self, value, index):
-        return False
-
-
-class VariableTable(_Table):
-    _tokenizer_class = Variable
-
-
-class SettingTable(_Table):
-    _tokenizer_class = Setting
-
-    def __init__(self, template_setter, prev_tokenizer=None):
-        _Table.__init__(self, prev_tokenizer)
-        self._template_setter = template_setter
-
-    def _tokenize(self, value, index):
-        if index == 0 and normalize(value) == 'testtemplate':
-            self._tokenizer = Setting(self._template_setter)
-        return _Table._tokenize(self, value, index)
-
-    def end_row(self):
-        self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
-
-
-class TestCaseTable(_Table):
-    _setting_class = TestCaseSetting
-    _test_template = None
-    _default_template = None
-
-    @property
-    def _tokenizer_class(self):
-        if self._test_template or (self._default_template and
-                                   self._test_template is not False):
-            return TemplatedKeywordCall
-        return KeywordCall
-
-    def _continues(self, value, index):
-        return index > 0 and _Table._continues(self, value, index)
-
-    def _tokenize(self, value, index):
-        if index == 0:
-            if value:
-                self._test_template = None
-            return GherkinTokenizer().tokenize(value, TC_KW_NAME)
-        if index == 1 and self._is_setting(value):
-            if self._is_template(value):
-                self._test_template = False
-                self._tokenizer = self._setting_class(self.set_test_template)
-            else:
-                self._tokenizer = self._setting_class()
-        if index == 1 and self._is_for_loop(value):
-            self._tokenizer = ForLoop()
-        if index == 1 and self._is_empty(value):
-            return [(value, SYNTAX)]
-        return _Table._tokenize(self, value, index)
-
-    def _is_setting(self, value):
-        return value.startswith('[') and value.endswith(']')
-
-    def _is_template(self, value):
-        return normalize(value) == '[template]'
-
-    def _is_for_loop(self, value):
-        return value.startswith(':') and normalize(value, remove=':') == 'for'
-
-    def set_test_template(self, template):
-        self._test_template = self._is_template_set(template)
-
-    def set_default_template(self, template):
-        self._default_template = self._is_template_set(template)
-
-    def _is_template_set(self, template):
-        return normalize(template) not in ('', '\\', 'none', '${empty}')
-
-
-class KeywordTable(TestCaseTable):
-    _tokenizer_class = KeywordCall
-    _setting_class = KeywordSetting
-
-    def _is_template(self, value):
-        return False
-
-
-# Following code copied directly from Robot Framework 2.7.5.
-
-class VariableSplitter:
-
-    def __init__(self, string, identifiers):
-        self.identifier = None
-        self.base = None
-        self.index = None
-        self.start = -1
-        self.end = -1
-        self._identifiers = identifiers
-        self._may_have_internal_variables = False
-        try:
-            self._split(string)
-        except ValueError:
-            pass
-        else:
-            self._finalize()
-
-    def get_replaced_base(self, variables):
-        if self._may_have_internal_variables:
-            return variables.replace_string(self.base)
-        return self.base
-
-    def _finalize(self):
-        self.identifier = self._variable_chars[0]
-        self.base = ''.join(self._variable_chars[2:-1])
-        self.end = self.start + len(self._variable_chars)
-        if self._has_list_variable_index():
-            self.index = ''.join(self._list_variable_index_chars[1:-1])
-            self.end += len(self._list_variable_index_chars)
-
-    def _has_list_variable_index(self):
-        return self._list_variable_index_chars\
-        and self._list_variable_index_chars[-1] == ']'
-
-    def _split(self, string):
-        start_index, max_index = self._find_variable(string)
-        self.start = start_index
-        self._open_curly = 1
-        self._state = self._variable_state
-        self._variable_chars = [string[start_index], '{']
-        self._list_variable_index_chars = []
-        self._string = string
-        start_index += 2
-        for index, char in enumerate(string[start_index:]):
-            index += start_index  # Giving start to enumerate only in Py 2.6+
-            try:
-                self._state(char, index)
-            except StopIteration:
-                return
-            if index  == max_index and not self._scanning_list_variable_index():
-                return
-
-    def _scanning_list_variable_index(self):
-        return self._state in [self._waiting_list_variable_index_state,
-                               self._list_variable_index_state]
-
-    def _find_variable(self, string):
-        max_end_index = string.rfind('}')
-        if max_end_index == -1:
-            raise ValueError('No variable end found')
-        if self._is_escaped(string, max_end_index):
-            return self._find_variable(string[:max_end_index])
-        start_index = self._find_start_index(string, 1, max_end_index)
-        if start_index == -1:
-            raise ValueError('No variable start found')
-        return start_index, max_end_index
-
-    def _find_start_index(self, string, start, end):
-        index = string.find('{', start, end) - 1
-        if index < 0:
-            return -1
-        if self._start_index_is_ok(string, index):
-            return index
-        return self._find_start_index(string, index+2, end)
-
-    def _start_index_is_ok(self, string, index):
-        return string[index] in self._identifiers\
-        and not self._is_escaped(string, index)
-
-    def _is_escaped(self, string, index):
-        escaped = False
-        while index > 0 and string[index-1] == '\\':
-            index -= 1
-            escaped = not escaped
-        return escaped
-
-    def _variable_state(self, char, index):
-        self._variable_chars.append(char)
-        if char == '}' and not self._is_escaped(self._string, index):
-            self._open_curly -= 1
-            if self._open_curly == 0:
-                if not self._is_list_variable():
-                    raise StopIteration
-                self._state = self._waiting_list_variable_index_state
-        elif char in self._identifiers:
-            self._state = self._internal_variable_start_state
-
-    def _is_list_variable(self):
-        return self._variable_chars[0] == '@'
-
-    def _internal_variable_start_state(self, char, index):
-        self._state = self._variable_state
-        if char == '{':
-            self._variable_chars.append(char)
-            self._open_curly += 1
-            self._may_have_internal_variables = True
-        else:
-            self._variable_state(char, index)
-
-    def _waiting_list_variable_index_state(self, char, index):
-        if char != '[':
-            raise StopIteration
-        self._list_variable_index_chars.append(char)
-        self._state = self._list_variable_index_state
-
-    def _list_variable_index_state(self, char, index):
-        self._list_variable_index_chars.append(char)
-        if char == ']':
-            raise StopIteration
diff --git a/python/ext-libs/pygments/lexers/_scilab_builtins.py b/python/ext-libs/pygments/lexers/_scilab_builtins.py
deleted file mode 100644
index ed0dc81..0000000
--- a/python/ext-libs/pygments/lexers/_scilab_builtins.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._scilab_builtins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Builtin list for the ScilabLexer.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-# These lists are generated automatically.
-# Run the following in a Scilab script:
-#
-# varType=["functions", "commands", "macros", "variables" ];
-# fd = mopen('list.txt','wt');
-#
-# for j=1:size(varType,"*")
-#     myStr="";
-#     a=completion("",varType(j));
-#     myStr=varType(j)+"_kw = [";
-#     for i=1:size(a,"*")
-#         myStr = myStr + """" + a(i) + """";
-#         if size(a,"*") <> i then
-#            myStr = myStr + ","; end
-#         end
-#     myStr = myStr + "]";
-#     mputl(myStr,fd);
-# end
-# mclose(fd);
-#
-# Then replace "$" by "\\$" manually.
-
-functions_kw = ["%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XML [...]
-
-commands_kw = ["abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who"]
-
-macros_kw = ["%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_ [...]
-
-builtin_consts = ["\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","int [...]
diff --git a/python/ext-libs/pygments/lexers/_sourcemodbuiltins.py b/python/ext-libs/pygments/lexers/_sourcemodbuiltins.py
deleted file mode 100644
index 0f6b477..0000000
--- a/python/ext-libs/pygments/lexers/_sourcemodbuiltins.py
+++ /dev/null
@@ -1,1072 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._sourcemodbuiltins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This file contains the names of SourceMod functions.
-    It is able to re-generate itself.
-
-    Do not edit the FUNCTIONS list by hand.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-FUNCTIONS = ['TopMenuHandler',
- 'CreateTopMenu',
- 'LoadTopMenuConfig',
- 'AddToTopMenu',
- 'GetTopMenuInfoString',
- 'GetTopMenuObjName',
- 'RemoveFromTopMenu',
- 'DisplayTopMenu',
- 'FindTopMenuCategory',
- 'OnAdminMenuCreated',
- 'OnAdminMenuReady',
- 'GetAdminTopMenu',
- 'AddTargetsToMenu',
- 'AddTargetsToMenu2',
- 'RedisplayAdminMenu',
- 'TEHook',
- 'AddTempEntHook',
- 'RemoveTempEntHook',
- 'TE_Start',
- 'TE_IsValidProp',
- 'TE_WriteNum',
- 'TE_ReadNum',
- 'TE_WriteFloat',
- 'TE_ReadFloat',
- 'TE_WriteVector',
- 'TE_ReadVector',
- 'TE_WriteAngles',
- 'TE_WriteFloatArray',
- 'TE_Send',
- 'TE_WriteEncodedEnt',
- 'TE_SendToAll',
- 'TE_SendToClient',
- 'CreateKeyValues',
- 'KvSetString',
- 'KvSetNum',
- 'KvSetUInt64',
- 'KvSetFloat',
- 'KvSetColor',
- 'KvSetVector',
- 'KvGetString',
- 'KvGetNum',
- 'KvGetFloat',
- 'KvGetColor',
- 'KvGetUInt64',
- 'KvGetVector',
- 'KvJumpToKey',
- 'KvJumpToKeySymbol',
- 'KvGotoFirstSubKey',
- 'KvGotoNextKey',
- 'KvSavePosition',
- 'KvDeleteKey',
- 'KvDeleteThis',
- 'KvGoBack',
- 'KvRewind',
- 'KvGetSectionName',
- 'KvSetSectionName',
- 'KvGetDataType',
- 'KeyValuesToFile',
- 'FileToKeyValues',
- 'KvSetEscapeSequences',
- 'KvNodesInStack',
- 'KvCopySubkeys',
- 'KvFindKeyById',
- 'KvGetNameSymbol',
- 'KvGetSectionSymbol',
- 'TE_SetupSparks',
- 'TE_SetupSmoke',
- 'TE_SetupDust',
- 'TE_SetupMuzzleFlash',
- 'TE_SetupMetalSparks',
- 'TE_SetupEnergySplash',
- 'TE_SetupArmorRicochet',
- 'TE_SetupGlowSprite',
- 'TE_SetupExplosion',
- 'TE_SetupBloodSprite',
- 'TE_SetupBeamRingPoint',
- 'TE_SetupBeamPoints',
- 'TE_SetupBeamLaser',
- 'TE_SetupBeamRing',
- 'TE_SetupBeamFollow',
- 'HookEvent',
- 'HookEventEx',
- 'UnhookEvent',
- 'CreateEvent',
- 'FireEvent',
- 'CancelCreatedEvent',
- 'GetEventBool',
- 'SetEventBool',
- 'GetEventInt',
- 'SetEventInt',
- 'GetEventFloat',
- 'SetEventFloat',
- 'GetEventString',
- 'SetEventString',
- 'GetEventName',
- 'SetEventBroadcast',
- 'GetUserMessageId',
- 'GetUserMessageName',
- 'StartMessage',
- 'StartMessageEx',
- 'EndMessage',
- 'MsgHook',
- 'MsgPostHook',
- 'HookUserMessage',
- 'UnhookUserMessage',
- 'StartMessageAll',
- 'StartMessageOne',
- 'InactivateClient',
- 'ReconnectClient',
- 'GetMaxEntities',
- 'GetEntityCount',
- 'IsValidEntity',
- 'IsValidEdict',
- 'IsEntNetworkable',
- 'CreateEdict',
- 'RemoveEdict',
- 'GetEdictFlags',
- 'SetEdictFlags',
- 'GetEdictClassname',
- 'GetEntityNetClass',
- 'ChangeEdictState',
- 'GetEntData',
- 'SetEntData',
- 'GetEntDataFloat',
- 'SetEntDataFloat',
- 'GetEntDataEnt2',
- 'SetEntDataEnt2',
- 'GetEntDataVector',
- 'SetEntDataVector',
- 'GetEntDataString',
- 'SetEntDataString',
- 'FindSendPropOffs',
- 'FindSendPropInfo',
- 'FindDataMapOffs',
- 'GetEntSendPropOffs',
- 'GetEntProp',
- 'SetEntProp',
- 'GetEntPropFloat',
- 'SetEntPropFloat',
- 'GetEntPropEnt',
- 'SetEntPropEnt',
- 'GetEntPropVector',
- 'SetEntPropVector',
- 'GetEntPropString',
- 'SetEntPropString',
- 'GetEntPropArraySize',
- 'GetEntDataArray',
- 'SetEntDataArray',
- 'GetEntityClassname',
- 'float',
- 'FloatMul',
- 'FloatDiv',
- 'FloatAdd',
- 'FloatSub',
- 'FloatFraction',
- 'RoundToZero',
- 'RoundToCeil',
- 'RoundToFloor',
- 'RoundToNearest',
- 'FloatCompare',
- 'SquareRoot',
- 'Pow',
- 'Exponential',
- 'Logarithm',
- 'Sine',
- 'Cosine',
- 'Tangent',
- 'FloatAbs',
- 'ArcTangent',
- 'ArcCosine',
- 'ArcSine',
- 'ArcTangent2',
- 'RoundFloat',
- 'operator%',
- 'DegToRad',
- 'RadToDeg',
- 'GetURandomInt',
- 'GetURandomFloat',
- 'SetURandomSeed',
- 'SetURandomSeedSimple',
- 'RemovePlayerItem',
- 'GivePlayerItem',
- 'GetPlayerWeaponSlot',
- 'IgniteEntity',
- 'ExtinguishEntity',
- 'TeleportEntity',
- 'ForcePlayerSuicide',
- 'SlapPlayer',
- 'FindEntityByClassname',
- 'GetClientEyeAngles',
- 'CreateEntityByName',
- 'DispatchSpawn',
- 'DispatchKeyValue',
- 'DispatchKeyValueFloat',
- 'DispatchKeyValueVector',
- 'GetClientAimTarget',
- 'GetTeamCount',
- 'GetTeamName',
- 'GetTeamScore',
- 'SetTeamScore',
- 'GetTeamClientCount',
- 'SetEntityModel',
- 'GetPlayerDecalFile',
- 'GetServerNetStats',
- 'EquipPlayerWeapon',
- 'ActivateEntity',
- 'SetClientInfo',
- 'SetClientListeningFlags',
- 'GetClientListeningFlags',
- 'SetListenOverride',
- 'GetListenOverride',
- 'IsClientMuted',
- 'TR_GetPointContents',
- 'TR_GetPointContentsEnt',
- 'TR_TraceRay',
- 'TR_TraceHull',
- 'TR_TraceRayFilter',
- 'TR_TraceHullFilter',
- 'TR_TraceRayEx',
- 'TR_TraceHullEx',
- 'TR_TraceRayFilterEx',
- 'TR_TraceHullFilterEx',
- 'TR_GetFraction',
- 'TR_GetEndPosition',
- 'TR_GetEntityIndex',
- 'TR_DidHit',
- 'TR_GetHitGroup',
- 'TR_GetPlaneNormal',
- 'TR_PointOutsideWorld',
- 'SortIntegers',
- 'SortFloats',
- 'SortStrings',
- 'SortFunc1D',
- 'SortCustom1D',
- 'SortCustom2D',
- 'SortADTArray',
- 'SortFuncADTArray',
- 'SortADTArrayCustom',
- 'CompileRegex',
- 'MatchRegex',
- 'GetRegexSubString',
- 'SimpleRegexMatch',
- 'TF2_GetPlayerClass',
- 'TF2_SetPlayerClass',
- 'TF2_GetPlayerResourceData',
- 'TF2_SetPlayerResourceData',
- 'TF2_RemoveWeaponSlot',
- 'TF2_RemoveAllWeapons',
- 'TF2_IsPlayerInCondition',
- 'TF2_GetObjectType',
- 'TF2_GetObjectMode',
- 'NominateMap',
- 'RemoveNominationByMap',
- 'RemoveNominationByOwner',
- 'GetExcludeMapList',
- 'GetNominatedMapList',
- 'CanMapChooserStartVote',
- 'InitiateMapChooserVote',
- 'HasEndOfMapVoteFinished',
- 'EndOfMapVoteEnabled',
- 'OnNominationRemoved',
- 'OnMapVoteStarted',
- 'CreateTimer',
- 'KillTimer',
- 'TriggerTimer',
- 'GetTickedTime',
- 'GetMapTimeLeft',
- 'GetMapTimeLimit',
- 'ExtendMapTimeLimit',
- 'GetTickInterval',
- 'OnMapTimeLeftChanged',
- 'IsServerProcessing',
- 'CreateDataTimer',
- 'ByteCountToCells',
- 'CreateArray',
- 'ClearArray',
- 'CloneArray',
- 'ResizeArray',
- 'GetArraySize',
- 'PushArrayCell',
- 'PushArrayString',
- 'PushArrayArray',
- 'GetArrayCell',
- 'GetArrayString',
- 'GetArrayArray',
- 'SetArrayCell',
- 'SetArrayString',
- 'SetArrayArray',
- 'ShiftArrayUp',
- 'RemoveFromArray',
- 'SwapArrayItems',
- 'FindStringInArray',
- 'FindValueInArray',
- 'ProcessTargetString',
- 'ReplyToTargetError',
- 'MultiTargetFilter',
- 'AddMultiTargetFilter',
- 'RemoveMultiTargetFilter',
- 'OnBanClient',
- 'OnBanIdentity',
- 'OnRemoveBan',
- 'BanClient',
- 'BanIdentity',
- 'RemoveBan',
- 'CreateTrie',
- 'SetTrieValue',
- 'SetTrieArray',
- 'SetTrieString',
- 'GetTrieValue',
- 'GetTrieArray',
- 'GetTrieString',
- 'RemoveFromTrie',
- 'ClearTrie',
- 'GetTrieSize',
- 'GetFunctionByName',
- 'CreateGlobalForward',
- 'CreateForward',
- 'GetForwardFunctionCount',
- 'AddToForward',
- 'RemoveFromForward',
- 'RemoveAllFromForward',
- 'Call_StartForward',
- 'Call_StartFunction',
- 'Call_PushCell',
- 'Call_PushCellRef',
- 'Call_PushFloat',
- 'Call_PushFloatRef',
- 'Call_PushArray',
- 'Call_PushArrayEx',
- 'Call_PushString',
- 'Call_PushStringEx',
- 'Call_Finish',
- 'Call_Cancel',
- 'NativeCall',
- 'CreateNative',
- 'ThrowNativeError',
- 'GetNativeStringLength',
- 'GetNativeString',
- 'SetNativeString',
- 'GetNativeCell',
- 'GetNativeCellRef',
- 'SetNativeCellRef',
- 'GetNativeArray',
- 'SetNativeArray',
- 'FormatNativeString',
- 'OnRebuildAdminCache',
- 'DumpAdminCache',
- 'AddCommandOverride',
- 'GetCommandOverride',
- 'UnsetCommandOverride',
- 'CreateAdmGroup',
- 'FindAdmGroup',
- 'SetAdmGroupAddFlag',
- 'GetAdmGroupAddFlag',
- 'GetAdmGroupAddFlags',
- 'SetAdmGroupImmuneFrom',
- 'GetAdmGroupImmuneCount',
- 'GetAdmGroupImmuneFrom',
- 'AddAdmGroupCmdOverride',
- 'GetAdmGroupCmdOverride',
- 'RegisterAuthIdentType',
- 'CreateAdmin',
- 'GetAdminUsername',
- 'BindAdminIdentity',
- 'SetAdminFlag',
- 'GetAdminFlag',
- 'GetAdminFlags',
- 'AdminInheritGroup',
- 'GetAdminGroupCount',
- 'GetAdminGroup',
- 'SetAdminPassword',
- 'GetAdminPassword',
- 'FindAdminByIdentity',
- 'RemoveAdmin',
- 'FlagBitsToBitArray',
- 'FlagBitArrayToBits',
- 'FlagArrayToBits',
- 'FlagBitsToArray',
- 'FindFlagByName',
- 'FindFlagByChar',
- 'FindFlagChar',
- 'ReadFlagString',
- 'CanAdminTarget',
- 'CreateAuthMethod',
- 'SetAdmGroupImmunityLevel',
- 'GetAdmGroupImmunityLevel',
- 'SetAdminImmunityLevel',
- 'GetAdminImmunityLevel',
- 'FlagToBit',
- 'BitToFlag',
- 'ServerCommand',
- 'ServerCommandEx',
- 'InsertServerCommand',
- 'ServerExecute',
- 'ClientCommand',
- 'FakeClientCommand',
- 'FakeClientCommandEx',
- 'PrintToServer',
- 'PrintToConsole',
- 'ReplyToCommand',
- 'GetCmdReplySource',
- 'SetCmdReplySource',
- 'IsChatTrigger',
- 'ShowActivity2',
- 'ShowActivity',
- 'ShowActivityEx',
- 'FormatActivitySource',
- 'SrvCmd',
- 'RegServerCmd',
- 'ConCmd',
- 'RegConsoleCmd',
- 'RegAdminCmd',
- 'GetCmdArgs',
- 'GetCmdArg',
- 'GetCmdArgString',
- 'CreateConVar',
- 'FindConVar',
- 'ConVarChanged',
- 'HookConVarChange',
- 'UnhookConVarChange',
- 'GetConVarBool',
- 'SetConVarBool',
- 'GetConVarInt',
- 'SetConVarInt',
- 'GetConVarFloat',
- 'SetConVarFloat',
- 'GetConVarString',
- 'SetConVarString',
- 'ResetConVar',
- 'GetConVarDefault',
- 'GetConVarFlags',
- 'SetConVarFlags',
- 'GetConVarBounds',
- 'SetConVarBounds',
- 'GetConVarName',
- 'QueryClientConVar',
- 'GetCommandIterator',
- 'ReadCommandIterator',
- 'CheckCommandAccess',
- 'CheckAccess',
- 'IsValidConVarChar',
- 'GetCommandFlags',
- 'SetCommandFlags',
- 'FindFirstConCommand',
- 'FindNextConCommand',
- 'SendConVarValue',
- 'AddServerTag',
- 'RemoveServerTag',
- 'CommandListener',
- 'AddCommandListener',
- 'RemoveCommandListener',
- 'TF2_IgnitePlayer',
- 'TF2_RespawnPlayer',
- 'TF2_RegeneratePlayer',
- 'TF2_AddCondition',
- 'TF2_RemoveCondition',
- 'TF2_SetPlayerPowerPlay',
- 'TF2_DisguisePlayer',
- 'TF2_RemovePlayerDisguise',
- 'TF2_StunPlayer',
- 'TF2_MakeBleed',
- 'TF2_GetResourceEntity',
- 'TF2_GetClass',
- 'TF2_CalcIsAttackCritical',
- 'TF2_OnIsHolidayActive',
- 'TF2_IsPlayerInDuel',
- 'TF2_OnConditionAdded',
- 'TF2_OnConditionRemoved',
- 'TF2_OnWaitingForPlayersStart',
- 'TF2_OnWaitingForPlayersEnd',
- 'SQL_Connect',
- 'SQL_DefConnect',
- 'SQL_ConnectCustom',
- 'SQLite_UseDatabase',
- 'SQL_CheckConfig',
- 'SQL_GetDriver',
- 'SQL_ReadDriver',
- 'SQL_GetDriverIdent',
- 'SQL_GetDriverProduct',
- 'SQL_GetAffectedRows',
- 'SQL_GetInsertId',
- 'SQL_GetError',
- 'SQL_EscapeString',
- 'SQL_QuoteString',
- 'SQL_FastQuery',
- 'SQL_Query',
- 'SQL_PrepareQuery',
- 'SQL_FetchMoreResults',
- 'SQL_HasResultSet',
- 'SQL_GetRowCount',
- 'SQL_GetFieldCount',
- 'SQL_FieldNumToName',
- 'SQL_FieldNameToNum',
- 'SQL_FetchRow',
- 'SQL_MoreRows',
- 'SQL_Rewind',
- 'SQL_FetchString',
- 'SQL_FetchFloat',
- 'SQL_FetchInt',
- 'SQL_IsFieldNull',
- 'SQL_FetchSize',
- 'SQL_BindParamInt',
- 'SQL_BindParamFloat',
- 'SQL_BindParamString',
- 'SQL_Execute',
- 'SQL_LockDatabase',
- 'SQL_UnlockDatabase',
- 'SQLTCallback',
- 'SQL_IsSameConnection',
- 'SQL_TConnect',
- 'SQL_TQuery',
- 'CloseHandle',
- 'CloneHandle',
- 'MenuHandler',
- 'CreateMenu',
- 'DisplayMenu',
- 'DisplayMenuAtItem',
- 'AddMenuItem',
- 'InsertMenuItem',
- 'RemoveMenuItem',
- 'RemoveAllMenuItems',
- 'GetMenuItem',
- 'GetMenuSelectionPosition',
- 'GetMenuItemCount',
- 'SetMenuPagination',
- 'GetMenuPagination',
- 'GetMenuStyle',
- 'SetMenuTitle',
- 'GetMenuTitle',
- 'CreatePanelFromMenu',
- 'GetMenuExitButton',
- 'SetMenuExitButton',
- 'GetMenuExitBackButton',
- 'SetMenuExitBackButton',
- 'SetMenuNoVoteButton',
- 'CancelMenu',
- 'GetMenuOptionFlags',
- 'SetMenuOptionFlags',
- 'IsVoteInProgress',
- 'CancelVote',
- 'VoteMenu',
- 'VoteMenuToAll',
- 'VoteHandler',
- 'SetVoteResultCallback',
- 'CheckVoteDelay',
- 'IsClientInVotePool',
- 'RedrawClientVoteMenu',
- 'GetMenuStyleHandle',
- 'CreatePanel',
- 'CreateMenuEx',
- 'GetClientMenu',
- 'CancelClientMenu',
- 'GetMaxPageItems',
- 'GetPanelStyle',
- 'SetPanelTitle',
- 'DrawPanelItem',
- 'DrawPanelText',
- 'CanPanelDrawFlags',
- 'SetPanelKeys',
- 'SendPanelToClient',
- 'GetPanelTextRemaining',
- 'GetPanelCurrentKey',
- 'SetPanelCurrentKey',
- 'RedrawMenuItem',
- 'InternalShowMenu',
- 'GetMenuVoteInfo',
- 'IsNewVoteAllowed',
- 'PrefetchSound',
- 'EmitAmbientSound',
- 'FadeClientVolume',
- 'StopSound',
- 'EmitSound',
- 'EmitSentence',
- 'GetDistGainFromSoundLevel',
- 'AmbientSHook',
- 'NormalSHook',
- 'AddAmbientSoundHook',
- 'AddNormalSoundHook',
- 'RemoveAmbientSoundHook',
- 'RemoveNormalSoundHook',
- 'EmitSoundToClient',
- 'EmitSoundToAll',
- 'ATTN_TO_SNDLEVEL',
- 'strlen',
- 'StrContains',
- 'strcmp',
- 'strncmp',
- 'StrEqual',
- 'strcopy',
- 'Format',
- 'FormatEx',
- 'VFormat',
- 'StringToInt',
- 'StringToIntEx',
- 'IntToString',
- 'StringToFloat',
- 'StringToFloatEx',
- 'FloatToString',
- 'BreakString',
- 'TrimString',
- 'SplitString',
- 'ReplaceString',
- 'ReplaceStringEx',
- 'GetCharBytes',
- 'IsCharAlpha',
- 'IsCharNumeric',
- 'IsCharSpace',
- 'IsCharMB',
- 'IsCharUpper',
- 'IsCharLower',
- 'StripQuotes',
- 'CharToUpper',
- 'CharToLower',
- 'FindCharInString',
- 'StrCat',
- 'ExplodeString',
- 'ImplodeStrings',
- 'GetVectorLength',
- 'GetVectorDistance',
- 'GetVectorDotProduct',
- 'GetVectorCrossProduct',
- 'NormalizeVector',
- 'GetAngleVectors',
- 'GetVectorAngles',
- 'GetVectorVectors',
- 'AddVectors',
- 'SubtractVectors',
- 'ScaleVector',
- 'NegateVector',
- 'MakeVectorFromPoints',
- 'BaseComm_IsClientGagged',
- 'BaseComm_IsClientMuted',
- 'BaseComm_SetClientGag',
- 'BaseComm_SetClientMute',
- 'FormatUserLogText',
- 'FindPluginByFile',
- 'FindTarget',
- 'AcceptEntityInput',
- 'SetVariantBool',
- 'SetVariantString',
- 'SetVariantInt',
- 'SetVariantFloat',
- 'SetVariantVector3D',
- 'SetVariantPosVector3D',
- 'SetVariantColor',
- 'SetVariantEntity',
- 'GameRules_GetProp',
- 'GameRules_SetProp',
- 'GameRules_GetPropFloat',
- 'GameRules_SetPropFloat',
- 'GameRules_GetPropEnt',
- 'GameRules_SetPropEnt',
- 'GameRules_GetPropVector',
- 'GameRules_SetPropVector',
- 'GameRules_GetPropString',
- 'GameRules_SetPropString',
- 'GameRules_GetRoundState',
- 'OnClientConnect',
- 'OnClientConnected',
- 'OnClientPutInServer',
- 'OnClientDisconnect',
- 'OnClientDisconnect_Post',
- 'OnClientCommand',
- 'OnClientSettingsChanged',
- 'OnClientAuthorized',
- 'OnClientPreAdminCheck',
- 'OnClientPostAdminFilter',
- 'OnClientPostAdminCheck',
- 'GetMaxClients',
- 'GetClientCount',
- 'GetClientName',
- 'GetClientIP',
- 'GetClientAuthString',
- 'GetClientUserId',
- 'IsClientConnected',
- 'IsClientInGame',
- 'IsClientInKickQueue',
- 'IsClientAuthorized',
- 'IsFakeClient',
- 'IsClientSourceTV',
- 'IsClientReplay',
- 'IsClientObserver',
- 'IsPlayerAlive',
- 'GetClientInfo',
- 'GetClientTeam',
- 'SetUserAdmin',
- 'GetUserAdmin',
- 'AddUserFlags',
- 'RemoveUserFlags',
- 'SetUserFlagBits',
- 'GetUserFlagBits',
- 'CanUserTarget',
- 'RunAdminCacheChecks',
- 'NotifyPostAdminCheck',
- 'CreateFakeClient',
- 'SetFakeClientConVar',
- 'GetClientHealth',
- 'GetClientModel',
- 'GetClientWeapon',
- 'GetClientMaxs',
- 'GetClientMins',
- 'GetClientAbsAngles',
- 'GetClientAbsOrigin',
- 'GetClientArmor',
- 'GetClientDeaths',
- 'GetClientFrags',
- 'GetClientDataRate',
- 'IsClientTimingOut',
- 'GetClientTime',
- 'GetClientLatency',
- 'GetClientAvgLatency',
- 'GetClientAvgLoss',
- 'GetClientAvgChoke',
- 'GetClientAvgData',
- 'GetClientAvgPackets',
- 'GetClientOfUserId',
- 'KickClient',
- 'KickClientEx',
- 'ChangeClientTeam',
- 'GetClientSerial',
- 'GetClientFromSerial',
- 'FindStringTable',
- 'GetNumStringTables',
- 'GetStringTableNumStrings',
- 'GetStringTableMaxStrings',
- 'GetStringTableName',
- 'FindStringIndex',
- 'ReadStringTable',
- 'GetStringTableDataLength',
- 'GetStringTableData',
- 'SetStringTableData',
- 'AddToStringTable',
- 'LockStringTables',
- 'AddFileToDownloadsTable',
- 'GetEntityFlags',
- 'SetEntityFlags',
- 'GetEntityMoveType',
- 'SetEntityMoveType',
- 'GetEntityRenderMode',
- 'SetEntityRenderMode',
- 'GetEntityRenderFx',
- 'SetEntityRenderFx',
- 'SetEntityRenderColor',
- 'GetEntityGravity',
- 'SetEntityGravity',
- 'SetEntityHealth',
- 'GetClientButtons',
- 'EntityOutput',
- 'HookEntityOutput',
- 'UnhookEntityOutput',
- 'HookSingleEntityOutput',
- 'UnhookSingleEntityOutput',
- 'SMC_CreateParser',
- 'SMC_ParseFile',
- 'SMC_GetErrorString',
- 'SMC_ParseStart',
- 'SMC_SetParseStart',
- 'SMC_ParseEnd',
- 'SMC_SetParseEnd',
- 'SMC_NewSection',
- 'SMC_KeyValue',
- 'SMC_EndSection',
- 'SMC_SetReaders',
- 'SMC_RawLine',
- 'SMC_SetRawLine',
- 'BfWriteBool',
- 'BfWriteByte',
- 'BfWriteChar',
- 'BfWriteShort',
- 'BfWriteWord',
- 'BfWriteNum',
- 'BfWriteFloat',
- 'BfWriteString',
- 'BfWriteEntity',
- 'BfWriteAngle',
- 'BfWriteCoord',
- 'BfWriteVecCoord',
- 'BfWriteVecNormal',
- 'BfWriteAngles',
- 'BfReadBool',
- 'BfReadByte',
- 'BfReadChar',
- 'BfReadShort',
- 'BfReadWord',
- 'BfReadNum',
- 'BfReadFloat',
- 'BfReadString',
- 'BfReadEntity',
- 'BfReadAngle',
- 'BfReadCoord',
- 'BfReadVecCoord',
- 'BfReadVecNormal',
- 'BfReadAngles',
- 'BfGetNumBytesLeft',
- 'CreateProfiler',
- 'StartProfiling',
- 'StopProfiling',
- 'GetProfilerTime',
- 'OnPluginStart',
- 'AskPluginLoad2',
- 'OnPluginEnd',
- 'OnPluginPauseChange',
- 'OnGameFrame',
- 'OnMapStart',
- 'OnMapEnd',
- 'OnConfigsExecuted',
- 'OnAutoConfigsBuffered',
- 'OnAllPluginsLoaded',
- 'GetMyHandle',
- 'GetPluginIterator',
- 'MorePlugins',
- 'ReadPlugin',
- 'GetPluginStatus',
- 'GetPluginFilename',
- 'IsPluginDebugging',
- 'GetPluginInfo',
- 'FindPluginByNumber',
- 'SetFailState',
- 'ThrowError',
- 'GetTime',
- 'FormatTime',
- 'LoadGameConfigFile',
- 'GameConfGetOffset',
- 'GameConfGetKeyValue',
- 'GetSysTickCount',
- 'AutoExecConfig',
- 'RegPluginLibrary',
- 'LibraryExists',
- 'GetExtensionFileStatus',
- 'OnLibraryAdded',
- 'OnLibraryRemoved',
- 'ReadMapList',
- 'SetMapListCompatBind',
- 'OnClientFloodCheck',
- 'OnClientFloodResult',
- 'CanTestFeatures',
- 'GetFeatureStatus',
- 'RequireFeature',
- 'LoadFromAddress',
- 'StoreToAddress',
- 'CreateStack',
- 'PushStackCell',
- 'PushStackString',
- 'PushStackArray',
- 'PopStackCell',
- 'PopStackString',
- 'PopStackArray',
- 'IsStackEmpty',
- 'PopStack',
- 'OnPlayerRunCmd',
- 'BuildPath',
- 'OpenDirectory',
- 'ReadDirEntry',
- 'OpenFile',
- 'DeleteFile',
- 'ReadFileLine',
- 'ReadFile',
- 'ReadFileString',
- 'WriteFile',
- 'WriteFileString',
- 'WriteFileLine',
- 'ReadFileCell',
- 'WriteFileCell',
- 'IsEndOfFile',
- 'FileSeek',
- 'FilePosition',
- 'FileExists',
- 'RenameFile',
- 'DirExists',
- 'FileSize',
- 'FlushFile',
- 'RemoveDir',
- 'CreateDirectory',
- 'GetFileTime',
- 'LogToOpenFile',
- 'LogToOpenFileEx',
- 'SetNextMap',
- 'GetNextMap',
- 'ForceChangeLevel',
- 'GetMapHistorySize',
- 'GetMapHistory',
- 'GeoipCode2',
- 'GeoipCode3',
- 'GeoipCountry',
- 'MarkNativeAsOptional',
- 'RegClientCookie',
- 'FindClientCookie',
- 'SetClientCookie',
- 'GetClientCookie',
- 'SetAuthIdCookie',
- 'AreClientCookiesCached',
- 'OnClientCookiesCached',
- 'CookieMenuHandler',
- 'SetCookiePrefabMenu',
- 'SetCookieMenuItem',
- 'ShowCookieMenu',
- 'GetCookieIterator',
- 'ReadCookieIterator',
- 'GetCookieAccess',
- 'GetClientCookieTime',
- 'LoadTranslations',
- 'SetGlobalTransTarget',
- 'GetClientLanguage',
- 'GetServerLanguage',
- 'GetLanguageCount',
- 'GetLanguageInfo',
- 'SetClientLanguage',
- 'GetLanguageByCode',
- 'GetLanguageByName',
- 'CS_OnBuyCommand',
- 'CS_OnCSWeaponDrop',
- 'CS_OnGetWeaponPrice',
- 'CS_OnTerminateRound',
- 'CS_RespawnPlayer',
- 'CS_SwitchTeam',
- 'CS_DropWeapon',
- 'CS_TerminateRound',
- 'CS_GetTranslatedWeaponAlias',
- 'CS_GetWeaponPrice',
- 'CS_GetClientClanTag',
- 'CS_SetClientClanTag',
- 'LogToGame',
- 'SetRandomSeed',
- 'GetRandomFloat',
- 'GetRandomInt',
- 'IsMapValid',
- 'IsDedicatedServer',
- 'GetEngineTime',
- 'GetGameTime',
- 'GetGameTickCount',
- 'GetGameDescription',
- 'GetGameFolderName',
- 'GetCurrentMap',
- 'PrecacheModel',
- 'PrecacheSentenceFile',
- 'PrecacheDecal',
- 'PrecacheGeneric',
- 'IsModelPrecached',
- 'IsDecalPrecached',
- 'IsGenericPrecached',
- 'PrecacheSound',
- 'IsSoundPrecached',
- 'CreateDialog',
- 'GuessSDKVersion',
- 'PrintToChat',
- 'PrintToChatAll',
- 'PrintCenterText',
- 'PrintCenterTextAll',
- 'PrintHintText',
- 'PrintHintTextToAll',
- 'ShowVGUIPanel',
- 'CreateHudSynchronizer',
- 'SetHudTextParams',
- 'SetHudTextParamsEx',
- 'ShowSyncHudText',
- 'ClearSyncHud',
- 'ShowHudText',
- 'ShowMOTDPanel',
- 'DisplayAskConnectBox',
- 'EntIndexToEntRef',
- 'EntRefToEntIndex',
- 'MakeCompatEntRef',
- 'SetClientViewEntity',
- 'SetLightStyle',
- 'GetClientEyePosition',
- 'CreateDataPack',
- 'WritePackCell',
- 'WritePackFloat',
- 'WritePackString',
- 'ReadPackCell',
- 'ReadPackFloat',
- 'ReadPackString',
- 'ResetPack',
- 'GetPackPosition',
- 'SetPackPosition',
- 'IsPackReadable',
- 'LogMessage',
- 'LogMessageEx',
- 'LogToFile',
- 'LogToFileEx',
- 'LogAction',
- 'LogError',
- 'OnLogAction',
- 'GameLogHook',
- 'AddGameLogHook',
- 'RemoveGameLogHook',
- 'FindTeamByName',
- 'StartPrepSDKCall',
- 'PrepSDKCall_SetVirtual',
- 'PrepSDKCall_SetSignature',
- 'PrepSDKCall_SetFromConf',
- 'PrepSDKCall_SetReturnInfo',
- 'PrepSDKCall_AddParameter',
- 'EndPrepSDKCall',
- 'SDKCall']
-
-if __name__ == '__main__':
-    import pprint
-    import re
-    import sys
-    import urllib
-
-    # urllib ends up wanting to import a module called 'math' -- if
-    # pygments/lexers is in the path, this ends badly.
-    for i in range(len(sys.path)-1, -1, -1):
-        if sys.path[i].endswith('/lexers'):
-            del sys.path[i]
-
-    def get_version():
-        f = urllib.urlopen('http://docs.sourcemod.net/api/index.php')
-        r = re.compile(r'SourceMod v\.<b>([\d\.]+)</td>')
-        for line in f:
-            m = r.search(line)
-            if m is not None:
-                return m.groups()[0]
-
-    def get_sm_functions():
-        f = urllib.urlopen('http://docs.sourcemod.net/api/SMfuncs.js')
-        r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
-        functions = []
-        for line in f:
-            m = r.match(line)
-            if m is not None:
-                functions.append(m.groups()[0])
-        return functions
-
-    def regenerate(filename, natives):
-        f = open(filename)
-        try:
-            content = f.read()
-        finally:
-            f.close()
-
-        header = content[:content.find('FUNCTIONS = [')]
-        footer = content[content.find("if __name__ == '__main__':"):]
-
-
-        f = open(filename, 'w')
-        f.write(header)
-        f.write('FUNCTIONS = %s\n\n' % pprint.pformat(natives))
-        f.write(footer)
-        f.close()
-
-    def run():
-        version = get_version()
-        print '> Downloading function index for SourceMod %s' % version
-        functions = get_sm_functions()
-        print '> %d functions found:' % len(functions)
-
-        functionlist = []
-        for full_function_name in functions:
-            print '>> %s' % full_function_name
-            functionlist.append(full_function_name)
-
-        regenerate(__file__, functionlist)
-
-
-    run()
diff --git a/python/ext-libs/pygments/lexers/_stan_builtins.py b/python/ext-libs/pygments/lexers/_stan_builtins.py
deleted file mode 100644
index 69d8ce7..0000000
--- a/python/ext-libs/pygments/lexers/_stan_builtins.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers._stan_builtins
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This file contains the names of functions for Stan used by
-    ``pygments.lexers.math.StanLexer.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-CONSTANTS=[   'e',
-    'epsilon',
-    'log10',
-    'log2',
-    'negative_epsilon',
-    'negative_infinity',
-    'not_a_number',
-    'pi',
-    'positive_infinity',
-    'sqrt2']
-
-FUNCTIONS=[   'Phi',
-    'abs',
-    'acos',
-    'acosh',
-    'asin',
-    'asinh',
-    'atan',
-    'atan2',
-    'atanh',
-    'bernoulli_log',
-    'beta_binomial_log',
-    'beta_log',
-    'binary_log_loss',
-    'binomial_coefficient_log',
-    'categorical_log',
-    'cauchy_log',
-    'cbrt',
-    'ceil',
-    'chi_square_log',
-    'cholesky_decompose',
-    'col',
-    'cols',
-    'cos',
-    'cosh',
-    'determinant',
-    'diag_matrix',
-    'diagonal',
-    'dirichlet_log',
-    'dot_product',
-    'dot_self',
-    'double_exponential_log',
-    'eigenvalues',
-    'eigenvalues_sym',
-    'erf',
-    'erfc',
-    'exp',
-    'exp2',
-    'expm1',
-    'exponential_cdf',
-    'exponential_log',
-    'fabs',
-    'fdim',
-    'floor',
-    'fma',
-    'fmax',
-    'fmin',
-    'fmod',
-    'gamma_log',
-    'hypergeometric_log',
-    'hypot',
-    'if_else',
-    'int_step',
-    'inv_chi_square_log',
-    'inv_cloglog',
-    'inv_gamma_log',
-    'inv_logit',
-    'inv_wishart_log',
-    'inverse',
-    'lbeta',
-    'lgamma',
-    'lkj_corr_cholesky_log',
-    'lkj_corr_log',
-    'lkj_cov_log',
-    'lmgamma',
-    'log',
-    'log10',
-    'log1m',
-    'log1p',
-    'log1p_exp',
-    'log2',
-    'log_sum_exp',
-    'logistic_log',
-    'logit',
-    'lognormal_cdf',
-    'lognormal_log',
-    'max',
-    'mean',
-    'min',
-    'multi_normal_cholesky_log',
-    'multi_normal_log',
-    'multi_student_t_log',
-    'multinomial_log',
-    'multiply_log',
-    'multiply_lower_tri_self_transpose',
-    'neg_binomial_log',
-    'normal_cdf',
-    'normal_log',
-    'ordered_logistic_log',
-    'pareto_log',
-    'poisson_log',
-    'pow',
-    'prod',
-    'round',
-    'row',
-    'rows',
-    'scaled_inv_chi_square_log',
-    'sd',
-    'sin',
-    'singular_values',
-    'sinh',
-    'softmax',
-    'sqrt',
-    'square',
-    'step',
-    'student_t_log',
-    'sum',
-    'tan',
-    'tanh',
-    'tgamma',
-    'trace',
-    'trunc',
-    'uniform_log',
-    'variance',
-    'weibull_cdf',
-    'weibull_log',
-    'wishart_log']
-
-DISTRIBUTIONS=[   'bernoulli',
-    'beta',
-    'beta_binomial',
-    'categorical',
-    'cauchy',
-    'chi_square',
-    'dirichlet',
-    'double_exponential',
-    'exponential',
-    'gamma',
-    'hypergeometric',
-    'inv_chi_square',
-    'inv_gamma',
-    'inv_wishart',
-    'lkj_corr',
-    'lkj_corr_cholesky',
-    'lkj_cov',
-    'logistic',
-    'lognormal',
-    'multi_normal',
-    'multi_normal_cholesky',
-    'multi_student_t',
-    'multinomial',
-    'neg_binomial',
-    'normal',
-    'ordered_logistic',
-    'pareto',
-    'poisson',
-    'scaled_inv_chi_square',
-    'student_t',
-    'uniform',
-    'weibull',
-    'wishart']
-
diff --git a/python/ext-libs/pygments/lexers/_vimbuiltins.py b/python/ext-libs/pygments/lexers/_vimbuiltins.py
deleted file mode 100644
index 9fc1b15..0000000
--- a/python/ext-libs/pygments/lexers/_vimbuiltins.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Split up in multiple functions so it's importable by jython, which has a
-# per-method size limit.
-
-def _getauto():
-    return [('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufW [...]
-def _getcommand():
-    return [('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),( [...]
-def _getoption():
-    return [('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa',' [...]
-
-option = _getoption()
-command = _getcommand()
-auto = _getauto()
diff --git a/python/ext-libs/pygments/lexers/agile.py b/python/ext-libs/pygments/lexers/agile.py
deleted file mode 100644
index 8bcb1d4..0000000
--- a/python/ext-libs/pygments/lexers/agile.py
+++ /dev/null
@@ -1,1917 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.agile
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for agile languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \
-     LexerContext, include, combined, do_insertions, bygroups, using
-from pygments.token import Error, Text, Other, \
-     Comment, Operator, Keyword, Name, String, Number, Generic, Punctuation
-from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-from pygments import unistring as uni
-
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
-           'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
-           'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
-           'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer',
-           'FancyLexer', 'DgLexer']
-
-# b/w compatibility
-from pygments.lexers.functional import SchemeLexer
-from pygments.lexers.jvm import IokeLexer, ClojureLexer
-
-line_re  = re.compile('.*?\n')
-
-
-class PythonLexer(RegexLexer):
-    """
-    For `Python <http://www.python.org>`_ source code.
-    """
-
-    name = 'Python'
-    aliases = ['python', 'py', 'sage']
-    filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
-    mimetypes = ['text/x-python', 'application/x-python']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
-            (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
-            (r'[^\S\n]+', Text),
-            (r'#.*$', Comment),
-            (r'[]{}:(),;[]', Punctuation),
-            (r'\\\n', Text),
-            (r'\\', Text),
-            (r'(in|is|and|or|not)\b', Operator.Word),
-            (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
-            include('keywords'),
-            (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
-            (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
-            (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
-             'fromimport'),
-            (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
-             'import'),
-            include('builtins'),
-            include('backtick'),
-            ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
-            ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
-            ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
-            ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
-            ('[uU]?"""', String, combined('stringescape', 'tdqs')),
-            ("[uU]?'''", String, combined('stringescape', 'tsqs')),
-            ('[uU]?"', String, combined('stringescape', 'dqs')),
-            ("[uU]?'", String, combined('stringescape', 'sqs')),
-            include('name'),
-            include('numbers'),
-        ],
-        'keywords': [
-            (r'(assert|break|continue|del|elif|else|except|exec|'
-             r'finally|for|global|if|lambda|pass|print|raise|'
-             r'return|try|while|yield(\s+from)?|as|with)\b', Keyword),
-        ],
-        'builtins': [
-            (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
-             r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
-             r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
-             r'file|filter|float|frozenset|getattr|globals|hasattr|hash|hex|id|'
-             r'input|int|intern|isinstance|issubclass|iter|len|list|locals|'
-             r'long|map|max|min|next|object|oct|open|ord|pow|property|range|'
-             r'raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|'
-             r'sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|'
-             r'vars|xrange|zip)\b', Name.Builtin),
-            (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
-             r')\b', Name.Builtin.Pseudo),
-            (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
-             r'BaseException|DeprecationWarning|EOFError|EnvironmentError|'
-             r'Exception|FloatingPointError|FutureWarning|GeneratorExit|IOError|'
-             r'ImportError|ImportWarning|IndentationError|IndexError|KeyError|'
-             r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
-             r'NotImplemented|NotImplementedError|OSError|OverflowError|'
-             r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
-             r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
-             r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
-             r'TypeError|UnboundLocalError|UnicodeDecodeError|'
-             r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
-             r'UnicodeWarning|UserWarning|ValueError|VMSError|Warning|'
-             r'WindowsError|ZeroDivisionError)\b', Name.Exception),
-        ],
-        'numbers': [
-            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
-            (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
-            (r'0[0-7]+j?', Number.Oct),
-            (r'0[xX][a-fA-F0-9]+', Number.Hex),
-            (r'\d+L', Number.Integer.Long),
-            (r'\d+j?', Number.Integer)
-        ],
-        'backtick': [
-            ('`.*?`', String.Backtick),
-        ],
-        'name': [
-            (r'@[a-zA-Z0-9_.]+', Name.Decorator),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'funcname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
-        ],
-        'classname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'(?:[ \t]|\\\n)+', Text),
-            (r'as\b', Keyword.Namespace),
-            (r',', Operator),
-            (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
-            (r'', Text, '#pop') # all else: go back
-        ],
-        'fromimport': [
-            (r'(?:[ \t]|\\\n)+', Text),
-            (r'import\b', Keyword.Namespace, '#pop'),
-            # if None occurs here, it's "raise x from None", since None can
-            # never be a module name
-            (r'None\b', Name.Builtin.Pseudo, '#pop'),
-            # sadly, in "raise x from y" y will be highlighted as namespace too
-            (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
-            # anything else here also means "raise x from y" and is therefore
-            # not an error
-            (r'', Text, '#pop'),
-        ],
-        'stringescape': [
-            (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
-             r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
-        ],
-        'strings': [
-            (r'%(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-             '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
-            (r'[^\\\'"%\n]+', String),
-            # quotes, percents and backslashes must be parsed one at a time
-            (r'[\'"\\]', String),
-            # unhandled string formatting sign
-            (r'%', String)
-            # newlines are an error (use "nl" state)
-        ],
-        'nl': [
-            (r'\n', String)
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
-            include('strings')
-        ],
-        'sqs': [
-            (r"'", String, '#pop'),
-            (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
-            include('strings')
-        ],
-        'tdqs': [
-            (r'"""', String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-        'tsqs': [
-            (r"'''", String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-    }
-
-    def analyse_text(text):
-        return shebang_matches(text, r'pythonw?(2(\.\d)?)?')
-
-
-class Python3Lexer(RegexLexer):
-    """
-    For `Python <http://www.python.org>`_ source code (version 3.0).
-
-    *New in Pygments 0.10.*
-    """
-
-    name = 'Python 3'
-    aliases = ['python3', 'py3']
-    filenames = []  # Nothing until Python 3 gets widespread
-    mimetypes = ['text/x-python3', 'application/x-python3']
-
-    flags = re.MULTILINE | re.UNICODE
-
-    uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
-    tokens = PythonLexer.tokens.copy()
-    tokens['keywords'] = [
-        (r'(assert|break|continue|del|elif|else|except|'
-         r'finally|for|global|if|lambda|pass|raise|nonlocal|'
-         r'return|try|while|yield(\s+from)?|as|with|True|False|None)\b',
-         Keyword),
-    ]
-    tokens['builtins'] = [
-        (r'(?<!\.)(__import__|abs|all|any|bin|bool|bytearray|bytes|'
-         r'chr|classmethod|cmp|compile|complex|delattr|dict|dir|'
-         r'divmod|enumerate|eval|filter|float|format|frozenset|getattr|'
-         r'globals|hasattr|hash|hex|id|input|int|isinstance|issubclass|'
-         r'iter|len|list|locals|map|max|memoryview|min|next|object|oct|'
-         r'open|ord|pow|print|property|range|repr|reversed|round|'
-         r'set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|'
-         r'vars|zip)\b', Name.Builtin),
-        (r'(?<!\.)(self|Ellipsis|NotImplemented)\b', Name.Builtin.Pseudo),
-        (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
-         r'BaseException|BufferError|BytesWarning|DeprecationWarning|'
-         r'EOFError|EnvironmentError|Exception|FloatingPointError|'
-         r'FutureWarning|GeneratorExit|IOError|ImportError|'
-         r'ImportWarning|IndentationError|IndexError|KeyError|'
-         r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
-         r'NotImplementedError|OSError|OverflowError|'
-         r'PendingDeprecationWarning|ReferenceError|'
-         r'RuntimeError|RuntimeWarning|StopIteration|'
-         r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
-         r'TypeError|UnboundLocalError|UnicodeDecodeError|'
-         r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
-         r'UnicodeWarning|UserWarning|ValueError|VMSError|Warning|'
-         r'WindowsError|ZeroDivisionError)\b', Name.Exception),
-    ]
-    tokens['numbers'] = [
-        (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-        (r'0[oO][0-7]+', Number.Oct),
-        (r'0[bB][01]+', Number.Bin),
-        (r'0[xX][a-fA-F0-9]+', Number.Hex),
-        (r'\d+', Number.Integer)
-    ]
-    tokens['backtick'] = []
-    tokens['name'] = [
-        (r'@[a-zA-Z0-9_]+', Name.Decorator),
-        (uni_name, Name),
-    ]
-    tokens['funcname'] = [
-        (uni_name, Name.Function, '#pop')
-    ]
-    tokens['classname'] = [
-        (uni_name, Name.Class, '#pop')
-    ]
-    tokens['import'] = [
-        (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
-        (r'\.', Name.Namespace),
-        (uni_name, Name.Namespace),
-        (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
-        (r'', Text, '#pop') # all else: go back
-    ]
-    tokens['fromimport'] = [
-        (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
-        (r'\.', Name.Namespace),
-        (uni_name, Name.Namespace),
-        (r'', Text, '#pop'),
-    ]
-    # don't highlight "%s" substitutions
-    tokens['strings'] = [
-        (r'[^\\\'"%\n]+', String),
-        # quotes, percents and backslashes must be parsed one at a time
-        (r'[\'"\\]', String),
-        # unhandled string formatting sign
-        (r'%', String)
-        # newlines are an error (use "nl" state)
-    ]
-
-    def analyse_text(text):
-        return shebang_matches(text, r'pythonw?3(\.\d)?')
-
-
-class PythonConsoleLexer(Lexer):
-    """
-    For Python console output or doctests, such as:
-
-    .. sourcecode:: pycon
-
-        >>> a = 'foo'
-        >>> print a
-        foo
-        >>> 1 / 0
-        Traceback (most recent call last):
-          File "<stdin>", line 1, in <module>
-        ZeroDivisionError: integer division or modulo by zero
-
-    Additional options:
-
-    `python3`
-        Use Python 3 lexer for code.  Default is ``False``.
-        *New in Pygments 1.0.*
-    """
-    name = 'Python console session'
-    aliases = ['pycon']
-    mimetypes = ['text/x-python-doctest']
-
-    def __init__(self, **options):
-        self.python3 = get_bool_opt(options, 'python3', False)
-        Lexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        if self.python3:
-            pylexer = Python3Lexer(**self.options)
-            tblexer = Python3TracebackLexer(**self.options)
-        else:
-            pylexer = PythonLexer(**self.options)
-            tblexer = PythonTracebackLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        curtb = ''
-        tbindex = 0
-        tb = 0
-        for match in line_re.finditer(text):
-            line = match.group()
-            if line.startswith(u'>>> ') or line.startswith(u'... '):
-                tb = 0
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:4])]))
-                curcode += line[4:]
-            elif line.rstrip() == u'...' and not tb:
-                # only a new >>> prompt can end an exception block
-                # otherwise an ellipsis in place of the traceback frames
-                # will be mishandled
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, u'...')]))
-                curcode += line[3:]
-            else:
-                if curcode:
-                    for item in do_insertions(insertions,
-                                    pylexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-                if (line.startswith(u'Traceback (most recent call last):') or
-                    re.match(ur'  File "[^"]+", line \d+\n$', line)):
-                    tb = 1
-                    curtb = line
-                    tbindex = match.start()
-                elif line == 'KeyboardInterrupt\n':
-                    yield match.start(), Name.Class, line
-                elif tb:
-                    curtb += line
-                    if not (line.startswith(' ') or line.strip() == u'...'):
-                        tb = 0
-                        for i, t, v in tblexer.get_tokens_unprocessed(curtb):
-                            yield tbindex+i, t, v
-                else:
-                    yield match.start(), Generic.Output, line
-        if curcode:
-            for item in do_insertions(insertions,
-                                      pylexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-class PythonTracebackLexer(RegexLexer):
-    """
-    For Python tracebacks.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'Python Traceback'
-    aliases = ['pytb']
-    filenames = ['*.pytb']
-    mimetypes = ['text/x-python-traceback']
-
-    tokens = {
-        'root': [
-            (r'^Traceback \(most recent call last\):\n',
-             Generic.Traceback, 'intb'),
-            # SyntaxError starts with this.
-            (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
-            (r'^.*\n', Other),
-        ],
-        'intb': [
-            (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
-            (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text)),
-            (r'^(    )(.+)(\n)',
-             bygroups(Text, using(PythonLexer), Text)),
-            (r'^([ \t]*)(\.\.\.)(\n)',
-             bygroups(Text, Comment, Text)), # for doctests...
-            (r'^([^:]+)(: )(.+)(\n)',
-             bygroups(Generic.Error, Text, Name, Text), '#pop'),
-            (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
-             bygroups(Generic.Error, Text), '#pop')
-        ],
-    }
-
-
-class Python3TracebackLexer(RegexLexer):
-    """
-    For Python 3.0 tracebacks, with support for chained exceptions.
-
-    *New in Pygments 1.0.*
-    """
-
-    name = 'Python 3.0 Traceback'
-    aliases = ['py3tb']
-    filenames = ['*.py3tb']
-    mimetypes = ['text/x-python3-traceback']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
-            (r'^During handling of the above exception, another '
-             r'exception occurred:\n\n', Generic.Traceback),
-            (r'^The above exception was the direct cause of the '
-             r'following exception:\n\n', Generic.Traceback),
-        ],
-        'intb': [
-            (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
-            (r'^(    )(.+)(\n)',
-             bygroups(Text, using(Python3Lexer), Text)),
-            (r'^([ \t]*)(\.\.\.)(\n)',
-             bygroups(Text, Comment, Text)), # for doctests...
-            (r'^([^:]+)(: )(.+)(\n)',
-             bygroups(Generic.Error, Text, Name, Text), '#pop'),
-            (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
-             bygroups(Generic.Error, Text), '#pop')
-        ],
-    }
-
-
-class RubyLexer(ExtendedRegexLexer):
-    """
-    For `Ruby <http://www.ruby-lang.org>`_ source code.
-    """
-
-    name = 'Ruby'
-    aliases = ['rb', 'ruby', 'duby']
-    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
-                 '*.rbx', '*.duby']
-    mimetypes = ['text/x-ruby', 'application/x-ruby']
-
-    flags = re.DOTALL | re.MULTILINE
-
-    def heredoc_callback(self, match, ctx):
-        # okay, this is the hardest part of parsing Ruby...
-        # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
-        start = match.start(1)
-        yield start, Operator, match.group(1)        # <<-?
-        yield match.start(2), String.Heredoc, match.group(2)  # quote ", ', `
-        yield match.start(3), Name.Constant, match.group(3)   # heredoc name
-        yield match.start(4), String.Heredoc, match.group(4)  # quote again
-
-        heredocstack = ctx.__dict__.setdefault('heredocstack', [])
-        outermost = not bool(heredocstack)
-        heredocstack.append((match.group(1) == '<<-', match.group(3)))
-
-        ctx.pos = match.start(5)
-        ctx.end = match.end(5)
-        # this may find other heredocs
-        for i, t, v in self.get_tokens_unprocessed(context=ctx):
-            yield i, t, v
-        ctx.pos = match.end()
-
-        if outermost:
-            # this is the outer heredoc again, now we can process them all
-            for tolerant, hdname in heredocstack:
-                lines = []
-                for match in line_re.finditer(ctx.text, ctx.pos):
-                    if tolerant:
-                        check = match.group().strip()
-                    else:
-                        check = match.group().rstrip()
-                    if check == hdname:
-                        for amatch in lines:
-                            yield amatch.start(), String.Heredoc, amatch.group()
-                        yield match.start(), Name.Constant, match.group()
-                        ctx.pos = match.end()
-                        break
-                    else:
-                        lines.append(match)
-                else:
-                    # end of heredoc not found -- error!
-                    for amatch in lines:
-                        yield amatch.start(), Error, amatch.group()
-            ctx.end = len(ctx.text)
-            del heredocstack[:]
-
-
-    def gen_rubystrings_rules():
-        def intp_regex_callback(self, match, ctx):
-            yield match.start(1), String.Regex, match.group(1)  # begin
-            nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
-            for i, t, v in self.get_tokens_unprocessed(context=nctx):
-                yield match.start(3)+i, t, v
-            yield match.start(4), String.Regex, match.group(4)  # end[mixounse]*
-            ctx.pos = match.end()
-
-        def intp_string_callback(self, match, ctx):
-            yield match.start(1), String.Other, match.group(1)
-            nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
-            for i, t, v in self.get_tokens_unprocessed(context=nctx):
-                yield match.start(3)+i, t, v
-            yield match.start(4), String.Other, match.group(4)  # end
-            ctx.pos = match.end()
-
-        states = {}
-        states['strings'] = [
-            # easy ones
-            (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
-             r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol),
-            (r":'(\\\\|\\'|[^'])*'", String.Symbol),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r':"', String.Symbol, 'simple-sym'),
-            (r'([a-zA-Z_][a-zA-Z0-9]*)(:)',
-             bygroups(String.Symbol, Punctuation)),  # Since Ruby 1.9
-            (r'"', String.Double, 'simple-string'),
-            (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
-        ]
-
-        # double-quoted string and symbol
-        for name, ttype, end in ('string', String.Double, '"'), \
-                                ('sym', String.Symbol, '"'), \
-                                ('backtick', String.Backtick, '`'):
-            states['simple-'+name] = [
-                include('string-intp-escaped'),
-                (r'[^\\%s#]+' % end, ttype),
-                (r'[\\#]', ttype),
-                (end, ttype, '#pop'),
-            ]
-
-        # braced quoted strings
-        for lbrace, rbrace, name in ('\\{', '\\}', 'cb'), \
-                                    ('\\[', '\\]', 'sb'), \
-                                    ('\\(', '\\)', 'pa'), \
-                                    ('<', '>', 'ab'):
-            states[name+'-intp-string'] = [
-                (r'\\[\\' + lbrace + rbrace + ']', String.Other),
-                (r'(?<!\\)' + lbrace, String.Other, '#push'),
-                (r'(?<!\\)' + rbrace, String.Other, '#pop'),
-                include('string-intp-escaped'),
-                (r'[\\#' + lbrace + rbrace + ']', String.Other),
-                (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
-            ]
-            states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
-                                      name+'-intp-string'))
-            states[name+'-string'] = [
-                (r'\\[\\' + lbrace + rbrace + ']', String.Other),
-                (r'(?<!\\)' + lbrace, String.Other, '#push'),
-                (r'(?<!\\)' + rbrace, String.Other, '#pop'),
-                (r'[\\#' + lbrace + rbrace + ']', String.Other),
-                (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
-            ]
-            states['strings'].append((r'%[qsw]' + lbrace, String.Other,
-                                      name+'-string'))
-            states[name+'-regex'] = [
-                (r'\\[\\' + lbrace + rbrace + ']', String.Regex),
-                (r'(?<!\\)' + lbrace, String.Regex, '#push'),
-                (r'(?<!\\)' + rbrace + '[mixounse]*', String.Regex, '#pop'),
-                include('string-intp'),
-                (r'[\\#' + lbrace + rbrace + ']', String.Regex),
-                (r'[^\\#' + lbrace + rbrace + ']+', String.Regex),
-            ]
-            states['strings'].append((r'%r' + lbrace, String.Regex,
-                                      name+'-regex'))
-
-        # these must come after %<brace>!
-        states['strings'] += [
-            # %r regex
-            (r'(%r([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
-             intp_regex_callback),
-            # regular fancy strings with qsw
-            (r'%[qsw]([^a-zA-Z0-9])((?:\\\1|(?!\1).)*)\1', String.Other),
-            (r'(%[QWx]([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2)',
-             intp_string_callback),
-            # special forms of fancy strings after operators or
-            # in method calls with braces
-            (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
-             bygroups(Text, String.Other, None)),
-            # and because of fixed width lookbehinds the whole thing a
-            # second time for line startings...
-            (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
-             bygroups(Text, String.Other, None)),
-            # all regular fancy strings without qsw
-            (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
-             intp_string_callback),
-        ]
-
-        return states
-
-    tokens = {
-        'root': [
-            (r'#.*?$', Comment.Single),
-            (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
-            # keywords
-            (r'(BEGIN|END|alias|begin|break|case|defined\?|'
-             r'do|else|elsif|end|ensure|for|if|in|next|redo|'
-             r'rescue|raise|retry|return|super|then|undef|unless|until|when|'
-             r'while|yield)\b', Keyword),
-            # start of function, class and module names
-            (r'(module)(\s+)([a-zA-Z_][a-zA-Z0-9_]*'
-             r'(?:::[a-zA-Z_][a-zA-Z0-9_]*)*)',
-             bygroups(Keyword, Text, Name.Namespace)),
-            (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
-            (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            # special methods
-            (r'(initialize|new|loop|include|extend|raise|attr_reader|'
-             r'attr_writer|attr_accessor|attr|catch|throw|private|'
-             r'module_function|public|protected|true|false|nil)\b',
-             Keyword.Pseudo),
-            (r'(not|and|or)\b', Operator.Word),
-            (r'(autoload|block_given|const_defined|eql|equal|frozen|include|'
-             r'instance_of|is_a|iterator|kind_of|method_defined|nil|'
-             r'private_method_defined|protected_method_defined|'
-             r'public_method_defined|respond_to|tainted)\?', Name.Builtin),
-            (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
-            (r'(?<!\.)(Array|Float|Integer|String|__id__|__send__|abort|'
-             r'ancestors|at_exit|autoload|binding|callcc|caller|'
-             r'catch|chomp|chop|class_eval|class_variables|'
-             r'clone|const_defined\?|const_get|const_missing|const_set|'
-             r'constants|display|dup|eval|exec|exit|extend|fail|fork|'
-             r'format|freeze|getc|gets|global_variables|gsub|'
-             r'hash|id|included_modules|inspect|instance_eval|'
-             r'instance_method|instance_methods|'
-             r'instance_variable_get|instance_variable_set|instance_variables|'
-             r'lambda|load|local_variables|loop|'
-             r'method|method_missing|methods|module_eval|name|'
-             r'object_id|open|p|print|printf|private_class_method|'
-             r'private_instance_methods|'
-             r'private_methods|proc|protected_instance_methods|'
-             r'protected_methods|public_class_method|'
-             r'public_instance_methods|public_methods|'
-             r'putc|puts|raise|rand|readline|readlines|require|'
-             r'scan|select|self|send|set_trace_func|singleton_methods|sleep|'
-             r'split|sprintf|srand|sub|syscall|system|taint|'
-             r'test|throw|to_a|to_s|trace_var|trap|untaint|untrace_var|'
-             r'warn)\b', Name.Builtin),
-            (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
-            # normal heredocs
-            (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
-             heredoc_callback),
-            # empty string heredocs
-            (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
-            (r'__END__', Comment.Preproc, 'end-part'),
-            # multiline regex (after keywords or assignments)
-            (r'(?:^|(?<=[=<>~!:])|'
-                 r'(?<=(?:\s|;)when\s)|'
-                 r'(?<=(?:\s|;)or\s)|'
-                 r'(?<=(?:\s|;)and\s)|'
-                 r'(?<=(?:\s|;|\.)index\s)|'
-                 r'(?<=(?:\s|;|\.)scan\s)|'
-                 r'(?<=(?:\s|;|\.)sub\s)|'
-                 r'(?<=(?:\s|;|\.)sub!\s)|'
-                 r'(?<=(?:\s|;|\.)gsub\s)|'
-                 r'(?<=(?:\s|;|\.)gsub!\s)|'
-                 r'(?<=(?:\s|;|\.)match\s)|'
-                 r'(?<=(?:\s|;)if\s)|'
-                 r'(?<=(?:\s|;)elsif\s)|'
-                 r'(?<=^when\s)|'
-                 r'(?<=^index\s)|'
-                 r'(?<=^scan\s)|'
-                 r'(?<=^sub\s)|'
-                 r'(?<=^gsub\s)|'
-                 r'(?<=^sub!\s)|'
-                 r'(?<=^gsub!\s)|'
-                 r'(?<=^match\s)|'
-                 r'(?<=^if\s)|'
-                 r'(?<=^elsif\s)'
-             r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
-            # multiline regex (in method calls or subscripts)
-            (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
-            # multiline regex (this time the funny no whitespace rule)
-            (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
-             'multiline-regex'),
-            # lex numbers and ignore following regular expressions which
-            # are division operators in fact (grrrr. i hate that. any
-            # better ideas?)
-            # since pygments 0.7 we also eat a "?" operator after numbers
-            # so that the char operator does not work. Chars are not allowed
-            # there so that you can use the ternary operator.
-            # stupid example:
-            #   x>=0?n[x]:""
-            (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
-             bygroups(Number.Oct, Text, Operator)),
-            (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
-             bygroups(Number.Hex, Text, Operator)),
-            (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
-             bygroups(Number.Bin, Text, Operator)),
-            (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
-             bygroups(Number.Integer, Text, Operator)),
-            # Names
-            (r'@@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Class),
-            (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Instance),
-            (r'\$[a-zA-Z0-9_]+', Name.Variable.Global),
-            (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global),
-            (r'\$-[0adFiIlpvw]', Name.Variable.Global),
-            (r'::', Operator),
-            include('strings'),
-            # chars
-            (r'\?(\\[MC]-)*' # modifiers
-             r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
-             r'(?!\w)',
-             String.Char),
-            (r'[A-Z][a-zA-Z0-9_]+', Name.Constant),
-            # this is needed because ruby attributes can look
-            # like keywords (class) or like this: ` ?!?
-            (r'(\.|::)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+-/\[<>=])',
-             bygroups(Operator, Name)),
-            (r'[a-zA-Z_]\w*[\!\?]?', Name),
-            (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
-             r'!~|&&?|\|\||\.{1,3})', Operator),
-            (r'[-+/*%=<>&!^|~]=?', Operator),
-            (r'[(){};,/?:\\]', Punctuation),
-            (r'\s+', Text)
-        ],
-        'funcname': [
-            (r'\(', Punctuation, 'defexpr'),
-            (r'(?:([a-zA-Z_][a-zA-Z0-9_]*)(\.))?'
-             r'([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
-             r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
-             bygroups(Name.Class, Operator, Name.Function), '#pop'),
-            (r'', Text, '#pop')
-        ],
-        'classname': [
-            (r'\(', Punctuation, 'defexpr'),
-            (r'<<', Operator, '#pop'),
-            (r'[A-Z_]\w*', Name.Class, '#pop'),
-            (r'', Text, '#pop')
-        ],
-        'defexpr': [
-            (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
-            (r'\(', Operator, '#push'),
-            include('root')
-        ],
-        'in-intp': [
-            ('}', String.Interpol, '#pop'),
-            include('root'),
-        ],
-        'string-intp': [
-            (r'#{', String.Interpol, 'in-intp'),
-            (r'#@@?[a-zA-Z_][a-zA-Z0-9_]*', String.Interpol),
-            (r'#\$[a-zA-Z_][a-zA-Z0-9_]*', String.Interpol)
-        ],
-        'string-intp-escaped': [
-            include('string-intp'),
-            (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
-             String.Escape)
-        ],
-        'interpolated-regex': [
-            include('string-intp'),
-            (r'[\\#]', String.Regex),
-            (r'[^\\#]+', String.Regex),
-        ],
-        'interpolated-string': [
-            include('string-intp'),
-            (r'[\\#]', String.Other),
-            (r'[^\\#]+', String.Other),
-        ],
-        'multiline-regex': [
-            include('string-intp'),
-            (r'\\\\', String.Regex),
-            (r'\\/', String.Regex),
-            (r'[\\#]', String.Regex),
-            (r'[^\\/#]+', String.Regex),
-            (r'/[mixounse]*', String.Regex, '#pop'),
-        ],
-        'end-part': [
-            (r'.+', Comment.Preproc, '#pop')
-        ]
-    }
-    tokens.update(gen_rubystrings_rules())
-
-    def analyse_text(text):
-        return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
-    """
-    For Ruby interactive console (**irb**) output like:
-
-    .. sourcecode:: rbcon
-
-        irb(main):001:0> a = 1
-        => 1
-        irb(main):002:0> puts a
-        1
-        => nil
-    """
-    name = 'Ruby irb session'
-    aliases = ['rbcon', 'irb']
-    mimetypes = ['text/x-ruby-shellsession']
-
-    _prompt_re = re.compile('irb\([a-zA-Z_][a-zA-Z0-9_]*\):\d{3}:\d+[>*"\'] '
-                            '|>> |\?> ')
-
-    def get_tokens_unprocessed(self, text):
-        rblexer = RubyLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        for match in line_re.finditer(text):
-            line = match.group()
-            m = self._prompt_re.match(line)
-            if m is not None:
-                end = m.end()
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:end])]))
-                curcode += line[end:]
-            else:
-                if curcode:
-                    for item in do_insertions(insertions,
-                                    rblexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-                yield match.start(), Generic.Output, line
-        if curcode:
-            for item in do_insertions(insertions,
-                                      rblexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-class PerlLexer(RegexLexer):
-    """
-    For `Perl <http://www.perl.org>`_ source code.
-    """
-
-    name = 'Perl'
-    aliases = ['perl', 'pl']
-    filenames = ['*.pl', '*.pm']
-    mimetypes = ['text/x-perl', 'application/x-perl']
-
-    flags = re.DOTALL | re.MULTILINE
-    # TODO: give this to a perl guy who knows how to parse perl...
-    tokens = {
-        'balanced-regex': [
-            (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
-            (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
-            (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
-            (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'),
-            (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
-            (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
-            (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'),
-            (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'),
-            (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'),
-            (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'),
-        ],
-        'root': [
-            (r'\#.*?$', Comment.Single),
-            (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
-            (r'(case|continue|do|else|elsif|for|foreach|if|last|my|'
-             r'next|our|redo|reset|then|unless|until|while|use|'
-             r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword),
-            (r'(format)(\s+)([a-zA-Z0-9_]+)(\s*)(=)(\s*\n)',
-             bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
-            (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
-            # common delimiters
-            (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
-                String.Regex),
-            (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
-            (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
-            (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
-                String.Regex),
-            (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
-                String.Regex),
-            # balanced delimiters
-            (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'),
-            (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
-            (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
-                'balanced-regex'),
-            (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex,
-                'balanced-regex'),
-
-            (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
-            (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
-            (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
-                String.Regex),
-            (r'\s+', Text),
-            (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|'
-             r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|'
-             r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|'
-             r'dump|each|endgrent|endhostent|endnetent|endprotoent|'
-             r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|'
-             r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|'
-             r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|'
-             r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|'
-             r'getppid|getpriority|getprotobyname|getprotobynumber|'
-             r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|'
-             r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|'
-             r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|'
-             r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|'
-             r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|'
-             r'opendir|ord|our|pack|package|pipe|pop|pos|printf|'
-             r'prototype|push|quotemeta|rand|read|readdir|'
-             r'readline|readlink|readpipe|recv|redo|ref|rename|require|'
-             r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|'
-             r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|'
-             r'setpgrp|setpriority|setprotoent|setpwent|setservent|'
-             r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|'
-             r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|'
-             r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|'
-             r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|'
-             r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|'
-             r'utime|values|vec|wait|waitpid|wantarray|warn|write'
-             r')\b', Name.Builtin),
-            (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
-            (r'<<([\'"]?)([a-zA-Z_][a-zA-Z0-9_]*)\1;?\n.*?\n\2\n', String),
-            (r'__END__', Comment.Preproc, 'end-part'),
-            (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
-            (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
-            (r'[$@%#]+', Name.Variable, 'varname'),
-            (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
-            (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
-            (r'0b[01]+(_[01]+)*', Number.Bin),
-            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
-             Number.Float),
-            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
-            (r'\d+(_\d+)*', Number.Integer),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
-            (r'<([^\s>]+)>', String.Regex),
-            (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
-            (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
-            (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
-            (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
-            (r'(q|qq|qw|qr|qx)([^a-zA-Z0-9])(.|\n)*?\2', String.Other),
-            (r'package\s+', Keyword, 'modulename'),
-            (r'sub\s+', Keyword, 'funcname'),
-            (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
-             r'!~|&&?|\|\||\.{1,3})', Operator),
-            (r'[-+/*%=<>&^|!\\~]=?', Operator),
-            (r'[\(\)\[\]:;,<>/\?\{\}]', Punctuation), # yes, there's no shortage
-                                                      # of punctuation in Perl!
-            (r'(?=\w)', Name, 'name'),
-        ],
-        'format': [
-            (r'\.\n', String.Interpol, '#pop'),
-            (r'[^\n]*\n', String.Interpol),
-        ],
-        'varname': [
-            (r'\s+', Text),
-            (r'\{', Punctuation, '#pop'), # hash syntax?
-            (r'\)|,', Punctuation, '#pop'), # argument specifier
-            (r'[a-zA-Z0-9_]+::', Name.Namespace),
-            (r'[a-zA-Z0-9_:]+', Name.Variable, '#pop'),
-        ],
-        'name': [
-            (r'[a-zA-Z0-9_]+::', Name.Namespace),
-            (r'[a-zA-Z0-9_:]+', Name, '#pop'),
-            (r'[A-Z_]+(?=[^a-zA-Z0-9_])', Name.Constant, '#pop'),
-            (r'(?=[^a-zA-Z0-9_])', Text, '#pop'),
-        ],
-        'modulename': [
-            (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
-        ],
-        'funcname': [
-            (r'[a-zA-Z_]\w*[\!\?]?', Name.Function),
-            (r'\s+', Text),
-            # argument declaration
-            (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
-            (r'.*?{', Punctuation, '#pop'),
-            (r';', Punctuation, '#pop'),
-        ],
-        'cb-string': [
-            (r'\\[\{\}\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\{', String.Other, 'cb-string'),
-            (r'\}', String.Other, '#pop'),
-            (r'[^\{\}\\]+', String.Other)
-        ],
-        'rb-string': [
-            (r'\\[\(\)\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\(', String.Other, 'rb-string'),
-            (r'\)', String.Other, '#pop'),
-            (r'[^\(\)]+', String.Other)
-        ],
-        'sb-string': [
-            (r'\\[\[\]\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\[', String.Other, 'sb-string'),
-            (r'\]', String.Other, '#pop'),
-            (r'[^\[\]]+', String.Other)
-        ],
-        'lt-string': [
-            (r'\\[\<\>\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\<', String.Other, 'lt-string'),
-            (r'\>', String.Other, '#pop'),
-            (r'[^\<\>]+', String.Other)
-        ],
-        'end-part': [
-            (r'.+', Comment.Preproc, '#pop')
-        ]
-    }
-
-    def analyse_text(text):
-        if shebang_matches(text, r'perl'):
-            return True
-        if 'my $' in text:
-            return 0.9
-        return 0.1 # who knows, might still be perl!
-
-
-class LuaLexer(RegexLexer):
-    """
-    For `Lua <http://www.lua.org>`_ source code.
-
-    Additional options accepted:
-
-    `func_name_highlighting`
-        If given and ``True``, highlight builtin function names
-        (default: ``True``).
-    `disabled_modules`
-        If given, must be a list of module names whose function names
-        should not be highlighted. By default all modules are highlighted.
-
-        To get a list of allowed modules have a look into the
-        `_luabuiltins` module:
-
-        .. sourcecode:: pycon
-
-            >>> from pygments.lexers._luabuiltins import MODULES
-            >>> MODULES.keys()
-            ['string', 'coroutine', 'modules', 'io', 'basic', ...]
-    """
-
-    name = 'Lua'
-    aliases = ['lua']
-    filenames = ['*.lua', '*.wlua']
-    mimetypes = ['text/x-lua', 'application/x-lua']
-
-    tokens = {
-        'root': [
-            # lua allows a file to start with a shebang
-            (r'#!(.*?)$', Comment.Preproc),
-            (r'', Text, 'base'),
-        ],
-        'base': [
-            (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline),
-            ('--.*$', Comment.Single),
-
-            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
-            (r'(?i)\d+e[+-]?\d+', Number.Float),
-            ('(?i)0x[0-9a-f]*', Number.Hex),
-            (r'\d+', Number.Integer),
-
-            (r'\n', Text),
-            (r'[^\S\n]', Text),
-            # multiline strings
-            (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
-            (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator),
-            (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
-            (r'(and|or|not)\b', Operator.Word),
-
-            ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
-             r'while)\b', Keyword),
-            (r'(local)\b', Keyword.Declaration),
-            (r'(true|false|nil)\b', Keyword.Constant),
-
-            (r'(function)\b', Keyword, 'funcname'),
-
-            (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
-
-            ("'", String.Single, combined('stringescape', 'sqs')),
-            ('"', String.Double, combined('stringescape', 'dqs'))
-        ],
-
-        'funcname': [
-            (r'\s+', Text),
-            ('(?:([A-Za-z_][A-Za-z0-9_]*)(\.))?([A-Za-z_][A-Za-z0-9_]*)',
-             bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
-            # inline function
-            ('\(', Punctuation, '#pop'),
-        ],
-
-        # if I understand correctly, every character is valid in a lua string,
-        # so this state is only for later corrections
-        'string': [
-            ('.', String)
-        ],
-
-        'stringescape': [
-            (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
-        ],
-
-        'sqs': [
-            ("'", String, '#pop'),
-            include('string')
-        ],
-
-        'dqs': [
-            ('"', String, '#pop'),
-            include('string')
-        ]
-    }
-
-    def __init__(self, **options):
-        self.func_name_highlighting = get_bool_opt(
-            options, 'func_name_highlighting', True)
-        self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
-        self._functions = set()
-        if self.func_name_highlighting:
-            from pygments.lexers._luabuiltins import MODULES
-            for mod, func in MODULES.iteritems():
-                if mod not in self.disabled_modules:
-                    self._functions.update(func)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name:
-                if value in self._functions:
-                    yield index, Name.Builtin, value
-                    continue
-                elif '.' in value:
-                    a, b = value.split('.')
-                    yield index, Name, a
-                    yield index + len(a), Punctuation, u'.'
-                    yield index + len(a) + 1, Name, b
-                    continue
-            yield index, token, value
-
-
-class MoonScriptLexer(LuaLexer):
-    """
-    For `MoonScript <http://moonscript.org.org>`_ source code.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = "MoonScript"
-    aliases = ["moon", "moonscript"]
-    filenames = ["*.moon"]
-    mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
-    tokens = {
-        'root': [
-            (r'#!(.*?)$', Comment.Preproc),
-            (r'', Text, 'base'),
-        ],
-        'base': [
-            ('--.*$', Comment.Single),
-            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
-            (r'(?i)\d+e[+-]?\d+', Number.Float),
-            (r'(?i)0x[0-9a-f]*', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r'\n', Text),
-            (r'[^\S\n]+', Text),
-            (r'(?s)\[(=*)\[.*?\]\1\]', String),
-            (r'(->|=>)', Name.Function),
-            (r':[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
-            (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
-            (r'[;,]', Punctuation),
-            (r'[\[\]\{\}\(\)]', Keyword.Type),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Variable),
-            (r"(class|extends|if|then|super|do|with|import|export|"
-             r"while|elseif|return|for|in|from|when|using|else|"
-             r"and|or|not|switch|break)\b", Keyword),
-            (r'(true|false|nil)\b', Keyword.Constant),
-            (r'(and|or|not)\b', Operator.Word),
-            (r'(self)\b', Name.Builtin.Pseudo),
-            (r'@@?([a-zA-Z_][a-zA-Z0-9_]*)?', Name.Variable.Class),
-            (r'[A-Z]\w*', Name.Class), # proper name
-            (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
-            ("'", String.Single, combined('stringescape', 'sqs')),
-            ('"', String.Double, combined('stringescape', 'dqs'))
-        ],
-        'stringescape': [
-            (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
-        ],
-        'sqs': [
-            ("'", String.Single, '#pop'),
-            (".", String)
-        ],
-        'dqs': [
-            ('"', String.Double, '#pop'),
-            (".", String)
-        ]
-    }
-
-    def get_tokens_unprocessed(self, text):
-        # set . as Operator instead of Punctuation
-        for index, token, value in \
-            LuaLexer.get_tokens_unprocessed(self, text):
-            if token == Punctuation and value == ".":
-                token = Operator
-            yield index, token, value
-
-
-class CrocLexer(RegexLexer):
-    """
-    For `Croc <http://jfbillingsley.com/croc>`_ source.
-    """
-    name = 'Croc'
-    filenames = ['*.croc']
-    aliases = ['croc']
-    mimetypes = ['text/x-crocsrc']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            # Comments
-            (r'//(.*?)\n', Comment.Single),
-            (r'/\*', Comment.Multiline, 'nestedcomment'),
-            # Keywords
-            (r'(as|assert|break|case|catch|class|continue|default'
-             r'|do|else|finally|for|foreach|function|global|namespace'
-             r'|if|import|in|is|local|module|return|scope|super|switch'
-             r'|this|throw|try|vararg|while|with|yield)\b', Keyword),
-            (r'(false|true|null)\b', Keyword.Constant),
-            # FloatLiteral
-            (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
-             Number.Float),
-            # IntegerLiteral
-            # -- Binary
-            (r'0[bB][01][01_]*', Number),
-            # -- Hexadecimal
-            (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
-            # -- Decimal
-            (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
-            # CharacterLiteral
-            (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
-             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
-             String.Char
-            ),
-            # StringLiteral
-            # -- WysiwygString
-            (r'@"(""|[^"])*"', String),
-            (r'@`(``|[^`])*`', String),
-            (r"@'(''|[^'])*'", String),
-            # -- DoubleQuotedString
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # Tokens
-            (
-             r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
-             r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
-             r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation
-            ),
-            # Identifier
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        'nestedcomment': [
-            (r'[^*/]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline),
-        ],
-    }
-
-
-class MiniDLexer(CrocLexer):
-    """
-    For MiniD source. MiniD is now known as Croc.
-    """
-    name = 'MiniD'
-    filenames = ['*.md']
-    aliases = ['minid']
-    mimetypes = ['text/x-minidsrc']
-
-
-class IoLexer(RegexLexer):
-    """
-    For `Io <http://iolanguage.com/>`_ (a small, prototype-based
-    programming language) source.
-
-    *New in Pygments 0.10.*
-    """
-    name = 'Io'
-    filenames = ['*.io']
-    aliases = ['io']
-    mimetypes = ['text/x-iosrc']
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            # Comments
-            (r'//(.*?)\n', Comment.Single),
-            (r'#(.*?)\n', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'/\+', Comment.Multiline, 'nestedcomment'),
-            # DoubleQuotedString
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # Operators
-            (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
-             Operator),
-            # keywords
-            (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
-             Keyword),
-            # constants
-            (r'(nil|false|true)\b', Name.Constant),
-            # names
-            (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
-             Name.Builtin),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            # numbers
-            (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+', Number.Integer)
-        ],
-        'nestedcomment': [
-            (r'[^+/]+', Comment.Multiline),
-            (r'/\+', Comment.Multiline, '#push'),
-            (r'\+/', Comment.Multiline, '#pop'),
-            (r'[+/]', Comment.Multiline),
-        ]
-    }
-
-
-class TclLexer(RegexLexer):
-    """
-    For Tcl source code.
-
-    *New in Pygments 0.10.*
-    """
-
-    keyword_cmds_re = (
-        r'\b(after|apply|array|break|catch|continue|elseif|else|error|'
-        r'eval|expr|for|foreach|global|if|namespace|proc|rename|return|'
-        r'set|switch|then|trace|unset|update|uplevel|upvar|variable|'
-        r'vwait|while)\b'
-        )
-
-    builtin_cmds_re = (
-        r'\b(append|bgerror|binary|cd|chan|clock|close|concat|dde|dict|'
-        r'encoding|eof|exec|exit|fblocked|fconfigure|fcopy|file|'
-        r'fileevent|flush|format|gets|glob|history|http|incr|info|interp|'
-        r'join|lappend|lassign|lindex|linsert|list|llength|load|loadTk|'
-        r'lrange|lrepeat|lreplace|lreverse|lsearch|lset|lsort|mathfunc|'
-        r'mathop|memory|msgcat|open|package|pid|pkg::create|pkg_mkIndex|'
-        r'platform|platform::shell|puts|pwd|re_syntax|read|refchan|'
-        r'regexp|registry|regsub|scan|seek|socket|source|split|string|'
-        r'subst|tell|time|tm|unknown|unload)\b'
-        )
-
-    name = 'Tcl'
-    aliases = ['tcl']
-    filenames = ['*.tcl']
-    mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
-
-    def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
-        return [
-            (keyword_cmds_re, Keyword, 'params' + context),
-            (builtin_cmds_re, Name.Builtin, 'params' + context),
-            (r'([\w\.\-]+)', Name.Variable, 'params' + context),
-            (r'#', Comment, 'comment'),
-        ]
-
-    tokens = {
-        'root': [
-            include('command'),
-            include('basic'),
-            include('data'),
-            (r'}', Keyword),  # HACK: somehow we miscounted our braces
-        ],
-        'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
-        'command-in-brace': _gen_command_rules(keyword_cmds_re,
-                                               builtin_cmds_re,
-                                               "-in-brace"),
-        'command-in-bracket': _gen_command_rules(keyword_cmds_re,
-                                                 builtin_cmds_re,
-                                                 "-in-bracket"),
-        'command-in-paren': _gen_command_rules(keyword_cmds_re,
-                                               builtin_cmds_re,
-                                               "-in-paren"),
-        'basic': [
-            (r'\(', Keyword, 'paren'),
-            (r'\[', Keyword, 'bracket'),
-            (r'\{', Keyword, 'brace'),
-            (r'"', String.Double, 'string'),
-            (r'(eq|ne|in|ni)\b', Operator.Word),
-            (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
-        ],
-        'data': [
-            (r'\s+', Text),
-            (r'0x[a-fA-F0-9]+', Number.Hex),
-            (r'0[0-7]+', Number.Oct),
-            (r'\d+\.\d+', Number.Float),
-            (r'\d+', Number.Integer),
-            (r'\$([\w\.\-\:]+)', Name.Variable),
-            (r'([\w\.\-\:]+)', Text),
-        ],
-        'params': [
-            (r';', Keyword, '#pop'),
-            (r'\n', Text, '#pop'),
-            (r'(else|elseif|then)\b', Keyword),
-            include('basic'),
-            include('data'),
-        ],
-        'params-in-brace': [
-            (r'}', Keyword, ('#pop', '#pop')),
-            include('params')
-        ],
-        'params-in-paren': [
-            (r'\)', Keyword, ('#pop', '#pop')),
-            include('params')
-        ],
-        'params-in-bracket': [
-            (r'\]', Keyword, ('#pop', '#pop')),
-            include('params')
-        ],
-        'string': [
-            (r'\[', String.Double, 'string-square'),
-            (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
-            (r'"', String.Double, '#pop')
-        ],
-        'string-square': [
-            (r'\[', String.Double, 'string-square'),
-            (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
-            (r'\]', String.Double, '#pop')
-        ],
-        'brace': [
-            (r'}', Keyword, '#pop'),
-            include('command-in-brace'),
-            include('basic'),
-            include('data'),
-        ],
-        'paren': [
-            (r'\)', Keyword, '#pop'),
-            include('command-in-paren'),
-            include('basic'),
-            include('data'),
-        ],
-        'bracket': [
-            (r'\]', Keyword, '#pop'),
-            include('command-in-bracket'),
-            include('basic'),
-            include('data'),
-        ],
-        'comment': [
-            (r'.*[^\\]\n', Comment, '#pop'),
-            (r'.*\\\n', Comment),
-        ],
-    }
-
-    def analyse_text(text):
-        return shebang_matches(text, r'(tcl)')
-
-
-class FactorLexer(RegexLexer):
-    """
-    Lexer for the `Factor <http://factorcode.org>`_ language.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'Factor'
-    aliases = ['factor']
-    filenames = ['*.factor']
-    mimetypes = ['text/x-factor']
-
-    flags = re.MULTILINE | re.UNICODE
-
-    builtin_kernel = (
-        r'(?:or|2bi|2tri|while|wrapper|nip|4dip|wrapper\\?|bi\\*|'
-        r'callstack>array|both\\?|hashcode|die|dupd|callstack|'
-        r'callstack\\?|3dup|tri@|pick|curry|build|\\?execute|3bi|'
-        r'prepose|>boolean|\\?if|clone|eq\\?|tri\\*|\\?|=|swapd|'
-        r'2over|2keep|3keep|clear|2dup|when|not|tuple\\?|dup|2bi\\*|'
-        r'2tri\\*|call|tri-curry|object|bi@|do|unless\\*|if\\*|loop|'
-        r'bi-curry\\*|drop|when\\*|assert=|retainstack|assert\\?|-rot|'
-        r'execute|2bi@|2tri@|boa|with|either\\?|3drop|bi|curry\\?|'
-        r'datastack|until|3dip|over|3curry|tri-curry\\*|tri-curry@|swap|'
-        r'and|2nip|throw|bi-curry|\\(clone\\)|hashcode\\*|compose|2dip|if|3tri|'
-        r'unless|compose\\?|tuple|keep|2curry|equal\\?|assert|tri|2drop|'
-        r'most|<wrapper>|boolean\\?|identity-hashcode|identity-tuple\\?|'
-        r'null|new|dip|bi-curry@|rot|xor|identity-tuple|boolean)\s'
-        )
-
-    builtin_assocs = (
-        r'(?:\\?at|assoc\\?|assoc-clone-like|assoc=|delete-at\\*|'
-        r'assoc-partition|extract-keys|new-assoc|value\\?|assoc-size|'
-        r'map>assoc|push-at|assoc-like|key\\?|assoc-intersect|'
-        r'assoc-refine|update|assoc-union|assoc-combine|at\\*|'
-        r'assoc-empty\\?|at\\+|set-at|assoc-all\\?|assoc-subset\\?|'
-        r'assoc-hashcode|change-at|assoc-each|assoc-diff|zip|values|'
-        r'value-at|rename-at|inc-at|enum\\?|at|cache|assoc>map|<enum>|'
-        r'assoc|assoc-map|enum|value-at\\*|assoc-map-as|>alist|'
-        r'assoc-filter-as|clear-assoc|assoc-stack|maybe-set-at|'
-        r'substitute|assoc-filter|2cache|delete-at|assoc-find|keys|'
-        r'assoc-any\\?|unzip)\s'
-        )
-
-    builtin_combinators = (
-        r'(?:case|execute-effect|no-cond|no-case\\?|3cleave>quot|2cleave|'
-        r'cond>quot|wrong-values\\?|no-cond\\?|cleave>quot|no-case|'
-        r'case>quot|3cleave|wrong-values|to-fixed-point|alist>quot|'
-        r'case-find|cond|cleave|call-effect|2cleave>quot|recursive-hashcode|'
-        r'linear-case-quot|spread|spread>quot)\s'
-        )
-
-    builtin_math = (
-        r'(?:number=|if-zero|next-power-of-2|each-integer|\\?1\\+|'
-        r'fp-special\\?|imaginary-part|unless-zero|float>bits|number\\?|'
-        r'fp-infinity\\?|bignum\\?|fp-snan\\?|denominator|fp-bitwise=|\\*|'
-        r'\\+|power-of-2\\?|-|u>=|/|>=|bitand|log2-expects-positive|<|'
-        r'log2|>|integer\\?|number|bits>double|2/|zero\\?|(find-integer)|'
-        r'bits>float|float\\?|shift|ratio\\?|even\\?|ratio|fp-sign|bitnot|'
-        r'>fixnum|complex\\?|/i|/f|byte-array>bignum|when-zero|sgn|>bignum|'
-        r'next-float|u<|u>|mod|recip|rational|find-last-integer|>float|'
-        r'(all-integers\\?)|2^|times|integer|fixnum\\?|neg|fixnum|sq|'
-        r'bignum|(each-integer)|bit\\?|fp-qnan\\?|find-integer|complex|'
-        r'<fp-nan>|real|double>bits|bitor|rem|fp-nan-payload|all-integers\\?|'
-        r'real-part|log2-expects-positive\\?|prev-float|align|unordered\\?|'
-        r'float|fp-nan\\?|abs|bitxor|u<=|odd\\?|<=|/mod|rational\\?|>integer|'
-        r'real\\?|numerator)\s'
-        )
-
-    builtin_sequences = (
-        r'(?:member-eq\\?|append|assert-sequence=|find-last-from|trim-head-slice|'
-        r'clone-like|3sequence|assert-sequence\\?|map-as|last-index-from|'
-        r'reversed|index-from|cut\\*|pad-tail|remove-eq!|concat-as|'
-        r'but-last|snip|trim-tail|nths|nth|2selector|sequence|slice\\?|'
-        r'<slice>|partition|remove-nth|tail-slice|empty\\?|tail\\*|'
-        r'if-empty|find-from|virtual-sequence\\?|member\\?|set-length|'
-        r'drop-prefix|unclip|unclip-last-slice|iota|map-sum|'
-        r'bounds-error\\?|sequence-hashcode-step|selector-for|'
-        r'accumulate-as|map|start|midpoint@|\\(accumulate\\)|rest-slice|'
-        r'prepend|fourth|sift|accumulate!|new-sequence|follow|map!|'
-        r'like|first4|1sequence|reverse|slice|unless-empty|padding|'
-        r'virtual@|repetition\\?|set-last|index|4sequence|max-length|'
-        r'set-second|immutable-sequence|first2|first3|replicate-as|'
-        r'reduce-index|unclip-slice|supremum|suffix!|insert-nth|'
-        r'trim-tail-slice|tail|3append|short|count|suffix|concat|'
-        r'flip|filter|sum|immutable\\?|reverse!|2sequence|map-integers|'
-        r'delete-all|start\\*|indices|snip-slice|check-slice|sequence\\?|'
-        r'head|map-find|filter!|append-as|reduce|sequence=|halves|'
-        r'collapse-slice|interleave|2map|filter-as|binary-reduce|'
-        r'slice-error\\?|product|bounds-check\\?|bounds-check|harvest|'
-        r'immutable|virtual-exemplar|find|produce|remove|pad-head|last|'
-        r'replicate|set-fourth|remove-eq|shorten|reversed\\?|'
-        r'map-find-last|3map-as|2unclip-slice|shorter\\?|3map|find-last|'
-        r'head-slice|pop\\*|2map-as|tail-slice\\*|but-last-slice|'
-        r'2map-reduce|iota\\?|collector-for|accumulate|each|selector|'
-        r'append!|new-resizable|cut-slice|each-index|head-slice\\*|'
-        r'2reverse-each|sequence-hashcode|pop|set-nth|\\?nth|'
-        r'<flat-slice>|second|join|when-empty|collector|'
-        r'immutable-sequence\\?|<reversed>|all\\?|3append-as|'
-        r'virtual-sequence|subseq\\?|remove-nth!|push-either|new-like|'
-        r'length|last-index|push-if|2all\\?|lengthen|assert-sequence|'
-        r'copy|map-reduce|move|third|first|3each|tail\\?|set-first|'
-        r'prefix|bounds-error|any\\?|<repetition>|trim-slice|exchange|'
-        r'surround|2reduce|cut|change-nth|min-length|set-third|produce-as|'
-        r'push-all|head\\?|delete-slice|rest|sum-lengths|2each|head\\*|'
-        r'infimum|remove!|glue|slice-error|subseq|trim|replace-slice|'
-        r'push|repetition|map-index|trim-head|unclip-last|mismatch)\s'
-        )
-
-    builtin_namespaces = (
-        r'(?:global|\\+@|change|set-namestack|change-global|init-namespaces|'
-        r'on|off|set-global|namespace|set|with-scope|bind|with-variable|'
-        r'inc|dec|counter|initialize|namestack|get|get-global|make-assoc)\s'
-        )
-
-    builtin_arrays = (
-        r'(?:<array>|2array|3array|pair|>array|1array|4array|pair\\?|'
-        r'array|resize-array|array\\?)\s'
-        )
-
-    builtin_io = (
-        r'(?:\\+character\\+|bad-seek-type\\?|readln|each-morsel|stream-seek|'
-        r'read|print|with-output-stream|contents|write1|stream-write1|'
-        r'stream-copy|stream-element-type|with-input-stream|'
-        r'stream-print|stream-read|stream-contents|stream-tell|'
-        r'tell-output|bl|seek-output|bad-seek-type|nl|stream-nl|write|'
-        r'flush|stream-lines|\\+byte\\+|stream-flush|read1|'
-        r'seek-absolute\\?|stream-read1|lines|stream-readln|'
-        r'stream-read-until|each-line|seek-end|with-output-stream\\*|'
-        r'seek-absolute|with-streams|seek-input|seek-relative\\?|'
-        r'input-stream|stream-write|read-partial|seek-end\\?|'
-        r'seek-relative|error-stream|read-until|with-input-stream\\*|'
-        r'with-streams\\*|tell-input|each-block|output-stream|'
-        r'stream-read-partial|each-stream-block|each-stream-line)\s'
-        )
-
-    builtin_strings = (
-        r'(?:resize-string|>string|<string>|1string|string|string\\?)\s'
-        )
-
-    builtin_vectors = (
-        r'(?:vector\\?|<vector>|\\?push|vector|>vector|1vector)\s'
-        )
-
-    builtin_continuations = (
-        r'(?:with-return|restarts|return-continuation|with-datastack|'
-        r'recover|rethrow-restarts|<restart>|ifcc|set-catchstack|'
-        r'>continuation<|cleanup|ignore-errors|restart\\?|'
-        r'compute-restarts|attempt-all-error|error-thread|continue|'
-        r'<continuation>|attempt-all-error\\?|condition\\?|'
-        r'<condition>|throw-restarts|error|catchstack|continue-with|'
-        r'thread-error-hook|continuation|rethrow|callcc1|'
-        r'error-continuation|callcc0|attempt-all|condition|'
-        r'continuation\\?|restart|return)\s'
-        )
-
-    tokens = {
-        'root': [
-            # TODO: (( inputs -- outputs ))
-            # TODO: << ... >>
-
-            # defining words
-            (r'(\s*)(:|::|MACRO:|MEMO:)(\s+)(\S+)',
-             bygroups(Text, Keyword, Text, Name.Function)),
-            (r'(\s*)(M:)(\s+)(\S+)(\s+)(\S+)',
-             bygroups(Text, Keyword, Text, Name.Class, Text, Name.Function)),
-            (r'(\s*)(GENERIC:)(\s+)(\S+)',
-             bygroups(Text, Keyword, Text, Name.Function)),
-            (r'(\s*)(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
-             bygroups(Text, Keyword, Text, Name.Function, Text, Name.Function)),
-            (r'(\()(\s+)', bygroups(Name.Function, Text), 'stackeffect'),
-            (r'\;\s', Keyword),
-
-            # imports and namespaces
-            (r'(USING:)((?:\s|\\\s)+)',
-             bygroups(Keyword.Namespace, Text), 'import'),
-            (r'(USE:)(\s+)(\S+)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace)),
-            (r'(UNUSE:)(\s+)(\S+)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace)),
-            (r'(QUALIFIED:)(\s+)(\S+)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace)),
-            (r'(QUALIFIED-WITH:)(\s+)(\S+)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace)),
-            (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+)(=>)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Text)),
-            (r'(IN:)(\s+)(\S+)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace)),
-            (r'(?:ALIAS|DEFER|FORGET|POSTPONE):', Keyword.Namespace),
-
-            # tuples and classes
-            (r'(TUPLE:)(\s+)(\S+)(\s+<\s+)(\S+)',
-             bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
-            (r'(TUPLE:)(\s+)(\S+)',
-             bygroups(Keyword, Text, Name.Class), 'slots'),
-            (r'(UNION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
-            (r'(INTERSECTION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
-            (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
-                bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
-            (r'(C:)(\s+)(\S+)(\s+)(\S+)',
-                bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
-            (r'INSTANCE:', Keyword),
-            (r'SLOT:', Keyword),
-            (r'MIXIN:', Keyword),
-            (r'(?:SINGLETON|SINGLETONS):', Keyword),
-
-            # other syntax
-            (r'CONSTANT:', Keyword),
-            (r'(?:SYMBOL|SYMBOLS):', Keyword),
-            (r'ERROR:', Keyword),
-            (r'SYNTAX:', Keyword),
-            (r'(HELP:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
-            (r'(MAIN:)(\s+)(\S+)',
-             bygroups(Keyword.Namespace, Text, Name.Function)),
-            (r'(?:ALIEN|TYPEDEF|FUNCTION|STRUCT):', Keyword),
-
-            # vocab.private
-            # TODO: words inside vocab.private should have red names?
-            (r'(?:<PRIVATE|PRIVATE>)', Keyword.Namespace),
-
-            # strings
-            (r'"""\s+(?:.|\n)*?\s+"""', String),
-            (r'"(?:\\\\|\\"|[^"])*"', String),
-            (r'CHAR:\s+(\\[\\abfnrstv]*|\S)\s', String.Char),
-
-            # comments
-            (r'\!\s+.*$', Comment),
-            (r'#\!\s+.*$', Comment),
-
-            # boolean constants
-            (r'(t|f)\s', Name.Constant),
-
-            # numbers
-            (r'-?\d+\.\d+\s', Number.Float),
-            (r'-?\d+\s', Number.Integer),
-            (r'HEX:\s+[a-fA-F\d]+\s', Number.Hex),
-            (r'BIN:\s+[01]+\s', Number.Integer),
-            (r'OCT:\s+[0-7]+\s', Number.Oct),
-
-            # operators
-            (r'[-+/*=<>^]\s', Operator),
-
-            # keywords
-            (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
-             Keyword),
-
-            # builtins
-            (builtin_kernel, Name.Builtin),
-            (builtin_assocs, Name.Builtin),
-            (builtin_combinators, Name.Builtin),
-            (builtin_math, Name.Builtin),
-            (builtin_sequences, Name.Builtin),
-            (builtin_namespaces, Name.Builtin),
-            (builtin_arrays, Name.Builtin),
-            (builtin_io, Name.Builtin),
-            (builtin_strings, Name.Builtin),
-            (builtin_vectors, Name.Builtin),
-            (builtin_continuations, Name.Builtin),
-
-            # whitespaces - usually not relevant
-            (r'\s+', Text),
-
-            # everything else is text
-            (r'\S+', Text),
-        ],
-
-        'stackeffect': [
-            (r'\s*\(', Name.Function, 'stackeffect'),
-            (r'\)', Name.Function, '#pop'),
-            (r'\-\-', Name.Function),
-            (r'\s+', Text),
-            (r'\S+', Name.Variable),
-        ],
-
-        'slots': [
-            (r'\s+', Text),
-            (r';\s', Keyword, '#pop'),
-            (r'\S+', Name.Variable),
-        ],
-
-        'import': [
-            (r';', Keyword, '#pop'),
-            (r'\S+', Name.Namespace),
-            (r'\s+', Text),
-        ],
-    }
-
-
-class FancyLexer(RegexLexer):
-    """
-    Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
-
-    Fancy is a self-hosted, pure object-oriented, dynamic,
-    class-based, concurrent general-purpose programming language
-    running on Rubinius, the Ruby VM.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'Fancy'
-    filenames = ['*.fy', '*.fancypack']
-    aliases = ['fancy', 'fy']
-    mimetypes = ['text/x-fancysrc']
-
-    tokens = {
-        # copied from PerlLexer:
-        'balanced-regex': [
-            (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
-            (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
-            (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
-            (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
-            (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
-            (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
-            (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
-            (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
-            (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
-            (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
-        ],
-        'root': [
-            (r'\s+', Text),
-
-            # balanced delimiters (copied from PerlLexer):
-            (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
-            (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
-            (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
-            (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
-            (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
-            (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
-
-            # Comments
-            (r'#(.*?)\n', Comment.Single),
-            # Symbols
-            (r'\'([^\'\s\[\]\(\)\{\}]+|\[\])', String.Symbol),
-            # Multi-line DoubleQuotedString
-            (r'"""(\\\\|\\"|[^"])*"""', String),
-            # DoubleQuotedString
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # keywords
-            (r'(def|class|try|catch|finally|retry|return|return_local|match|'
-             r'case|->|=>)\b', Keyword),
-            # constants
-            (r'(self|super|nil|false|true)\b', Name.Constant),
-            (r'[(){};,/?\|:\\]', Punctuation),
-            # names
-            (r'(Object|Array|Hash|Directory|File|Class|String|Number|'
-             r'Enumerable|FancyEnumerable|Block|TrueClass|NilClass|'
-             r'FalseClass|Tuple|Symbol|Stack|Set|FancySpec|Method|Package|'
-             r'Range)\b', Name.Builtin),
-            # functions
-            (r'[a-zA-Z]([a-zA-Z0-9_]|[-+?!=*/^><%])*:', Name.Function),
-            # operators, must be below functions
-            (r'[-+*/~,<>=&!?%^\[\]\.$]+', Operator),
-            ('[A-Z][a-zA-Z0-9_]*', Name.Constant),
-            ('@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Instance),
-            ('@@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Class),
-            ('@@?', Operator),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            # numbers - / checks are necessary to avoid mismarking regexes,
-            # see comment in RubyLexer
-            (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
-             bygroups(Number.Oct, Text, Operator)),
-            (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
-             bygroups(Number.Hex, Text, Operator)),
-            (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
-             bygroups(Number.Bin, Text, Operator)),
-            (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
-             bygroups(Number.Integer, Text, Operator)),
-            (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+', Number.Integer)
-        ]
-    }
-
-
-class DgLexer(RegexLexer):
-    """
-    Lexer for `dg <http://pyos.github.com/dg>`_,
-    a functional and object-oriented programming language
-    running on the CPython 3 VM.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'dg'
-    aliases = ['dg']
-    filenames = ['*.dg']
-    mimetypes = ['text/x-dg']
-
-    tokens = {
-        'root': [
-            # Whitespace:
-            (r'\s+', Text),
-            (r'#.*?$', Comment.Single),
-            # Lexemes:
-            #  Numbers
-            (r'0[bB][01]+', Number.Bin),
-            (r'0[oO][0-7]+', Number.Oct),
-            (r'0[xX][\da-fA-F]+', Number.Hex),
-            (r'[+-]?\d+\.\d+([eE][+-]?\d+)?[jJ]?', Number.Float),
-            (r'[+-]?\d+[eE][+-]?\d+[jJ]?', Number.Float),
-            (r'[+-]?\d+[jJ]?', Number.Integer),
-            #  Character/String Literals
-            (r"[br]*'''", String, combined('stringescape', 'tsqs', 'string')),
-            (r'[br]*"""', String, combined('stringescape', 'tdqs', 'string')),
-            (r"[br]*'", String, combined('stringescape', 'sqs', 'string')),
-            (r'[br]*"', String, combined('stringescape', 'dqs', 'string')),
-            #  Operators
-            (r"`\w+'*`", Operator), # Infix links
-            #   Reserved infix links
-            (r'\b(or|and|if|else|where|is|in)\b', Operator.Word),
-            (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
-            #  Identifiers
-            #   Python 3 types
-            (r"(?<!\.)(bool|bytearray|bytes|classmethod|complex|dict'?|"
-             r"float|frozenset|int|list'?|memoryview|object|property|range|"
-             r"set'?|slice|staticmethod|str|super|tuple'?|type)"
-             r"(?!['\w])", Name.Builtin),
-            #   Python 3 builtins + some more
-            (r'(?<!\.)(__import__|abs|all|any|bin|bind|chr|cmp|compile|complex|'
-             r'delattr|dir|divmod|drop|dropwhile|enumerate|eval|filter|flip|'
-             r'foldl1?|format|fst|getattr|globals|hasattr|hash|head|hex|id|'
-             r'init|input|isinstance|issubclass|iter|iterate|last|len|locals|'
-             r'map|max|min|next|oct|open|ord|pow|print|repr|reversed|round|'
-             r'setattr|scanl1?|snd|sorted|sum|tail|take|takewhile|vars|zip)'
-             r"(?!['\w])", Name.Builtin),
-            (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
-             Name.Builtin.Pseudo),
-            (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
-             Name.Exception),
-            (r"(?<!\.)(KeyboardInterrupt|SystemExit|StopIteration|"
-             r"GeneratorExit)(?!['\w])", Name.Exception),
-            #   Compiler-defined identifiers
-            (r"(?<![\.\w])(import|inherit|for|while|switch|not|raise|unsafe|"
-             r"yield|with)(?!['\w])", Keyword.Reserved),
-            #   Other links
-            (r"[A-Z_']+\b", Name),
-            (r"[A-Z][\w']*\b", Keyword.Type),
-            (r"\w+'*", Name),
-            #  Blocks
-            (r'[()]', Punctuation),
-        ],
-        'stringescape': [
-            (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
-             r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
-        ],
-        'string': [
-            (r'%(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-             '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
-            (r'[^\\\'"%\n]+', String),
-            # quotes, percents and backslashes must be parsed one at a time
-            (r'[\'"\\]', String),
-            # unhandled string formatting sign
-            (r'%', String),
-            (r'\n', String)
-        ],
-        'dqs': [
-            (r'"', String, '#pop')
-        ],
-        'sqs': [
-            (r"'", String, '#pop')
-        ],
-        'tdqs': [
-            (r'"""', String, '#pop')
-        ],
-        'tsqs': [
-            (r"'''", String, '#pop')
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/asm.py b/python/ext-libs/pygments/lexers/asm.py
deleted file mode 100644
index 7ff64bc..0000000
--- a/python/ext-libs/pygments/lexers/asm.py
+++ /dev/null
@@ -1,398 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.asm
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for assembly languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer
-from pygments.lexers.compiled import DLexer, CppLexer, CLexer
-from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
-     Other, Keyword, Operator
-
-__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer',
-           'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'Ca65Lexer']
-
-
-class GasLexer(RegexLexer):
-    """
-    For Gas (AT&T) assembly code.
-    """
-    name = 'GAS'
-    aliases = ['gas']
-    filenames = ['*.s', '*.S']
-    mimetypes = ['text/x-gas']
-
-    #: optional Comment or Whitespace
-    string = r'"(\\"|[^"])*"'
-    char = r'[a-zA-Z$._0-9 at -]'
-    identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
-    number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            (identifier + ':', Name.Label),
-            (r'\.' + identifier, Name.Attribute, 'directive-args'),
-            (r'lock|rep(n?z)?|data\d+', Name.Attribute),
-            (identifier, Name.Function, 'instruction-args'),
-            (r'[\r\n]+', Text)
-        ],
-        'directive-args': [
-            (identifier, Name.Constant),
-            (string, String),
-            ('@' + identifier, Name.Attribute),
-            (number, Number.Integer),
-            (r'[\r\n]+', Text, '#pop'),
-
-            (r'#.*?$', Comment, '#pop'),
-
-            include('punctuation'),
-            include('whitespace')
-        ],
-        'instruction-args': [
-            # For objdump-disassembled code, shouldn't occur in
-            # actual assembler input
-            ('([a-z0-9]+)( )(<)('+identifier+')(>)',
-                bygroups(Number.Hex, Text, Punctuation, Name.Constant,
-                         Punctuation)),
-            ('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
-                bygroups(Number.Hex, Text, Punctuation, Name.Constant,
-                         Punctuation, Number.Integer, Punctuation)),
-
-            # Address constants
-            (identifier, Name.Constant),
-            (number, Number.Integer),
-            # Registers
-            ('%' + identifier, Name.Variable),
-            # Numeric constants
-            ('$'+number, Number.Integer),
-            (r"$'(.|\\')'", String.Char),
-            (r'[\r\n]+', Text, '#pop'),
-            (r'#.*?$', Comment, '#pop'),
-            include('punctuation'),
-            include('whitespace')
-        ],
-        'whitespace': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'#.*?\n', Comment)
-        ],
-        'punctuation': [
-            (r'[-*,.():]+', Punctuation)
-        ]
-    }
-
-    def analyse_text(text):
-        if re.match(r'^\.(text|data|section)', text, re.M):
-            return True
-        elif re.match(r'^\.\w+', text, re.M):
-            return 0.1
-
-
-class ObjdumpLexer(RegexLexer):
-    """
-    For the output of 'objdump -dr'
-    """
-    name = 'objdump'
-    aliases = ['objdump']
-    filenames = ['*.objdump']
-    mimetypes = ['text/x-objdump']
-
-    hex = r'[0-9A-Za-z]'
-
-    tokens = {
-        'root': [
-            # File name & format:
-            ('(.*?)(:)( +file format )(.*?)$',
-                bygroups(Name.Label, Punctuation, Text, String)),
-            # Section header
-            ('(Disassembly of section )(.*?)(:)$',
-                bygroups(Text, Name.Label, Punctuation)),
-            # Function labels
-            # (With offset)
-            ('('+hex+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
-                bygroups(Number.Hex, Text, Punctuation, Name.Function,
-                         Punctuation, Number.Hex, Punctuation)),
-            # (Without offset)
-            ('('+hex+'+)( )(<)(.*?)(>:)$',
-                bygroups(Number.Hex, Text, Punctuation, Name.Function,
-                         Punctuation)),
-            # Code line with disassembled instructions
-            ('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)( *\t)([a-zA-Z].*?)$',
-                bygroups(Text, Name.Label, Text, Number.Hex, Text,
-                         using(GasLexer))),
-            # Code line with ascii
-            ('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)( *)(.*?)$',
-                bygroups(Text, Name.Label, Text, Number.Hex, Text, String)),
-            # Continued code line, only raw opcodes without disassembled
-            # instruction
-            ('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)$',
-                bygroups(Text, Name.Label, Text, Number.Hex)),
-            # Skipped a few bytes
-            (r'\t\.\.\.$', Text),
-            # Relocation line
-            # (With offset)
-            (r'(\t\t\t)('+hex+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x' + hex + '+)$',
-                bygroups(Text, Name.Label, Text, Name.Property, Text,
-                         Name.Constant, Punctuation, Number.Hex)),
-            # (Without offset)
-            (r'(\t\t\t)('+hex+r'+:)( )([^\t]+)(\t)(.*?)$',
-                bygroups(Text, Name.Label, Text, Name.Property, Text,
-                         Name.Constant)),
-            (r'[^\n]+\n', Other)
-        ]
-    }
-
-
-class DObjdumpLexer(DelegatingLexer):
-    """
-    For the output of 'objdump -Sr on compiled D files'
-    """
-    name = 'd-objdump'
-    aliases = ['d-objdump']
-    filenames = ['*.d-objdump']
-    mimetypes = ['text/x-d-objdump']
-
-    def __init__(self, **options):
-        super(DObjdumpLexer, self).__init__(DLexer, ObjdumpLexer, **options)
-
-
-class CppObjdumpLexer(DelegatingLexer):
-    """
-    For the output of 'objdump -Sr on compiled C++ files'
-    """
-    name = 'cpp-objdump'
-    aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
-    filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
-    mimetypes = ['text/x-cpp-objdump']
-
-    def __init__(self, **options):
-        super(CppObjdumpLexer, self).__init__(CppLexer, ObjdumpLexer, **options)
-
-
-class CObjdumpLexer(DelegatingLexer):
-    """
-    For the output of 'objdump -Sr on compiled C files'
-    """
-    name = 'c-objdump'
-    aliases = ['c-objdump']
-    filenames = ['*.c-objdump']
-    mimetypes = ['text/x-c-objdump']
-
-    def __init__(self, **options):
-        super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options)
-
-
-class LlvmLexer(RegexLexer):
-    """
-    For LLVM assembly code.
-    """
-    name = 'LLVM'
-    aliases = ['llvm']
-    filenames = ['*.ll']
-    mimetypes = ['text/x-llvm']
-
-    #: optional Comment or Whitespace
-    string = r'"[^"]*?"'
-    identifier = r'([-a-zA-Z$._][-a-zA-Z$._0-9]*|' + string + ')'
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-
-            # Before keywords, because keywords are valid label names :(...
-            (identifier + '\s*:', Name.Label),
-
-            include('keyword'),
-
-            (r'%' + identifier, Name.Variable),#Name.Identifier.Local),
-            (r'@' + identifier, Name.Variable.Global),#Name.Identifier.Global),
-            (r'%\d+', Name.Variable.Anonymous),#Name.Identifier.Anonymous),
-            (r'@\d+', Name.Variable.Global),#Name.Identifier.Anonymous),
-            (r'!' + identifier, Name.Variable),
-            (r'!\d+', Name.Variable.Anonymous),
-            (r'c?' + string, String),
-
-            (r'0[xX][a-fA-F0-9]+', Number),
-            (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
-
-            (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
-        ],
-        'whitespace': [
-            (r'(\n|\s)+', Text),
-            (r';.*?\n', Comment)
-        ],
-        'keyword': [
-            # Regular keywords
-            (r'(begin|end'
-             r'|true|false'
-             r'|declare|define'
-             r'|global|constant'
-
-             r'|private|linker_private|internal|available_externally|linkonce'
-             r'|linkonce_odr|weak|weak_odr|appending|dllimport|dllexport'
-             r'|common|default|hidden|protected|extern_weak|external'
-             r'|thread_local|zeroinitializer|undef|null|to|tail|target|triple'
-             r'|datalayout|volatile|nuw|nsw|nnan|ninf|nsz|arcp|fast|exact|inbounds'
-             r'|align|addrspace|section|alias|module|asm|sideeffect|gc|dbg'
-
-             r'|ccc|fastcc|coldcc|x86_stdcallcc|x86_fastcallcc|arm_apcscc'
-             r'|arm_aapcscc|arm_aapcs_vfpcc'
-
-             r'|cc|c'
-
-             r'|signext|zeroext|inreg|sret|nounwind|noreturn|noalias|nocapture'
-             r'|byval|nest|readnone|readonly'
-
-             r'|inlinehint|noinline|alwaysinline|optsize|ssp|sspreq|noredzone'
-             r'|noimplicitfloat|naked'
-
-             r'|type|opaque'
-
-             r'|eq|ne|slt|sgt|sle'
-             r'|sge|ult|ugt|ule|uge'
-             r'|oeq|one|olt|ogt|ole'
-             r'|oge|ord|uno|ueq|une'
-             r'|x'
-
-             # instructions
-             r'|add|fadd|sub|fsub|mul|fmul|udiv|sdiv|fdiv|urem|srem|frem|shl'
-             r'|lshr|ashr|and|or|xor|icmp|fcmp'
-
-             r'|phi|call|trunc|zext|sext|fptrunc|fpext|uitofp|sitofp|fptoui'
-             r'fptosi|inttoptr|ptrtoint|bitcast|select|va_arg|ret|br|switch'
-             r'|invoke|unwind|unreachable'
-
-             r'|malloc|alloca|free|load|store|getelementptr'
-
-             r'|extractelement|insertelement|shufflevector|getresult'
-             r'|extractvalue|insertvalue'
-
-             r')\b', Keyword),
-
-            # Types
-            (r'void|float|double|x86_fp80|fp128|ppc_fp128|label|metadata',
-             Keyword.Type),
-
-            # Integer types
-            (r'i[1-9]\d*', Keyword)
-        ]
-    }
-
-
-class NasmLexer(RegexLexer):
-    """
-    For Nasm (Intel) assembly code.
-    """
-    name = 'NASM'
-    aliases = ['nasm']
-    filenames = ['*.asm', '*.ASM']
-    mimetypes = ['text/x-nasm']
-
-    identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?#@~]*'
-    hexn = r'(?:0[xX][0-9a-fA-F]+|$0[0-9a-fA-F]*|[0-9]+[0-9a-fA-F]*h)'
-    octn = r'[0-7]+q'
-    binn = r'[01]+b'
-    decn = r'[0-9]+'
-    floatn = decn + r'\.e?' + decn
-    string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
-    declkw = r'(?:res|d)[bwdqt]|times'
-    register = (r'r[0-9][0-5]?[bwd]|'
-                r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
-                r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
-    wordop = r'seg|wrt|strict'
-    type = r'byte|[dq]?word'
-    directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
-                  r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
-                  r'EXPORT|LIBRARY|MODULE')
-
-    flags = re.IGNORECASE | re.MULTILINE
-    tokens = {
-        'root': [
-            include('whitespace'),
-            (r'^\s*%', Comment.Preproc, 'preproc'),
-            (identifier + ':', Name.Label),
-            (r'(%s)(\s+)(equ)' % identifier,
-                bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
-                'instruction-args'),
-            (directives, Keyword, 'instruction-args'),
-            (declkw, Keyword.Declaration, 'instruction-args'),
-            (identifier, Name.Function, 'instruction-args'),
-            (r'[\r\n]+', Text)
-        ],
-        'instruction-args': [
-            (string, String),
-            (hexn, Number.Hex),
-            (octn, Number.Oct),
-            (binn, Number),
-            (floatn, Number.Float),
-            (decn, Number.Integer),
-            include('punctuation'),
-            (register, Name.Builtin),
-            (identifier, Name.Variable),
-            (r'[\r\n]+', Text, '#pop'),
-            include('whitespace')
-        ],
-        'preproc': [
-            (r'[^;\n]+', Comment.Preproc),
-            (r';.*?\n', Comment.Single, '#pop'),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'whitespace': [
-            (r'\n', Text),
-            (r'[ \t]+', Text),
-            (r';.*', Comment.Single)
-        ],
-        'punctuation': [
-            (r'[,():\[\]]+', Punctuation),
-            (r'[&|^<>+*/%~-]+', Operator),
-            (r'[$]+', Keyword.Constant),
-            (wordop, Operator.Word),
-            (type, Keyword.Type)
-        ],
-    }
-
-
-class Ca65Lexer(RegexLexer):
-    """
-    For ca65 assembler sources.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'ca65'
-    aliases = ['ca65']
-    filenames = ['*.s']
-
-    flags = re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r';.*', Comment.Single),
-            (r'\s+', Text),
-            (r'[a-z_.@$][\w.@$]*:', Name.Label),
-            (r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
-             r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
-             r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
-             r'|bit)\b', Keyword),
-            (r'\.[a-z0-9_]+', Keyword.Pseudo),
-            (r'[-+~*/^&|!<>=]', Operator),
-            (r'"[^"\n]*.', String),
-            (r"'[^'\n]*.", String.Char),
-            (r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
-            (r'\d+|%[01]+', Number.Integer),
-            (r'[#,.:()=]', Punctuation),
-            (r'[a-z_.@$][\w.@$]*', Name),
-        ]
-    }
-
-    def analyse_text(self, text):
-        # comments in GAS start with "#"
-        if re.match(r'^\s*;', text, re.MULTILINE):
-            return 0.9
diff --git a/python/ext-libs/pygments/lexers/compiled.py b/python/ext-libs/pygments/lexers/compiled.py
deleted file mode 100644
index 7513a4e..0000000
--- a/python/ext-libs/pygments/lexers/compiled.py
+++ /dev/null
@@ -1,3496 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.compiled
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for compiled languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-from string import Template
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
-     this, combined, inherit, do_insertions
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-     Number, Punctuation, Error, Literal, Generic
-from pygments.scanner import Scanner
-
-# backwards compatibility
-from pygments.lexers.functional import OcamlLexer
-from pygments.lexers.jvm import JavaLexer, ScalaLexer
-
-__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', 'DylanLexer',
-           'ObjectiveCLexer', 'ObjectiveCppLexer', 'FortranLexer', 'GLShaderLexer',
-           'PrologLexer', 'CythonLexer', 'ValaLexer', 'OocLexer', 'GoLexer',
-           'FelixLexer', 'AdaLexer', 'Modula2Lexer', 'BlitzMaxLexer',
-           'NimrodLexer', 'FantomLexer', 'RustLexer', 'CudaLexer', 'MonkeyLexer',
-           'DylanLidLexer', 'DylanConsoleLexer', 'CobolLexer',
-           'CobolFreeformatLexer', 'LogosLexer']
-
-
-class CFamilyLexer(RegexLexer):
-    """
-    For C family source code.  This is used as a base class to avoid repetitious
-    definitions.
-    """
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-    #: only one /* */ style comment
-    _ws1 = r':\s*/[*].*?[*]/\s*'
-
-    tokens = {
-        'whitespace': [
-            # preprocessor directives: without whitespace
-            ('^#if\s+0', Comment.Preproc, 'if0'),
-            ('^#', Comment.Preproc, 'macro'),
-            # or with whitespace
-            ('^(' + _ws1 + r')(#if\s+0)',
-             bygroups(using(this), Comment.Preproc), 'if0'),
-            ('^(' + _ws1 + ')(#)',
-             bygroups(using(this), Comment.Preproc), 'macro'),
-            (r'^(\s*)([a-zA-Z_][a-zA-Z0-9_]*:(?!:))',
-             bygroups(Text, Name.Label)),
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-        ],
-        'statements': [
-            (r'L?"', String, 'string'),
-            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
-            (r'0[0-7]+[LlUu]*', Number.Oct),
-            (r'\d+[LlUu]*', Number.Integer),
-            (r'\*/', Error),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\],.]', Punctuation),
-            (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
-            (r'(auto|break|case|const|continue|default|do|else|enum|extern|'
-             r'for|goto|if|register|restricted|return|sizeof|static|struct|'
-             r'switch|typedef|union|volatile|while)\b', Keyword),
-            (r'(bool|int|long|float|short|double|char|unsigned|signed|void|'
-             r'[a-z_][a-z0-9_]*_t)\b',
-             Keyword.Type),
-            (r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
-            # Vector intrinsics
-            (r'(__(m128i|m128d|m128|m64))\b', Keyword.Reserved),
-            # Microsoft-isms
-            (r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|'
-             r'declspec|finally|int64|try|leave|wchar_t|w64|unaligned|'
-             r'raise|noop|identifier|forceinline|assume)\b', Keyword.Reserved),
-            (r'(true|false|NULL)\b', Name.Builtin),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'root': [
-            include('whitespace'),
-            # functions
-            (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))'    # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'             # method name
-             r'(\s*\([^;]*?\))'                      # signature
-             r'(' + _ws + r')?({)',
-             bygroups(using(this), Name.Function, using(this), using(this),
-                      Punctuation),
-             'function'),
-            # function declarations
-            (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))'    # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'             # method name
-             r'(\s*\([^;]*?\))'                      # signature
-             r'(' + _ws + r')?(;)',
-             bygroups(using(this), Name.Function, using(this), using(this),
-                      Punctuation)),
-            ('', Text, 'statement'),
-        ],
-        'statement' : [
-            include('whitespace'),
-            include('statements'),
-            ('[{}]', Punctuation),
-            (';', Punctuation, '#pop'),
-        ],
-        'function': [
-            include('whitespace'),
-            include('statements'),
-            (';', Punctuation),
-            ('{', Punctuation, '#push'),
-            ('}', Punctuation, '#pop'),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
-             r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
-            (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
-            (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
-            (r'.*?\n', Comment),
-        ]
-    }
-
-    stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t',
-                    'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list',
-                    'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t',
-                    'wctrans_t', 'wint_t', 'wctype_t']
-    c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t',
-                 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t',
-                 'int_least16_t', 'int_least32_t', 'int_least64_t',
-                 'uint_least8_t', 'uint_least16_t', 'uint_least32_t',
-                 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
-                 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t',
-                 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t',
-                 'uintmax_t']
-
-    def __init__(self, **options):
-        self.stdlibhighlighting = get_bool_opt(options,
-                'stdlibhighlighting', True)
-        self.c99highlighting = get_bool_opt(options,
-                'c99highlighting', True)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name:
-                if self.stdlibhighlighting and value in self.stdlib_types:
-                    token = Keyword.Type
-                elif self.c99highlighting and value in self.c99_types:
-                    token = Keyword.Type
-            yield index, token, value
-
-
-class CLexer(CFamilyLexer):
-    """
-    For C source code with preprocessor directives.
-    """
-    name = 'C'
-    aliases = ['c']
-    filenames = ['*.c', '*.h', '*.idc']
-    mimetypes = ['text/x-chdr', 'text/x-csrc']
-    priority = 0.1
-
-    def analyse_text(text):
-        return 0.1
-
-
-class CppLexer(CFamilyLexer):
-    """
-    For C++ source code with preprocessor directives.
-    """
-    name = 'C++'
-    aliases = ['cpp', 'c++']
-    filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
-                 '*.cc', '*.hh', '*.cxx', '*.hxx',
-                 '*.C', '*.H', '*.cp', '*.CPP']
-    mimetypes = ['text/x-c++hdr', 'text/x-c++src']
-    priority = 0.1
-
-    tokens = {
-        'statements': [
-            (r'(asm|catch|const_cast|delete|dynamic_cast|explicit|'
-             r'export|friend|mutable|namespace|new|operator|'
-             r'private|protected|public|reinterpret_cast|'
-             r'restrict|static_cast|template|this|throw|throws|'
-             r'typeid|typename|using|virtual)\b', Keyword),
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            inherit,
-         ],
-        'root': [
-            inherit,
-            # C++ Microsoft-isms
-            (r'__(virtual_inheritance|uuidof|super|single_inheritance|'
-             r'multiple_inheritance|interface|event)\b', Keyword.Reserved),
-            # Offload C++ extensions, http://offload.codeplay.com/
-            (r'(__offload|__blockingoffload|__outer)\b', Keyword.Pseudo),
-        ],
-        'classname': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
-            # template specification
-            (r'\s*(?=>)', Text, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        return 0.1
-
-
-class ECLexer(CLexer):
-    """
-    For eC source code with preprocessor directives.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'eC'
-    aliases = ['ec']
-    filenames = ['*.ec', '*.eh']
-    mimetypes = ['text/x-echdr', 'text/x-ecsrc']
-
-    tokens = {
-        'statements': [
-            (r'(virtual|class|private|public|property|import|delete|new|new0|'
-             r'renew|renew0|define|get|set|remote|dllexport|dllimport|stdcall|'
-             r'subclass|__on_register_module|namespace|using|typed_object|'
-             r'any_object|incref|register|watch|stopwatching|firewatchers|'
-             r'watchable|class_designer|class_fixed|class_no_expansion|isset|'
-             r'class_default_property|property_category|class_data|'
-             r'class_property|virtual|thisclass|'
-             r'dbtable|dbindex|database_open|dbfield)\b', Keyword),
-            (r'(uint|uint16|uint32|uint64|bool|byte|unichar|int64)\b',
-             Keyword.Type),
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(null|value|this)\b', Name.Builtin),
-            inherit,
-        ],
-        'classname': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
-            # template specification
-            (r'\s*(?=>)', Text, '#pop'),
-        ],
-    }
-
-
-class DLexer(RegexLexer):
-    """
-    For D source.
-
-    *New in Pygments 1.2.*
-    """
-    name = 'D'
-    filenames = ['*.d', '*.di']
-    aliases = ['d']
-    mimetypes = ['text/x-dsrc']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            #(r'\\\n', Text), # line continuations
-            # Comments
-            (r'//(.*?)\n', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'/\+', Comment.Multiline, 'nested_comment'),
-            # Keywords
-            (r'(abstract|alias|align|asm|assert|auto|body|break|case|cast'
-             r'|catch|class|const|continue|debug|default|delegate|delete'
-             r'|deprecated|do|else|enum|export|extern|finally|final'
-             r'|foreach_reverse|foreach|for|function|goto|if|import|inout'
-             r'|interface|invariant|in|is|lazy|mixin|module|new|nothrow|out'
-             r'|override|package|pragma|private|protected|public|pure|ref|return'
-             r'|scope|static|struct|super|switch|synchronized|template|this'
-             r'|throw|try|typedef|typeid|typeof|union|unittest|version|volatile'
-             r'|while|with|__traits)\b', Keyword
-            ),
-            (r'(bool|byte|cdouble|cent|cfloat|char|creal|dchar|double|float'
-             r'|idouble|ifloat|int|ireal|long|real|short|ubyte|ucent|uint|ulong'
-             r'|ushort|void|wchar)\b', Keyword.Type
-            ),
-            (r'(false|true|null)\b', Keyword.Constant),
-            (r'macro\b', Keyword.Reserved),
-            (r'(string|wstring|dstring)\b', Name.Builtin),
-            # FloatLiteral
-            # -- HexFloat
-            (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
-             r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
-            # -- DecimalFloat
-            (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
-             r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
-            (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
-            # IntegerLiteral
-            # -- Binary
-            (r'0[Bb][01_]+', Number),
-            # -- Octal
-            (r'0[0-7_]+', Number.Oct),
-            # -- Hexadecimal
-            (r'0[xX][0-9a-fA-F_]+', Number.Hex),
-            # -- Decimal
-            (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
-            # CharacterLiteral
-            (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
-             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
-             String.Char
-            ),
-            # StringLiteral
-            # -- WysiwygString
-            (r'r"[^"]*"[cwd]?', String),
-            # -- AlternateWysiwygString
-            (r'`[^`]*`[cwd]?', String),
-            # -- DoubleQuotedString
-            (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
-            # -- EscapeSequence
-            (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
-             r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
-             String
-            ),
-            # -- HexString
-            (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
-            # -- DelimitedString
-            (r'q"\[', String, 'delimited_bracket'),
-            (r'q"\(', String, 'delimited_parenthesis'),
-            (r'q"<', String, 'delimited_angle'),
-            (r'q"{', String, 'delimited_curly'),
-            (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
-            (r'q"(.).*?\1"', String),
-            # -- TokenString
-            (r'q{', String, 'token_string'),
-            # Tokens
-            (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
-             r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
-             r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation
-            ),
-            # Identifier
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        'nested_comment': [
-            (r'[^+/]+', Comment.Multiline),
-            (r'/\+', Comment.Multiline, '#push'),
-            (r'\+/', Comment.Multiline, '#pop'),
-            (r'[+/]', Comment.Multiline),
-        ],
-        'token_string': [
-            (r'{', Punctuation, 'token_string_nest'),
-            (r'}', String, '#pop'),
-            include('root'),
-        ],
-        'token_string_nest': [
-            (r'{', Punctuation, '#push'),
-            (r'}', Punctuation, '#pop'),
-            include('root'),
-        ],
-        'delimited_bracket': [
-            (r'[^\[\]]+', String),
-            (r'\[', String, 'delimited_inside_bracket'),
-            (r'\]"', String, '#pop'),
-        ],
-        'delimited_inside_bracket': [
-            (r'[^\[\]]+', String),
-            (r'\[', String, '#push'),
-            (r'\]', String, '#pop'),
-        ],
-        'delimited_parenthesis': [
-            (r'[^\(\)]+', String),
-            (r'\(', String, 'delimited_inside_parenthesis'),
-            (r'\)"', String, '#pop'),
-        ],
-        'delimited_inside_parenthesis': [
-            (r'[^\(\)]+', String),
-            (r'\(', String, '#push'),
-            (r'\)', String, '#pop'),
-        ],
-        'delimited_angle': [
-            (r'[^<>]+', String),
-            (r'<', String, 'delimited_inside_angle'),
-            (r'>"', String, '#pop'),
-        ],
-        'delimited_inside_angle': [
-            (r'[^<>]+', String),
-            (r'<', String, '#push'),
-            (r'>', String, '#pop'),
-        ],
-        'delimited_curly': [
-            (r'[^{}]+', String),
-            (r'{', String, 'delimited_inside_curly'),
-            (r'}"', String, '#pop'),
-        ],
-        'delimited_inside_curly': [
-            (r'[^{}]+', String),
-            (r'{', String, '#push'),
-            (r'}', String, '#pop'),
-        ],
-    }
-
-
-class DelphiLexer(Lexer):
-    """
-    For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
-    Turbo Pascal and Free Pascal source code.
-
-    Additional options accepted:
-
-    `turbopascal`
-        Highlight Turbo Pascal specific keywords (default: ``True``).
-    `delphi`
-        Highlight Borland Delphi specific keywords (default: ``True``).
-    `freepascal`
-        Highlight Free Pascal specific keywords (default: ``True``).
-    `units`
-        A list of units that should be considered builtin, supported are
-        ``System``, ``SysUtils``, ``Classes`` and ``Math``.
-        Default is to consider all of them builtin.
-    """
-    name = 'Delphi'
-    aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
-    filenames = ['*.pas']
-    mimetypes = ['text/x-pascal']
-
-    TURBO_PASCAL_KEYWORDS = [
-        'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
-        'const', 'constructor', 'continue', 'destructor', 'div', 'do',
-        'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
-        'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
-        'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
-        'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
-        'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
-        'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
-    ]
-
-    DELPHI_KEYWORDS = [
-        'as', 'class', 'except', 'exports', 'finalization', 'finally',
-        'initialization', 'is', 'library', 'on', 'property', 'raise',
-        'threadvar', 'try'
-    ]
-
-    FREE_PASCAL_KEYWORDS = [
-        'dispose', 'exit', 'false', 'new', 'true'
-    ]
-
-    BLOCK_KEYWORDS = set([
-        'begin', 'class', 'const', 'constructor', 'destructor', 'end',
-        'finalization', 'function', 'implementation', 'initialization',
-        'label', 'library', 'operator', 'procedure', 'program', 'property',
-        'record', 'threadvar', 'type', 'unit', 'uses', 'var'
-    ])
-
-    FUNCTION_MODIFIERS = set([
-        'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
-        'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
-        'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
-        'override', 'assembler'
-    ])
-
-    # XXX: those aren't global. but currently we know no way for defining
-    #      them just for the type context.
-    DIRECTIVES = set([
-        'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
-        'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
-        'published', 'public'
-    ])
-
-    BUILTIN_TYPES = set([
-        'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
-        'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
-        'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
-        'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
-        'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
-        'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
-        'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
-        'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
-        'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
-        'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
-        'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
-        'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
-        'widechar', 'widestring', 'word', 'wordbool'
-    ])
-
-    BUILTIN_UNITS = {
-        'System': [
-            'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
-            'append', 'arctan', 'assert', 'assigned', 'assignfile',
-            'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
-            'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
-            'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
-            'dispose', 'doubletocomp', 'endthread', 'enummodules',
-            'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
-            'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
-            'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
-            'findresourcehinstance', 'flush', 'frac', 'freemem',
-            'get8087cw', 'getdir', 'getlasterror', 'getmem',
-            'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
-            'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
-            'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
-            'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
-            'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
-            'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
-            'randomize', 'read', 'readln', 'reallocmem',
-            'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
-            'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
-            'set8087cw', 'setlength', 'setlinebreakstyle',
-            'setmemorymanager', 'setstring', 'settextbuf',
-            'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
-            'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
-            'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
-            'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
-            'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
-            'utf8tounicode', 'val', 'vararrayredim', 'varclear',
-            'widecharlentostring', 'widecharlentostrvar',
-            'widechartostring', 'widechartostrvar',
-            'widestringtoucs4string', 'write', 'writeln'
-        ],
-        'SysUtils': [
-            'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
-            'allocmem', 'ansicomparefilename', 'ansicomparestr',
-            'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
-            'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
-            'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
-            'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
-            'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
-            'ansistrscan', 'ansistrupper', 'ansiuppercase',
-            'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
-            'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
-            'callterminateprocs', 'changefileext', 'charlength',
-            'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
-            'comparetext', 'createdir', 'createguid', 'currentyear',
-            'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
-            'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
-            'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
-            'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
-            'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
-            'exceptionerrormessage', 'excludetrailingbackslash',
-            'excludetrailingpathdelimiter', 'expandfilename',
-            'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
-            'extractfiledrive', 'extractfileext', 'extractfilename',
-            'extractfilepath', 'extractrelativepath', 'extractshortpathname',
-            'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
-            'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
-            'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
-            'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
-            'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
-            'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
-            'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
-            'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
-            'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
-            'getenvironmentvariable', 'getfileversion', 'getformatsettings',
-            'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
-            'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
-            'includetrailingbackslash', 'includetrailingpathdelimiter',
-            'incmonth', 'initializepackage', 'interlockeddecrement',
-            'interlockedexchange', 'interlockedexchangeadd',
-            'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
-            'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
-            'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
-            'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
-            'outofmemoryerror', 'quotedstr', 'raiselastoserror',
-            'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
-            'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
-            'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
-            'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
-            'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
-            'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
-            'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
-            'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
-            'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
-            'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
-            'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
-            'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
-            'strtotimedef', 'strupper', 'supports', 'syserrormessage',
-            'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
-            'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
-            'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
-            'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
-            'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
-            'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
-            'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
-            'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
-            'wraptext'
-        ],
-        'Classes': [
-            'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
-            'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
-            'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
-            'groupdescendantswith', 'hextobin', 'identtoint',
-            'initinheritedcomponent', 'inttoident', 'invalidpoint',
-            'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
-            'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
-            'pointsequal', 'readcomponentres', 'readcomponentresex',
-            'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
-            'registerclasses', 'registercomponents', 'registerintegerconsts',
-            'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
-            'teststreamformat', 'unregisterclass', 'unregisterclasses',
-            'unregisterintegerconsts', 'unregistermoduleclasses',
-            'writecomponentresfile'
-        ],
-        'Math': [
-            'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
-            'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
-            'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
-            'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
-            'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
-            'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
-            'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
-            'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
-            'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
-            'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
-            'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
-            'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
-            'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
-            'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
-            'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
-            'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
-            'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
-            'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
-            'tan', 'tanh', 'totalvariance', 'variance'
-        ]
-    }
-
-    ASM_REGISTERS = set([
-        'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
-        'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
-        'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
-        'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
-        'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
-        'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
-        'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
-        'xmm6', 'xmm7'
-    ])
-
-    ASM_INSTRUCTIONS = set([
-        'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
-        'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
-        'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
-        'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
-        'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
-        'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
-        'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
-        'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
-        'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
-        'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
-        'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
-        'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
-        'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
-        'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
-        'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
-        'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
-        'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
-        'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
-        'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
-        'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
-        'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
-        'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
-        'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
-        'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
-        'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
-        'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
-        'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
-        'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
-        'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
-        'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
-        'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
-        'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
-        'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
-        'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
-        'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
-        'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
-        'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
-        'xlatb', 'xor'
-    ])
-
-    def __init__(self, **options):
-        Lexer.__init__(self, **options)
-        self.keywords = set()
-        if get_bool_opt(options, 'turbopascal', True):
-            self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
-        if get_bool_opt(options, 'delphi', True):
-            self.keywords.update(self.DELPHI_KEYWORDS)
-        if get_bool_opt(options, 'freepascal', True):
-            self.keywords.update(self.FREE_PASCAL_KEYWORDS)
-        self.builtins = set()
-        for unit in get_list_opt(options, 'units', self.BUILTIN_UNITS.keys()):
-            self.builtins.update(self.BUILTIN_UNITS[unit])
-
-    def get_tokens_unprocessed(self, text):
-        scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
-        stack = ['initial']
-        in_function_block = False
-        in_property_block = False
-        was_dot = False
-        next_token_is_function = False
-        next_token_is_property = False
-        collect_labels = False
-        block_labels = set()
-        brace_balance = [0, 0]
-
-        while not scanner.eos:
-            token = Error
-
-            if stack[-1] == 'initial':
-                if scanner.scan(r'\s+'):
-                    token = Text
-                elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
-                    if scanner.match.startswith('$'):
-                        token = Comment.Preproc
-                    else:
-                        token = Comment.Multiline
-                elif scanner.scan(r'//.*?$'):
-                    token = Comment.Single
-                elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
-                    token = Operator
-                    # stop label highlighting on next ";"
-                    if collect_labels and scanner.match == ';':
-                        collect_labels = False
-                elif scanner.scan(r'[\(\)\[\]]+'):
-                    token = Punctuation
-                    # abort function naming ``foo = Function(...)``
-                    next_token_is_function = False
-                    # if we are in a function block we count the open
-                    # braces because ootherwise it's impossible to
-                    # determine the end of the modifier context
-                    if in_function_block or in_property_block:
-                        if scanner.match == '(':
-                            brace_balance[0] += 1
-                        elif scanner.match == ')':
-                            brace_balance[0] -= 1
-                        elif scanner.match == '[':
-                            brace_balance[1] += 1
-                        elif scanner.match == ']':
-                            brace_balance[1] -= 1
-                elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
-                    lowercase_name = scanner.match.lower()
-                    if lowercase_name == 'result':
-                        token = Name.Builtin.Pseudo
-                    elif lowercase_name in self.keywords:
-                        token = Keyword
-                        # if we are in a special block and a
-                        # block ending keyword occours (and the parenthesis
-                        # is balanced) we end the current block context
-                        if (in_function_block or in_property_block) and \
-                           lowercase_name in self.BLOCK_KEYWORDS and \
-                           brace_balance[0] <= 0 and \
-                           brace_balance[1] <= 0:
-                            in_function_block = False
-                            in_property_block = False
-                            brace_balance = [0, 0]
-                            block_labels = set()
-                        if lowercase_name in ('label', 'goto'):
-                            collect_labels = True
-                        elif lowercase_name == 'asm':
-                            stack.append('asm')
-                        elif lowercase_name == 'property':
-                            in_property_block = True
-                            next_token_is_property = True
-                        elif lowercase_name in ('procedure', 'operator',
-                                                'function', 'constructor',
-                                                'destructor'):
-                            in_function_block = True
-                            next_token_is_function = True
-                    # we are in a function block and the current name
-                    # is in the set of registered modifiers. highlight
-                    # it as pseudo keyword
-                    elif in_function_block and \
-                         lowercase_name in self.FUNCTION_MODIFIERS:
-                        token = Keyword.Pseudo
-                    # if we are in a property highlight some more
-                    # modifiers
-                    elif in_property_block and \
-                         lowercase_name in ('read', 'write'):
-                        token = Keyword.Pseudo
-                        next_token_is_function = True
-                    # if the last iteration set next_token_is_function
-                    # to true we now want this name highlighted as
-                    # function. so do that and reset the state
-                    elif next_token_is_function:
-                        # Look if the next token is a dot. If yes it's
-                        # not a function, but a class name and the
-                        # part after the dot a function name
-                        if scanner.test(r'\s*\.\s*'):
-                            token = Name.Class
-                        # it's not a dot, our job is done
-                        else:
-                            token = Name.Function
-                            next_token_is_function = False
-                    # same for properties
-                    elif next_token_is_property:
-                        token = Name.Property
-                        next_token_is_property = False
-                    # Highlight this token as label and add it
-                    # to the list of known labels
-                    elif collect_labels:
-                        token = Name.Label
-                        block_labels.add(scanner.match.lower())
-                    # name is in list of known labels
-                    elif lowercase_name in block_labels:
-                        token = Name.Label
-                    elif lowercase_name in self.BUILTIN_TYPES:
-                        token = Keyword.Type
-                    elif lowercase_name in self.DIRECTIVES:
-                        token = Keyword.Pseudo
-                    # builtins are just builtins if the token
-                    # before isn't a dot
-                    elif not was_dot and lowercase_name in self.builtins:
-                        token = Name.Builtin
-                    else:
-                        token = Name
-                elif scanner.scan(r"'"):
-                    token = String
-                    stack.append('string')
-                elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
-                    token = String.Char
-                elif scanner.scan(r'\$[0-9A-Fa-f]+'):
-                    token = Number.Hex
-                elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
-                    token = Number.Integer
-                elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
-                    token = Number.Float
-                else:
-                    # if the stack depth is deeper than once, pop
-                    if len(stack) > 1:
-                        stack.pop()
-                    scanner.get_char()
-
-            elif stack[-1] == 'string':
-                if scanner.scan(r"''"):
-                    token = String.Escape
-                elif scanner.scan(r"'"):
-                    token = String
-                    stack.pop()
-                elif scanner.scan(r"[^']*"):
-                    token = String
-                else:
-                    scanner.get_char()
-                    stack.pop()
-
-            elif stack[-1] == 'asm':
-                if scanner.scan(r'\s+'):
-                    token = Text
-                elif scanner.scan(r'end'):
-                    token = Keyword
-                    stack.pop()
-                elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
-                    if scanner.match.startswith('$'):
-                        token = Comment.Preproc
-                    else:
-                        token = Comment.Multiline
-                elif scanner.scan(r'//.*?$'):
-                    token = Comment.Single
-                elif scanner.scan(r"'"):
-                    token = String
-                    stack.append('string')
-                elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
-                    token = Name.Label
-                elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
-                    lowercase_name = scanner.match.lower()
-                    if lowercase_name in self.ASM_INSTRUCTIONS:
-                        token = Keyword
-                    elif lowercase_name in self.ASM_REGISTERS:
-                        token = Name.Builtin
-                    else:
-                        token = Name
-                elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
-                    token = Operator
-                elif scanner.scan(r'[\(\)\[\]]+'):
-                    token = Punctuation
-                elif scanner.scan(r'\$[0-9A-Fa-f]+'):
-                    token = Number.Hex
-                elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
-                    token = Number.Integer
-                elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
-                    token = Number.Float
-                else:
-                    scanner.get_char()
-                    stack.pop()
-
-            # save the dot!!!11
-            if scanner.match.strip():
-                was_dot = scanner.match == '.'
-            yield scanner.start_pos, token, scanner.match or ''
-
-
-class DylanLexer(RegexLexer):
-    """
-    For the `Dylan <http://www.opendylan.org/>`_ language.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'Dylan'
-    aliases = ['dylan']
-    filenames = ['*.dylan', '*.dyl', '*.intr']
-    mimetypes = ['text/x-dylan']
-
-    flags = re.IGNORECASE
-
-    builtins = set([
-        'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
-        'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
-        'each-subclass', 'exception', 'exclude', 'function', 'generic',
-        'handler', 'inherited', 'inline', 'inline-only', 'instance',
-        'interface', 'import', 'keyword', 'library', 'macro', 'method',
-        'module', 'open', 'primary', 'required', 'sealed', 'sideways',
-        'singleton', 'slot', 'thread', 'variable', 'virtual'])
-
-    keywords = set([
-        'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
-        'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
-        'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
-        'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
-        'while'])
-
-    operators = set([
-        '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
-        '>', '>=', '&', '|'])
-
-    functions = set([
-        'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
-        'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
-        'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
-        'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
-        'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
-        'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
-        'condition-format-arguments', 'condition-format-string', 'conjoin',
-        'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
-        'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
-        'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
-        'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
-        'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
-        'function-arguments', 'function-return-values',
-        'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
-        'generic-function-methods', 'head', 'head-setter', 'identity',
-        'initialize', 'instance?', 'integral?', 'intersection',
-        'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
-        'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
-        'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
-        'min', 'modulo', 'negative', 'negative?', 'next-method',
-        'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
-        'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
-        'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
-        'remove-duplicates', 'remove-duplicates!', 'remove-key!',
-        'remove-method', 'replace-elements!', 'replace-subsequence!',
-        'restart-query', 'return-allowed?', 'return-description',
-        'return-query', 'reverse', 'reverse!', 'round', 'round/',
-        'row-major-index', 'second', 'second-setter', 'shallow-copy',
-        'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
-        'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
-        'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
-        'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
-        'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
-        'vector', 'zero?'])
-
-    valid_name = '\\\\?[a-zA-Z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+'
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name:
-                lowercase_value = value.lower()
-                if lowercase_value in self.builtins:
-                    yield index, Name.Builtin, value
-                    continue
-                if lowercase_value in self.keywords:
-                    yield index, Keyword, value
-                    continue
-                if lowercase_value in self.functions:
-                    yield index, Name.Builtin, value
-                    continue
-                if lowercase_value in self.operators:
-                    yield index, Operator, value
-                    continue
-            yield index, token, value
-
-    tokens = {
-        'root': [
-            # Whitespace
-            (r'\s+', Text),
-
-            # single line comment
-            (r'//.*?\n', Comment.Single),
-
-            # lid header
-            (r'([A-Za-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
-                bygroups(Name.Attribute, Operator, Text, String)),
-
-            ('', Text, 'code') # no header match, switch to code
-        ],
-        'code': [
-            # Whitespace
-            (r'\s+', Text),
-
-            # single line comment
-            (r'//.*?\n', Comment.Single),
-
-            # multi-line comment
-            (r'/\*', Comment.Multiline, 'comment'),
-
-            # strings and characters
-            (r'"', String, 'string'),
-            (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-
-            # binary integer
-            (r'#[bB][01]+', Number),
-
-            # octal integer
-            (r'#[oO][0-7]+', Number.Oct),
-
-            # floating point
-            (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
-
-            # decimal integer
-            (r'[-+]?\d+', Number.Integer),
-
-            # hex integer
-            (r'#[xX][0-9a-fA-F]+', Number.Hex),
-
-            # Macro parameters
-            (r'(\?' + valid_name + ')(:)'
-             r'(token|name|variable|expression|body|case-body|\*)',
-                bygroups(Name.Tag, Operator, Name.Builtin)),
-            (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
-                bygroups(Name.Tag, Operator, Name.Builtin)),
-            (r'\?' + valid_name, Name.Tag),
-
-            # Punctuation
-            (r'(=>|::|#\(|#\[|##|\?|\?\?|\?=|[(){}\[\],\.;])', Punctuation),
-
-            # Most operators are picked up as names and then re-flagged.
-            # This one isn't valid in a name though, so we pick it up now.
-            (r':=', Operator),
-
-            # Pick up #t / #f before we match other stuff with #.
-            (r'#[tf]', Literal),
-
-            # #"foo" style keywords
-            (r'#"', String.Symbol, 'keyword'),
-
-            # #rest, #key, #all-keys, etc.
-            (r'#[a-zA-Z0-9-]+', Keyword),
-
-            # required-init-keyword: style keywords.
-            (valid_name + ':', Keyword),
-
-            # class names
-            (r'<' + valid_name + '>', Name.Class),
-
-            # define variable forms.
-            (r'\*' + valid_name + '\*', Name.Variable.Global),
-
-            # define constant forms.
-            (r'\$' + valid_name, Name.Constant),
-
-            # everything else. We re-flag some of these in the method above.
-            (valid_name, Name),
-        ],
-        'comment': [
-            (r'[^*/]', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ],
-        'keyword': [
-            (r'"', String.Symbol, '#pop'),
-            (r'[^\\"]+', String.Symbol), # all other characters
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ]
-    }
-
-
-class DylanLidLexer(RegexLexer):
-    """
-    For Dylan LID (Library Interchange Definition) files.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'DylanLID'
-    aliases = ['dylan-lid', 'lid']
-    filenames = ['*.lid', '*.hdp']
-    mimetypes = ['text/x-dylan-lid']
-
-    flags = re.IGNORECASE
-
-    tokens = {
-        'root': [
-            # Whitespace
-            (r'\s+', Text),
-
-            # single line comment
-            (r'//.*?\n', Comment.Single),
-
-            # lid header
-            (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
-             bygroups(Name.Attribute, Operator, Text, String)),
-        ]
-    }
-
-
-class DylanConsoleLexer(Lexer):
-    """
-    For Dylan interactive console output like:
-
-    .. sourcecode:: dylan-console
-
-        ? let a = 1;
-        => 1
-        ? a
-        => 1
-
-    This is based on a copy of the RubyConsoleLexer.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Dylan session'
-    aliases = ['dylan-console', 'dylan-repl']
-    filenames = ['*.dylan-console']
-    mimetypes = ['text/x-dylan-console']
-
-    _line_re  = re.compile('.*?\n')
-    _prompt_re = re.compile('\?| ')
-
-    def get_tokens_unprocessed(self, text):
-        dylexer = DylanLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        for match in self._line_re.finditer(text):
-            line = match.group()
-            m = self._prompt_re.match(line)
-            if m is not None:
-                end = m.end()
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:end])]))
-                curcode += line[end:]
-            else:
-                if curcode:
-                    for item in do_insertions(insertions,
-                                    dylexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-                yield match.start(), Generic.Output, line
-        if curcode:
-            for item in do_insertions(insertions,
-                                      dylexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-def objective(baselexer):
-    """
-    Generate a subclass of baselexer that accepts the Objective-C syntax
-    extensions.
-    """
-
-    # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
-    # since that's quite common in ordinary C/C++ files.  It's OK to match
-    # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
-    #
-    # The upshot of this is that we CANNOT match @class or @interface
-    _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
-    # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] |  identifier? : )
-    # (note the identifier is *optional* when there is a ':'!)
-    _oc_message = re.compile(r'\[\s*[a-zA-Z_][a-zA-Z0-9_]*\s+'
-                             r'(?:[a-zA-Z_][a-zA-Z0-9_]*\s*\]|'
-                             r'(?:[a-zA-Z_][a-zA-Z0-9_]*)?:)')
-
-    class GeneratedObjectiveCVariant(baselexer):
-        """
-        Implements Objective-C syntax on top of an existing C family lexer.
-        """
-
-        tokens = {
-            'statements': [
-                (r'@"', String, 'string'),
-                (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
-                 String.Char),
-                (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-                (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-                (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-                (r'@0[0-7]+[Ll]?', Number.Oct),
-                (r'@\d+[Ll]?', Number.Integer),
-                (r'(in|@selector|@private|@protected|@public|@encode|'
-                 r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
-                 r'@synthesize|@dynamic|@optional)\b', Keyword),
-                (r'(id|Class|IMP|SEL|BOOL|IBOutlet|IBAction|unichar)\b',
-                 Keyword.Type),
-                (r'@(true|false|YES|NO)\n', Name.Builtin),
-                (r'(YES|NO|nil)\b', Name.Builtin),
-                (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
-                 ('#pop', 'oc_classname')),
-                (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
-                 ('#pop', 'oc_forward_classname')),
-                inherit,
-            ],
-            'oc_classname' : [
-                # interface definition that inherits
-                ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*:\s*)([a-zA-Z$_][a-zA-Z0-9$_]*)?',
-                 bygroups(Name.Class, Text, Name.Class), '#pop'),
-                # interface definition for a category
-                ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*)(\([a-zA-Z$_][a-zA-Z0-9$_]*\))',
-                 bygroups(Name.Class, Text, Name.Label), '#pop'),
-                # simple interface / implementation
-                ('([a-zA-Z$_][a-zA-Z0-9$_]*)', Name.Class, '#pop')
-            ],
-            'oc_forward_classname' : [
-              ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*,\s*)',
-               bygroups(Name.Class, Text), 'oc_forward_classname'),
-              ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*;?)',
-               bygroups(Name.Class, Text), '#pop')
-            ],
-            'root': [
-              # methods
-              (r'^([-+])(\s*)'                         # method marker
-               r'(\(.*?\))?(\s*)'                      # return type
-               r'([a-zA-Z$_][a-zA-Z0-9$_]*:?)',        # begin of method name
-               bygroups(Keyword, Text, using(this),
-                        Text, Name.Function),
-               'method'),
-              inherit,
-            ],
-            'method': [
-                include('whitespace'),
-                # TODO unsure if ellipses are allowed elsewhere, see
-                # discussion in Issue 789
-                (r',', Punctuation),
-                (r'\.\.\.', Punctuation),
-                (r'(\(.*?\))([a-zA-Z$_][a-zA-Z0-9$_]*)', bygroups(using(this),
-                                                                  Name.Variable)),
-                (r'[a-zA-Z$_][a-zA-Z0-9$_]*:', Name.Function),
-                (';', Punctuation, '#pop'),
-                ('{', Punctuation, 'function'),
-                ('', Text, '#pop'),
-            ],
-        }
-
-        def analyse_text(text):
-            if _oc_keywords.search(text):
-                return 1.0
-            elif '@"' in text: # strings
-                return 0.8
-            elif _oc_message.search(text):
-                return 0.8
-            return 0
-
-    return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
-    """
-    For Objective-C source code with preprocessor directives.
-    """
-
-    name = 'Objective-C'
-    aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
-    filenames = ['*.m', '*.h']
-    mimetypes = ['text/x-objective-c']
-    priority = 0.05    # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
-    """
-    For Objective-C++ source code with preprocessor directives.
-    """
-
-    name = 'Objective-C++'
-    aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
-    filenames = ['*.mm', '*.hh']
-    mimetypes = ['text/x-objective-c++']
-    priority = 0.05    # Lower than C++
-
-
-class FortranLexer(RegexLexer):
-    """
-    Lexer for FORTRAN 90 code.
-
-    *New in Pygments 0.10.*
-    """
-    name = 'Fortran'
-    aliases = ['fortran']
-    filenames = ['*.f', '*.f90', '*.F', '*.F90']
-    mimetypes = ['text/x-fortran']
-    flags = re.IGNORECASE
-
-    # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
-    # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
-    # Logical (?): NOT, AND, OR, EQV, NEQV
-
-    # Builtins:
-    # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
-
-    tokens = {
-        'root': [
-            (r'!.*\n', Comment),
-            include('strings'),
-            include('core'),
-            (r'[a-z][a-z0-9_]*', Name.Variable),
-            include('nums'),
-            (r'[\s]+', Text),
-        ],
-        'core': [
-            # Statements
-            (r'\b(ABSTRACT|ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|ASYNCHRONOUS|'
-             r'BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|COMMON|CONTAINS|'
-             r'CONTINUE|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|'
-             r'ELEMENTAL|ELSE|ENCODE|END( FILE)?|ENDIF|ENTRY|ENUMERATOR|EQUIVALENCE|'
-             r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|'
-             r'GOTO|IF|IMPLICIT|IMPORT|INCLUDE|INQUIRE|INTENT|INTERFACE|'
-             r'INTRINSIC|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|'
-             r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|'
-             r'PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|'
-             r'RECURSIVE|RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBROUTINE|'
-             r'TARGET|THEN|TYPE|USE|VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b',
-             Keyword),
-
-            # Data Types
-            (r'\b(CHARACTER|COMPLEX|DOUBLE PRECISION|DOUBLE COMPLEX|INTEGER|'
-             r'LOGICAL|REAL|C_INT|C_SHORT|C_LONG|C_LONG_LONG|C_SIGNED_CHAR|'
-             r'C_SIZE_T|C_INT8_T|C_INT16_T|C_INT32_T|C_INT64_T|C_INT_LEAST8_T|'
-             r'C_INT_LEAST16_T|C_INT_LEAST32_T|C_INT_LEAST64_T|C_INT_FAST8_T|'
-             r'C_INT_FAST16_T|C_INT_FAST32_T|C_INT_FAST64_T|C_INTMAX_T|'
-             r'C_INTPTR_T|C_FLOAT|C_DOUBLE|C_LONG_DOUBLE|C_FLOAT_COMPLEX|'
-             r'C_DOUBLE_COMPLEX|C_LONG_DOUBLE_COMPLEX|C_BOOL|C_CHAR|C_PTR|'
-             r'C_FUNPTR)\s*\b',
-             Keyword.Type),
-
-            # Operators
-            (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
-
-            (r'(::)', Keyword.Declaration),
-
-            (r'[(),:&%;]', Punctuation),
-
-            # Intrinsics
-            (r'\b(Abort|Abs|Access|AChar|ACos|AdjustL|AdjustR|AImag|AInt|Alarm|'
-             r'All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|Associated|'
-             r'ATan|BesJ|BesJN|BesY|BesYN|Bit_Size|BTest|CAbs|CCos|Ceiling|'
-             r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|'
-             r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|'
-             r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|'
-             r'C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|C_New_Line|'
-             r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|'
-             r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|'
-             r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|'
-             r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|'
-             r'DProd|DSign|DSinH|DSin|DSqRt|DTanH|DTan|DTime|EOShift|Epsilon|'
-             r'ErF|ErFC|ETime|Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|'
-             r'FGetC|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|'
-             r'FTell|GError|GetArg|Get_Command|Get_Command_Argument|'
-             r'Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|'
-             r'GetUId|GMTime|HostNm|Huge|IAbs|IAChar|IAnd|IArgC|IBClr|IBits|'
-             r'IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|IErrNo|IFix|Imag|'
-             r'ImagPart|Index|Int|IOr|IRand|IsaTty|IShft|IShftC|ISign|'
-             r'Iso_C_Binding|Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|'
-             r'LBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|'
-             r'Logical|Long|LShift|LStat|LTime|MatMul|Max|MaxExponent|MaxLoc|'
-             r'MaxVal|MClock|Merge|Move_Alloc|Min|MinExponent|MinLoc|MinVal|'
-             r'Mod|Modulo|MvBits|Nearest|New_Line|NInt|Not|Or|Pack|PError|'
-             r'Precision|Present|Product|Radix|Rand|Random_Number|Random_Seed|'
-             r'Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|'
-             r'Same_Type_As|Scale|Scan|Second|Selected_Int_Kind|'
-             r'Selected_Real_Kind|Set_Exponent|Shape|Short|Sign|Signal|SinH|'
-             r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Sum|SymLnk|'
-             r'System|System_Clock|Tan|TanH|Time|Tiny|Transfer|Transpose|Trim|'
-             r'TtyNam|UBound|UMask|Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|'
-             r'ZLog|ZSin|ZSqRt)\s*\b',
-             Name.Builtin),
-
-            # Booleans
-            (r'\.(true|false)\.', Name.Builtin),
-            # Comparing Operators
-            (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
-        ],
-
-        'strings': [
-            (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
-            (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
-        ],
-
-        'nums': [
-            (r'\d+(?![.Ee])', Number.Integer),
-            (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
-            (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
-        ],
-    }
-
-
-class GLShaderLexer(RegexLexer):
-    """
-    GLSL (OpenGL Shader) lexer.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'GLSL'
-    aliases = ['glsl']
-    filenames = ['*.vert', '*.frag', '*.geo']
-    mimetypes = ['text/x-glslsrc']
-
-    tokens = {
-        'root': [
-            (r'^#.*', Comment.Preproc),
-            (r'//.*', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
-             Operator),
-            (r'[?:]', Operator), # quick hack for ternary
-            (r'\bdefined\b', Operator),
-            (r'[;{}(),\[\]]', Punctuation),
-            #FIXME when e is present, no decimal point needed
-            (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
-            (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
-            (r'0[xX][0-9a-fA-F]*', Number.Hex),
-            (r'0[0-7]*', Number.Oct),
-            (r'[1-9][0-9]*', Number.Integer),
-            (r'\b(attribute|const|uniform|varying|centroid|break|continue|'
-             r'do|for|while|if|else|in|out|inout|float|int|void|bool|true|'
-             r'false|invariant|discard|return|mat[234]|mat[234]x[234]|'
-             r'vec[234]|[ib]vec[234]|sampler[123]D|samplerCube|'
-             r'sampler[12]DShadow|struct)\b', Keyword),
-            (r'\b(asm|class|union|enum|typedef|template|this|packed|goto|'
-             r'switch|default|inline|noinline|volatile|public|static|extern|'
-             r'external|interface|long|short|double|half|fixed|unsigned|'
-             r'lowp|mediump|highp|precision|input|output|hvec[234]|'
-             r'[df]vec[234]|sampler[23]DRect|sampler2DRectShadow|sizeof|'
-             r'cast|namespace|using)\b', Keyword), #future use
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
-            (r'\.', Punctuation),
-            (r'\s+', Text),
-        ],
-    }
-
-
-class PrologLexer(RegexLexer):
-    """
-    Lexer for Prolog files.
-    """
-    name = 'Prolog'
-    aliases = ['prolog']
-    filenames = ['*.prolog', '*.pro', '*.pl']
-    mimetypes = ['text/x-prolog']
-
-    flags = re.UNICODE
-
-    tokens = {
-        'root': [
-            (r'^#.*', Comment.Single),
-            (r'/\*', Comment.Multiline, 'nested-comment'),
-            (r'%.*', Comment.Single),
-            (r'[0-9]+', Number),
-            (r'[\[\](){}|.,;!]', Punctuation),
-            (r':-|-->', Punctuation),
-            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
-             r'\\[0-7]+\\|\\[\w\W]|[^"])*"', String.Double),
-            (r"'(?:''|[^'])*'", String.Atom), # quoted atom
-            # Needs to not be followed by an atom.
-            #(r'=(?=\s|[a-zA-Z\[])', Operator),
-            (r'is\b', Operator),
-            (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
-             Operator),
-            (r'(mod|div|not)\b', Operator),
-            (r'_', Keyword), # The don't-care variable
-            (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
-            (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
-             u'[a-zA-Z0-9_$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
-             u'(\\s*)(:-|-->)',
-             bygroups(Name.Function, Text, Operator)), # function defn
-            (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
-             u'[a-zA-Z0-9_$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
-             u'(\\s*)(\\()',
-             bygroups(Name.Function, Text, Punctuation)),
-            (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
-             u'[a-zA-Z0-9_$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
-             String.Atom), # atom, characters
-            # This one includes !
-            (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
-             String.Atom), # atom, graphics
-            (r'[A-Z_][A-Za-z0-9_]*', Name.Variable),
-            (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
-        ],
-        'nested-comment': [
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'[^*/]+', Comment.Multiline),
-            (r'[*/]', Comment.Multiline),
-        ],
-    }
-
-    def analyse_text(text):
-        return ':-' in text
-
-
-class CythonLexer(RegexLexer):
-    """
-    For Pyrex and `Cython <http://cython.org>`_ source code.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Cython'
-    aliases = ['cython', 'pyx']
-    filenames = ['*.pyx', '*.pxd', '*.pxi']
-    mimetypes = ['text/x-cython', 'application/x-cython']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
-            (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
-            (r'[^\S\n]+', Text),
-            (r'#.*$', Comment),
-            (r'[]{}:(),;[]', Punctuation),
-            (r'\\\n', Text),
-            (r'\\', Text),
-            (r'(in|is|and|or|not)\b', Operator.Word),
-            (r'(<)([a-zA-Z0-9.?]+)(>)',
-             bygroups(Punctuation, Keyword.Type, Punctuation)),
-            (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
-            (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
-             bygroups(Keyword, Number.Integer, Operator, Name, Operator,
-                      Name, Punctuation)),
-            include('keywords'),
-            (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
-            (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
-            (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
-            (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
-            include('builtins'),
-            include('backtick'),
-            ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
-            ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
-            ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
-            ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
-            ('[uU]?"""', String, combined('stringescape', 'tdqs')),
-            ("[uU]?'''", String, combined('stringescape', 'tsqs')),
-            ('[uU]?"', String, combined('stringescape', 'dqs')),
-            ("[uU]?'", String, combined('stringescape', 'sqs')),
-            include('name'),
-            include('numbers'),
-        ],
-        'keywords': [
-            (r'(assert|break|by|continue|ctypedef|del|elif|else|except\??|exec|'
-             r'finally|for|gil|global|if|include|lambda|nogil|pass|print|raise|'
-             r'return|try|while|yield|as|with)\b', Keyword),
-            (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
-        ],
-        'builtins': [
-            (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
-             r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
-             r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
-             r'file|filter|float|frozenset|getattr|globals|hasattr|hash|hex|id|'
-             r'input|int|intern|isinstance|issubclass|iter|len|list|locals|'
-             r'long|map|max|min|next|object|oct|open|ord|pow|property|range|'
-             r'raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|'
-             r'sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|'
-             r'vars|xrange|zip)\b', Name.Builtin),
-            (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
-             r')\b', Name.Builtin.Pseudo),
-            (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
-             r'BaseException|DeprecationWarning|EOFError|EnvironmentError|'
-             r'Exception|FloatingPointError|FutureWarning|GeneratorExit|IOError|'
-             r'ImportError|ImportWarning|IndentationError|IndexError|KeyError|'
-             r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
-             r'NotImplemented|NotImplementedError|OSError|OverflowError|'
-             r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
-             r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
-             r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
-             r'TypeError|UnboundLocalError|UnicodeDecodeError|'
-             r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
-             r'UnicodeWarning|UserWarning|ValueError|Warning|ZeroDivisionError'
-             r')\b', Name.Exception),
-        ],
-        'numbers': [
-            (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'0\d+', Number.Oct),
-            (r'0[xX][a-fA-F0-9]+', Number.Hex),
-            (r'\d+L', Number.Integer.Long),
-            (r'\d+', Number.Integer)
-        ],
-        'backtick': [
-            ('`.*?`', String.Backtick),
-        ],
-        'name': [
-            (r'@[a-zA-Z0-9_]+', Name.Decorator),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'funcname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
-        ],
-        'cdef': [
-            (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
-            (r'(struct|enum|union|class)\b', Keyword),
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(?=[(:#=]|$)',
-             bygroups(Name.Function, Text), '#pop'),
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(,)',
-             bygroups(Name.Function, Text, Punctuation)),
-            (r'from\b', Keyword, '#pop'),
-            (r'as\b', Keyword),
-            (r':', Punctuation, '#pop'),
-            (r'(?=["\'])', Text, '#pop'),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Keyword.Type),
-            (r'.', Text),
-        ],
-        'classname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
-            (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
-            (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
-            (r'', Text, '#pop') # all else: go back
-        ],
-        'fromimport': [
-            (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
-            (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
-            # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
-            (r'', Text, '#pop'),
-        ],
-        'stringescape': [
-            (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
-             r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
-        ],
-        'strings': [
-            (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-             '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
-            (r'[^\\\'"%\n]+', String),
-            # quotes, percents and backslashes must be parsed one at a time
-            (r'[\'"\\]', String),
-            # unhandled string formatting sign
-            (r'%', String)
-            # newlines are an error (use "nl" state)
-        ],
-        'nl': [
-            (r'\n', String)
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
-            include('strings')
-        ],
-        'sqs': [
-            (r"'", String, '#pop'),
-            (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
-            include('strings')
-        ],
-        'tdqs': [
-            (r'"""', String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-        'tsqs': [
-            (r"'''", String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-    }
-
-
-class ValaLexer(RegexLexer):
-    """
-    For Vala source code with preprocessor directives.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'Vala'
-    aliases = ['vala', 'vapi']
-    filenames = ['*.vala', '*.vapi']
-    mimetypes = ['text/x-vala']
-
-    tokens = {
-        'whitespace': [
-            (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-        ],
-        'statements': [
-            (r'L?"', String, 'string'),
-            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
-             String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
-             bygroups(Punctuation, Name.Decorator, Punctuation)),
-            # TODO: "correctly" parse complex code attributes
-            (r'(\[)(CCode|(?:Integer|Floating)Type)',
-             bygroups(Punctuation, Name.Decorator)),
-            (r'[()\[\],.]', Punctuation),
-            (r'(as|base|break|case|catch|construct|continue|default|delete|do|'
-             r'else|enum|finally|for|foreach|get|if|in|is|lock|new|out|params|'
-             r'return|set|sizeof|switch|this|throw|try|typeof|while|yield)\b',
-             Keyword),
-            (r'(abstract|const|delegate|dynamic|ensures|extern|inline|internal|'
-             r'override|owned|private|protected|public|ref|requires|signal|'
-             r'static|throws|unowned|var|virtual|volatile|weak|yields)\b',
-             Keyword.Declaration),
-            (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
-             'namespace'),
-            (r'(class|errordomain|interface|struct)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'class'),
-            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Name.Attribute)),
-            # void is an actual keyword, others are in glib-2.0.vapi
-            (r'(void|bool|char|double|float|int|int8|int16|int32|int64|long|'
-             r'short|size_t|ssize_t|string|time_t|uchar|uint|uint8|uint16|'
-             r'uint32|uint64|ulong|unichar|ushort)\b', Keyword.Type),
-            (r'(true|false|null)\b', Name.Builtin),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'root': [
-            include('whitespace'),
-            ('', Text, 'statement'),
-        ],
-        'statement' : [
-            include('whitespace'),
-            include('statements'),
-            ('[{}]', Punctuation),
-            (';', Punctuation, '#pop'),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
-            (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
-            (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
-            (r'.*?\n', Comment),
-        ],
-        'class': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'namespace': [
-            (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
-        ],
-    }
-
-
-class OocLexer(RegexLexer):
-    """
-    For `Ooc <http://ooc-lang.org/>`_ source code
-
-    *New in Pygments 1.2.*
-    """
-    name = 'Ooc'
-    aliases = ['ooc']
-    filenames = ['*.ooc']
-    mimetypes = ['text/x-ooc']
-
-    tokens = {
-        'root': [
-            (r'\b(class|interface|implement|abstract|extends|from|'
-             r'this|super|new|const|final|static|import|use|extern|'
-             r'inline|proto|break|continue|fallthrough|operator|if|else|for|'
-             r'while|do|switch|case|as|in|version|return|true|false|null)\b',
-             Keyword),
-            (r'include\b', Keyword, 'include'),
-            (r'(cover)([ \t]+)(from)([ \t]+)([a-zA-Z0-9_]+[*@]?)',
-             bygroups(Keyword, Text, Keyword, Text, Name.Class)),
-            (r'(func)((?:[ \t]|\\\n)+)(~[a-z_][a-zA-Z0-9_]*)',
-             bygroups(Keyword, Text, Name.Function)),
-            (r'\bfunc\b', Keyword),
-            # Note: %= and ^= not listed on http://ooc-lang.org/syntax
-            (r'//.*', Comment),
-            (r'(?s)/\*.*?\*/', Comment.Multiline),
-            (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
-             r'&&?|\|\|?|\^=?)', Operator),
-            (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
-                                                 Name.Function)),
-            (r'[A-Z][A-Z0-9_]+', Name.Constant),
-            (r'[A-Z][a-zA-Z0-9_]*([@*]|\[[ \t]*\])?', Name.Class),
-
-            (r'([a-z][a-zA-Z0-9_]*(?:~[a-z][a-zA-Z0-9_]*)?)((?:[ \t]|\\\n)*)(?=\()',
-             bygroups(Name.Function, Text)),
-            (r'[a-z][a-zA-Z0-9_]*', Name.Variable),
-
-            # : introduces types
-            (r'[:(){}\[\];,]', Punctuation),
-
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'0c[0-9]+', Number.Oct),
-            (r'0b[01]+', Number.Binary),
-            (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
-            (r'[0-9_]+', Number.Decimal),
-
-            (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\"])*"',
-             String.Double),
-            (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
-             String.Char),
-            (r'@', Punctuation), # pointer dereference
-            (r'\.', Punctuation), # imports or chain operator
-
-            (r'\\[ \t\n]', Text),
-            (r'[ \t]+', Text),
-        ],
-        'include': [
-            (r'[\w/]+', Name),
-            (r',', Punctuation),
-            (r'[ \t]', Text),
-            (r'[;\n]', Text, '#pop'),
-        ],
-    }
-
-
-class GoLexer(RegexLexer):
-    """
-    For `Go <http://golang.org>`_ source.
-    """
-    name = 'Go'
-    filenames = ['*.go']
-    aliases = ['go']
-    mimetypes = ['text/x-gosrc']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuations
-            (r'//(.*?)\n', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'(import|package)\b', Keyword.Namespace),
-            (r'(var|func|struct|map|chan|type|interface|const)\b', Keyword.Declaration),
-            (r'(break|default|select|case|defer|go'
-             r'|else|goto|switch|fallthrough|if|range'
-             r'|continue|for|return)\b', Keyword),
-            (r'(true|false|iota|nil)\b', Keyword.Constant),
-            # It seems the builtin types aren't actually keywords, but
-            # can be used as functions. So we need two declarations.
-            (r'(uint|uint8|uint16|uint32|uint64'
-             r'|int|int8|int16|int32|int64'
-             r'|float|float32|float64'
-             r'|complex64|complex128|byte|rune'
-             r'|string|bool|error|uintptr'
-             r'|print|println|panic|recover|close|complex|real|imag'
-             r'|len|cap|append|copy|delete|new|make)\b(\()',
-             bygroups(Name.Builtin, Punctuation)),
-            (r'(uint|uint8|uint16|uint32|uint64'
-             r'|int|int8|int16|int32|int64'
-             r'|float|float32|float64'
-             r'|complex64|complex128|byte|rune'
-             r'|string|bool|error|uintptr)\b', Keyword.Type),
-            # imaginary_lit
-            (r'\d+i', Number),
-            (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
-            (r'\.\d+([Ee][-+]\d+)?i', Number),
-            (r'\d+[Ee][-+]\d+i', Number),
-            # float_lit
-            (r'\d+(\.\d+[eE][+\-]?\d+|'
-             r'\.\d*|[eE][+\-]?\d+)', Number.Float),
-            (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
-            # int_lit
-            # -- octal_lit
-            (r'0[0-7]+', Number.Oct),
-            # -- hex_lit
-            (r'0[xX][0-9a-fA-F]+', Number.Hex),
-            # -- decimal_lit
-            (r'(0|[1-9][0-9]*)', Number.Integer),
-            # char_lit
-            (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
-             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
-             String.Char
-            ),
-            # StringLiteral
-            # -- raw_string_lit
-            (r'`[^`]*`', String),
-            # -- interpreted_string_lit
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # Tokens
-            (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
-             r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
-            (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
-            # identifier
-            (r'[a-zA-Z_]\w*', Name.Other),
-        ]
-    }
-
-
-class FelixLexer(RegexLexer):
-    """
-    For `Felix <http://www.felix-lang.org>`_ source code.
-
-    *New in Pygments 1.2.*
-    """
-
-    name = 'Felix'
-    aliases = ['felix', 'flx']
-    filenames = ['*.flx', '*.flxh']
-    mimetypes = ['text/x-felix']
-
-    preproc = [
-        'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
-    ]
-
-    keywords = [
-        '_', '_deref', 'all', 'as',
-        'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
-        'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
-        'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
-        'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
-        'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
-        'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
-        'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
-        'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
-        'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
-        'when', 'whilst', 'with', 'yield',
-    ]
-
-    keyword_directives = [
-        '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
-        'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
-        'package', 'private', 'pod', 'property', 'public', 'publish',
-        'requires', 'todo', 'virtual', 'use',
-    ]
-
-    keyword_declarations = [
-        'def', 'let', 'ref', 'val', 'var',
-    ]
-
-    keyword_types = [
-        'unit', 'void', 'any', 'bool',
-        'byte',  'offset',
-        'address', 'caddress', 'cvaddress', 'vaddress',
-        'tiny', 'short', 'int', 'long', 'vlong',
-        'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
-        'int8', 'int16', 'int32', 'int64',
-        'uint8', 'uint16', 'uint32', 'uint64',
-        'float', 'double', 'ldouble',
-        'complex', 'dcomplex', 'lcomplex',
-        'imaginary', 'dimaginary', 'limaginary',
-        'char', 'wchar', 'uchar',
-        'charp', 'charcp', 'ucharp', 'ucharcp',
-        'string', 'wstring', 'ustring',
-        'cont',
-        'array', 'varray', 'list',
-        'lvalue', 'opt', 'slice',
-    ]
-
-    keyword_constants = [
-        'false', 'true',
-    ]
-
-    operator_words = [
-        'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
-    ]
-
-    name_builtins = [
-        '_svc', 'while',
-    ]
-
-    name_pseudo = [
-        'root', 'self', 'this',
-    ]
-
-    decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-
-            # Keywords
-            (r'(axiom|ctor|fun|gen|proc|reduce|union)\b', Keyword,
-             'funcname'),
-            (r'(class|cclass|cstruct|obj|struct)\b', Keyword, 'classname'),
-            (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
-
-            (r'(%s)\b' % '|'.join(keywords), Keyword),
-            (r'(%s)\b' % '|'.join(keyword_directives), Name.Decorator),
-            (r'(%s)\b' % '|'.join(keyword_declarations), Keyword.Declaration),
-            (r'(%s)\b' % '|'.join(keyword_types), Keyword.Type),
-            (r'(%s)\b' % '|'.join(keyword_constants), Keyword.Constant),
-
-            # Operators
-            include('operators'),
-
-            # Float Literal
-            # -- Hex Float
-            (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
-             r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
-            # -- DecimalFloat
-            (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
-             r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
-            (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
-             Number.Float),
-
-            # IntegerLiteral
-            # -- Binary
-            (r'0[Bb][01_]+%s' % decimal_suffixes, Number),
-            # -- Octal
-            (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
-            # -- Hexadecimal
-            (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
-            # -- Decimal
-            (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
-
-            # Strings
-            ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
-            ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
-            ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
-            ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
-            ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
-            ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
-            ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
-            ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
-
-            # Punctuation
-            (r'[\[\]{}:(),;?]', Punctuation),
-
-            # Labels
-            (r'[a-zA-Z_]\w*:>', Name.Label),
-
-            # Identifiers
-            (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
-            (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        'whitespace': [
-            (r'\n', Text),
-            (r'\s+', Text),
-
-            include('comment'),
-
-            # Preprocessor
-            (r'#\s*if\s+0', Comment.Preproc, 'if0'),
-            (r'#', Comment.Preproc, 'macro'),
-        ],
-        'operators': [
-            (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
-            (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
-        ],
-        'comment': [
-            (r'//(.*?)\n', Comment.Single),
-            (r'/[*]', Comment.Multiline, 'comment2'),
-        ],
-        'comment2': [
-            (r'[^\/*]', Comment.Multiline),
-            (r'/[*]', Comment.Multiline, '#push'),
-            (r'[*]/', Comment.Multiline, '#pop'),
-            (r'[\/*]', Comment.Multiline),
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
-            (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
-            (r'.*?\n', Comment),
-        ],
-        'macro': [
-            include('comment'),
-            (r'(import|include)(\s+)(<[^>]*?>)',
-             bygroups(Comment.Preproc, Text, String), '#pop'),
-            (r'(import|include)(\s+)("[^"]*?")',
-             bygroups(Comment.Preproc, Text, String), '#pop'),
-            (r"(import|include)(\s+)('[^']*?')",
-             bygroups(Comment.Preproc, Text, String), '#pop'),
-            (r'[^/\n]+', Comment.Preproc),
-            ##(r'/[*](.|\n)*?[*]/', Comment),
-            ##(r'//.*?\n', Comment, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'funcname': [
-            include('whitespace'),
-            (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
-            # anonymous functions
-            (r'(?=\()', Text, '#pop'),
-        ],
-        'classname': [
-            include('whitespace'),
-            (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
-            # anonymous classes
-            (r'(?=\{)', Text, '#pop'),
-        ],
-        'modulename': [
-            include('whitespace'),
-            (r'\[', Punctuation, ('modulename2', 'tvarlist')),
-            (r'', Error, 'modulename2'),
-        ],
-        'modulename2': [
-            include('whitespace'),
-            (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
-        ],
-        'tvarlist': [
-            include('whitespace'),
-            include('operators'),
-            (r'\[', Punctuation, '#push'),
-            (r'\]', Punctuation, '#pop'),
-            (r',', Punctuation),
-            (r'(with|where)\b', Keyword),
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        'stringescape': [
-            (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
-             r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
-        ],
-        'strings': [
-            (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-             '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
-            (r'[^\\\'"%\n]+', String),
-            # quotes, percents and backslashes must be parsed one at a time
-            (r'[\'"\\]', String),
-            # unhandled string formatting sign
-            (r'%', String)
-            # newlines are an error (use "nl" state)
-        ],
-        'nl': [
-            (r'\n', String)
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            # included here again for raw strings
-            (r'\\\\|\\"|\\\n', String.Escape),
-            include('strings')
-        ],
-        'sqs': [
-            (r"'", String, '#pop'),
-            # included here again for raw strings
-            (r"\\\\|\\'|\\\n", String.Escape),
-            include('strings')
-        ],
-        'tdqs': [
-            (r'"""', String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-        'tsqs': [
-            (r"'''", String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-     }
-
-
-class AdaLexer(RegexLexer):
-    """
-    For Ada source code.
-
-    *New in Pygments 1.3.*
-    """
-
-    name = 'Ada'
-    aliases = ['ada', 'ada95' 'ada2005']
-    filenames = ['*.adb', '*.ads', '*.ada']
-    mimetypes = ['text/x-ada']
-
-    flags = re.MULTILINE | re.I  # Ignore case
-
-    tokens = {
-        'root': [
-            (r'[^\S\n]+', Text),
-            (r'--.*?\n', Comment.Single),
-            (r'[^\S\n]+', Text),
-            (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
-            (r'(subtype|type)(\s+)([a-z0-9_]+)',
-             bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
-            (r'task|protected', Keyword.Declaration),
-            (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
-            (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
-            (r'(pragma)(\s+)([a-zA-Z0-9_]+)', bygroups(Keyword.Reserved, Text,
-                                                       Comment.Preproc)),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(Address|Byte|Boolean|Character|Controlled|Count|Cursor|'
-             r'Duration|File_Mode|File_Type|Float|Generator|Integer|Long_Float|'
-             r'Long_Integer|Long_Long_Float|Long_Long_Integer|Natural|Positive|'
-             r'Reference_Type|Short_Float|Short_Integer|Short_Short_Float|'
-             r'Short_Short_Integer|String|Wide_Character|Wide_String)\b',
-             Keyword.Type),
-            (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
-            (r'generic|private', Keyword.Declaration),
-            (r'package', Keyword.Declaration, 'package'),
-            (r'array\b', Keyword.Reserved, 'array_def'),
-            (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
-            (r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)',
-             bygroups(Name.Constant, Text, Punctuation, Text,
-                      Keyword.Reserved)),
-            (r'<<[a-z0-9_]+>>', Name.Label),
-            (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
-             bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
-            (r'\b(abort|abs|abstract|accept|access|aliased|all|array|at|begin|'
-             r'body|case|constant|declare|delay|delta|digits|do|else|elsif|end|'
-             r'entry|exception|exit|interface|for|goto|if|is|limited|loop|new|'
-             r'null|of|or|others|out|overriding|pragma|protected|raise|range|'
-             r'record|renames|requeue|return|reverse|select|separate|subtype|'
-             r'synchronized|task|tagged|terminate|then|type|until|when|while|'
-             r'xor)\b',
-             Keyword.Reserved),
-            (r'"[^"]*"', String),
-            include('attribute'),
-            include('numbers'),
-            (r"'[^']'", String.Character),
-            (r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))),
-            (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
-            (r'[*<>+=/&-]', Operator),
-            (r'\n+', Text),
-        ],
-        'numbers' : [
-            (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
-            (r'[0-9_]+\.[0-9_]*', Number.Float),
-            (r'[0-9_]+', Number.Integer),
-        ],
-        'attribute' : [
-            (r"(')([a-zA-Z0-9_]+)", bygroups(Punctuation, Name.Attribute)),
-        ],
-        'subprogram' : [
-            (r'\(', Punctuation, ('#pop', 'formal_part')),
-            (r';', Punctuation, '#pop'),
-            (r'is\b', Keyword.Reserved, '#pop'),
-            (r'"[^"]+"|[a-z0-9_]+', Name.Function),
-            include('root'),
-        ],
-        'end' : [
-            ('(if|case|record|loop|select)', Keyword.Reserved),
-            ('"[^"]+"|[a-zA-Z0-9_.]+', Name.Function),
-            ('\s+', Text),
-            (';', Punctuation, '#pop'),
-        ],
-        'type_def': [
-            (r';', Punctuation, '#pop'),
-            (r'\(', Punctuation, 'formal_part'),
-            (r'with|and|use', Keyword.Reserved),
-            (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
-            (r'record\b', Keyword.Reserved, ('record_def')),
-            (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
-            include('root'),
-        ],
-        'array_def' : [
-            (r';', Punctuation, '#pop'),
-            (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text,
-                                                   Keyword.Reserved)),
-            include('root'),
-        ],
-        'record_def' : [
-            (r'end record', Keyword.Reserved, '#pop'),
-            include('root'),
-        ],
-        'import': [
-            (r'[a-z0-9_.]+', Name.Namespace, '#pop'),
-            (r'', Text, '#pop'),
-        ],
-        'formal_part' : [
-            (r'\)', Punctuation, '#pop'),
-            (r'[a-z0-9_]+', Name.Variable),
-            (r',|:[^=]', Punctuation),
-            (r'(in|not|null|out|access)\b', Keyword.Reserved),
-            include('root'),
-        ],
-        'package': [
-            ('body', Keyword.Declaration),
-            ('is\s+new|renames', Keyword.Reserved),
-            ('is', Keyword.Reserved, '#pop'),
-            (';', Punctuation, '#pop'),
-            ('\(', Punctuation, 'package_instantiation'),
-            ('([a-zA-Z0-9_.]+)', Name.Class),
-            include('root'),
-        ],
-        'package_instantiation': [
-            (r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable,
-                                                        Text, Punctuation)),
-            (r'[a-z0-9._\'"]', Text),
-            (r'\)', Punctuation, '#pop'),
-            include('root'),
-        ],
-    }
-
-
-class Modula2Lexer(RegexLexer):
-    """
-    For `Modula-2 <http://www.modula2.org/>`_ source code.
-
-    Additional options that determine which keywords are highlighted:
-
-    `pim`
-        Select PIM Modula-2 dialect (default: True).
-    `iso`
-        Select ISO Modula-2 dialect (default: False).
-    `objm2`
-        Select Objective Modula-2 dialect (default: False).
-    `gm2ext`
-        Also highlight GNU extensions (default: False).
-
-    *New in Pygments 1.3.*
-    """
-    name = 'Modula-2'
-    aliases = ['modula2', 'm2']
-    filenames = ['*.def', '*.mod']
-    mimetypes = ['text/x-modula2']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'whitespace': [
-            (r'\n+', Text), # blank lines
-            (r'\s+', Text), # whitespace
-        ],
-        'identifiers': [
-            (r'([a-zA-Z_\$][a-zA-Z0-9_\$]*)', Name),
-        ],
-        'numliterals': [
-            (r'[01]+B', Number.Binary),        # binary number (ObjM2)
-            (r'[0-7]+B', Number.Oct),          # octal number (PIM + ISO)
-            (r'[0-7]+C', Number.Oct),          # char code (PIM + ISO)
-            (r'[0-9A-F]+C', Number.Hex),       # char code (ObjM2)
-            (r'[0-9A-F]+H', Number.Hex),       # hexadecimal number
-            (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
-            (r'[0-9]+\.[0-9]+', Number.Float), # real number
-            (r'[0-9]+', Number.Integer),       # decimal whole number
-        ],
-        'strings': [
-            (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
-            (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
-        ],
-        'operators': [
-            (r'[*/+=#~&<>\^-]', Operator),
-            (r':=', Operator),   # assignment
-            (r'@', Operator),    # pointer deref (ISO)
-            (r'\.\.', Operator), # ellipsis or range
-            (r'`', Operator),    # Smalltalk message (ObjM2)
-            (r'::', Operator),   # type conversion (ObjM2)
-        ],
-        'punctuation': [
-            (r'[\(\)\[\]{},.:;|]', Punctuation),
-        ],
-        'comments': [
-            (r'//.*?\n', Comment.Single),       # ObjM2
-            (r'/\*(.*?)\*/', Comment.Multiline), # ObjM2
-            (r'\(\*([^\$].*?)\*\)', Comment.Multiline),
-            # TO DO: nesting of (* ... *) comments
-        ],
-        'pragmas': [
-            (r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM
-            (r'<\*(.*?)\*>', Comment.Preproc),     # ISO + ObjM2
-        ],
-        'root': [
-            include('whitespace'),
-            include('comments'),
-            include('pragmas'),
-            include('identifiers'),
-            include('numliterals'),
-            include('strings'),
-            include('operators'),
-            include('punctuation'),
-        ]
-    }
-
-    pim_reserved_words = [
-        # 40 reserved words
-        'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION',
-        'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR',
-        'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD',
-        'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED',
-        'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
-        'UNTIL', 'VAR', 'WHILE', 'WITH',
-    ]
-
-    pim_pervasives = [
-        # 31 pervasives
-        'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC',
-        'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL',
-        'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD',
-        'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL',
-    ]
-
-    iso_reserved_words = [
-        # 46 reserved words
-        'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
-        'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY',
-        'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN',
-        'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER',
-        'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY',
-        'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
-        'WITH',
-    ]
-
-    iso_pervasives = [
-        # 42 pervasives
-        'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX',
-        'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH',
-        'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH',
-        'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW',
-        'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE',
-        'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
-    ]
-
-    objm2_reserved_words = [
-        # base language, 42 reserved words
-        'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
-        'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF',
-        'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD',
-        'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE',
-        'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
-        'UNTIL', 'VAR', 'VARIADIC', 'WHILE',
-        # OO extensions, 16 reserved words
-        'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
-        'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
-        'SUPER', 'TRY',
-    ]
-
-    objm2_pervasives = [
-        # base language, 38 pervasives
-        'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE',
-        'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD',
-        'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL',
-        'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX',
-        'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF',
-        # OO extensions, 3 pervasives
-        'OBJECT', 'NO', 'YES',
-    ]
-
-    gnu_reserved_words = [
-        # 10 additional reserved words
-        'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
-        '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
-    ]
-
-    gnu_pervasives = [
-        # 21 identifiers, actually from pseudo-module SYSTEM
-        # but we will highlight them as if they were pervasives
-        'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
-        'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
-        'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
-        'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
-    ]
-
-    def __init__(self, **options):
-        self.reserved_words = set()
-        self.pervasives = set()
-        # ISO Modula-2
-        if get_bool_opt(options, 'iso', False):
-            self.reserved_words.update(self.iso_reserved_words)
-            self.pervasives.update(self.iso_pervasives)
-        # Objective Modula-2
-        elif get_bool_opt(options, 'objm2', False):
-            self.reserved_words.update(self.objm2_reserved_words)
-            self.pervasives.update(self.objm2_pervasives)
-        # PIM Modula-2 (DEFAULT)
-        else:
-            self.reserved_words.update(self.pim_reserved_words)
-            self.pervasives.update(self.pim_pervasives)
-        # GNU extensions
-        if get_bool_opt(options, 'gm2ext', False):
-            self.reserved_words.update(self.gnu_reserved_words)
-            self.pervasives.update(self.gnu_pervasives)
-        # initialise
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            # check for reserved words and pervasives
-            if token is Name:
-                if value in self.reserved_words:
-                    token = Keyword.Reserved
-                elif value in self.pervasives:
-                    token = Keyword.Pervasive
-            # return result
-            yield index, token, value
-
-
-class BlitzMaxLexer(RegexLexer):
-    """
-    For `BlitzMax <http://blitzbasic.com>`_ source code.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'BlitzMax'
-    aliases = ['blitzmax', 'bmax']
-    filenames = ['*.bmx']
-    mimetypes = ['text/x-bmx']
-
-    bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
-    bmax_sktypes = r'@{1,2}|[!#$%]'
-    bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
-    bmax_name = r'[a-z_][a-z0-9_]*'
-    bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
-                r'|([ \t]*)([:])([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
-                (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
-    bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
-
-    flags = re.MULTILINE | re.IGNORECASE
-    tokens = {
-        'root': [
-            # Text
-            (r'[ \t]+', Text),
-            (r'\.\.\n', Text), # Line continuation
-            # Comments
-            (r"'.*?\n", Comment.Single),
-            (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
-            # Data types
-            ('"', String.Double, 'string'),
-            # Numbers
-            (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
-            (r'\.[0-9]*(?!\.)', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r'\$[0-9a-f]+', Number.Hex),
-            (r'\%[10]+', Number), # Binary
-            # Other
-            (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
-             (bmax_vopwords), Operator),
-            (r'[(),.:\[\]]', Punctuation),
-            (r'(?:#[\w \t]*)', Name.Label),
-            (r'(?:\?[\w \t]*)', Comment.Preproc),
-            # Identifiers
-            (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
-             bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
-            (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
-             (bmax_name, bmax_name),
-             bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
-            (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
-                                 Operator, Text, Punctuation, Text,
-                                 Keyword.Type, Name.Class, Text,
-                                 Keyword.Type, Text, Punctuation)),
-            (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
-                                Text, Punctuation, Text, Keyword.Type,
-                                Name.Class, Text, Keyword.Type)),
-            (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
-             bygroups(Keyword.Reserved, Text, Name.Class)),
-            # Keywords
-            (r'\b(Ptr)\b', Keyword.Type),
-            (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
-            (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
-            (r'\b(TNullMethodException|TNullFunctionException|'
-             r'TNullObjectException|TArrayBoundsException|'
-             r'TRuntimeException)\b', Name.Exception),
-            (r'\b(Strict|SuperStrict|Module|ModuleInfo|'
-             r'End|Return|Continue|Exit|Public|Private|'
-             r'Var|VarPtr|Chr|Len|Asc|SizeOf|Sgn|Abs|Min|Max|'
-             r'New|Release|Delete|'
-             r'Incbin|IncbinPtr|IncbinLen|'
-             r'Framework|Include|Import|Extern|EndExtern|'
-             r'Function|EndFunction|'
-             r'Type|EndType|Extends|'
-             r'Method|EndMethod|'
-             r'Abstract|Final|'
-             r'If|Then|Else|ElseIf|EndIf|'
-             r'For|To|Next|Step|EachIn|'
-             r'While|Wend|EndWhile|'
-             r'Repeat|Until|Forever|'
-             r'Select|Case|Default|EndSelect|'
-             r'Try|Catch|EndTry|Throw|Assert|'
-             r'Goto|DefData|ReadData|RestoreData)\b', Keyword.Reserved),
-            # Final resolve (for variable names and such)
-            (r'(%s)' % (bmax_name), Name.Variable),
-        ],
-        'string': [
-            (r'""', String.Double),
-            (r'"C?', String.Double, '#pop'),
-            (r'[^"]+', String.Double),
-        ],
-    }
-
-
-class NimrodLexer(RegexLexer):
-    """
-    For `Nimrod <http://nimrod-code.org/>`_ source code.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Nimrod'
-    aliases = ['nimrod', 'nim']
-    filenames = ['*.nim', '*.nimrod']
-    mimetypes = ['text/x-nimrod']
-
-    flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
-
-    def underscorize(words):
-        newWords = []
-        new = ""
-        for word in words:
-            for ch in word:
-                new += (ch + "_?")
-            newWords.append(new)
-            new = ""
-        return "|".join(newWords)
-
-    keywords = [
-        'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
-        'case', 'cast', 'const', 'continue', 'converter', 'discard',
-        'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
-        'for', 'generic', 'if', 'implies', 'in', 'yield',
-        'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
-        'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
-        'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
-        'tuple', 'type' , 'when', 'while', 'with', 'without', 'xor'
-    ]
-
-    keywordsPseudo = [
-        'nil', 'true', 'false'
-    ]
-
-    opWords = [
-        'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
-        'notin', 'is', 'isnot'
-    ]
-
-    types = [
-        'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
-        'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
-    ]
-
-    tokens = {
-        'root': [
-            (r'##.*$', String.Doc),
-            (r'#.*$', Comment),
-            (r'\*|=|>|<|\+|-|/|@|\$|~|&|%|\!|\?|\||\\|\[|\]', Operator),
-            (r'\.\.|\.|,|\[\.|\.\]|{\.|\.}|\(\.|\.\)|{|}|\(|\)|:|\^|`|;',
-             Punctuation),
-
-            # Strings
-            (r'(?:[\w]+)"', String, 'rdqs'),
-            (r'"""', String, 'tdqs'),
-            ('"', String, 'dqs'),
-
-            # Char
-            ("'", String.Char, 'chars'),
-
-            # Keywords
-            (r'(%s)\b' % underscorize(opWords), Operator.Word),
-            (r'(p_?r_?o_?c_?\s)(?![\(\[\]])', Keyword, 'funcname'),
-            (r'(%s)\b' % underscorize(keywords), Keyword),
-            (r'(%s)\b' % underscorize(['from', 'import', 'include']),
-             Keyword.Namespace),
-            (r'(v_?a_?r)\b', Keyword.Declaration),
-            (r'(%s)\b' % underscorize(types), Keyword.Type),
-            (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
-            # Identifiers
-            (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
-            # Numbers
-            (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))',
-              Number.Float, ('float-suffix', 'float-number')),
-            (r'0[xX][a-fA-F0-9][a-fA-F0-9_]*', Number.Hex, 'int-suffix'),
-            (r'0[bB][01][01_]*', Number, 'int-suffix'),
-            (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
-            (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
-            # Whitespace
-            (r'\s+', Text),
-            (r'.+$', Error),
-        ],
-        'chars': [
-          (r'\\([\\abcefnrtvl"\']|x[a-fA-F0-9]{2}|[0-9]{1,3})', String.Escape),
-          (r"'", String.Char, '#pop'),
-          (r".", String.Char)
-        ],
-        'strings': [
-            (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
-            (r'[^\\\'"\$\n]+', String),
-            # quotes, dollars and backslashes must be parsed one at a time
-            (r'[\'"\\]', String),
-            # unhandled string formatting sign
-            (r'\$', String)
-            # newlines are an error (use "nl" state)
-        ],
-        'dqs': [
-            (r'\\([\\abcefnrtvl"\']|\n|x[a-fA-F0-9]{2}|[0-9]{1,3})',
-             String.Escape),
-            (r'"', String, '#pop'),
-            include('strings')
-        ],
-        'rdqs': [
-            (r'"(?!")', String, '#pop'),
-            (r'""', String.Escape),
-            include('strings')
-        ],
-        'tdqs': [
-            (r'"""(?!")', String, '#pop'),
-            include('strings'),
-            include('nl')
-        ],
-        'funcname': [
-            (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
-            (r'`.+`', Name.Function, '#pop')
-        ],
-        'nl': [
-            (r'\n', String)
-        ],
-        'float-number': [
-          (r'\.(?!\.)[0-9_]*', Number.Float),
-          (r'[eE][+-]?[0-9][0-9_]*', Number.Float),
-          (r'', Text, '#pop')
-        ],
-        'float-suffix': [
-          (r'\'[fF](32|64)', Number.Float),
-          (r'', Text, '#pop')
-        ],
-        'int-suffix': [
-          (r'\'[iI](32|64)', Number.Integer.Long),
-          (r'\'[iI](8|16)', Number.Integer),
-          (r'', Text, '#pop')
-        ],
-    }
-
-
-class FantomLexer(RegexLexer):
-    """
-    For Fantom source code.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'Fantom'
-    aliases = ['fan']
-    filenames = ['*.fan']
-    mimetypes = ['application/x-fantom']
-
-    # often used regexes
-    def s(str):
-        return Template(str).substitute(
-            dict (
-                pod = r'[\"\w\.]+',
-                eos = r'\n|;',
-                id = r'[a-zA-Z_][a-zA-Z0-9_]*',
-                # all chars which can be part of type definition. Starts with
-                # either letter, or [ (maps), or | (funcs)
-                type = r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]\|\->\?]*?',
-                )
-            )
-
-
-    tokens = {
-        'comments': [
-            (r'(?s)/\*.*?\*/', Comment.Multiline),           #Multiline
-            (r'//.*?\n', Comment.Single),                    #Single line
-            #todo: highlight references in fandocs
-            (r'\*\*.*?\n', Comment.Special),                 #Fandoc
-            (r'#.*\n', Comment.Single)                       #Shell-style
-        ],
-        'literals': [
-            (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number),   #Duration
-            (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number),
-                                                             #Duration with dot
-            (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float),    #Float/Decimal
-            (r'\b-?0x[0-9a-fA-F_]+', Number.Hex),            #Hex
-            (r'\b-?[\d_]+', Number.Integer),                 #Int
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), #Char
-            (r'"', Punctuation, 'insideStr'),                #Opening quote
-            (r'`', Punctuation, 'insideUri'),                #Opening accent
-            (r'\b(true|false|null)\b', Keyword.Constant),    #Bool & null
-            (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)',          #DSL
-             bygroups(Name.Namespace, Punctuation, Name.Class,
-                      Punctuation, String, Punctuation)),
-            (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?',               #Type/slot literal
-             bygroups(Name.Namespace, Punctuation, Name.Class,
-                      Punctuation, Name.Function)),
-            (r'\[,\]', Literal),                             # Empty list
-            (s(r'($type)(\[,\])'),                           # Typed empty list
-             bygroups(using(this, state = 'inType'), Literal)),
-            (r'\[:\]', Literal),                             # Empty Map
-            (s(r'($type)(\[:\])'),
-             bygroups(using(this, state = 'inType'), Literal)),
-        ],
-        'insideStr': [
-            (r'\\\\', String.Escape),                        #Escaped backslash
-            (r'\\"', String.Escape),                         #Escaped "
-            (r'\\`', String.Escape),                         #Escaped `
-            (r'\$\w+', String.Interpol),                     #Subst var
-            (r'\${.*?}', String.Interpol),                   #Subst expr
-            (r'"', Punctuation, '#pop'),                     #Closing quot
-            (r'.', String)                                   #String content
-        ],
-        'insideUri': [  #TODO: remove copy/paste str/uri
-            (r'\\\\', String.Escape),                        #Escaped backslash
-            (r'\\"', String.Escape),                         #Escaped "
-            (r'\\`', String.Escape),                         #Escaped `
-            (r'\$\w+', String.Interpol),                     #Subst var
-            (r'\${.*?}', String.Interpol),                   #Subst expr
-            (r'`', Punctuation, '#pop'),                     #Closing tick
-            (r'.', String.Backtick)                          #URI content
-        ],
-        'protectionKeywords': [
-            (r'\b(public|protected|private|internal)\b', Keyword),
-        ],
-        'typeKeywords': [
-            (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
-        ],
-        'methodKeywords': [
-            (r'\b(abstract|native|once|override|static|virtual|final)\b',
-             Keyword),
-        ],
-        'fieldKeywords': [
-            (r'\b(abstract|const|final|native|override|static|virtual|'
-             r'readonly)\b', Keyword)
-        ],
-        'otherKeywords': [
-            (r'\b(try|catch|throw|finally|for|if|else|while|as|is|isnot|'
-             r'switch|case|default|continue|break|do|return|get|set)\b',
-             Keyword),
-            (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
-        ],
-        'operators': [
-            (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
-        ],
-        'inType': [
-            (r'[\[\]\|\->:\?]', Punctuation),
-            (s(r'$id'), Name.Class),
-            (r'', Text, '#pop'),
-
-        ],
-        'root': [
-            include('comments'),
-            include('protectionKeywords'),
-            include('typeKeywords'),
-            include('methodKeywords'),
-            include('fieldKeywords'),
-            include('literals'),
-            include('otherKeywords'),
-            include('operators'),
-            (r'using\b', Keyword.Namespace, 'using'),         # Using stmt
-            (r'@\w+', Name.Decorator, 'facet'),               # Symbol
-            (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
-             'inheritance'),                                  # Inheritance list
-
-
-            ### Type var := val
-            (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
-             bygroups(using(this, state = 'inType'), Text,
-                      Name.Variable, Text, Operator)),
-
-            ### var := val
-            (s(r'($id)(\s*)(:=)'),
-             bygroups(Name.Variable, Text, Operator)),
-
-            ### .someId( or ->someId( ###
-            (s(r'(\.|(?:\->))($id)(\s*)(\()'),
-             bygroups(Operator, Name.Function, Text, Punctuation),
-             'insideParen'),
-
-            ### .someId  or ->someId
-            (s(r'(\.|(?:\->))($id)'),
-             bygroups(Operator, Name.Function)),
-
-            ### new makeXXX ( ####
-            (r'(new)(\s+)(make\w*)(\s*)(\()',
-             bygroups(Keyword, Text, Name.Function, Text, Punctuation),
-             'insideMethodDeclArgs'),
-
-            ### Type name (  ####
-            (s(r'($type)([ \t]+)' #Return type and whitespace
-               r'($id)(\s*)(\()'), #method name + open brace
-             bygroups(using(this, state = 'inType'), Text,
-                      Name.Function, Text, Punctuation),
-             'insideMethodDeclArgs'),
-
-            ### ArgType argName, #####
-            (s(r'($type)(\s+)($id)(\s*)(,)'),
-             bygroups(using(this, state='inType'), Text, Name.Variable,
-                      Text, Punctuation)),
-
-            #### ArgType argName) ####
-            ## Covered in 'insideParen' state
-
-            ### ArgType argName -> ArgType| ###
-            (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
-             bygroups(using(this, state='inType'), Text, Name.Variable,
-                      Text, Punctuation, Text, using(this, state = 'inType'),
-                      Punctuation)),
-
-            ### ArgType argName|  ###
-            (s(r'($type)(\s+)($id)(\s*)(\|)'),
-             bygroups(using(this, state='inType'), Text, Name.Variable,
-                      Text, Punctuation)),
-
-            ### Type var
-            (s(r'($type)([ \t]+)($id)'),
-             bygroups(using(this, state='inType'), Text,
-                      Name.Variable)),
-
-            (r'\(', Punctuation, 'insideParen'),
-            (r'\{', Punctuation, 'insideBrace'),
-            (r'.', Text)
-        ],
-        'insideParen': [
-            (r'\)', Punctuation, '#pop'),
-            include('root'),
-        ],
-        'insideMethodDeclArgs': [
-            (r'\)', Punctuation, '#pop'),
-            (s(r'($type)(\s+)($id)(\s*)(\))'),
-             bygroups(using(this, state='inType'), Text, Name.Variable,
-                      Text, Punctuation), '#pop'),
-            include('root'),
-        ],
-        'insideBrace': [
-            (r'\}', Punctuation, '#pop'),
-            include('root'),
-        ],
-        'inheritance': [
-            (r'\s+', Text),                                      #Whitespace
-            (r':|,', Punctuation),
-            (r'(?:(\w+)(::))?(\w+)',
-             bygroups(Name.Namespace, Punctuation, Name.Class)),
-            (r'{', Punctuation, '#pop')
-        ],
-        'using': [
-            (r'[ \t]+', Text), # consume whitespaces
-            (r'(\[)(\w+)(\])',
-             bygroups(Punctuation, Comment.Special, Punctuation)), #ffi
-            (r'(\")?([\w\.]+)(\")?',
-             bygroups(Punctuation, Name.Namespace, Punctuation)), #podname
-            (r'::', Punctuation, 'usingClass'),
-            (r'', Text, '#pop')
-        ],
-        'usingClass': [
-            (r'[ \t]+', Text), # consume whitespaces
-            (r'(as)(\s+)(\w+)',
-             bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
-            (r'[\w\$]+', Name.Class),
-            (r'', Text, '#pop:2') # jump out to root state
-        ],
-        'facet': [
-            (r'\s+', Text),
-            (r'{', Punctuation, 'facetFields'),
-            (r'', Text, '#pop')
-        ],
-        'facetFields': [
-            include('comments'),
-            include('literals'),
-            include('operators'),
-            (r'\s+', Text),
-            (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
-            (r'}', Punctuation, '#pop'),
-            (r'.', Text)
-        ],
-    }
-
-
-class RustLexer(RegexLexer):
-    """
-    Lexer for Mozilla's Rust programming language.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Rust'
-    filenames = ['*.rs', '*.rc']
-    aliases = ['rust']
-    mimetypes = ['text/x-rustsrc']
-
-    tokens = {
-        'root': [
-            # Whitespace and Comments
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'//(.*?)\n', Comment.Single),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-
-            # Keywords
-            (r'(as|assert|break|const'
-             r'|copy|do|else|enum|extern|fail'
-             r'|false|fn|for|if|impl|let|log'
-             r'|loop|match|mod|move|mut|once|priv|pub|pure'
-             r'|ref|return|static|struct|trait|true|type|unsafe|use|while'
-             r'|u8|u16|u32|u64|i8|i16|i32|i64|uint'
-             r'|int|float|f32|f64|str)\b', Keyword),
-
-            # Character Literal
-            (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
-             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
-             String.Char),
-            # Binary Literal
-            (r'0[Bb][01_]+', Number, 'number_lit'),
-            # Octal Literal
-            (r'0[0-7_]+', Number.Oct, 'number_lit'),
-            # Hexadecimal Literal
-            (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
-            # Decimal Literal
-            (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?'
-             r'[0-9_]+|\.[0-9_]*|[eE][+\-]?[0-9_]+)?', Number, 'number_lit'),
-            # String Literal
-            (r'"', String, 'string'),
-
-            # Operators and Punctuation
-            (r'[{}()\[\],.;]', Punctuation),
-            (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
-            # Identifier
-            (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
-
-            # Attributes
-            (r'#\[', Comment.Preproc, 'attribute['),
-            (r'#\(', Comment.Preproc, 'attribute('),
-            # Macros
-            (r'[A-Za-z_][A-Za-z0-9_]*!\[', Comment.Preproc, 'attribute['),
-            (r'[A-Za-z_][A-Za-z0-9_]*!\(', Comment.Preproc, 'attribute('),
-        ],
-        'number_lit': [
-            (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
-             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
-            (r'[^\\"]+', String),
-            (r'\\', String),
-        ],
-        'attribute_common': [
-            (r'"', String, 'string'),
-            (r'\[', Comment.Preproc, 'attribute['),
-            (r'\(', Comment.Preproc, 'attribute('),
-        ],
-        'attribute[': [
-            include('attribute_common'),
-            (r'\];?', Comment.Preproc, '#pop'),
-            (r'[^"\]]+', Comment.Preproc),
-        ],
-        'attribute(': [
-            include('attribute_common'),
-            (r'\);?', Comment.Preproc, '#pop'),
-            (r'[^"\)]+', Comment.Preproc),
-        ],
-    }
-
-
-class CudaLexer(CLexer):
-    """
-    For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
-    source.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'CUDA'
-    filenames = ['*.cu', '*.cuh']
-    aliases = ['cuda', 'cu']
-    mimetypes = ['text/x-cuda']
-
-    function_qualifiers = ['__device__', '__global__', '__host__',
-                           '__noinline__', '__forceinline__']
-    variable_qualifiers = ['__device__', '__constant__', '__shared__',
-                           '__restrict__']
-    vector_types = ['char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
-                    'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
-                    'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
-                    'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
-                    'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
-                    'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
-                    'ulonglong2', 'float1', 'float2', 'float3', 'float4',
-                    'double1', 'double2', 'dim3']
-    variables = ['gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize']
-    functions = ['__threadfence_block', '__threadfence', '__threadfence_system',
-                 '__syncthreads', '__syncthreads_count', '__syncthreads_and',
-                 '__syncthreads_or']
-    execution_confs = ['<<<', '>>>']
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            CLexer.get_tokens_unprocessed(self, text):
-            if token is Name:
-                if value in self.variable_qualifiers:
-                    token = Keyword.Type
-                elif value in self.vector_types:
-                    token = Keyword.Type
-                elif value in self.variables:
-                    token = Name.Builtin
-                elif value in self.execution_confs:
-                    token = Keyword.Pseudo
-                elif value in self.function_qualifiers:
-                    token = Keyword.Reserved
-                elif value in self.functions:
-                    token = Name.Function
-            yield index, token, value
-
-
-class MonkeyLexer(RegexLexer):
-    """
-    For
-    `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
-    source code.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Monkey'
-    aliases = ['monkey']
-    filenames = ['*.monkey']
-    mimetypes = ['text/x-monkey']
-
-    name_variable = r'[a-z_][a-zA-Z0-9_]*'
-    name_function = r'[A-Z][a-zA-Z0-9_]*'
-    name_constant = r'[A-Z_][A-Z0-9_]*'
-    name_class = r'[A-Z][a-zA-Z0-9_]*'
-    name_module = r'[a-z0-9_]*'
-
-    keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
-    # ? == Bool // % == Int // # == Float // $ == String
-    keyword_type_special = r'[?%#$]'
-
-    flags = re.MULTILINE
-
-    tokens = {
-        'root': [
-            #Text
-            (r'\s+', Text),
-            # Comments
-            (r"'.*", Comment),
-            (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
-            # preprocessor directives
-            (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
-            # preprocessor variable (any line starting with '#' that is not a directive)
-            (r'^#', Comment.Preproc, 'variables'),
-            # String
-            ('"', String.Double, 'string'),
-            # Numbers
-            (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
-            (r'\.[0-9]+(?!\.)', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r'\$[0-9a-fA-Z]+', Number.Hex),
-            (r'\%[10]+', Number), # Binary
-            # Native data types
-            (r'\b%s\b' % keyword_type, Keyword.Type),
-            # Exception handling
-            (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
-            (r'Throwable', Name.Exception),
-            # Builtins
-            (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
-            (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
-            (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
-            # Keywords
-            (r'(?i)^(Import)(\s+)(.*)(\n)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
-            (r'(?i)^Strict\b.*\n', Keyword.Reserved),
-            (r'(?i)(Const|Local|Global|Field)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'variables'),
-            (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
-             bygroups(Keyword.Reserved, Text), 'classname'),
-            (r'(?i)(Function|Method)(\s+)',
-             bygroups(Keyword.Reserved, Text), 'funcname'),
-            (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
-             r'Final|Abstract)\b', Keyword.Reserved),
-            # Flow Control stuff
-            (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
-             r'Select|Case|Default|'
-             r'While|Wend|'
-             r'Repeat|Until|Forever|'
-             r'For|To|Until|Step|EachIn|Next|'
-             r'Exit|Continue)\s+', Keyword.Reserved),
-            # not used yet
-            (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
-            # Array
-            (r'[\[\]]', Punctuation),
-            # Other
-            (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
-            (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
-            (r'[\(\){}!#,.:]', Punctuation),
-            # catch the rest
-            (r'%s\b' % name_constant, Name.Constant),
-            (r'%s\b' % name_function, Name.Function),
-            (r'%s\b' % name_variable, Name.Variable),
-        ],
-        'funcname': [
-            (r'(?i)%s\b' % name_function, Name.Function),
-            (r':', Punctuation, 'classname'),
-            (r'\s+', Text),
-            (r'\(', Punctuation, 'variables'),
-            (r'\)', Punctuation, '#pop')
-        ],
-        'classname': [
-            (r'%s\.' % name_module, Name.Namespace),
-            (r'%s\b' % keyword_type, Keyword.Type),
-            (r'%s\b' % name_class, Name.Class),
-            # array (of given size)
-            (r'(\[)(\s*)(\d*)(\s*)(\])',
-             bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
-            # generics
-            (r'\s+(?!<)', Text, '#pop'),
-            (r'<', Punctuation, '#push'),
-            (r'>', Punctuation, '#pop'),
-            (r'\n', Text, '#pop'),
-            (r'', Text, '#pop')
-        ],
-        'variables': [
-            (r'%s\b' % name_constant, Name.Constant),
-            (r'%s\b' % name_variable, Name.Variable),
-            (r'%s' % keyword_type_special, Keyword.Type),
-            (r'\s+', Text),
-            (r':', Punctuation, 'classname'),
-            (r',', Punctuation, '#push'),
-            (r'', Text, '#pop')
-        ],
-        'string': [
-            (r'[^"~]+', String.Double),
-            (r'~q|~n|~r|~t|~z|~~', String.Escape),
-            (r'"', String.Double, '#pop'),
-        ],
-        'comment' : [
-            (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
-            (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
-            (r'\n', Comment.Multiline),
-            (r'.+', Comment.Multiline),
-        ],
-    }
-
-
-class CobolLexer(RegexLexer):
-    """
-    Lexer for OpenCOBOL code.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'COBOL'
-    aliases = ['cobol']
-    filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
-    mimetypes = ['text/x-cobol']
-    flags = re.IGNORECASE | re.MULTILINE
-
-    # Data Types: by PICTURE and USAGE
-    # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
-    # Logical (?): NOT, AND, OR
-
-    # Reserved words:
-    # http://opencobol.add1tocobol.com/#reserved-words
-    # Intrinsics:
-    # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
-
-    tokens = {
-        'root': [
-            include('comment'),
-            include('strings'),
-            include('core'),
-            include('nums'),
-            (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable),
-    #       (r'[\s]+', Text),
-            (r'[ \t]+', Text),
-        ],
-        'comment': [
-            (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
-        ],
-        'core': [
-            # Figurative constants
-            (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?'
-             r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
-             r'\s*($|(?=[^0-9a-z_\-]))',
-             Name.Constant),
-
-            # Reserved words STATEMENTS and other bolds
-            (r'(^|(?<=[^0-9a-z_\-]))'
-             r'(ACCEPT|ADD|ALLOCATE|CALL|CANCEL|CLOSE|COMPUTE|'
-             r'CONFIGURATION|CONTINUE|'
-             r'DATA|DELETE|DISPLAY|DIVIDE|DIVISION|ELSE|END|END-ACCEPT|'
-             r'END-ADD|END-CALL|END-COMPUTE|END-DELETE|END-DISPLAY|'
-             r'END-DIVIDE|END-EVALUATE|END-IF|END-MULTIPLY|END-OF-PAGE|'
-             r'END-PERFORM|END-READ|END-RETURN|END-REWRITE|END-SEARCH|'
-             r'END-START|END-STRING|END-SUBTRACT|END-UNSTRING|END-WRITE|'
-             r'ENVIRONMENT|EVALUATE|EXIT|FD|FILE|FILE-CONTROL|FOREVER|'
-             r'FREE|GENERATE|GO|GOBACK|'
-             r'IDENTIFICATION|IF|INITIALIZE|'
-             r'INITIATE|INPUT-OUTPUT|INSPECT|INVOKE|I-O-CONTROL|LINKAGE|'
-             r'LOCAL-STORAGE|MERGE|MOVE|MULTIPLY|OPEN|'
-             r'PERFORM|PROCEDURE|PROGRAM-ID|RAISE|READ|RELEASE|RESUME|'
-             r'RETURN|REWRITE|SCREEN|'
-             r'SD|SEARCH|SECTION|SET|SORT|START|STOP|STRING|SUBTRACT|'
-             r'SUPPRESS|TERMINATE|THEN|UNLOCK|UNSTRING|USE|VALIDATE|'
-             r'WORKING-STORAGE|WRITE)'
-             r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Reserved),
-
-            # Reserved words
-            (r'(^|(?<=[^0-9a-z_\-]))'
-             r'(ACCESS|ADDRESS|ADVANCING|AFTER|ALL|'
-             r'ALPHABET|ALPHABETIC|ALPHABETIC-LOWER|ALPHABETIC-UPPER|'
-             r'ALPHANUMERIC|ALPHANUMERIC-EDITED|ALSO|ALTER|ALTERNATE'
-             r'ANY|ARE|AREA|AREAS|ARGUMENT-NUMBER|ARGUMENT-VALUE|AS|'
-             r'ASCENDING|ASSIGN|AT|AUTO|AUTO-SKIP|AUTOMATIC|AUTOTERMINATE|'
-             r'BACKGROUND-COLOR|BASED|BEEP|BEFORE|BELL|'
-             r'BLANK|'
-             r'BLINK|BLOCK|BOTTOM|BY|BYTE-LENGTH|CHAINING|'
-             r'CHARACTER|CHARACTERS|CLASS|CODE|CODE-SET|COL|COLLATING|'
-             r'COLS|COLUMN|COLUMNS|COMMA|COMMAND-LINE|COMMIT|COMMON|'
-             r'CONSTANT|CONTAINS|CONTENT|CONTROL|'
-             r'CONTROLS|CONVERTING|COPY|CORR|CORRESPONDING|COUNT|CRT|'
-             r'CURRENCY|CURSOR|CYCLE|DATE|DAY|DAY-OF-WEEK|DE|DEBUGGING|'
-             r'DECIMAL-POINT|DECLARATIVES|DEFAULT|DELIMITED|'
-             r'DELIMITER|DEPENDING|DESCENDING|DETAIL|DISK|'
-             r'DOWN|DUPLICATES|DYNAMIC|EBCDIC|'
-             r'ENTRY|ENVIRONMENT-NAME|ENVIRONMENT-VALUE|EOL|EOP|'
-             r'EOS|ERASE|ERROR|ESCAPE|EXCEPTION|'
-             r'EXCLUSIVE|EXTEND|EXTERNAL|'
-             r'FILE-ID|FILLER|FINAL|FIRST|FIXED|FLOAT-LONG|FLOAT-SHORT|'
-             r'FOOTING|FOR|FOREGROUND-COLOR|FORMAT|FROM|FULL|FUNCTION|'
-             r'FUNCTION-ID|GIVING|GLOBAL|GROUP|'
-             r'HEADING|HIGHLIGHT|I-O|ID|'
-             r'IGNORE|IGNORING|IN|INDEX|INDEXED|INDICATE|'
-             r'INITIAL|INITIALIZED|INPUT|'
-             r'INTO|INTRINSIC|INVALID|IS|JUST|JUSTIFIED|KEY|LABEL|'
-             r'LAST|LEADING|LEFT|LENGTH|LIMIT|LIMITS|LINAGE|'
-             r'LINAGE-COUNTER|LINE|LINES|LOCALE|LOCK|'
-             r'LOWLIGHT|MANUAL|MEMORY|MINUS|MODE|'
-             r'MULTIPLE|NATIONAL|NATIONAL-EDITED|NATIVE|'
-             r'NEGATIVE|NEXT|NO|NULL|NULLS|NUMBER|NUMBERS|NUMERIC|'
-             r'NUMERIC-EDITED|OBJECT-COMPUTER|OCCURS|OF|OFF|OMITTED|ON|ONLY|'
-             r'OPTIONAL|ORDER|ORGANIZATION|OTHER|OUTPUT|OVERFLOW|'
-             r'OVERLINE|PACKED-DECIMAL|PADDING|PAGE|PARAGRAPH|'
-             r'PLUS|POINTER|POSITION|POSITIVE|PRESENT|PREVIOUS|'
-             r'PRINTER|PRINTING|PROCEDURE-POINTER|PROCEDURES|'
-             r'PROCEED|PROGRAM|PROGRAM-POINTER|PROMPT|QUOTE|'
-             r'QUOTES|RANDOM|RD|RECORD|RECORDING|RECORDS|RECURSIVE|'
-             r'REDEFINES|REEL|REFERENCE|RELATIVE|REMAINDER|REMOVAL|'
-             r'RENAMES|REPLACING|REPORT|REPORTING|REPORTS|REPOSITORY|'
-             r'REQUIRED|RESERVE|RETURNING|REVERSE-VIDEO|REWIND|'
-             r'RIGHT|ROLLBACK|ROUNDED|RUN|SAME|SCROLL|'
-             r'SECURE|SEGMENT-LIMIT|SELECT|SENTENCE|SEPARATE|'
-             r'SEQUENCE|SEQUENTIAL|SHARING|SIGN|SIGNED|SIGNED-INT|'
-             r'SIGNED-LONG|SIGNED-SHORT|SIZE|SORT-MERGE|SOURCE|'
-             r'SOURCE-COMPUTER|SPECIAL-NAMES|STANDARD|'
-             r'STANDARD-1|STANDARD-2|STATUS|SUM|'
-             r'SYMBOLIC|SYNC|SYNCHRONIZED|TALLYING|TAPE|'
-             r'TEST|THROUGH|THRU|TIME|TIMES|TO|TOP|TRAILING|'
-             r'TRANSFORM|TYPE|UNDERLINE|UNIT|UNSIGNED|'
-             r'UNSIGNED-INT|UNSIGNED-LONG|UNSIGNED-SHORT|UNTIL|UP|'
-             r'UPDATE|UPON|USAGE|USING|VALUE|VALUES|VARYING|WAIT|WHEN|'
-             r'WITH|WORDS|YYYYDDD|YYYYMMDD)'
-             r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Pseudo),
-
-            # inactive reserved words
-            (r'(^|(?<=[^0-9a-z_\-]))'
-             r'(ACTIVE-CLASS|ALIGNED|ANYCASE|ARITHMETIC|ATTRIBUTE|B-AND|'
-             r'B-NOT|B-OR|B-XOR|BIT|BOOLEAN|CD|CENTER|CF|CH|CHAIN|CLASS-ID|'
-             r'CLASSIFICATION|COMMUNICATION|CONDITION|DATA-POINTER|'
-             r'DESTINATION|DISABLE|EC|EGI|EMI|ENABLE|END-RECEIVE|'
-             r'ENTRY-CONVENTION|EO|ESI|EXCEPTION-OBJECT|EXPANDS|FACTORY|'
-             r'FLOAT-BINARY-16|FLOAT-BINARY-34|FLOAT-BINARY-7|'
-             r'FLOAT-DECIMAL-16|FLOAT-DECIMAL-34|FLOAT-EXTENDED|FORMAT|'
-             r'FUNCTION-POINTER|GET|GROUP-USAGE|IMPLEMENTS|INFINITY|'
-             r'INHERITS|INTERFACE|INTERFACE-ID|INVOKE|LC_ALL|LC_COLLATE|'
-             r'LC_CTYPE|LC_MESSAGES|LC_MONETARY|LC_NUMERIC|LC_TIME|'
-             r'LINE-COUNTER|MESSAGE|METHOD|METHOD-ID|NESTED|NONE|NORMAL|'
-             r'OBJECT|OBJECT-REFERENCE|OPTIONS|OVERRIDE|PAGE-COUNTER|PF|PH|'
-             r'PROPERTY|PROTOTYPE|PURGE|QUEUE|RAISE|RAISING|RECEIVE|'
-             r'RELATION|REPLACE|REPRESENTS-NOT-A-NUMBER|RESET|RESUME|RETRY|'
-             r'RF|RH|SECONDS|SEGMENT|SELF|SEND|SOURCES|STATEMENT|STEP|'
-             r'STRONG|SUB-QUEUE-1|SUB-QUEUE-2|SUB-QUEUE-3|SUPER|SYMBOL|'
-             r'SYSTEM-DEFAULT|TABLE|TERMINAL|TEXT|TYPEDEF|UCS-4|UNIVERSAL|'
-             r'USER-DEFAULT|UTF-16|UTF-8|VAL-STATUS|VALID|VALIDATE|'
-             r'VALIDATE-STATUS)\s*($|(?=[^0-9a-z_\-]))', Error),
-
-            # Data Types
-            (r'(^|(?<=[^0-9a-z_\-]))'
-             r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
-             r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
-             r'BINARY-C-LONG|'
-             r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
-             r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type),
-
-            # Operators
-            (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
-
-            # (r'(::)', Keyword.Declaration),
-
-            (r'([(),;:&%.])', Punctuation),
-
-            # Intrinsics
-            (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
-             r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
-             r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
-             r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
-             r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
-             r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG10|LOG|'
-             r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
-             r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
-             r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
-             r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
-             r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
-             r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
-             r'($|(?=[^0-9a-z_\-]))', Name.Function),
-
-            # Booleans
-            (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin),
-            # Comparing Operators
-            (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|'
-             r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word),
-        ],
-
-        # \"[^\"\n]*\"|\'[^\'\n]*\'
-        'strings': [
-            # apparently strings can be delimited by EOL if they are continued
-            # in the next line
-            (r'"[^"\n]*("|\n)', String.Double),
-            (r"'[^'\n]*('|\n)", String.Single),
-        ],
-
-        'nums': [
-            (r'\d+(\s*|\.$|$)', Number.Integer),
-            (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
-            (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
-        ],
-    }
-
-
-class CobolFreeformatLexer(CobolLexer):
-    """
-    Lexer for Free format OpenCOBOL code.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'COBOLFree'
-    aliases = ['cobolfree']
-    filenames = ['*.cbl', '*.CBL']
-    mimetypes = []
-    flags = re.IGNORECASE | re.MULTILINE
-
-    tokens = {
-        'comment': [
-            (r'(\*>.*\n|^\w*\*.*$)', Comment),
-        ],
-    }
-
-
-class LogosLexer(ObjectiveCppLexer):
-    """
-    For Logos + Objective-C source code with preprocessor directives.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Logos'
-    aliases = ['logos']
-    filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
-    mimetypes = ['text/x-logos']
-    priority = 0.25
-
-    tokens = {
-        'statements': [
-            (r'(%orig|%log)\b', Keyword),
-            (r'(%c)\b(\()(\s*)([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*)(\))',
-             bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
-            (r'(%init)\b(\()',
-             bygroups(Keyword, Punctuation), 'logos_init_directive'),
-            (r'(%init)(?=\s*;)', bygroups(Keyword)),
-            (r'(%hook|%group)(\s+)([a-zA-Z$_][a-zA-Z0-9$_]+)',
-             bygroups(Keyword, Text, Name.Class), '#pop'),
-            (r'(%subclass)(\s+)', bygroups(Keyword, Text),
-            ('#pop', 'logos_classname')),
-            inherit,
-        ],
-        'logos_init_directive' : [
-            ('\s+', Text),
-            (',', Punctuation, ('logos_init_directive', '#pop')),
-            ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*)(=)(\s*)([^);]*)',
-             bygroups(Name.Class, Text, Punctuation, Text, Text)),
-            ('([a-zA-Z$_][a-zA-Z0-9$_]*)', Name.Class),
-            ('\)', Punctuation, '#pop'),
-        ],
-        'logos_classname' : [
-            ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*:\s*)([a-zA-Z$_][a-zA-Z0-9$_]*)?',
-             bygroups(Name.Class, Text, Name.Class), '#pop'),
-            ('([a-zA-Z$_][a-zA-Z0-9$_]*)', Name.Class, '#pop')
-        ],
-        'root': [
-            (r'(%subclass)(\s+)', bygroups(Keyword, Text),
-             'logos_classname'),
-            (r'(%hook|%group)(\s+)([a-zA-Z$_][a-zA-Z0-9$_]+)',
-             bygroups(Keyword, Text, Name.Class)),
-            (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
-             bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
-            (r'(%ctor)(\s*)({)', bygroups(Keyword, Text, Punctuation),
-             'function'),
-            (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
-             bygroups(Keyword, Text, Keyword, String, Keyword)),
-            (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
-            inherit,
-        ],
-    }
-
-    _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
-    def analyse_text(text):
-        if LogosLexer._logos_keywords.search(text):
-            return 1.0
-        return 0
diff --git a/python/ext-libs/pygments/lexers/dalvik.py b/python/ext-libs/pygments/lexers/dalvik.py
deleted file mode 100644
index de9b11f..0000000
--- a/python/ext-libs/pygments/lexers/dalvik.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.dalvik
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Pygments lexers for Dalvik VM-related languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Keyword, Text, Comment, Name, String, Number, \
-                           Punctuation
-
-__all__ = ['SmaliLexer']
-
-
-class SmaliLexer(RegexLexer):
-    """
-    For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
-    code.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Smali'
-    aliases = ['smali']
-    filenames = ['*.smali']
-    mimetypes = ['text/smali']
-
-    tokens = {
-        'root': [
-            include('comment'),
-            include('label'),
-            include('field'),
-            include('method'),
-            include('class'),
-            include('directive'),
-            include('access-modifier'),
-            include('instruction'),
-            include('literal'),
-            include('punctuation'),
-            include('type'),
-            include('whitespace')
-        ],
-        'directive': [
-            (r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
-             r'enum|method|registers|locals|array-data|packed-switch|'
-             r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
-             r'epilogue|source)', Keyword),
-            (r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
-             'packed-switch|sparse-switch|parameter|local)', Keyword),
-            (r'^[ \t]*\.restart local', Keyword),
-        ],
-        'access-modifier': [
-            (r'(public|private|protected|static|final|synchronized|bridge|'
-             r'varargs|native|abstract|strictfp|synthetic|constructor|'
-             r'declared-synchronized|interface|enum|annotation|volatile|'
-             r'transient)', Keyword),
-        ],
-        'whitespace': [
-            (r'\n', Text),
-            (r'\s+', Text),
-        ],
-        'instruction': [
-            (r'\b[vp]\d+\b', Name.Builtin), # registers
-            (r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
-        ],
-        'literal': [
-            (r'".*"', String),
-            (r'0x[0-9A-Fa-f]+t?', Number.Hex),
-            (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'[0-9]+L?', Number.Integer),
-        ],
-        'field': [
-            (r'(\$?\b)([A-Za-z0-9_$]*)(:)',
-             bygroups(Punctuation, Name.Variable, Punctuation)),
-        ],
-        'method': [
-            (r'<(?:cl)?init>', Name.Function), # constructor
-            (r'(\$?\b)([A-Za-z0-9_$]*)(\()',
-             bygroups(Punctuation, Name.Function, Punctuation)),
-        ],
-        'label': [
-            (r':[A-Za-z0-9_]+', Name.Label),
-        ],
-        'class': [
-            # class names in the form Lcom/namespace/ClassName;
-            # I only want to color the ClassName part, so the namespace part is
-            # treated as 'Text'
-            (r'(L)((?:[A-Za-z0-9_$]+/)*)([A-Za-z0-9_$]+)(;)',
-                bygroups(Keyword.Type, Text, Name.Class, Text)),
-        ],
-        'punctuation': [
-            (r'->', Punctuation),
-            (r'[{},\(\):=\.-]', Punctuation),
-        ],
-        'type': [
-            (r'[ZBSCIJFDV\[]+', Keyword.Type),
-        ],
-        'comment': [
-            (r'#.*?\n', Comment),
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/dotnet.py b/python/ext-libs/pygments/lexers/dotnet.py
deleted file mode 100644
index bdd9edc..0000000
--- a/python/ext-libs/pygments/lexers/dotnet.py
+++ /dev/null
@@ -1,630 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.dotnet
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for .net languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
-     using, this
-from pygments.token import Punctuation, \
-     Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
-from pygments.util import get_choice_opt
-from pygments import unistring as uni
-
-from pygments.lexers.web import XmlLexer
-
-__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
-           'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
-
-
-class CSharpLexer(RegexLexer):
-    """
-    For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
-    source code.
-
-    Additional options accepted:
-
-    `unicodelevel`
-      Determines which Unicode characters this lexer allows for identifiers.
-      The possible values are:
-
-      * ``none`` -- only the ASCII letters and numbers are allowed. This
-        is the fastest selection.
-      * ``basic`` -- all Unicode characters from the specification except
-        category ``Lo`` are allowed.
-      * ``full`` -- all Unicode characters as specified in the C# specs
-        are allowed.  Note that this means a considerable slowdown since the
-        ``Lo`` category has more than 40,000 characters in it!
-
-      The default value is ``basic``.
-
-      *New in Pygments 0.8.*
-    """
-
-    name = 'C#'
-    aliases = ['csharp', 'c#']
-    filenames = ['*.cs']
-    mimetypes = ['text/x-csharp'] # inferred
-
-    flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
-    # for the range of allowed unicode characters in identifiers,
-    # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
-    levels = {
-        'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
-        'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
-                  '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
-                  uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
-        'full': ('@?(?:_|[^' +
-                 uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
-                 + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
-                                        'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
-    }
-
-    tokens = {}
-    token_variants = True
-
-    for levelname, cs_ident in levels.items():
-        tokens[levelname] = {
-            'root': [
-                # method names
-                (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
-                 r'(' + cs_ident + ')'                           # method name
-                 r'(\s*)(\()',                               # signature start
-                 bygroups(using(this), Name.Function, Text, Punctuation)),
-                (r'^\s*\[.*?\]', Name.Attribute),
-                (r'[^\S\n]+', Text),
-                (r'\\\n', Text), # line continuation
-                (r'//.*?\n', Comment.Single),
-                (r'/[*].*?[*]/', Comment.Multiline),
-                (r'\n', Text),
-                (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
-                (r'[{}]', Punctuation),
-                (r'@"(""|[^"])*"', String),
-                (r'"(\\\\|\\"|[^"\n])*["\n]', String),
-                (r"'\\.'|'[^\\]'", String.Char),
-                (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
-                 r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
-                (r'#[ \t]*(if|endif|else|elif|define|undef|'
-                 r'line|error|warning|region|endregion|pragma)\b.*?\n',
-                 Comment.Preproc),
-                (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
-                 Keyword)),
-                (r'(abstract|as|async|await|base|break|case|catch|'
-                 r'checked|const|continue|default|delegate|'
-                 r'do|else|enum|event|explicit|extern|false|finally|'
-                 r'fixed|for|foreach|goto|if|implicit|in|interface|'
-                 r'internal|is|lock|new|null|operator|'
-                 r'out|override|params|private|protected|public|readonly|'
-                 r'ref|return|sealed|sizeof|stackalloc|static|'
-                 r'switch|this|throw|true|try|typeof|'
-                 r'unchecked|unsafe|virtual|void|while|'
-                 r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
-                 r'descending|from|group|into|orderby|select|where|'
-                 r'join|equals)\b', Keyword),
-                (r'(global)(::)', bygroups(Keyword, Punctuation)),
-                (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
-                 r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
-                (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
-                (r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
-                (cs_ident, Name),
-            ],
-            'class': [
-                (cs_ident, Name.Class, '#pop')
-            ],
-            'namespace': [
-                (r'(?=\()', Text, '#pop'), # using (resource)
-                ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
-            ]
-        }
-
-    def __init__(self, **options):
-        level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(), 'basic')
-        if level not in self._all_tokens:
-            # compile the regexes now
-            self._tokens = self.__class__.process_tokendef(level)
-        else:
-            self._tokens = self._all_tokens[level]
-
-        RegexLexer.__init__(self, **options)
-
-
-class NemerleLexer(RegexLexer):
-    """
-    For `Nemerle <http://nemerle.org>`_ source code.
-
-    Additional options accepted:
-
-    `unicodelevel`
-      Determines which Unicode characters this lexer allows for identifiers.
-      The possible values are:
-
-      * ``none`` -- only the ASCII letters and numbers are allowed. This
-        is the fastest selection.
-      * ``basic`` -- all Unicode characters from the specification except
-        category ``Lo`` are allowed.
-      * ``full`` -- all Unicode characters as specified in the C# specs
-        are allowed.  Note that this means a considerable slowdown since the
-        ``Lo`` category has more than 40,000 characters in it!
-
-      The default value is ``basic``.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Nemerle'
-    aliases = ['nemerle']
-    filenames = ['*.n']
-    mimetypes = ['text/x-nemerle'] # inferred
-
-    flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
-    # for the range of allowed unicode characters in identifiers, see
-    # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
-    levels = dict(
-        none = '@?[_a-zA-Z][a-zA-Z0-9_]*',
-        basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
-                 '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
-                 uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
-        full = ('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
-                                            'Nl') + '])'
-                + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
-                                       'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
-    )
-
-    tokens = {}
-    token_variants = True
-
-    for levelname, cs_ident in levels.items():
-        tokens[levelname] = {
-            'root': [
-                # method names
-                (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
-                 r'(' + cs_ident + ')'                           # method name
-                 r'(\s*)(\()',                               # signature start
-                 bygroups(using(this), Name.Function, Text, Punctuation)),
-                (r'^\s*\[.*?\]', Name.Attribute),
-                (r'[^\S\n]+', Text),
-                (r'\\\n', Text), # line continuation
-                (r'//.*?\n', Comment.Single),
-                (r'/[*].*?[*]/', Comment.Multiline),
-                (r'\n', Text),
-                (r'\$\s*"', String, 'splice-string'),
-                (r'\$\s*<#', String, 'splice-string2'),
-                (r'<#', String, 'recursive-string'),
-
-                (r'(<\[)\s*(' + cs_ident + ':)?', Keyword),
-                (r'\]\>', Keyword),
-
-                # quasiquotation only
-                (r'\$' + cs_ident, Name),
-                (r'(\$)(\()', bygroups(Name, Punctuation),
-                 'splice-string-content'),
-
-                (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
-                (r'[{}]', Punctuation),
-                (r'@"(""|[^"])*"', String),
-                (r'"(\\\\|\\"|[^"\n])*["\n]', String),
-                (r"'\\.'|'[^\\]'", String.Char),
-                (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
-                (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
-                (r'#[ \t]*(if|endif|else|elif|define|undef|'
-                 r'line|error|warning|region|endregion|pragma)\b.*?\n',
-                 Comment.Preproc),
-                (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
-                 Keyword)),
-                (r'(abstract|and|as|base|catch|def|delegate|'
-                 r'enum|event|extern|false|finally|'
-                 r'fun|implements|interface|internal|'
-                 r'is|macro|match|matches|module|mutable|new|'
-                 r'null|out|override|params|partial|private|'
-                 r'protected|public|ref|sealed|static|'
-                 r'syntax|this|throw|true|try|type|typeof|'
-                 r'virtual|volatile|when|where|with|'
-                 r'assert|assert2|async|break|checked|continue|do|else|'
-                 r'ensures|for|foreach|if|late|lock|new|nolate|'
-                 r'otherwise|regexp|repeat|requires|return|surroundwith|'
-                 r'unchecked|unless|using|while|yield)\b', Keyword),
-                (r'(global)(::)', bygroups(Keyword, Punctuation)),
-                (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
-                 r'short|string|uint|ulong|ushort|void|array|list)\b\??',
-                 Keyword.Type),
-                (r'(:>?)\s*(' + cs_ident + r'\??)',
-                 bygroups(Punctuation, Keyword.Type)),
-                (r'(class|struct|variant|module)(\s+)',
-                 bygroups(Keyword, Text), 'class'),
-                (r'(namespace|using)(\s+)', bygroups(Keyword, Text),
-                 'namespace'),
-                (cs_ident, Name),
-            ],
-            'class': [
-                (cs_ident, Name.Class, '#pop')
-            ],
-            'namespace': [
-                (r'(?=\()', Text, '#pop'), # using (resource)
-                ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
-            ],
-            'splice-string': [
-                (r'[^"$]',  String),
-                (r'\$' + cs_ident, Name),
-                (r'(\$)(\()', bygroups(Name, Punctuation),
-                 'splice-string-content'),
-                (r'\\"',  String),
-                (r'"',  String, '#pop')
-            ],
-            'splice-string2': [
-                (r'[^#<>$]',  String),
-                (r'\$' + cs_ident, Name),
-                (r'(\$)(\()', bygroups(Name, Punctuation),
-                 'splice-string-content'),
-                (r'<#',  String, '#push'),
-                (r'#>',  String, '#pop')
-            ],
-            'recursive-string': [
-                (r'[^#<>]',  String),
-                (r'<#',  String, '#push'),
-                (r'#>',  String, '#pop')
-            ],
-            'splice-string-content': [
-                (r'if|match', Keyword),
-                (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
-                (cs_ident, Name),
-                (r'\d+', Number),
-                (r'\(', Punctuation, '#push'),
-                (r'\)', Punctuation, '#pop')
-            ]
-        }
-
-    def __init__(self, **options):
-        level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(),
-                               'basic')
-        if level not in self._all_tokens:
-            # compile the regexes now
-            self._tokens = self.__class__.process_tokendef(level)
-        else:
-            self._tokens = self._all_tokens[level]
-
-        RegexLexer.__init__(self, **options)
-
-
-class BooLexer(RegexLexer):
-    """
-    For `Boo <http://boo.codehaus.org/>`_ source code.
-    """
-
-    name = 'Boo'
-    aliases = ['boo']
-    filenames = ['*.boo']
-    mimetypes = ['text/x-boo']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(#|//).*$', Comment.Single),
-            (r'/[*]', Comment.Multiline, 'comment'),
-            (r'[]{}:(),.;[]', Punctuation),
-            (r'\\\n', Text),
-            (r'\\', Text),
-            (r'(in|is|and|or|not)\b', Operator.Word),
-            (r'/(\\\\|\\/|[^/\s])/', String.Regex),
-            (r'@/(\\\\|\\/|[^/])*/', String.Regex),
-            (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
-            (r'(as|abstract|callable|constructor|destructor|do|import|'
-             r'enum|event|final|get|interface|internal|of|override|'
-             r'partial|private|protected|public|return|set|static|'
-             r'struct|transient|virtual|yield|super|and|break|cast|'
-             r'continue|elif|else|ensure|except|for|given|goto|if|in|'
-             r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
-             r'while|from|as)\b', Keyword),
-            (r'def(?=\s+\(.*?\))', Keyword),
-            (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
-            (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
-             r'assert|checked|enumerate|filter|getter|len|lock|map|'
-             r'matrix|max|min|normalArrayIndexing|print|property|range|'
-             r'rawArrayIndexing|required|typeof|unchecked|using|'
-             r'yieldAll|zip)\b', Name.Builtin),
-            (r'"""(\\\\|\\"|.*?)"""', String.Double),
-            (r'"(\\\\|\\"|[^"]*?)"', String.Double),
-            (r"'(\\\\|\\'|[^']*?)'", String.Single),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
-            (r'[0-9][0-9\.]*(ms?|d|h|s)', Number),
-            (r'0\d+', Number.Oct),
-            (r'0x[a-fA-F0-9]+', Number.Hex),
-            (r'\d+L', Number.Integer.Long),
-            (r'\d+', Number.Integer),
-        ],
-        'comment': [
-            ('/[*]', Comment.Multiline, '#push'),
-            ('[*]/', Comment.Multiline, '#pop'),
-            ('[^/*]', Comment.Multiline),
-            ('[*/]', Comment.Multiline)
-        ],
-        'funcname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
-        ],
-        'classname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'namespace': [
-            ('[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
-        ]
-    }
-
-
-class VbNetLexer(RegexLexer):
-    """
-    For
-    `Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
-    source code.
-    """
-
-    name = 'VB.net'
-    aliases = ['vb.net', 'vbnet']
-    filenames = ['*.vb', '*.bas']
-    mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
-
-    flags = re.MULTILINE | re.IGNORECASE
-    tokens = {
-        'root': [
-            (r'^\s*<.*?>', Name.Attribute),
-            (r'\s+', Text),
-            (r'\n', Text),
-            (r'rem\b.*?\n', Comment),
-            (r"'.*?\n", Comment),
-            (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#End\s+If|#Const|'
-             r'#ExternalSource.*?\n|#End\s+ExternalSource|'
-             r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
-             Comment.Preproc),
-            (r'[\(\){}!#,.:]', Punctuation),
-            (r'Option\s+(Strict|Explicit|Compare)\s+'
-             r'(On|Off|Binary|Text)', Keyword.Declaration),
-            (r'(?<!\.)(AddHandler|Alias|'
-             r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
-             r'CDec|CDbl|CInt|CLng|CObj|Continue|CSByte|CShort|'
-             r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
-             r'Default|Delegate|DirectCast|Do|Each|Else|ElseIf|'
-             r'EndIf|Erase|Error|Event|Exit|False|Finally|For|'
-             r'Friend|Get|Global|GoSub|GoTo|Handles|If|'
-             r'Implements|Inherits|Interface|'
-             r'Let|Lib|Loop|Me|MustInherit|'
-             r'MustOverride|MyBase|MyClass|Narrowing|New|Next|'
-             r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
-             r'Operator|Option|Optional|Overloads|Overridable|'
-             r'Overrides|ParamArray|Partial|Private|Protected|'
-             r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
-             r'Return|Select|Set|Shadows|Shared|Single|'
-             r'Static|Step|Stop|SyncLock|Then|'
-             r'Throw|To|True|Try|TryCast|Wend|'
-             r'Using|When|While|Widening|With|WithEvents|'
-             r'WriteOnly)\b', Keyword),
-            (r'(?<!\.)End\b', Keyword, 'end'),
-            (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
-            (r'(?<!\.)(Function|Sub|Property)(\s+)',
-             bygroups(Keyword, Text), 'funcname'),
-            (r'(?<!\.)(Class|Structure|Enum)(\s+)',
-             bygroups(Keyword, Text), 'classname'),
-            (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
-             bygroups(Keyword, Text), 'namespace'),
-            (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
-             r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
-             r'UShort)\b', Keyword.Type),
-            (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
-             r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
-            (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
-             r'<=|>=|<>|[-&*/\\^+=<>]',
-             Operator),
-            ('"', String, 'string'),
-            ('[a-zA-Z_][a-zA-Z0-9_]*[%&@!#$]?', Name),
-            ('#.*?#', Literal.Date),
-            (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
-            (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
-            (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
-            (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
-            (r'_\n', Text), # Line continuation
-        ],
-        'string': [
-            (r'""', String),
-            (r'"C?', String, '#pop'),
-            (r'[^"]+', String),
-        ],
-        'dim': [
-            (r'[a-z_][a-z0-9_]*', Name.Variable, '#pop'),
-            (r'', Text, '#pop'),  # any other syntax
-        ],
-        'funcname': [
-            (r'[a-z_][a-z0-9_]*', Name.Function, '#pop'),
-        ],
-        'classname': [
-            (r'[a-z_][a-z0-9_]*', Name.Class, '#pop'),
-        ],
-        'namespace': [
-            (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop'),
-        ],
-        'end': [
-            (r'\s+', Text),
-            (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
-             Keyword, '#pop'),
-            (r'', Text, '#pop'),
-        ]
-    }
-
-
-class GenericAspxLexer(RegexLexer):
-    """
-    Lexer for ASP.NET pages.
-    """
-
-    name = 'aspx-gen'
-    filenames = []
-    mimetypes = []
-
-    flags = re.DOTALL
-
-    tokens = {
-        'root': [
-            (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
-            (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
-                                                        Other,
-                                                        using(XmlLexer))),
-            (r'(.+?)(?=<)', using(XmlLexer)),
-            (r'.+', using(XmlLexer)),
-        ],
-    }
-
-
-#TODO support multiple languages within the same source file
-class CSharpAspxLexer(DelegatingLexer):
-    """
-    Lexer for highligting C# within ASP.NET pages.
-    """
-
-    name = 'aspx-cs'
-    aliases = ['aspx-cs']
-    filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
-    mimetypes = []
-
-    def __init__(self, **options):
-        super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer,
-                                              **options)
-
-    def analyse_text(text):
-        if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
-            return 0.2
-        elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
-            return 0.15
-
-
-class VbNetAspxLexer(DelegatingLexer):
-    """
-    Lexer for highligting Visual Basic.net within ASP.NET pages.
-    """
-
-    name = 'aspx-vb'
-    aliases = ['aspx-vb']
-    filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
-    mimetypes = []
-
-    def __init__(self, **options):
-        super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer,
-                                              **options)
-
-    def analyse_text(text):
-        if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
-            return 0.2
-        elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
-            return 0.15
-
-
-# Very close to functional.OcamlLexer
-class FSharpLexer(RegexLexer):
-    """
-    For the F# language.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'FSharp'
-    aliases = ['fsharp']
-    filenames = ['*.fs', '*.fsi']
-    mimetypes = ['text/x-fsharp']
-
-    keywords = [
-      'abstract', 'and', 'as', 'assert', 'base', 'begin', 'class',
-      'default', 'delegate', 'do', 'do!', 'done', 'downcast',
-      'downto', 'elif', 'else', 'end', 'exception', 'extern',
-      'false', 'finally', 'for', 'fun', 'function', 'global', 'if',
-      'in', 'inherit', 'inline', 'interface', 'internal', 'lazy',
-      'let', 'let!', 'match', 'member', 'module', 'mutable',
-      'namespace', 'new', 'null', 'of', 'open', 'or', 'override',
-      'private', 'public', 'rec', 'return', 'return!', 'sig',
-      'static', 'struct', 'then', 'to', 'true', 'try', 'type',
-      'upcast', 'use', 'use!', 'val', 'void', 'when', 'while',
-      'with', 'yield', 'yield!'
-    ]
-    keyopts = [
-      '!=','#','&&','&','\(','\)','\*','\+',',','-\.',
-      '->','-','\.\.','\.','::',':=',':>',':',';;',';','<-',
-      '<','>]','>','\?\?','\?','\[<','\[>','\[\|','\[',
-      ']','_','`','{','\|\]','\|','}','~','<@','=','@>'
-    ]
-
-    operators = r'[!$%&*+\./:<=>?@^|~-]'
-    word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'not', 'or']
-    prefix_syms = r'[!?~]'
-    infix_syms = r'[=<>@^|&+\*/$%-]'
-    primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array',
-                  'byte', 'sbyte', 'int16', 'uint16', 'uint32', 'int64', 'uint64'
-                  'nativeint', 'unativeint', 'decimal', 'void', 'float32', 'single',
-                  'double']
-
-    tokens = {
-        'escape-sequence': [
-            (r'\\[\\\"\'ntbr]', String.Escape),
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-        ],
-        'root': [
-            (r'\s+', Text),
-            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
-            (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
-             Name.Namespace, 'dotted'),
-            (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
-            (r'//.*?\n', Comment.Single),
-            (r'\(\*(?!\))', Comment, 'comment'),
-            (r'\b(%s)\b' % '|'.join(keywords), Keyword),
-            (r'(%s)' % '|'.join(keyopts), Operator),
-            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-            (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
-            (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
-            (r'#[ \t]*(if|endif|else|line|nowarn|light)\b.*?\n',
-             Comment.Preproc),
-
-            (r"[^\W\d][\w']*", Name),
-
-            (r'\d[\d_]*', Number.Integer),
-            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
-            (r'0[oO][0-7][0-7_]*', Number.Oct),
-            (r'0[bB][01][01_]*', Number.Binary),
-            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
-             String.Char),
-            (r"'.'", String.Char),
-            (r"'", Keyword), # a stray quote is another syntax element
-
-            (r'"', String.Double, 'string'),
-
-            (r'[~?][a-z][\w\']*:', Name.Variable),
-        ],
-        'comment': [
-            (r'[^(*)]+', Comment),
-            (r'\(\*', Comment, '#push'),
-            (r'\*\)', Comment, '#pop'),
-            (r'[(*)]', Comment),
-        ],
-        'string': [
-            (r'[^\\"]+', String.Double),
-            include('escape-sequence'),
-            (r'\\\n', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'dotted': [
-            (r'\s+', Text),
-            (r'\.', Punctuation),
-            (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
-            (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
-            (r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/foxpro.py b/python/ext-libs/pygments/lexers/foxpro.py
deleted file mode 100644
index 741ea04..0000000
--- a/python/ext-libs/pygments/lexers/foxpro.py
+++ /dev/null
@@ -1,428 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.foxpro
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Simple lexer for Microsoft Visual FoxPro source code.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
-     Name, String
-
-__all__ = ['FoxProLexer']
-
-
-class FoxProLexer(RegexLexer):
-    """Lexer for Microsoft Visual FoxPro language.
-
-    FoxPro syntax allows shortening all keywords and function names
-    to 4 characters.  Shortened forms are not recognized by this lexer.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'FoxPro'
-    aliases = ['Clipper', 'XBase']
-    filenames = ['*.PRG', '*.prg']
-    mimetype = []
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    tokens = {
-        'root': [
-            (r';\s*\n', Punctuation), # consume newline
-            (r'(^|\n)\s*', Text, 'newline'),
-
-            # Square brackets may be used for array indices
-            # and for string literal.  Look for arrays
-            # before matching string literals.
-            (r'(?<=\w)\[[0-9, ]+\]', Text),
-            (r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
-            (r'(^\s*\*|&&|&&).*?\n', Comment.Single),
-
-            (r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
-             r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
-             r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
-             r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
-             r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
-             r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
-             r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
-             r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
-             r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
-             r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
-             r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
-             r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
-             r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
-             r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
-             r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
-             r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
-             r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
-             r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
-             r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
-             r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
-             r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
-             r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
-             r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
-             r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
-             r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
-             r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
-             r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
-             r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
-             r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
-             r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
-             r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
-             r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
-             r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
-             r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
-             r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
-             r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
-             r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
-             r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
-             r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
-             r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
-             r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
-             r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
-             r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
-             r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
-             r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
-             r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
-             r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
-             r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
-             r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
-             r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
-             r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
-             r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
-             r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
-             r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
-             r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
-             r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
-             r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
-             r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
-             r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
-             r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
-             r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
-             r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
-             r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
-             r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
-             r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
-             r'YEAR)(?=\s*\()', Name.Function),
-
-            (r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
-             r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
-             r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
-             r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
-             r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
-             r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
-             r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
-             r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
-             r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
-             r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
-             r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
-             r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
-             r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
-
-            (r'THISFORMSET|THISFORM|THIS', Name.Builtin),
-
-            (r'Application|CheckBox|Collection|Column|ComboBox|'
-             r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
-             r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
-             r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
-             r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
-             r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
-             r'Project|Relation|ReportListener|Separator|Servers|Server|'
-             r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
-             r'XMLAdapter|XMLField|XMLTable', Name.Class),
-
-            (r'm\.[a-z_]\w*', Name.Variable),
-            (r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
-
-            (r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
-             r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
-             r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
-             r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
-             r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
-             r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
-             r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
-             r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
-             r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
-             r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
-             r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
-             r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
-             r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
-             r'BreakOnError|BufferModeOverride|BufferMode|'
-             r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
-             r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
-             r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
-             r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
-             r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
-             r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
-             r'ContinuousScroll|ControlBox|ControlCount|Controls|'
-             r'ControlSource|ConversionFunc|Count|CurrentControl|'
-             r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
-             r'CursorSchema|CursorSource|CursorStatus|Curvature|'
-             r'Database|DataSessionID|DataSession|DataSourceType|'
-             r'DataSource|DataType|DateFormat|DateMark|Debug|'
-             r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
-             r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
-             r'DeleteCmd|DeleteMark|Description|Desktop|'
-             r'Details|DisabledBackColor|DisabledForeColor|'
-             r'DisabledItemBackColor|DisabledItemForeColor|'
-             r'DisabledPicture|DisableEncode|DisplayCount|'
-             r'DisplayValue|Dockable|Docked|DockPosition|'
-             r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
-             r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
-             r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
-             r'DynamicFontItalic|DynamicFontStrikethru|'
-             r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
-             r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
-             r'DynamicLineHeight|EditorOptions|Enabled|'
-             r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
-             r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
-             r'FetchMemoDataSource|FetchMemo|FetchSize|'
-             r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
-             r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
-             r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
-             r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
-             r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
-             r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
-             r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
-             r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
-             r'HelpContextID|HideSelection|HighlightBackColor|'
-             r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
-             r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
-             r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
-             r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
-             r'InsertCmdDataSource|InsertCmdRefreshCmd|'
-             r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
-             r'InsertCmd|Instancing|IntegralHeight|'
-             r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
-             r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
-             r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
-             r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
-             r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
-             r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
-             r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
-             r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
-             r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
-             r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
-             r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
-             r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
-             r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
-             r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
-             r'NumberOfElements|Object|OLEClass|OLEDragMode|'
-             r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
-             r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
-             r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
-             r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
-             r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
-             r'OutputPageCount|OutputType|PageCount|PageHeight|'
-             r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
-             r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
-             r'Parent|Partition|PasswordChar|PictureMargin|'
-             r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
-             r'PictureVal|Picture|Prepared|'
-             r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
-             r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
-             r'ProjectHookLibrary|ProjectHook|QuietMode|'
-             r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
-             r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
-             r'RecordSource|RefreshAlias|'
-             r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
-             r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
-             r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
-             r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
-             r'Rotation|RowColChange|RowHeight|RowSourceType|'
-             r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
-             r'Seconds|SelectCmd|SelectedID|'
-             r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
-             r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
-             r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
-             r'ServerClass|ServerHelpFile|ServerName|'
-             r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
-             r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
-             r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
-             r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
-             r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
-             r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
-             r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
-             r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
-             r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
-             r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
-             r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
-             r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
-             r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
-             r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
-             r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
-             r'VersionCompany|VersionCopyright|VersionDescription|'
-             r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
-             r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
-             r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
-             r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
-             r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
-             r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
-             r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
-             r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
-             r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
-             r'XSDtype|ZoomBox)', Name.Attribute),
-
-            (r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
-             r'AddProperty|AddTableSchema|AddToSCC|Add|'
-             r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
-             r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
-             r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
-             r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
-             r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
-             r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
-             r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
-             r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
-             r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
-             r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
-             r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
-             r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
-             r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
-             r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
-             r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
-             r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
-             r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
-             r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
-             r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
-
-            (r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
-             r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
-             r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
-             r'AfterCursorUpdate|AfterDelete|AfterInsert|'
-             r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
-             r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
-             r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
-             r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
-             r'BeforeInsert|BeforeDock|BeforeOpenTables|'
-             r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
-             r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
-             r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
-             r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
-             r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
-             r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
-             r'dbc_AfterDropOffline|dbc_AfterDropTable|'
-             r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
-             r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
-             r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
-             r'dbc_AfterRenameTable|dbc_AfterRenameView|'
-             r'dbc_AfterValidateData|dbc_BeforeAddTable|'
-             r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
-             r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
-             r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
-             r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
-             r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
-             r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
-             r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
-             r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
-             r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
-             r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
-             r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
-             r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
-             r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
-             r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
-             r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
-             r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
-             r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
-             r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
-             r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
-             r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
-             r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
-             r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
-             r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
-             r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
-
-            (r'\s+', Text),
-            # everything else is not colored
-            (r'.', Text),
-        ],
-        'newline': [
-            (r'\*.*?$', Comment.Single, '#pop'),
-            (r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
-             r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
-             r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
-             r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
-             r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
-             r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
-             r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
-             r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
-             r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
-             r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
-             r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
-             r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
-             r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
-             r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
-             r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
-             r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
-             r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
-             r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
-             r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
-             r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
-             r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
-             r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
-             r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
-             r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
-             r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
-             r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
-             r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
-             r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
-             r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
-             r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
-             r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
-             r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
-             r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
-             r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
-             r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
-             r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
-             r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
-             r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
-             r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
-             r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
-             r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
-             r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
-             r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
-             r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
-             r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
-             r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
-             r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
-             r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
-             r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
-             r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
-             r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
-             r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
-             r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
-             r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
-             r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
-             r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
-             r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
-             r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
-             r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
-             r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
-             r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
-             r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
-             r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
-             r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
-             r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
-             r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
-             r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
-             r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
-             r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
-             r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
-             r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
-             r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
-             r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
-             r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
-                Keyword.Reserved, '#pop'),
-            (r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
-                Comment.Preproc, '#pop'),
-            (r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
-            (r'.', Text, '#pop'),
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/functional.py b/python/ext-libs/pygments/lexers/functional.py
deleted file mode 100644
index 1047ab0..0000000
--- a/python/ext-libs/pygments/lexers/functional.py
+++ /dev/null
@@ -1,2598 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.functional
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for functional languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
-     String, Number, Punctuation, Literal, Generic, Error
-
-__all__ = ['RacketLexer', 'SchemeLexer', 'CommonLispLexer', 'HaskellLexer',
-           'LiterateHaskellLexer', 'SMLLexer', 'OcamlLexer', 'ErlangLexer',
-           'ErlangShellLexer', 'OpaLexer', 'CoqLexer', 'NewLispLexer',
-           'ElixirLexer', 'ElixirConsoleLexer', 'KokaLexer']
-
-
-class RacketLexer(RegexLexer):
-    """
-    Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly known as
-    PLT Scheme).
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Racket'
-    aliases = ['racket', 'rkt']
-    filenames = ['*.rkt', '*.rktl']
-    mimetypes = ['text/x-racket', 'application/x-racket']
-
-    # From namespace-mapped-symbols
-    keywords = [
-        '#%app', '#%datum', '#%expression', '#%module-begin',
-        '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
-        '#%provide', '#%require', '#%stratified-body', '#%top',
-        '#%top-interaction', '#%variable-reference', '...', 'and', 'begin',
-        'begin-for-syntax', 'begin0', 'case', 'case-lambda', 'cond',
-        'datum->syntax-object', 'define', 'define-for-syntax',
-        'define-struct', 'define-syntax', 'define-syntax-rule',
-        'define-syntaxes', 'define-values', 'define-values-for-syntax',
-        'delay', 'do', 'expand-path', 'fluid-let', 'hash-table-copy',
-        'hash-table-count', 'hash-table-for-each', 'hash-table-get',
-        'hash-table-iterate-first', 'hash-table-iterate-key',
-        'hash-table-iterate-next', 'hash-table-iterate-value',
-        'hash-table-map', 'hash-table-put!', 'hash-table-remove!',
-        'hash-table?', 'if', 'lambda', 'let', 'let*', 'let*-values',
-        'let-struct', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc',
-        'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes',
-        'letrec-syntaxes+values', 'letrec-values', 'list-immutable',
-        'make-hash-table', 'make-immutable-hash-table', 'make-namespace',
-        'module', 'module-identifier=?', 'module-label-identifier=?',
-        'module-template-identifier=?', 'module-transformer-identifier=?',
-        'namespace-transformer-require', 'or', 'parameterize',
-        'parameterize*', 'parameterize-break', 'provide',
-        'provide-for-label', 'provide-for-syntax', 'quasiquote',
-        'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
-        'quote-syntax/prune', 'require', 'require-for-label',
-        'require-for-syntax', 'require-for-template', 'set!',
-        'set!-values', 'syntax', 'syntax-case', 'syntax-case*',
-        'syntax-id-rules', 'syntax-object->datum', 'syntax-rules',
-        'syntax/loc', 'time', 'transcript-off', 'transcript-on', 'unless',
-        'unquote', 'unquote-splicing', 'unsyntax', 'unsyntax-splicing',
-        'when', 'with-continuation-mark', 'with-handlers',
-        'with-handlers*', 'with-syntax', 'λ'
-    ]
-
-    # From namespace-mapped-symbols
-    builtins = [
-        '*', '+', '-', '/', '<', '<=', '=', '>', '>=',
-        'abort-current-continuation', 'abs', 'absolute-path?', 'acos',
-        'add1', 'alarm-evt', 'always-evt', 'andmap', 'angle', 'append',
-        'apply', 'arithmetic-shift', 'arity-at-least',
-        'arity-at-least-value', 'arity-at-least?', 'asin', 'assoc', 'assq',
-        'assv', 'atan', 'banner', 'bitwise-and', 'bitwise-bit-field',
-        'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor',
-        'boolean?', 'bound-identifier=?', 'box', 'box-immutable', 'box?',
-        'break-enabled', 'break-thread', 'build-path',
-        'build-path/convention-type', 'byte-pregexp', 'byte-pregexp?',
-        'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
-        'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
-        'bytes->path-element', 'bytes->string/latin-1',
-        'bytes->string/locale', 'bytes->string/utf-8', 'bytes-append',
-        'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
-        'bytes-converter?', 'bytes-copy', 'bytes-copy!', 'bytes-fill!',
-        'bytes-length', 'bytes-open-converter', 'bytes-ref', 'bytes-set!',
-        'bytes-utf-8-index', 'bytes-utf-8-length', 'bytes-utf-8-ref',
-        'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar', 'caaadr',
-        'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
-        'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
-        'call-in-nested-thread', 'call-with-break-parameterization',
-        'call-with-composable-continuation',
-        'call-with-continuation-barrier', 'call-with-continuation-prompt',
-        'call-with-current-continuation', 'call-with-escape-continuation',
-        'call-with-exception-handler',
-        'call-with-immediate-continuation-mark', 'call-with-input-file',
-        'call-with-output-file', 'call-with-parameterization',
-        'call-with-semaphore', 'call-with-semaphore/enable-break',
-        'call-with-values', 'call/cc', 'call/ec', 'car', 'cdaaar',
-        'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar',
-        'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
-        'ceiling', 'channel-get', 'channel-put', 'channel-put-evt',
-        'channel-try-get', 'channel?', 'chaperone-box', 'chaperone-evt',
-        'chaperone-hash', 'chaperone-of?', 'chaperone-procedure',
-        'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
-        'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
-        'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
-        'char-downcase', 'char-foldcase', 'char-general-category',
-        'char-graphic?', 'char-iso-control?', 'char-lower-case?',
-        'char-numeric?', 'char-punctuation?', 'char-ready?',
-        'char-symbolic?', 'char-title-case?', 'char-titlecase',
-        'char-upcase', 'char-upper-case?', 'char-utf-8-length',
-        'char-whitespace?', 'char<=?', 'char<?', 'char=?', 'char>=?',
-        'char>?', 'char?', 'check-duplicate-identifier',
-        'checked-procedure-check-and-extract', 'choice-evt',
-        'cleanse-path', 'close-input-port', 'close-output-port',
-        'collect-garbage', 'collection-file-path', 'collection-path',
-        'compile', 'compile-allow-set!-undefined',
-        'compile-context-preservation-enabled',
-        'compile-enforce-module-constants', 'compile-syntax',
-        'compiled-expression?', 'compiled-module-expression?',
-        'complete-path?', 'complex?', 'cons',
-        'continuation-mark-set->context', 'continuation-mark-set->list',
-        'continuation-mark-set->list*', 'continuation-mark-set-first',
-        'continuation-mark-set?', 'continuation-marks',
-        'continuation-prompt-available?', 'continuation-prompt-tag?',
-        'continuation?', 'copy-file', 'cos',
-        'current-break-parameterization', 'current-code-inspector',
-        'current-command-line-arguments', 'current-compile',
-        'current-continuation-marks', 'current-custodian',
-        'current-directory', 'current-drive', 'current-error-port',
-        'current-eval', 'current-evt-pseudo-random-generator',
-        'current-gc-milliseconds', 'current-get-interaction-input-port',
-        'current-inexact-milliseconds', 'current-input-port',
-        'current-inspector', 'current-library-collection-paths',
-        'current-load', 'current-load-extension',
-        'current-load-relative-directory', 'current-load/use-compiled',
-        'current-locale', 'current-memory-use', 'current-milliseconds',
-        'current-module-declare-name', 'current-module-declare-source',
-        'current-module-name-resolver', 'current-namespace',
-        'current-output-port', 'current-parameterization',
-        'current-preserved-thread-cell-values', 'current-print',
-        'current-process-milliseconds', 'current-prompt-read',
-        'current-pseudo-random-generator', 'current-read-interaction',
-        'current-reader-guard', 'current-readtable', 'current-seconds',
-        'current-security-guard', 'current-subprocess-custodian-mode',
-        'current-thread', 'current-thread-group',
-        'current-thread-initial-stack-size',
-        'current-write-relative-directory', 'custodian-box-value',
-        'custodian-box?', 'custodian-limit-memory',
-        'custodian-managed-list', 'custodian-memory-accounting-available?',
-        'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
-        'custom-print-quotable-accessor', 'custom-print-quotable?',
-        'custom-write-accessor', 'custom-write?', 'date', 'date*',
-        'date*-nanosecond', 'date*-time-zone-name', 'date*?', 'date-day',
-        'date-dst?', 'date-hour', 'date-minute', 'date-month',
-        'date-second', 'date-time-zone-offset', 'date-week-day',
-        'date-year', 'date-year-day', 'date?', 'datum-intern-literal',
-        'default-continuation-prompt-tag', 'delete-directory',
-        'delete-file', 'denominator', 'directory-exists?',
-        'directory-list', 'display', 'displayln', 'dump-memory-stats',
-        'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-wind',
-        'eof', 'eof-object?', 'ephemeron-value', 'ephemeron?', 'eprintf',
-        'eq-hash-code', 'eq?', 'equal-hash-code',
-        'equal-secondary-hash-code', 'equal?', 'equal?/recur',
-        'eqv-hash-code', 'eqv?', 'error', 'error-display-handler',
-        'error-escape-handler', 'error-print-context-length',
-        'error-print-source-location', 'error-print-width',
-        'error-value->string-handler', 'eval', 'eval-jit-enabled',
-        'eval-syntax', 'even?', 'evt?', 'exact->inexact', 'exact-integer?',
-        'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact?',
-        'executable-yield-handler', 'exit', 'exit-handler', 'exn',
-        'exn-continuation-marks', 'exn-message', 'exn:break',
-        'exn:break-continuation', 'exn:break?', 'exn:fail',
-        'exn:fail:contract', 'exn:fail:contract:arity',
-        'exn:fail:contract:arity?', 'exn:fail:contract:continuation',
-        'exn:fail:contract:continuation?',
-        'exn:fail:contract:divide-by-zero',
-        'exn:fail:contract:divide-by-zero?',
-        'exn:fail:contract:non-fixnum-result',
-        'exn:fail:contract:non-fixnum-result?',
-        'exn:fail:contract:variable', 'exn:fail:contract:variable-id',
-        'exn:fail:contract:variable?', 'exn:fail:contract?',
-        'exn:fail:filesystem', 'exn:fail:filesystem:exists',
-        'exn:fail:filesystem:exists?', 'exn:fail:filesystem:version',
-        'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
-        'exn:fail:network', 'exn:fail:network?', 'exn:fail:out-of-memory',
-        'exn:fail:out-of-memory?', 'exn:fail:read',
-        'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
-        'exn:fail:read:non-char', 'exn:fail:read:non-char?',
-        'exn:fail:read?', 'exn:fail:syntax', 'exn:fail:syntax-exprs',
-        'exn:fail:syntax:unbound', 'exn:fail:syntax:unbound?',
-        'exn:fail:syntax?', 'exn:fail:unsupported',
-        'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
-        'exn:fail?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?', 'exp',
-        'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
-        'expand-syntax-to-top-form', 'expand-to-top-form',
-        'expand-user-path', 'expt', 'file-exists?',
-        'file-or-directory-identity', 'file-or-directory-modify-seconds',
-        'file-or-directory-permissions', 'file-position', 'file-size',
-        'file-stream-buffer-mode', 'file-stream-port?',
-        'filesystem-root-list', 'find-executable-path',
-        'find-library-collection-paths', 'find-system-path', 'fixnum?',
-        'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
-        'for-each', 'force', 'format', 'fprintf', 'free-identifier=?',
-        'gcd', 'generate-temporaries', 'gensym', 'get-output-bytes',
-        'get-output-string', 'getenv', 'global-port-print-handler',
-        'guard-evt', 'handle-evt', 'handle-evt?', 'hash', 'hash-equal?',
-        'hash-eqv?', 'hash-has-key?', 'hash-placeholder?', 'hash-ref!',
-        'hasheq', 'hasheqv', 'identifier-binding',
-        'identifier-label-binding', 'identifier-prune-lexical-context',
-        'identifier-prune-to-source-module',
-        'identifier-remove-from-definition-context',
-        'identifier-template-binding', 'identifier-transformer-binding',
-        'identifier?', 'imag-part', 'immutable?', 'impersonate-box',
-        'impersonate-hash', 'impersonate-procedure', 'impersonate-struct',
-        'impersonate-vector', 'impersonator-of?',
-        'impersonator-prop:application-mark',
-        'impersonator-property-accessor-procedure?',
-        'impersonator-property?', 'impersonator?', 'inexact->exact',
-        'inexact-real?', 'inexact?', 'input-port?', 'inspector?',
-        'integer->char', 'integer->integer-bytes',
-        'integer-bytes->integer', 'integer-length', 'integer-sqrt',
-        'integer-sqrt/remainder', 'integer?',
-        'internal-definition-context-seal', 'internal-definition-context?',
-        'keyword->string', 'keyword<?', 'keyword?', 'kill-thread', 'lcm',
-        'length', 'liberal-define-context?', 'link-exists?', 'list',
-        'list*', 'list->bytes', 'list->string', 'list->vector', 'list-ref',
-        'list-tail', 'list?', 'load', 'load-extension',
-        'load-on-demand-enabled', 'load-relative',
-        'load-relative-extension', 'load/cd', 'load/use-compiled',
-        'local-expand', 'local-expand/capture-lifts',
-        'local-transformer-expand',
-        'local-transformer-expand/capture-lifts', 'locale-string-encoding',
-        'log', 'magnitude', 'make-arity-at-least', 'make-bytes',
-        'make-channel', 'make-continuation-prompt-tag', 'make-custodian',
-        'make-custodian-box', 'make-date', 'make-date*',
-        'make-derived-parameter', 'make-directory', 'make-ephemeron',
-        'make-exn', 'make-exn:break', 'make-exn:fail',
-        'make-exn:fail:contract', 'make-exn:fail:contract:arity',
-        'make-exn:fail:contract:continuation',
-        'make-exn:fail:contract:divide-by-zero',
-        'make-exn:fail:contract:non-fixnum-result',
-        'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
-        'make-exn:fail:filesystem:exists',
-        'make-exn:fail:filesystem:version', 'make-exn:fail:network',
-        'make-exn:fail:out-of-memory', 'make-exn:fail:read',
-        'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
-        'make-exn:fail:syntax', 'make-exn:fail:syntax:unbound',
-        'make-exn:fail:unsupported', 'make-exn:fail:user',
-        'make-file-or-directory-link', 'make-hash-placeholder',
-        'make-hasheq-placeholder', 'make-hasheqv',
-        'make-hasheqv-placeholder', 'make-immutable-hasheqv',
-        'make-impersonator-property', 'make-input-port', 'make-inspector',
-        'make-known-char-range-list', 'make-output-port', 'make-parameter',
-        'make-pipe', 'make-placeholder', 'make-polar',
-        'make-prefab-struct', 'make-pseudo-random-generator',
-        'make-reader-graph', 'make-readtable', 'make-rectangular',
-        'make-rename-transformer', 'make-resolved-module-path',
-        'make-security-guard', 'make-semaphore', 'make-set!-transformer',
-        'make-shared-bytes', 'make-sibling-inspector',
-        'make-special-comment', 'make-srcloc', 'make-string',
-        'make-struct-field-accessor', 'make-struct-field-mutator',
-        'make-struct-type', 'make-struct-type-property',
-        'make-syntax-delta-introducer', 'make-syntax-introducer',
-        'make-thread-cell', 'make-thread-group', 'make-vector',
-        'make-weak-box', 'make-weak-hasheqv', 'make-will-executor', 'map',
-        'max', 'mcar', 'mcdr', 'mcons', 'member', 'memq', 'memv', 'min',
-        'module->exports', 'module->imports', 'module->language-info',
-        'module->namespace', 'module-compiled-exports',
-        'module-compiled-imports', 'module-compiled-language-info',
-        'module-compiled-name', 'module-path-index-join',
-        'module-path-index-resolve', 'module-path-index-split',
-        'module-path-index?', 'module-path?', 'module-predefined?',
-        'module-provide-protected?', 'modulo', 'mpair?', 'nack-guard-evt',
-        'namespace-attach-module', 'namespace-attach-module-declaration',
-        'namespace-base-phase', 'namespace-mapped-symbols',
-        'namespace-module-identifier', 'namespace-module-registry',
-        'namespace-require', 'namespace-require/constant',
-        'namespace-require/copy', 'namespace-require/expansion-time',
-        'namespace-set-variable-value!', 'namespace-symbol->identifier',
-        'namespace-syntax-introduce', 'namespace-undefine-variable!',
-        'namespace-unprotect-module', 'namespace-variable-value',
-        'namespace?', 'negative?', 'never-evt', 'newline',
-        'normal-case-path', 'not', 'null', 'null?', 'number->string',
-        'number?', 'numerator', 'object-name', 'odd?', 'open-input-bytes',
-        'open-input-file', 'open-input-output-file', 'open-input-string',
-        'open-output-bytes', 'open-output-file', 'open-output-string',
-        'ormap', 'output-port?', 'pair?', 'parameter-procedure=?',
-        'parameter?', 'parameterization?', 'path->bytes',
-        'path->complete-path', 'path->directory-path', 'path->string',
-        'path-add-suffix', 'path-convention-type', 'path-element->bytes',
-        'path-element->string', 'path-for-some-system?',
-        'path-list-string->path-list', 'path-replace-suffix',
-        'path-string?', 'path?', 'peek-byte', 'peek-byte-or-special',
-        'peek-bytes', 'peek-bytes!', 'peek-bytes-avail!',
-        'peek-bytes-avail!*', 'peek-bytes-avail!/enable-break',
-        'peek-char', 'peek-char-or-special', 'peek-string', 'peek-string!',
-        'pipe-content-length', 'placeholder-get', 'placeholder-set!',
-        'placeholder?', 'poll-guard-evt', 'port-closed-evt',
-        'port-closed?', 'port-commit-peeked', 'port-count-lines!',
-        'port-count-lines-enabled', 'port-display-handler',
-        'port-file-identity', 'port-file-unlock', 'port-next-location',
-        'port-print-handler', 'port-progress-evt',
-        'port-provides-progress-evts?', 'port-read-handler',
-        'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?',
-        'port-writes-special?', 'port?', 'positive?',
-        'prefab-key->struct-type', 'prefab-struct-key', 'pregexp',
-        'pregexp?', 'primitive-closure?', 'primitive-result-arity',
-        'primitive?', 'print', 'print-as-expression',
-        'print-boolean-long-form', 'print-box', 'print-graph',
-        'print-hash-table', 'print-mpair-curly-braces',
-        'print-pair-curly-braces', 'print-reader-abbreviations',
-        'print-struct', 'print-syntax-width', 'print-unreadable',
-        'print-vector-length', 'printf', 'procedure->method',
-        'procedure-arity', 'procedure-arity-includes?', 'procedure-arity?',
-        'procedure-closure-contents-eq?', 'procedure-extract-target',
-        'procedure-reduce-arity', 'procedure-rename',
-        'procedure-struct-type?', 'procedure?', 'promise?',
-        'prop:arity-string', 'prop:checked-procedure',
-        'prop:custom-print-quotable', 'prop:custom-write',
-        'prop:equal+hash', 'prop:evt', 'prop:exn:srclocs',
-        'prop:impersonator-of', 'prop:input-port',
-        'prop:liberal-define-context', 'prop:output-port',
-        'prop:procedure', 'prop:rename-transformer',
-        'prop:set!-transformer', 'pseudo-random-generator->vector',
-        'pseudo-random-generator-vector?', 'pseudo-random-generator?',
-        'putenv', 'quotient', 'quotient/remainder', 'raise',
-        'raise-arity-error', 'raise-mismatch-error', 'raise-syntax-error',
-        'raise-type-error', 'raise-user-error', 'random', 'random-seed',
-        'rational?', 'rationalize', 'read', 'read-accept-bar-quote',
-        'read-accept-box', 'read-accept-compiled', 'read-accept-dot',
-        'read-accept-graph', 'read-accept-infix-dot', 'read-accept-lang',
-        'read-accept-quasiquote', 'read-accept-reader', 'read-byte',
-        'read-byte-or-special', 'read-bytes', 'read-bytes!',
-        'read-bytes-avail!', 'read-bytes-avail!*',
-        'read-bytes-avail!/enable-break', 'read-bytes-line',
-        'read-case-sensitive', 'read-char', 'read-char-or-special',
-        'read-curly-brace-as-paren', 'read-decimal-as-inexact',
-        'read-eval-print-loop', 'read-language', 'read-line',
-        'read-on-demand-source', 'read-square-bracket-as-paren',
-        'read-string', 'read-string!', 'read-syntax',
-        'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
-        'readtable?', 'real->double-flonum', 'real->floating-point-bytes',
-        'real->single-flonum', 'real-part', 'real?', 'regexp',
-        'regexp-match', 'regexp-match-peek', 'regexp-match-peek-immediate',
-        'regexp-match-peek-positions',
-        'regexp-match-peek-positions-immediate',
-        'regexp-match-peek-positions-immediate/end',
-        'regexp-match-peek-positions/end', 'regexp-match-positions',
-        'regexp-match-positions/end', 'regexp-match/end', 'regexp-match?',
-        'regexp-max-lookbehind', 'regexp-replace', 'regexp-replace*',
-        'regexp?', 'relative-path?', 'remainder',
-        'rename-file-or-directory', 'rename-transformer-target',
-        'rename-transformer?', 'resolve-path', 'resolved-module-path-name',
-        'resolved-module-path?', 'reverse', 'round', 'seconds->date',
-        'security-guard?', 'semaphore-peek-evt', 'semaphore-post',
-        'semaphore-try-wait?', 'semaphore-wait',
-        'semaphore-wait/enable-break', 'semaphore?',
-        'set!-transformer-procedure', 'set!-transformer?', 'set-box!',
-        'set-mcar!', 'set-mcdr!', 'set-port-next-location!',
-        'shared-bytes', 'shell-execute', 'simplify-path', 'sin',
-        'single-flonum?', 'sleep', 'special-comment-value',
-        'special-comment?', 'split-path', 'sqrt', 'srcloc',
-        'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source',
-        'srcloc-span', 'srcloc?', 'string', 'string->bytes/latin-1',
-        'string->bytes/locale', 'string->bytes/utf-8',
-        'string->immutable-string', 'string->keyword', 'string->list',
-        'string->number', 'string->path', 'string->path-element',
-        'string->symbol', 'string->uninterned-symbol',
-        'string->unreadable-symbol', 'string-append', 'string-ci<=?',
-        'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
-        'string-copy', 'string-copy!', 'string-downcase', 'string-fill!',
-        'string-foldcase', 'string-length', 'string-locale-ci<?',
-        'string-locale-ci=?', 'string-locale-ci>?',
-        'string-locale-downcase', 'string-locale-upcase',
-        'string-locale<?', 'string-locale=?', 'string-locale>?',
-        'string-normalize-nfc', 'string-normalize-nfd',
-        'string-normalize-nfkc', 'string-normalize-nfkd', 'string-ref',
-        'string-set!', 'string-titlecase', 'string-upcase',
-        'string-utf-8-length', 'string<=?', 'string<?', 'string=?',
-        'string>=?', 'string>?', 'string?', 'struct->vector',
-        'struct-accessor-procedure?', 'struct-constructor-procedure?',
-        'struct-info', 'struct-mutator-procedure?',
-        'struct-predicate-procedure?', 'struct-type-info',
-        'struct-type-make-constructor', 'struct-type-make-predicate',
-        'struct-type-property-accessor-procedure?',
-        'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
-        'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
-        'struct:exn:fail', 'struct:exn:fail:contract',
-        'struct:exn:fail:contract:arity',
-        'struct:exn:fail:contract:continuation',
-        'struct:exn:fail:contract:divide-by-zero',
-        'struct:exn:fail:contract:non-fixnum-result',
-        'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
-        'struct:exn:fail:filesystem:exists',
-        'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
-        'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
-        'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
-        'struct:exn:fail:syntax', 'struct:exn:fail:syntax:unbound',
-        'struct:exn:fail:unsupported', 'struct:exn:fail:user',
-        'struct:srcloc', 'struct?', 'sub1', 'subbytes', 'subprocess',
-        'subprocess-group-enabled', 'subprocess-kill', 'subprocess-pid',
-        'subprocess-status', 'subprocess-wait', 'subprocess?', 'substring',
-        'symbol->string', 'symbol-interned?', 'symbol-unreadable?',
-        'symbol?', 'sync', 'sync/enable-break', 'sync/timeout',
-        'sync/timeout/enable-break', 'syntax->list', 'syntax-arm',
-        'syntax-column', 'syntax-disarm', 'syntax-e', 'syntax-line',
-        'syntax-local-bind-syntaxes', 'syntax-local-certifier',
-        'syntax-local-context', 'syntax-local-expand-expression',
-        'syntax-local-get-shadower', 'syntax-local-introduce',
-        'syntax-local-lift-context', 'syntax-local-lift-expression',
-        'syntax-local-lift-module-end-declaration',
-        'syntax-local-lift-provide', 'syntax-local-lift-require',
-        'syntax-local-lift-values-expression',
-        'syntax-local-make-definition-context',
-        'syntax-local-make-delta-introducer',
-        'syntax-local-module-defined-identifiers',
-        'syntax-local-module-exports',
-        'syntax-local-module-required-identifiers', 'syntax-local-name',
-        'syntax-local-phase-level',
-        'syntax-local-transforming-module-provides?', 'syntax-local-value',
-        'syntax-local-value/immediate', 'syntax-original?',
-        'syntax-position', 'syntax-property',
-        'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm',
-        'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source',
-        'syntax-source-module', 'syntax-span', 'syntax-taint',
-        'syntax-tainted?', 'syntax-track-origin',
-        'syntax-transforming-module-expression?', 'syntax-transforming?',
-        'syntax?', 'system-big-endian?', 'system-idle-evt',
-        'system-language+country', 'system-library-subpath',
-        'system-path-convention-type', 'system-type', 'tan',
-        'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt',
-        'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses',
-        'tcp-close', 'tcp-connect', 'tcp-connect/enable-break',
-        'tcp-listen', 'tcp-listener?', 'tcp-port?', 'terminal-port?',
-        'thread', 'thread-cell-ref', 'thread-cell-set!', 'thread-cell?',
-        'thread-dead-evt', 'thread-dead?', 'thread-group?',
-        'thread-resume', 'thread-resume-evt', 'thread-rewind-receive',
-        'thread-running?', 'thread-suspend', 'thread-suspend-evt',
-        'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
-        'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?',
-        'udp-close', 'udp-connect!', 'udp-connected?', 'udp-open-socket',
-        'udp-receive!', 'udp-receive!*', 'udp-receive!-evt',
-        'udp-receive!/enable-break', 'udp-receive-ready-evt', 'udp-send',
-        'udp-send*', 'udp-send-evt', 'udp-send-ready-evt', 'udp-send-to',
-        'udp-send-to*', 'udp-send-to-evt', 'udp-send-to/enable-break',
-        'udp-send/enable-break', 'udp?', 'unbox',
-        'uncaught-exception-handler', 'use-collection-link-paths',
-        'use-compiled-file-paths', 'use-user-specific-search-paths',
-        'values', 'variable-reference->empty-namespace',
-        'variable-reference->module-base-phase',
-        'variable-reference->module-declaration-inspector',
-        'variable-reference->module-source',
-        'variable-reference->namespace', 'variable-reference->phase',
-        'variable-reference->resolved-module-path',
-        'variable-reference-constant?', 'variable-reference?', 'vector',
-        'vector->immutable-vector', 'vector->list',
-        'vector->pseudo-random-generator',
-        'vector->pseudo-random-generator!', 'vector->values',
-        'vector-fill!', 'vector-immutable', 'vector-length', 'vector-ref',
-        'vector-set!', 'vector-set-performance-stats!', 'vector?',
-        'version', 'void', 'void?', 'weak-box-value', 'weak-box?',
-        'will-execute', 'will-executor?', 'will-register',
-        'will-try-execute', 'with-input-from-file', 'with-output-to-file',
-        'wrap-evt', 'write', 'write-byte', 'write-bytes',
-        'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
-        'write-bytes-avail/enable-break', 'write-char', 'write-special',
-        'write-special-avail*', 'write-special-evt', 'write-string', 'zero?'
-    ]
-
-    # From SchemeLexer
-    valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
-
-    tokens = {
-        'root' : [
-            (r';.*$', Comment.Single),
-            (r'#\|[^|]+\|#', Comment.Multiline),
-
-            # whitespaces - usually not relevant
-            (r'\s+', Text),
-
-            ## numbers: Keep in mind Racket reader hash prefixes,
-            ## which can denote the base or the type. These don't map
-            ## neatly onto pygments token types; some judgment calls
-            ## here.  Note that none of these regexps attempt to
-            ## exclude identifiers that start with a number, such as a
-            ## variable named "100-Continue".
-
-            # #b
-            (r'#b[-+]?[01]+\.[01]+', Number.Float),
-            (r'#b[01]+e[-+]?[01]+', Number.Float),
-            (r'#b[-+]?[01]/[01]+', Number),
-            (r'#b[-+]?[01]+', Number.Integer),
-            (r'#b\S*', Error),
-
-            # #d OR no hash prefix
-            (r'(#d)?[-+]?\d+\.\d+', Number.Float),
-            (r'(#d)?\d+e[-+]?\d+', Number.Float),
-            (r'(#d)?[-+]?\d+/\d+', Number),
-            (r'(#d)?[-+]?\d+', Number.Integer),
-            (r'#d\S*', Error),
-
-            # #e
-            (r'#e[-+]?\d+\.\d+', Number.Float),
-            (r'#e\d+e[-+]?\d+', Number.Float),
-            (r'#e[-+]?\d+/\d+', Number),
-            (r'#e[-+]?\d+', Number),
-            (r'#e\S*', Error),
-
-            # #i is always inexact-real, i.e. float
-            (r'#i[-+]?\d+\.\d+', Number.Float),
-            (r'#i\d+e[-+]?\d+', Number.Float),
-            (r'#i[-+]?\d+/\d+', Number.Float),
-            (r'#i[-+]?\d+', Number.Float),
-            (r'#i\S*', Error),
-
-            # #o
-            (r'#o[-+]?[0-7]+\.[0-7]+', Number.Oct),
-            (r'#o[0-7]+e[-+]?[0-7]+', Number.Oct),
-            (r'#o[-+]?[0-7]+/[0-7]+', Number.Oct),
-            (r'#o[-+]?[0-7]+', Number.Oct),
-            (r'#o\S*', Error),
-
-            # #x
-            (r'#x[-+]?[0-9a-fA-F]+\.[0-9a-fA-F]+', Number.Hex),
-            # the exponent variation (e.g. #x1e1) is N/A
-            (r'#x[-+]?[0-9a-fA-F]+/[0-9a-fA-F]+', Number.Hex),
-            (r'#x[-+]?[0-9a-fA-F]+', Number.Hex),
-            (r'#x\S*', Error),
-
-
-            # strings, symbols and characters
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'" + valid_name, String.Symbol),
-            (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
-            (r'#rx".+"', String.Regex),
-            (r'#px".+"', String.Regex),
-
-            # constants
-            (r'(#t|#f)', Name.Constant),
-
-            # keyword argument names (e.g. #:keyword)
-            (r'#:\S+', Keyword.Declaration),
-
-            # #lang
-            (r'#lang \S+', Keyword.Namespace),
-
-            # special operators
-            (r"('|#|`|,@|,|\.)", Operator),
-
-            # highlight the keywords
-            ('(%s)' % '|'.join([
-                re.escape(entry) + ' ' for entry in keywords]),
-                Keyword
-            ),
-
-            # first variable in a quoted string like
-            # '(this is syntactic sugar)
-            (r"(?<='\()" + valid_name, Name.Variable),
-            (r"(?<=#\()" + valid_name, Name.Variable),
-
-            # highlight the builtins
-            ("(?<=\()(%s)" % '|'.join([
-                re.escape(entry) + ' ' for entry in builtins]),
-                Name.Builtin
-            ),
-
-            # the remaining functions; handle both ( and [
-            (r'(?<=(\(|\[|\{))' + valid_name, Name.Function),
-
-            # find the remaining variables
-            (valid_name, Name.Variable),
-
-            # the famous parentheses!
-            (r'(\(|\)|\[|\]|\{|\})', Punctuation),
-        ],
-    }
-
-
-class SchemeLexer(RegexLexer):
-    """
-    A Scheme lexer, parsing a stream and outputting the tokens
-    needed to highlight scheme code.
-    This lexer could be most probably easily subclassed to parse
-    other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
-
-    This parser is checked with pastes from the LISP pastebin
-    at http://paste.lisp.org/ to cover as much syntax as possible.
-
-    It supports the full Scheme syntax as defined in R5RS.
-
-    *New in Pygments 0.6.*
-    """
-    name = 'Scheme'
-    aliases = ['scheme', 'scm']
-    filenames = ['*.scm', '*.ss']
-    mimetypes = ['text/x-scheme', 'application/x-scheme']
-
-    # list of known keywords and builtins taken form vim 6.4 scheme.vim
-    # syntax file.
-    keywords = [
-        'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
-        'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
-        'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
-        'let-syntax', 'letrec-syntax', 'syntax-rules'
-    ]
-    builtins = [
-        '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
-        'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
-        'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
-        'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
-        'cadr', 'call-with-current-continuation', 'call-with-input-file',
-        'call-with-output-file', 'call-with-values', 'call/cc', 'car',
-        'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
-        'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
-        'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
-        'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
-        'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
-        'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
-        'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
-        'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
-        'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
-        'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
-        'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
-        'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
-        'integer?', 'interaction-environment', 'lcm', 'length', 'list',
-        'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
-        'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
-        'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
-        'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
-        'null?', 'number->string', 'number?', 'numerator', 'odd?',
-        'open-input-file', 'open-output-file', 'output-port?', 'pair?',
-        'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
-        'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
-        'remainder', 'reverse', 'round', 'scheme-report-environment',
-        'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
-        'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
-        'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
-        'string-copy', 'string-fill!', 'string-length', 'string-ref',
-        'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
-        'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
-        'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
-        'vector', 'vector->list', 'vector-fill!', 'vector-length',
-        'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
-        'with-output-to-file', 'write', 'write-char', 'zero?'
-    ]
-
-    # valid names for identifiers
-    # well, names can only not consist fully of numbers
-    # but this should be good enough for now
-    valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
-
-    tokens = {
-        'root' : [
-            # the comments - always starting with semicolon
-            # and going to the end of the line
-            (r';.*$', Comment.Single),
-
-            # whitespaces - usually not relevant
-            (r'\s+', Text),
-
-            # numbers
-            (r'-?\d+\.\d+', Number.Float),
-            (r'-?\d+', Number.Integer),
-            # support for uncommon kinds of numbers -
-            # have to figure out what the characters mean
-            #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
-            # strings, symbols and characters
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'" + valid_name, String.Symbol),
-            (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
-
-            # constants
-            (r'(#t|#f)', Name.Constant),
-
-            # special operators
-            (r"('|#|`|,@|,|\.)", Operator),
-
-            # highlight the keywords
-            ('(%s)' % '|'.join([
-                re.escape(entry) + ' ' for entry in keywords]),
-                Keyword
-            ),
-
-            # first variable in a quoted string like
-            # '(this is syntactic sugar)
-            (r"(?<='\()" + valid_name, Name.Variable),
-            (r"(?<=#\()" + valid_name, Name.Variable),
-
-            # highlight the builtins
-            ("(?<=\()(%s)" % '|'.join([
-                re.escape(entry) + ' ' for entry in builtins]),
-                Name.Builtin
-            ),
-
-            # the remaining functions
-            (r'(?<=\()' + valid_name, Name.Function),
-            # find the remaining variables
-            (valid_name, Name.Variable),
-
-            # the famous parentheses!
-            (r'(\(|\))', Punctuation),
-            (r'(\[|\])', Punctuation),
-        ],
-    }
-
-
-class CommonLispLexer(RegexLexer):
-    """
-    A Common Lisp lexer.
-
-    *New in Pygments 0.9.*
-    """
-    name = 'Common Lisp'
-    aliases = ['common-lisp', 'cl']
-    filenames = ['*.cl', '*.lisp', '*.el']  # use for Elisp too
-    mimetypes = ['text/x-common-lisp']
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    ### couple of useful regexes
-
-    # characters that are not macro-characters and can be used to begin a symbol
-    nonmacro = r'\\.|[a-zA-Z0-9!$%&*+-/<=>?@\[\]^_{}~]'
-    constituent = nonmacro + '|[#.:]'
-    terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
-
-    ### symbol token, reverse-engineered from hyperspec
-    # Take a deep breath...
-    symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
-
-    def __init__(self, **options):
-        from pygments.lexers._clbuiltins import BUILTIN_FUNCTIONS, \
-            SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
-            BUILTIN_TYPES, BUILTIN_CLASSES
-        self.builtin_function = BUILTIN_FUNCTIONS
-        self.special_forms = SPECIAL_FORMS
-        self.macros = MACROS
-        self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
-        self.declarations = DECLARATIONS
-        self.builtin_types = BUILTIN_TYPES
-        self.builtin_classes = BUILTIN_CLASSES
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        stack = ['root']
-        for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
-            if token is Name.Variable:
-                if value in self.builtin_function:
-                    yield index, Name.Builtin, value
-                    continue
-                if value in self.special_forms:
-                    yield index, Keyword, value
-                    continue
-                if value in self.macros:
-                    yield index, Name.Builtin, value
-                    continue
-                if value in self.lambda_list_keywords:
-                    yield index, Keyword, value
-                    continue
-                if value in self.declarations:
-                    yield index, Keyword, value
-                    continue
-                if value in self.builtin_types:
-                    yield index, Keyword.Type, value
-                    continue
-                if value in self.builtin_classes:
-                    yield index, Name.Class, value
-                    continue
-            yield index, token, value
-
-    tokens = {
-        'root' : [
-            ('', Text, 'body'),
-        ],
-        'multiline-comment' : [
-            (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
-            (r'\|#', Comment.Multiline, '#pop'),
-            (r'[^|#]+', Comment.Multiline),
-            (r'[|#]', Comment.Multiline),
-        ],
-        'commented-form' : [
-            (r'\(', Comment.Preproc, '#push'),
-            (r'\)', Comment.Preproc, '#pop'),
-            (r'[^()]+', Comment.Preproc),
-        ],
-        'body' : [
-            # whitespace
-            (r'\s+', Text),
-
-            # single-line comment
-            (r';.*$', Comment.Single),
-
-            # multi-line comment
-            (r'#\|', Comment.Multiline, 'multiline-comment'),
-
-            # encoding comment (?)
-            (r'#\d*Y.*$', Comment.Special),
-
-            # strings and characters
-            (r'"(\\.|\\\n|[^"\\])*"', String),
-            # quoting
-            (r":" + symbol, String.Symbol),
-            (r"'" + symbol, String.Symbol),
-            (r"'", Operator),
-            (r"`", Operator),
-
-            # decimal numbers
-            (r'[-+]?\d+\.?' + terminated, Number.Integer),
-            (r'[-+]?\d+/\d+' + terminated, Number),
-            (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' \
-                + terminated, Number.Float),
-
-            # sharpsign strings and characters
-            (r"#\\." + terminated, String.Char),
-            (r"#\\" + symbol, String.Char),
-
-            # vector
-            (r'#\(', Operator, 'body'),
-
-            # bitstring
-            (r'#\d*\*[01]*', Literal.Other),
-
-            # uninterned symbol
-            (r'#:' + symbol, String.Symbol),
-
-            # read-time and load-time evaluation
-            (r'#[.,]', Operator),
-
-            # function shorthand
-            (r'#\'', Name.Function),
-
-            # binary rational
-            (r'#[bB][+-]?[01]+(/[01]+)?', Number),
-
-            # octal rational
-            (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
-            # hex rational
-            (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
-
-            # radix rational
-            (r'#\d+[rR][+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
-
-            # complex
-            (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'),
-
-            # array
-            (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
-            # structure
-            (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
-            # path
-            (r'#[pP]?"(\\.|[^"])*"', Literal.Other),
-
-            # reference
-            (r'#\d+=', Operator),
-            (r'#\d+#', Operator),
-
-            # read-time comment
-            (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
-
-            # read-time conditional
-            (r'#[+-]', Operator),
-
-            # special operators that should have been parsed already
-            (r'(,@|,|\.)', Operator),
-
-            # special constants
-            (r'(t|nil)' + terminated, Name.Constant),
-
-            # functions and variables
-            (r'\*' + symbol + '\*', Name.Variable.Global),
-            (symbol, Name.Variable),
-
-            # parentheses
-            (r'\(', Punctuation, 'body'),
-            (r'\)', Punctuation, '#pop'),
-        ],
-    }
-
-
-class HaskellLexer(RegexLexer):
-    """
-    A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
-    *New in Pygments 0.8.*
-    """
-    name = 'Haskell'
-    aliases = ['haskell', 'hs']
-    filenames = ['*.hs']
-    mimetypes = ['text/x-haskell']
-
-    reserved = ['case','class','data','default','deriving','do','else',
-                'if','in','infix[lr]?','instance',
-                'let','newtype','of','then','type','where','_']
-    ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK',
-             'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE',
-             'DC[1-4]','NAK','SYN','ETB','CAN',
-             'EM','SUB','ESC','[FGRU]S','SP','DEL']
-
-    tokens = {
-        'root': [
-            # Whitespace:
-            (r'\s+', Text),
-            #(r'--\s*|.*$', Comment.Doc),
-            (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
-            (r'{-', Comment.Multiline, 'comment'),
-            # Lexemes:
-            #  Identifiers
-            (r'\bimport\b', Keyword.Reserved, 'import'),
-            (r'\bmodule\b', Keyword.Reserved, 'module'),
-            (r'\berror\b', Name.Exception),
-            (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
-            (r'^[_a-z][\w\']*', Name.Function),
-            (r"'?[_a-z][\w']*", Name),
-            (r"('')?[A-Z][\w\']*", Keyword.Type),
-            #  Operators
-            (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
-            (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
-            (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
-            (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
-            #  Numbers
-            (r'\d+[eE][+-]?\d+', Number.Float),
-            (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
-            (r'0[oO][0-7]+', Number.Oct),
-            (r'0[xX][\da-fA-F]+', Number.Hex),
-            (r'\d+', Number.Integer),
-            #  Character/String Literals
-            (r"'", String.Char, 'character'),
-            (r'"', String, 'string'),
-            #  Special
-            (r'\[\]', Keyword.Type),
-            (r'\(\)', Name.Builtin),
-            (r'[][(),;`{}]', Punctuation),
-        ],
-        'import': [
-            # Import statements
-            (r'\s+', Text),
-            (r'"', String, 'string'),
-            # after "funclist" state
-            (r'\)', Punctuation, '#pop'),
-            (r'qualified\b', Keyword),
-            # import X as Y
-            (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)',
-             bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
-            # import X hiding (functions)
-            (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()',
-             bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
-            # import X (functions)
-            (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
-             bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
-            # import X
-            (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'),
-        ],
-        'module': [
-            (r'\s+', Text),
-            (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
-             bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
-            (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'),
-        ],
-        'funclist': [
-            (r'\s+', Text),
-            (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
-            (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
-            (r'--.*$', Comment.Single),
-            (r'{-', Comment.Multiline, 'comment'),
-            (r',', Punctuation),
-            (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
-            # (HACK, but it makes sense to push two instances, believe me)
-            (r'\(', Punctuation, ('funclist', 'funclist')),
-            (r'\)', Punctuation, '#pop:2'),
-        ],
-        'comment': [
-            # Multiline Comments
-            (r'[^-{}]+', Comment.Multiline),
-            (r'{-', Comment.Multiline, '#push'),
-            (r'-}', Comment.Multiline, '#pop'),
-            (r'[-{}]', Comment.Multiline),
-        ],
-        'character': [
-            # Allows multi-chars, incorrectly.
-            (r"[^\\']", String.Char),
-            (r"\\", String.Escape, 'escape'),
-            ("'", String.Char, '#pop'),
-        ],
-        'string': [
-            (r'[^\\"]+', String),
-            (r"\\", String.Escape, 'escape'),
-            ('"', String, '#pop'),
-        ],
-        'escape': [
-            (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
-            (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
-            ('|'.join(ascii), String.Escape, '#pop'),
-            (r'o[0-7]+', String.Escape, '#pop'),
-            (r'x[\da-fA-F]+', String.Escape, '#pop'),
-            (r'\d+', String.Escape, '#pop'),
-            (r'\s+\\', String.Escape, '#pop'),
-        ],
-    }
-
-
-line_re = re.compile('.*?\n')
-bird_re = re.compile(r'(>[ \t]*)(.*\n)')
-
-class LiterateHaskellLexer(Lexer):
-    """
-    For Literate Haskell (Bird-style or LaTeX) source.
-
-    Additional options accepted:
-
-    `litstyle`
-        If given, must be ``"bird"`` or ``"latex"``.  If not given, the style
-        is autodetected: if the first non-whitespace character in the source
-        is a backslash or percent character, LaTeX is assumed, else Bird.
-
-    *New in Pygments 0.9.*
-    """
-    name = 'Literate Haskell'
-    aliases = ['lhs', 'literate-haskell']
-    filenames = ['*.lhs']
-    mimetypes = ['text/x-literate-haskell']
-
-    def get_tokens_unprocessed(self, text):
-        hslexer = HaskellLexer(**self.options)
-
-        style = self.options.get('litstyle')
-        if style is None:
-            style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
-
-        code = ''
-        insertions = []
-        if style == 'bird':
-            # bird-style
-            for match in line_re.finditer(text):
-                line = match.group()
-                m = bird_re.match(line)
-                if m:
-                    insertions.append((len(code),
-                                       [(0, Comment.Special, m.group(1))]))
-                    code += m.group(2)
-                else:
-                    insertions.append((len(code), [(0, Text, line)]))
-        else:
-            # latex-style
-            from pygments.lexers.text import TexLexer
-            lxlexer = TexLexer(**self.options)
-
-            codelines = 0
-            latex = ''
-            for match in line_re.finditer(text):
-                line = match.group()
-                if codelines:
-                    if line.lstrip().startswith('\\end{code}'):
-                        codelines = 0
-                        latex += line
-                    else:
-                        code += line
-                elif line.lstrip().startswith('\\begin{code}'):
-                    codelines = 1
-                    latex += line
-                    insertions.append((len(code),
-                                       list(lxlexer.get_tokens_unprocessed(latex))))
-                    latex = ''
-                else:
-                    latex += line
-            insertions.append((len(code),
-                               list(lxlexer.get_tokens_unprocessed(latex))))
-        for item in do_insertions(insertions, hslexer.get_tokens_unprocessed(code)):
-            yield item
-
-
-class SMLLexer(RegexLexer):
-    """
-    For the Standard ML language.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Standard ML'
-    aliases = ['sml']
-    filenames = ['*.sml', '*.sig', '*.fun',]
-    mimetypes = ['text/x-standardml', 'application/x-standardml']
-
-    alphanumid_reserved = [
-        # Core
-        'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
-        'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
-        'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
-        'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
-        # Modules
-        'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
-        'struct', 'structure', 'where',
-    ]
-
-    symbolicid_reserved = [
-        # Core
-        ':', '\|', '=', '=>', '->', '#',
-        # Modules
-        ':>',
-    ]
-
-    nonid_reserved = [ '(', ')', '[', ']', '{', '}', ',', ';', '...', '_' ]
-
-    alphanumid_re = r"[a-zA-Z][a-zA-Z0-9_']*"
-    symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
-
-    # A character constant is a sequence of the form #s, where s is a string
-    # constant denoting a string of size one character. This setup just parses
-    # the entire string as either a String.Double or a String.Char (depending
-    # on the argument), even if the String.Char is an erronous
-    # multiple-character string.
-    def stringy (whatkind):
-        return [
-            (r'[^"\\]', whatkind),
-            (r'\\[\\\"abtnvfr]', String.Escape),
-            # Control-character notation is used for codes < 32,
-            # where \^@ == \000
-            (r'\\\^[\x40-\x5e]', String.Escape),
-            # Docs say 'decimal digits'
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\u[0-9a-fA-F]{4}', String.Escape),
-            (r'\\\s+\\', String.Interpol),
-            (r'"', whatkind, '#pop'),
-        ]
-
-    # Callbacks for distinguishing tokens and reserved words
-    def long_id_callback(self, match):
-        if match.group(1) in self.alphanumid_reserved: token = Error
-        else: token = Name.Namespace
-        yield match.start(1), token, match.group(1)
-        yield match.start(2), Punctuation, match.group(2)
-
-    def end_id_callback(self, match):
-        if match.group(1) in self.alphanumid_reserved: token = Error
-        elif match.group(1) in self.symbolicid_reserved: token = Error
-        else: token = Name
-        yield match.start(1), token, match.group(1)
-
-    def id_callback(self, match):
-        str = match.group(1)
-        if str in self.alphanumid_reserved: token = Keyword.Reserved
-        elif str in self.symbolicid_reserved: token = Punctuation
-        else: token = Name
-        yield match.start(1), token, str
-
-    tokens = {
-        # Whitespace and comments are (almost) everywhere
-        'whitespace': [
-            (r'\s+', Text),
-            (r'\(\*', Comment.Multiline, 'comment'),
-        ],
-
-        'delimiters': [
-            # This lexer treats these delimiters specially:
-            # Delimiters define scopes, and the scope is how the meaning of
-            # the `|' is resolved - is it a case/handle expression, or function
-            # definition by cases? (This is not how the Definition works, but
-            # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
-            (r'\(|\[|{', Punctuation, 'main'),
-            (r'\)|\]|}', Punctuation, '#pop'),
-            (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
-            (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
-            (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
-        ],
-
-        'core': [
-            # Punctuation that doesn't overlap symbolic identifiers
-            (r'(%s)' % '|'.join([re.escape(z) for z in nonid_reserved]),
-             Punctuation),
-
-            # Special constants: strings, floats, numbers in decimal and hex
-            (r'#"', String.Char, 'char'),
-            (r'"', String.Double, 'string'),
-            (r'~?0x[0-9a-fA-F]+', Number.Hex),
-            (r'0wx[0-9a-fA-F]+', Number.Hex),
-            (r'0w\d+', Number.Integer),
-            (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
-            (r'~?\d+\.\d+', Number.Float),
-            (r'~?\d+[eE]~?\d+', Number.Float),
-            (r'~?\d+', Number.Integer),
-
-            # Labels
-            (r'#\s*[1-9][0-9]*', Name.Label),
-            (r'#\s*(%s)' % alphanumid_re, Name.Label),
-            (r'#\s+(%s)' % symbolicid_re, Name.Label),
-            # Some reserved words trigger a special, local lexer state change
-            (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
-            (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
-            (r'\b(functor|include|open|signature|structure)\b(?!\')',
-             Keyword.Reserved, 'sname'),
-            (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
-
-            # Regular identifiers, long and otherwise
-            (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
-            (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
-            (r'(%s)' % alphanumid_re, id_callback),
-            (r'(%s)' % symbolicid_re, id_callback),
-        ],
-        'dotted': [
-            (r'(%s)(\.)' % alphanumid_re, long_id_callback),
-            (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
-            (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
-            (r'\s+', Error),
-            (r'\S+', Error),
-        ],
-
-
-        # Main parser (prevents errors in files that have scoping errors)
-        'root': [ (r'', Text, 'main') ],
-
-        # In this scope, I expect '|' to not be followed by a function name,
-        # and I expect 'and' to be followed by a binding site
-        'main': [
-            include('whitespace'),
-
-            # Special behavior of val/and/fun
-            (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
-            (r'\b(fun)\b(?!\')', Keyword.Reserved,
-             ('#pop', 'main-fun', 'fname')),
-
-            include('delimiters'),
-            include('core'),
-            (r'\S+', Error),
-        ],
-
-        # In this scope, I expect '|' and 'and' to be followed by a function
-        'main-fun': [
-            include('whitespace'),
-
-            (r'\s', Text),
-            (r'\(\*', Comment.Multiline, 'comment'),
-
-            # Special behavior of val/and/fun
-            (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
-            (r'\b(val)\b(?!\')', Keyword.Reserved,
-             ('#pop', 'main', 'vname')),
-
-            # Special behavior of '|' and '|'-manipulating keywords
-            (r'\|', Punctuation, 'fname'),
-            (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
-             ('#pop', 'main')),
-
-            include('delimiters'),
-            include('core'),
-            (r'\S+', Error),
-        ],
-
-        # Character and string parsers
-        'char': stringy(String.Char),
-        'string': stringy(String.Double),
-
-        'breakout': [
-            (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
-        ],
-
-        # Dealing with what comes after module system keywords
-        'sname': [
-            include('whitespace'),
-            include('breakout'),
-
-            (r'(%s)' % alphanumid_re, Name.Namespace),
-            (r'', Text, '#pop'),
-        ],
-
-        # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
-        'fname': [
-            include('whitespace'),
-            (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
-            (r'\(', Punctuation, 'tyvarseq'),
-
-            (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
-            (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
-
-            # Ignore interesting function declarations like "fun (x + y) = ..."
-            (r'', Text, '#pop'),
-        ],
-
-        # Dealing with what comes after the 'val' (or 'and') keyword
-        'vname': [
-            include('whitespace'),
-            (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
-            (r'\(', Punctuation, 'tyvarseq'),
-
-            (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
-             bygroups(Name.Variable, Text, Punctuation), '#pop'),
-            (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
-             bygroups(Name.Variable, Text, Punctuation), '#pop'),
-            (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
-            (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
-
-            # Ignore interesting patterns like 'val (x, y)'
-            (r'', Text, '#pop'),
-        ],
-
-        # Dealing with what comes after the 'type' (or 'and') keyword
-        'tname': [
-            include('whitespace'),
-            include('breakout'),
-
-            (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
-            (r'\(', Punctuation, 'tyvarseq'),
-            (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
-
-            (r'(%s)' % alphanumid_re, Keyword.Type),
-            (r'(%s)' % symbolicid_re, Keyword.Type),
-            (r'\S+', Error, '#pop'),
-        ],
-
-        # A type binding includes most identifiers
-        'typbind': [
-            include('whitespace'),
-
-            (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-
-            include('breakout'),
-            include('core'),
-            (r'\S+', Error, '#pop'),
-        ],
-
-        # Dealing with what comes after the 'datatype' (or 'and') keyword
-        'dname': [
-            include('whitespace'),
-            include('breakout'),
-
-            (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
-            (r'\(', Punctuation, 'tyvarseq'),
-            (r'(=)(\s*)(datatype)',
-             bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
-            (r'=(?!%s)' % symbolicid_re, Punctuation,
-             ('#pop', 'datbind', 'datcon')),
-
-            (r'(%s)' % alphanumid_re, Keyword.Type),
-            (r'(%s)' % symbolicid_re, Keyword.Type),
-            (r'\S+', Error, '#pop'),
-        ],
-
-        # common case - A | B | C of int
-        'datbind': [
-            include('whitespace'),
-
-            (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
-            (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-            (r'\b(of)\b(?!\')', Keyword.Reserved),
-
-            (r'(\|)(\s*)(%s)' % alphanumid_re,
-             bygroups(Punctuation, Text, Name.Class)),
-            (r'(\|)(\s+)(%s)' % symbolicid_re,
-             bygroups(Punctuation, Text, Name.Class)),
-
-            include('breakout'),
-            include('core'),
-            (r'\S+', Error),
-        ],
-
-        # Dealing with what comes after an exception
-        'ename': [
-            include('whitespace'),
-
-            (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
-             bygroups(Keyword.Reserved, Text, Name.Class)),
-            (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
-             bygroups(Keyword.Reserved, Text, Name.Class)),
-            (r'\b(of)\b(?!\')', Keyword.Reserved),
-
-            include('breakout'),
-            include('core'),
-            (r'\S+', Error),
-        ],
-
-        'datcon': [
-            include('whitespace'),
-            (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
-            (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
-            (r'\S+', Error, '#pop'),
-        ],
-
-        # Series of type variables
-        'tyvarseq': [
-            (r'\s', Text),
-            (r'\(\*', Comment.Multiline, 'comment'),
-
-            (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
-            (alphanumid_re, Name),
-            (r',', Punctuation),
-            (r'\)', Punctuation, '#pop'),
-            (symbolicid_re, Name),
-        ],
-
-        'comment': [
-            (r'[^(*)]', Comment.Multiline),
-            (r'\(\*', Comment.Multiline, '#push'),
-            (r'\*\)', Comment.Multiline, '#pop'),
-            (r'[(*)]', Comment.Multiline),
-        ],
-    }
-
-
-class OcamlLexer(RegexLexer):
-    """
-    For the OCaml language.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'OCaml'
-    aliases = ['ocaml']
-    filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
-    mimetypes = ['text/x-ocaml']
-
-    keywords = [
-      'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
-      'downto', 'else', 'end', 'exception', 'external', 'false',
-      'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
-      'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
-      'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
-      'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
-      'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
-    ]
-    keyopts = [
-      '!=','#','&','&&','\(','\)','\*','\+',',','-',
-      '-\.','->','\.','\.\.',':','::',':=',':>',';',';;','<',
-      '<-','=','>','>]','>}','\?','\?\?','\[','\[<','\[>','\[\|',
-      ']','_','`','{','{<','\|','\|]','}','~'
-    ]
-
-    operators = r'[!$%&*+\./:<=>?@^|~-]'
-    word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
-    prefix_syms = r'[!?~]'
-    infix_syms = r'[=<>@^|&+\*/$%-]'
-    primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array']
-
-    tokens = {
-        'escape-sequence': [
-            (r'\\[\\\"\'ntbr]', String.Escape),
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-        ],
-        'root': [
-            (r'\s+', Text),
-            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
-            (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
-             Name.Namespace, 'dotted'),
-            (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
-            (r'\(\*(?![)])', Comment, 'comment'),
-            (r'\b(%s)\b' % '|'.join(keywords), Keyword),
-            (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
-            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-            (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
-            (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
-            (r"[^\W\d][\w']*", Name),
-
-            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
-            (r'0[oO][0-7][0-7_]*', Number.Oct),
-            (r'0[bB][01][01_]*', Number.Binary),
-            (r'\d[\d_]*', Number.Integer),
-
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
-             String.Char),
-            (r"'.'", String.Char),
-            (r"'", Keyword), # a stray quote is another syntax element
-
-            (r'"', String.Double, 'string'),
-
-            (r'[~?][a-z][\w\']*:', Name.Variable),
-        ],
-        'comment': [
-            (r'[^(*)]+', Comment),
-            (r'\(\*', Comment, '#push'),
-            (r'\*\)', Comment, '#pop'),
-            (r'[(*)]', Comment),
-        ],
-        'string': [
-            (r'[^\\"]+', String.Double),
-            include('escape-sequence'),
-            (r'\\\n', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'dotted': [
-            (r'\s+', Text),
-            (r'\.', Punctuation),
-            (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
-            (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
-            (r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
-        ],
-    }
-
-
-class ErlangLexer(RegexLexer):
-    """
-    For the Erlang functional programming language.
-
-    Blame Jeremy Thurgood (http://jerith.za.net/).
-
-    *New in Pygments 0.9.*
-    """
-
-    name = 'Erlang'
-    aliases = ['erlang']
-    filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
-    mimetypes = ['text/x-erlang']
-
-    keywords = [
-        'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
-        'let', 'of', 'query', 'receive', 'try', 'when',
-        ]
-
-    builtins = [ # See erlang(3) man page
-        'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
-        'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
-        'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
-        'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
-        'float', 'float_to_list', 'fun_info', 'fun_to_list',
-        'function_exported', 'garbage_collect', 'get', 'get_keys',
-        'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
-        'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
-        'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
-        'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
-        'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
-        'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
-        'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
-        'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
-        'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
-        'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
-        'pid_to_list', 'port_close', 'port_command', 'port_connect',
-        'port_control', 'port_call', 'port_info', 'port_to_list',
-        'process_display', 'process_flag', 'process_info', 'purge_module',
-        'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
-        'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
-        'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
-        'spawn_opt', 'split_binary', 'start_timer', 'statistics',
-        'suspend_process', 'system_flag', 'system_info', 'system_monitor',
-        'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
-        'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
-        'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
-        ]
-
-    operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
-    word_operators = [
-        'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
-        'div', 'not', 'or', 'orelse', 'rem', 'xor'
-        ]
-
-    atom_re = r"(?:[a-z][a-zA-Z0-9_]*|'[^\n']*[^\\]')"
-
-    variable_re = r'(?:[A-Z_][a-zA-Z0-9_]*)'
-
-    escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
-
-    macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
-
-    base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'%.*\n', Comment),
-            ('(' + '|'.join(keywords) + r')\b', Keyword),
-            ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
-            ('(' + '|'.join(word_operators) + r')\b', Operator.Word),
-            (r'^-', Punctuation, 'directive'),
-            (operators, Operator),
-            (r'"', String, 'string'),
-            (r'<<', Name.Label),
-            (r'>>', Name.Label),
-            ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
-            ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
-             bygroups(Name.Function, Text, Punctuation)),
-            (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer),
-            (r'[+-]?\d+', Number.Integer),
-            (r'[+-]?\d+.\d+', Number.Float),
-            (r'[]\[:_@\".{}()|;,]', Punctuation),
-            (variable_re, Name.Variable),
-            (atom_re, Name),
-            (r'\?'+macro_re, Name.Constant),
-            (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
-            (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
-            ],
-        'string': [
-            (escape_re, String.Escape),
-            (r'"', String, '#pop'),
-            (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
-            (r'[^"\\~]+', String),
-            (r'~', String),
-            ],
-        'directive': [
-            (r'(define)(\s*)(\()('+macro_re+r')',
-             bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
-            (r'(record)(\s*)(\()('+macro_re+r')',
-             bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
-            (atom_re, Name.Entity, '#pop'),
-            ],
-        }
-
-
-class ErlangShellLexer(Lexer):
-    """
-    Shell sessions in erl (for Erlang code).
-
-    *New in Pygments 1.1.*
-    """
-    name = 'Erlang erl session'
-    aliases = ['erl']
-    filenames = ['*.erl-sh']
-    mimetypes = ['text/x-erl-shellsession']
-
-    _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
-
-    def get_tokens_unprocessed(self, text):
-        erlexer = ErlangLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        for match in line_re.finditer(text):
-            line = match.group()
-            m = self._prompt_re.match(line)
-            if m is not None:
-                end = m.end()
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:end])]))
-                curcode += line[end:]
-            else:
-                if curcode:
-                    for item in do_insertions(insertions,
-                                    erlexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-                if line.startswith('*'):
-                    yield match.start(), Generic.Traceback, line
-                else:
-                    yield match.start(), Generic.Output, line
-        if curcode:
-            for item in do_insertions(insertions,
-                                      erlexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-class OpaLexer(RegexLexer):
-    """
-    Lexer for the Opa language (http://opalang.org).
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Opa'
-    aliases = ['opa']
-    filenames = ['*.opa']
-    mimetypes = ['text/x-opa']
-
-    # most of these aren't strictly keywords
-    # but if you color only real keywords, you might just
-    # as well not color anything
-    keywords = [
-        'and', 'as', 'begin', 'css', 'database', 'db', 'do', 'else', 'end',
-        'external', 'forall', 'if', 'import', 'match', 'package', 'parser',
-        'rec', 'server', 'then', 'type', 'val', 'with', 'xml_parser',
-    ]
-
-    # matches both stuff and `stuff`
-    ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
-
-    op_re = r'[.=\-<>,@~%/+?*&^!]'
-    punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
-                               # because they are also used for inserts
-
-    tokens = {
-        # copied from the caml lexer, should be adapted
-        'escape-sequence': [
-            (r'\\[\\\"\'ntr}]', String.Escape),
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-        ],
-
-        # factorizing these rules, because they are inserted many times
-        'comments': [
-            (r'/\*', Comment, 'nested-comment'),
-            (r'//.*?$', Comment),
-        ],
-        'comments-and-spaces': [
-            include('comments'),
-            (r'\s+', Text),
-        ],
-
-        'root': [
-            include('comments-and-spaces'),
-            # keywords
-            (r'\b(%s)\b' % '|'.join(keywords), Keyword),
-            # directives
-            # we could parse the actual set of directives instead of anything
-            # starting with @, but this is troublesome
-            # because it needs to be adjusted all the time
-            # and assuming we parse only sources that compile, it is useless
-            (r'@'+ident_re+r'\b', Name.Builtin.Pseudo),
-
-            # number literals
-            (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
-            (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
-            (r'-?\d+[eE][+\-]?\d+', Number.Float),
-            (r'0[xX][\da-fA-F]+', Number.Hex),
-            (r'0[oO][0-7]+', Number.Oct),
-            (r'0[bB][01]+', Number.Binary),
-            (r'\d+', Number.Integer),
-            # color literals
-            (r'#[\da-fA-F]{3,6}', Number.Integer),
-
-            # string literals
-            (r'"', String.Double, 'string'),
-            # char literal, should be checked because this is the regexp from
-            # the caml lexer
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
-             String.Char),
-
-            # this is meant to deal with embedded exprs in strings
-            # every time we find a '}' we pop a state so that if we were
-            # inside a string, we are back in the string state
-            # as a consequence, we must also push a state every time we find a
-            # '{' or else we will have errors when parsing {} for instance
-            (r'{', Operator, '#push'),
-            (r'}', Operator, '#pop'),
-
-            # html literals
-            # this is a much more strict that the actual parser,
-            # since a<b would not be parsed as html
-            # but then again, the parser is way too lax, and we can't hope
-            # to have something as tolerant
-            (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
-
-            # db path
-            # matching the '[_]' in '/a[_]' because it is a part
-            # of the syntax of the db path definition
-            # unfortunately, i don't know how to match the ']' in
-            # /a[1], so this is somewhat inconsistent
-            (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
-            # putting the same color on <- as on db path, since
-            # it can be used only to mean Db.write
-            (r'<-(?!'+op_re+r')', Name.Variable),
-
-            # 'modules'
-            # although modules are not distinguished by their names as in caml
-            # the standard library seems to follow the convention that modules
-            # only area capitalized
-            (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
-
-            # operators
-            # = has a special role because this is the only
-            # way to syntactic distinguish binding constructions
-            # unfortunately, this colors the equal in {x=2} too
-            (r'=(?!'+op_re+r')', Keyword),
-            (r'(%s)+' % op_re, Operator),
-            (r'(%s)+' % punc_re, Operator),
-
-            # coercions
-            (r':', Operator, 'type'),
-            # type variables
-            # we need this rule because we don't parse specially type
-            # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
-            ("'"+ident_re, Keyword.Type),
-
-            # id literal, #something, or #{expr}
-            (r'#'+ident_re, String.Single),
-            (r'#(?={)', String.Single),
-
-            # identifiers
-            # this avoids to color '2' in 'a2' as an integer
-            (ident_re, Text),
-
-            # default, not sure if that is needed or not
-            # (r'.', Text),
-        ],
-
-        # it is quite painful to have to parse types to know where they end
-        # this is the general rule for a type
-        # a type is either:
-        # * -> ty
-        # * type-with-slash
-        # * type-with-slash -> ty
-        # * type-with-slash (, type-with-slash)+ -> ty
-        #
-        # the code is pretty funky in here, but this code would roughly
-        # translate in caml to:
-        # let rec type stream =
-        # match stream with
-        # | [< "->";  stream >] -> type stream
-        # | [< "";  stream >] ->
-        #   type_with_slash stream
-        #   type_lhs_1 stream;
-        # and type_1 stream = ...
-        'type': [
-            include('comments-and-spaces'),
-            (r'->', Keyword.Type),
-            (r'', Keyword.Type, ('#pop', 'type-lhs-1', 'type-with-slash')),
-        ],
-
-        # parses all the atomic or closed constructions in the syntax of type
-        # expressions: record types, tuple types, type constructors, basic type
-        # and type variables
-        'type-1': [
-            include('comments-and-spaces'),
-            (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
-            (r'~?{', Keyword.Type, ('#pop', 'type-record')),
-            (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
-            (ident_re, Keyword.Type, '#pop'),
-            ("'"+ident_re, Keyword.Type),
-            # this case is not in the syntax but sometimes
-            # we think we are parsing types when in fact we are parsing
-            # some css, so we just pop the states until we get back into
-            # the root state
-            (r'', Keyword.Type, '#pop'),
-        ],
-
-        # type-with-slash is either:
-        # * type-1
-        # * type-1 (/ type-1)+
-        'type-with-slash': [
-            include('comments-and-spaces'),
-            (r'', Keyword.Type, ('#pop', 'slash-type-1', 'type-1')),
-        ],
-        'slash-type-1': [
-            include('comments-and-spaces'),
-            ('/', Keyword.Type, ('#pop', 'type-1')),
-            # same remark as above
-            (r'', Keyword.Type, '#pop'),
-        ],
-
-        # we go in this state after having parsed a type-with-slash
-        # while trying to parse a type
-        # and at this point we must determine if we are parsing an arrow
-        # type (in which case we must continue parsing) or not (in which
-        # case we stop)
-        'type-lhs-1': [
-            include('comments-and-spaces'),
-            (r'->', Keyword.Type, ('#pop', 'type')),
-            (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
-            (r'', Keyword.Type, '#pop'),
-        ],
-        'type-arrow': [
-            include('comments-and-spaces'),
-            # the look ahead here allows parsing f(x : int, y : float -> truc)
-            # correctly
-            (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
-            (r'->', Keyword.Type, ('#pop', 'type')),
-            # same remark as above
-            (r'', Keyword.Type, '#pop'),
-        ],
-
-        # no need to do precise parsing for tuples and records
-        # because they are closed constructions, so we can simply
-        # find the closing delimiter
-        # note that this function would be not work if the source
-        # contained identifiers like `{)` (although it could be patched
-        # to support it)
-        'type-tuple': [
-            include('comments-and-spaces'),
-            (r'[^\(\)/*]+', Keyword.Type),
-            (r'[/*]', Keyword.Type),
-            (r'\(', Keyword.Type, '#push'),
-            (r'\)', Keyword.Type, '#pop'),
-        ],
-        'type-record': [
-            include('comments-and-spaces'),
-            (r'[^{}/*]+', Keyword.Type),
-            (r'[/*]', Keyword.Type),
-            (r'{', Keyword.Type, '#push'),
-            (r'}', Keyword.Type, '#pop'),
-        ],
-
-#        'type-tuple': [
-#            include('comments-and-spaces'),
-#            (r'\)', Keyword.Type, '#pop'),
-#            (r'', Keyword.Type, ('#pop', 'type-tuple-1', 'type-1')),
-#        ],
-#        'type-tuple-1': [
-#            include('comments-and-spaces'),
-#            (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
-#            (r',', Keyword.Type, 'type-1'),
-#        ],
-#        'type-record':[
-#            include('comments-and-spaces'),
-#            (r'}', Keyword.Type, '#pop'),
-#            (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
-#        ],
-#        'type-record-field-expr': [
-#
-#        ],
-
-        'nested-comment': [
-            (r'[^/*]+', Comment),
-            (r'/\*', Comment, '#push'),
-            (r'\*/', Comment, '#pop'),
-            (r'[/*]', Comment),
-        ],
-
-        # the copy pasting between string and single-string
-        # is kinda sad. Is there a way to avoid that??
-        'string': [
-            (r'[^\\"{]+', String.Double),
-            (r'"', String.Double, '#pop'),
-            (r'{', Operator, 'root'),
-            include('escape-sequence'),
-        ],
-        'single-string': [
-            (r'[^\\\'{]+', String.Double),
-            (r'\'', String.Double, '#pop'),
-            (r'{', Operator, 'root'),
-            include('escape-sequence'),
-        ],
-
-        # all the html stuff
-        # can't really reuse some existing html parser
-        # because we must be able to parse embedded expressions
-
-        # we are in this state after someone parsed the '<' that
-        # started the html literal
-        'html-open-tag': [
-            (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
-            (r'>', String.Single, ('#pop', 'html-content')),
-        ],
-
-        # we are in this state after someone parsed the '</' that
-        # started the end of the closing tag
-        'html-end-tag': [
-            # this is a star, because </> is allowed
-            (r'[\w\-:]*>', String.Single, '#pop'),
-        ],
-
-        # we are in this state after having parsed '<ident(:ident)?'
-        # we thus parse a possibly empty list of attributes
-        'html-attr': [
-            (r'\s+', Text),
-            (r'[\w\-:]+=', String.Single, 'html-attr-value'),
-            (r'/>', String.Single, '#pop'),
-            (r'>', String.Single, ('#pop', 'html-content')),
-        ],
-
-        'html-attr-value': [
-            (r"'", String.Single, ('#pop', 'single-string')),
-            (r'"', String.Single, ('#pop', 'string')),
-            (r'#'+ident_re, String.Single, '#pop'),
-            (r'#(?={)', String.Single, ('#pop', 'root')),
-            (r'[^"\'{`=<>]+', String.Single, '#pop'),
-            (r'{', Operator, ('#pop', 'root')), # this is a tail call!
-        ],
-
-        # we should probably deal with '\' escapes here
-        'html-content': [
-            (r'<!--', Comment, 'html-comment'),
-            (r'</', String.Single, ('#pop', 'html-end-tag')),
-            (r'<', String.Single, 'html-open-tag'),
-            (r'{', Operator, 'root'),
-            (r'[^<{]+', String.Single),
-        ],
-
-        'html-comment': [
-            (r'-->', Comment, '#pop'),
-            (r'[^\-]+|-', Comment),
-        ],
-    }
-
-
-class CoqLexer(RegexLexer):
-    """
-    For the `Coq <http://coq.inria.fr/>`_ theorem prover.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Coq'
-    aliases = ['coq']
-    filenames = ['*.v']
-    mimetypes = ['text/x-coq']
-
-    keywords1 = [
-        # Vernacular commands
-        'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
-        'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
-        'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
-        'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
-        'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
-        'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
-        'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
-        'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
-        'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
-        'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
-        'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
-        'outside',
-    ]
-    keywords2 = [
-        # Gallina
-        'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
-        'match', 'end',  'in', 'return', 'let', 'if', 'is', 'then', 'else',
-        'for', 'of', 'nosimpl', 'with', 'as',
-    ]
-    keywords3 = [
-        # Sorts
-        'Type', 'Prop',
-    ]
-    keywords4 = [
-        # Tactics
-        'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
-        'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
-        'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
-        'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
-        'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
-        'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
-        'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
-        'split', 'left', 'right', 'autorewrite',
-    ]
-    keywords5 = [
-        # Terminators
-        'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
-        'assumption', 'solve', 'contradiction', 'discriminate',
-    ]
-    keywords6 = [
-        # Control
-        'do', 'last', 'first', 'try', 'idtac', 'repeat',
-    ]
-      # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
-      # 'downto', 'else', 'end', 'exception', 'external', 'false',
-      # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
-      # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
-      # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
-      # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
-      # 'type', 'val', 'virtual', 'when', 'while', 'with'
-    keyopts = [
-        '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
-        r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
-        '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
-        r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>',
-        r'/\\', r'\\/',
-        u'Π', u'λ',
-    ]
-    operators = r'[!$%&*+\./:<=>?@^|~-]'
-    word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
-    prefix_syms = r'[!?~]'
-    infix_syms = r'[=<>@^|&+\*/$%-]'
-    primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list',
-                  'array']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
-            (r'\(\*', Comment, 'comment'),
-            (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace),
-            (r'\b(%s)\b' % '|'.join(keywords2), Keyword),
-            (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type),
-            (r'\b(%s)\b' % '|'.join(keywords4), Keyword),
-            (r'\b(%s)\b' % '|'.join(keywords5), Keyword.Pseudo),
-            (r'\b(%s)\b' % '|'.join(keywords6), Keyword.Reserved),
-            (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
-             Name.Namespace, 'dotted'),
-            (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
-            (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
-            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-            (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
-            (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
-            (r"[^\W\d][\w']*", Name),
-
-            (r'\d[\d_]*', Number.Integer),
-            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
-            (r'0[oO][0-7][0-7_]*', Number.Oct),
-            (r'0[bB][01][01_]*', Number.Binary),
-            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
-             String.Char),
-            (r"'.'", String.Char),
-            (r"'", Keyword), # a stray quote is another syntax element
-
-            (r'"', String.Double, 'string'),
-
-            (r'[~?][a-z][\w\']*:', Name.Variable),
-        ],
-        'comment': [
-            (r'[^(*)]+', Comment),
-            (r'\(\*', Comment, '#push'),
-            (r'\*\)', Comment, '#pop'),
-            (r'[(*)]', Comment),
-        ],
-        'string': [
-            (r'[^"]+', String.Double),
-            (r'""', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'dotted': [
-            (r'\s+', Text),
-            (r'\.', Punctuation),
-            (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
-            (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
-            (r'[a-z][a-z0-9_\']*', Name, '#pop'),
-            (r'', Text, '#pop')
-        ],
-    }
-
-    def analyse_text(text):
-        if text.startswith('(*'):
-            return True
-
-
-class NewLispLexer(RegexLexer):
-    """
-    For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'NewLisp'
-    aliases = ['newlisp']
-    filenames = ['*.lsp', '*.nl']
-    mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
-    flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
-
-    # list of built-in functions for newLISP version 10.3
-    builtins = [
-        '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
-        '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
-        '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
-        '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
-        'acos', 'acosh', 'add', 'address', 'amb', 'and',  'and', 'append-file',
-        'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
-        'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
-        'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin',
-        'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case',
-        'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
-        'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant',
-        'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
-        'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
-        'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
-        'def-new', 'default', 'define-macro', 'define-macro', 'define',
-        'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
-        'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
-        'doargs',  'dolist',  'dostring', 'dotimes',  'dotree', 'dump', 'dup',
-        'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
-        'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
-        'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
-        'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
-        'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
-        'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
-        'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
-        'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
-        'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
-        'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn',
-        'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup',
-        'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
-        'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
-        'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
-        'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
-        'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
-        'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
-        'net-send-to', 'net-send-udp', 'net-send', 'net-service',
-        'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
-        'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
-        'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
-        'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
-        'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
-        'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
-        'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
-        'read-key', 'read-line', 'read-utf8', 'read', 'reader-event',
-        'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
-        'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
-        'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
-        'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
-        'set-ref', 'set', 'setf',  'setq', 'sgn', 'share', 'signal', 'silent',
-        'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
-        'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
-        'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
-        'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
-        'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
-        'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
-        'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
-        'write', 'write-char', 'write-file', 'write-line', 'write',
-        'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
-    ]
-
-    # valid names
-    valid_name = r'([a-zA-Z0-9!$%&*+.,/<=>?@^_~|-])+|(\[.*?\])+'
-
-    tokens = {
-        'root': [
-            # shebang
-            (r'#!(.*?)$', Comment.Preproc),
-            # comments starting with semicolon
-            (r';.*$', Comment.Single),
-            # comments starting with #
-            (r'#.*$', Comment.Single),
-
-            # whitespace
-            (r'\s+', Text),
-
-            # strings, symbols and characters
-            (r'"(\\\\|\\"|[^"])*"', String),
-
-            # braces
-            (r"{", String, "bracestring"),
-
-            # [text] ... [/text] delimited strings
-            (r'\[text\]*', String, "tagstring"),
-
-            # 'special' operators...
-            (r"('|:)", Operator),
-
-            # highlight the builtins
-            ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
-             Keyword),
-
-            # the remaining functions
-            (r'(?<=\()' + valid_name, Name.Variable),
-
-            # the remaining variables
-            (valid_name, String.Symbol),
-
-            # parentheses
-            (r'(\(|\))', Punctuation),
-        ],
-
-        # braced strings...
-        'bracestring': [
-             ("{", String, "#push"),
-             ("}", String, "#pop"),
-             ("[^{}]+", String),
-        ],
-
-        # tagged [text]...[/text] delimited strings...
-        'tagstring': [
-            (r'(?s)(.*?)(\[/text\])', String, '#pop'),
-        ],
-    }
-
-
-class ElixirLexer(RegexLexer):
-    """
-    For the `Elixir language <http://elixir-lang.org>`_.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Elixir'
-    aliases = ['elixir', 'ex', 'exs']
-    filenames = ['*.ex', '*.exs']
-    mimetypes = ['text/x-elixir']
-
-    def gen_elixir_sigil_rules():
-        states = {}
-
-        states['strings'] = [
-            (r'(%[A-Ba-z])?"""(?:.|\n)*?"""', String.Doc),
-            (r"'''(?:.|\n)*?'''", String.Doc),
-            (r'"', String.Double, 'dqs'),
-            (r"'.*'", String.Single),
-            (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
-             r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
-        ]
-
-        for lbrace, rbrace, name, in ('\\{', '\\}', 'cb'), \
-                                     ('\\[', '\\]', 'sb'), \
-                                     ('\\(', '\\)', 'pa'), \
-                                     ('\\<', '\\>', 'lt'):
-
-            states['strings'] += [
-                (r'%[a-z]' + lbrace, String.Double, name + 'intp'),
-                (r'%[A-Z]' + lbrace, String.Double, name + 'no-intp')
-            ]
-
-            states[name +'intp'] = [
-                (r'' + rbrace + '[a-z]*', String.Double, "#pop"),
-                include('enddoublestr')
-            ]
-
-            states[name +'no-intp'] = [
-                (r'.*' + rbrace + '[a-z]*', String.Double , "#pop")
-            ]
-
-        return states
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'#.*$', Comment.Single),
-            (r'\b(case|cond|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
-             r'defp?|defprotocol|defimpl|defrecord|defmacrop?|defdelegate|'
-             r'defexception|exit|raise|throw|unless|after|rescue|catch|else)\b(?![?!])|'
-             r'(?<!\.)\b(do|\-\>)\b\s*', Keyword),
-            (r'\b(import|require|use|recur|quote|unquote|super|refer)\b(?![?!])',
-                Keyword.Namespace),
-            (r'(?<!\.)\b(and|not|or|when|xor|in)\b', Operator.Word),
-            (r'%=|\*=|\*\*=|\+=|\-=|\^=|\|\|=|'
-             r'<=>|<(?!<|=)|>(?!<|=|>)|<=|>=|===|==|=~|!=|!~|(?=[ \t])\?|'
-             r'(?<=[ \t])!+|&&|\|\||\^|\*|\+|\-|/|'
-             r'\||\+\+|\-\-|\*\*|\/\/|\<\-|\<\>|<<|>>|=|\.', Operator),
-            (r'(?<!:)(:)([a-zA-Z_]\w*([?!]|=(?![>=]))?|\<\>|===?|>=?|<=?|'
-             r'<=>|&&?|%\(\)|%\[\]|%\{\}|\+\+?|\-\-?|\|\|?|\!|//|[%&`/\|]|'
-             r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol),
-            (r':"', String.Symbol, 'interpoling_symbol'),
-            (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant),
-            (r'\b(__(FILE|LINE|MODULE|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
-            (r'[a-zA-Z_!][\w_]*[!\?]?', Name),
-            (r'[(){};,/\|:\\\[\]]', Punctuation),
-            (r'@[a-zA-Z_]\w*|&\d', Name.Variable),
-            (r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^\d\s])'
-             r'(_?\d)*)?([eE][-+]?\d(_?\d)*)?|0[bB][01]+)\b', Number),
-            (r'%r\/.*\/', String.Regex),
-            include('strings'),
-        ],
-        'dqs': [
-            (r'"', String.Double, "#pop"),
-            include('enddoublestr')
-        ],
-        'interpoling': [
-            (r'#{', String.Interpol, 'interpoling_string'),
-        ],
-        'interpoling_string' : [
-            (r'}', String.Interpol, "#pop"),
-            include('root')
-        ],
-        'interpoling_symbol': [
-            (r'"', String.Symbol, "#pop"),
-            include('interpoling'),
-            (r'[^#"]+', String.Symbol),
-        ],
-        'enddoublestr' : [
-            include('interpoling'),
-            (r'[^#"]+', String.Double),
-        ]
-    }
-    tokens.update(gen_elixir_sigil_rules())
-
-
-class ElixirConsoleLexer(Lexer):
-    """
-    For Elixir interactive console (iex) output like:
-
-    .. sourcecode:: iex
-
-        iex> [head | tail] = [1,2,3]
-        [1,2,3]
-        iex> head
-        1
-        iex> tail
-        [2,3]
-        iex> [head | tail]
-        [1,2,3]
-        iex> length [head | tail]
-        3
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Elixir iex session'
-    aliases = ['iex']
-    mimetypes = ['text/x-elixir-shellsession']
-
-    _prompt_re = re.compile('(iex|\.{3})> ')
-
-    def get_tokens_unprocessed(self, text):
-        exlexer = ElixirLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        for match in line_re.finditer(text):
-            line = match.group()
-            if line.startswith(u'** '):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Error, line[:-1])]))
-                curcode += line[-1:]
-            else:
-                m = self._prompt_re.match(line)
-                if m is not None:
-                    end = m.end()
-                    insertions.append((len(curcode),
-                                       [(0, Generic.Prompt, line[:end])]))
-                    curcode += line[end:]
-                else:
-                    if curcode:
-                        for item in do_insertions(insertions,
-                                        exlexer.get_tokens_unprocessed(curcode)):
-                            yield item
-                        curcode = ''
-                        insertions = []
-                    yield match.start(), Generic.Output, line
-        if curcode:
-            for item in do_insertions(insertions,
-                                      exlexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-class KokaLexer(RegexLexer):
-    """
-    Lexer for the `Koka <http://research.microsoft.com/en-us/projects/koka/>`_
-    language.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Koka'
-    aliases = ['koka']
-    filenames = ['*.kk', '*.kki']
-    mimetypes = ['text/x-koka']
-
-    keywords = [
-        'infix', 'infixr', 'infixl', 'prefix', 'postfix',
-        'type', 'cotype', 'rectype', 'alias',
-        'struct', 'con',
-        'fun', 'function', 'val', 'var',
-        'external',
-        'if', 'then', 'else', 'elif', 'return', 'match',
-        'private', 'public', 'private',
-        'module', 'import', 'as',
-        'include', 'inline',
-        'rec',
-        'try', 'yield', 'enum',
-        'interface', 'instance',
-    ]
-
-    # keywords that are followed by a type
-    typeStartKeywords = [
-        'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
-    ]
-
-    # keywords valid in a type
-    typekeywords = [
-        'forall', 'exists', 'some', 'with',
-    ]
-
-    # builtin names and special names
-    builtin = [
-        'for', 'while', 'repeat',
-        'foreach', 'foreach-indexed',
-        'error', 'catch', 'finally',
-        'cs', 'js', 'file', 'ref', 'assigned',
-    ]
-
-    # symbols that can be in an operator
-    symbols = '[\$%&\*\+@!/\\\^~=\.:\-\?\|<>]+'
-
-    # symbol boundary: an operator keyword should not be followed by any of these
-    sboundary = '(?!'+symbols+')'
-
-    # name boundary: a keyword should not be followed by any of these
-    boundary = '(?![a-zA-Z0-9_\\-])'
-
-    # main lexer
-    tokens = {
-        'root': [
-            include('whitespace'),
-
-            # go into type mode
-            (r'::?' + sboundary, Keyword.Type, 'type'),
-            (r'alias' + boundary, Keyword, 'alias-type'),
-            (r'struct' + boundary, Keyword, 'struct-type'),
-            (r'(%s)' % '|'.join(typeStartKeywords) + boundary, Keyword, 'type'),
-
-            # special sequences of tokens (we use ?: for non-capturing group as
-            # required by 'bygroups')
-            (r'(module)(\s*)((?:interface)?)(\s*)'
-             r'((?:[a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*\.)*'
-             r'[a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*)',
-             bygroups(Keyword, Text, Keyword, Text, Name.Namespace)),
-            (r'(import)(\s+)((?:[a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*\.)*[a-z]'
-             r'(?:[a-zA-Z0-9_]|\-[a-zA-Z])*)(\s*)((?:as)?)'
-             r'((?:[A-Z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*)?)',
-             bygroups(Keyword, Text, Name.Namespace, Text, Keyword,
-                      Name.Namespace)),
-
-            # keywords
-            (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
-            (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
-            (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
-            (r'::|:=|\->|[=\.:]' + sboundary, Keyword),
-            (r'\-' + sboundary, Generic.Strong),
-
-            # names
-            (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?=\.)', Name.Namespace),
-            (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?!\.)', Name.Class),
-            (r'[a-z]([a-zA-Z0-9_]|\-[a-zA-Z])*', Name),
-            (r'_([a-zA-Z0-9_]|\-[a-zA-Z])*', Name.Variable),
-
-            # literal string
-            (r'@"', String.Double, 'litstring'),
-
-            # operators
-            (symbols, Operator),
-            (r'`', Operator),
-            (r'[\{\}\(\)\[\];,]', Punctuation),
-
-            # literals. No check for literal characters with len > 1
-            (r'[0-9]+\.[0-9]+([eE][\-\+]?[0-9]+)?', Number.Float),
-            (r'0[xX][0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-
-            (r"'", String.Char, 'char'),
-            (r'"', String.Double, 'string'),
-        ],
-
-        # type started by alias
-        'alias-type': [
-            (r'=',Keyword),
-            include('type')
-        ],
-
-        # type started by struct
-        'struct-type': [
-            (r'(?=\((?!,*\)))',Punctuation, '#pop'),
-            include('type')
-        ],
-
-        # type started by colon
-        'type': [
-            (r'[\(\[<]', Keyword.Type, 'type-nested'),
-            include('type-content')
-        ],
-
-        # type nested in brackets: can contain parameters, comma etc.
-        'type-nested': [
-            (r'[\)\]>]', Keyword.Type, '#pop'),
-            (r'[\(\[<]', Keyword.Type, 'type-nested'),
-            (r',', Keyword.Type),
-            (r'([a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*)(\s*)(:)(?!:)',
-             bygroups(Name.Variable,Text,Keyword.Type)),  # parameter name
-            include('type-content')
-        ],
-
-        # shared contents of a type
-        'type-content': [
-            include('whitespace'),
-
-            # keywords
-            (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
-            (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
-             Keyword, '#pop'),  # need to match because names overlap...
-
-            # kinds
-            (r'[EPH]' + boundary, Keyword.Type),
-            (r'[*!]', Keyword.Type),
-
-            # type names
-            (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?=\.)', Name.Namespace),
-            (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?!\.)', Name.Class),
-            (r'[a-z][0-9]*(?![a-zA-Z_\-])', Keyword.Type),   # Generic.Emph
-            (r'_([a-zA-Z0-9_]|\-[a-zA-Z])*', Keyword.Type),  # Generic.Emph
-            (r'[a-z]([a-zA-Z0-9_]|\-[a-zA-Z])*', Keyword.Type),
-
-            # type keyword operators
-            (r'::|\->|[\.:|]', Keyword.Type),
-
-            #catchall
-            (r'', Text, '#pop')
-        ],
-
-        # comments and literals
-        'whitespace': [
-            (r'\s+', Text),
-            (r'/\*', Comment.Multiline, 'comment'),
-            (r'//.*$', Comment.Single)
-        ],
-        'comment': [
-            (r'[^/\*]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[\*/]', Comment.Multiline),
-        ],
-        'litstring': [
-            (r'[^"]+', String.Double),
-            (r'""', String.Escape),
-            (r'"', String.Double, '#pop'),
-        ],
-        'string': [
-            (r'[^\\"\n]+', String.Double),
-            include('escape-sequence'),
-            (r'["\n]', String.Double, '#pop'),
-        ],
-        'char': [
-            (r'[^\\\'\n]+', String.Char),
-            include('escape-sequence'),
-            (r'[\'\n]', String.Char, '#pop'),
-        ],
-        'escape-sequence': [
-            (r'\\[abfnrtv0\\\"\'\?]', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-            (r'\\u[0-9a-fA-F]{4}', String.Escape),
-            # Yes, \U literals are 6 hex digits.
-            (r'\\U[0-9a-fA-F]{6}', String.Escape)
-        ]
-    }
-
diff --git a/python/ext-libs/pygments/lexers/hdl.py b/python/ext-libs/pygments/lexers/hdl.py
deleted file mode 100644
index 57ffc34..0000000
--- a/python/ext-libs/pygments/lexers/hdl.py
+++ /dev/null
@@ -1,356 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.hdl
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for hardware descriptor languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-from pygments.lexer import RegexLexer, bygroups, include, using, this
-from pygments.token import \
-     Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
-     Error
-
-__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
-
-
-class VerilogLexer(RegexLexer):
-    """
-    For verilog source code with preprocessor directives.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'verilog'
-    aliases = ['verilog', 'v']
-    filenames = ['*.v']
-    mimetypes = ['text/x-verilog']
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
-    tokens = {
-        'root': [
-            (r'^\s*`define', Comment.Preproc, 'macro'),
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'[{}#@]', Punctuation),
-            (r'L?"', String, 'string'),
-            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
-            (r'([0-9]+)|(\'b)[0-1]+', Number.Hex),   # should be binary
-            (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
-            (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
-            (r'\'[01xz]', Number),
-            (r'\d+[Ll]?', Number.Integer),
-            (r'\*/', Error),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\],.;\']', Punctuation),
-            (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
-
-            (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
-            (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text),
-             'import'),
-
-            (r'(always|always_comb|always_ff|always_latch|and|assign|automatic|'
-             r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|'
-             r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|'
-             r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|'
-             r'endtable|endtask|enum|event|final|for|force|forever|fork|function|'
-             r'generate|genvar|highz0|highz1|if|initial|inout|input|'
-             r'integer|join|large|localparam|macromodule|medium|module|'
-             r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|'
-             r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|'
-             r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|'
-             r'rtranif1|scalared|signed|small|specify|specparam|strength|'
-             r'string|strong0|strong1|struct|table|task|'
-             r'tran|tranif0|tranif1|type|typedef|'
-             r'unsigned|var|vectored|void|wait|weak0|weak1|while|'
-             r'xnor|xor)\b', Keyword),
-
-            (r'`(accelerate|autoexpand_vectornets|celldefine|default_nettype|'
-             r'else|elsif|endcelldefine|endif|endprotect|endprotected|'
-             r'expand_vectornets|ifdef|ifndef|include|noaccelerate|noexpand_vectornets|'
-             r'noremove_gatenames|noremove_netnames|nounconnected_drive|'
-             r'protect|protected|remove_gatenames|remove_netnames|resetall|'
-             r'timescale|unconnected_drive|undef)\b', Comment.Preproc),
-
-            (r'\$(bits|bitstoreal|bitstoshortreal|countdrivers|display|fclose|'
-             r'fdisplay|finish|floor|fmonitor|fopen|fstrobe|fwrite|'
-             r'getpattern|history|incsave|input|itor|key|list|log|'
-             r'monitor|monitoroff|monitoron|nokey|nolog|printtimescale|'
-             r'random|readmemb|readmemh|realtime|realtobits|reset|reset_count|'
-             r'reset_value|restart|rtoi|save|scale|scope|shortrealtobits|'
-             r'showscopes|showvariables|showvars|sreadmemb|sreadmemh|'
-             r'stime|stop|strobe|time|timeformat|write)\b', Name.Builtin),
-
-            (r'(byte|shortint|int|longint|integer|time|'
-             r'bit|logic|reg|'
-             r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
-             r'shortreal|real|realtime)\b', Keyword.Type),
-            ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
-        ]
-    }
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            # Convention: mark all upper case names as constants
-            if token is Name:
-                if value.isupper():
-                    token = Name.Constant
-            yield index, token, value
-
-
-class SystemVerilogLexer(RegexLexer):
-    """
-    Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
-    1800-2009 standard.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'systemverilog'
-    aliases = ['systemverilog', 'sv']
-    filenames = ['*.sv', '*.svh']
-    mimetypes = ['text/x-systemverilog']
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
-    tokens = {
-        'root': [
-            (r'^\s*`define', Comment.Preproc, 'macro'),
-            (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
-            (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'),
-
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'[{}#@]', Punctuation),
-            (r'L?"', String, 'string'),
-            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
-            (r'([0-9]+)|(\'b)[0-1]+', Number.Hex),   # should be binary
-            (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
-            (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
-            (r'\'[01xz]', Number),
-            (r'\d+[Ll]?', Number.Integer),
-            (r'\*/', Error),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\],.;\']', Punctuation),
-            (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
-
-            (r'(accept_on|alias|always|always_comb|always_ff|always_latch|'
-             r'and|assert|assign|assume|automatic|before|begin|bind|bins|'
-             r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|'
-             r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|'
-             r'context|continue|cover|covergroup|coverpoint|cross|deassign|'
-             r'default|defparam|design|disable|dist|do|edge|else|end|endcase|'
-             r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|'
-             r'endgroup|endinterface|endmodule|endpackage|endprimitive|'
-             r'endprogram|endproperty|endsequence|endspecify|endtable|'
-             r'endtask|enum|event|eventually|expect|export|extends|extern|'
-             r'final|first_match|for|force|foreach|forever|fork|forkjoin|'
-             r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|'
-             r'ignore_bins|illegal_bins|implies|import|incdir|include|'
-             r'initial|inout|input|inside|instance|int|integer|interface|'
-             r'intersect|join|join_any|join_none|large|let|liblist|library|'
-             r'local|localparam|logic|longint|macromodule|matches|medium|'
-             r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|'
-             r'not|notif0|notif1|null|or|output|package|packed|parameter|'
-             r'pmos|posedge|primitive|priority|program|property|protected|'
-             r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|'
-             r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|'
-             r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|'
-             r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|'
-             r's_until|s_until_with|scalared|sequence|shortint|shortreal|'
-             r'showcancelled|signed|small|solve|specify|specparam|static|'
-             r'string|strong|strong0|strong1|struct|super|supply0|supply1|'
-             r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|'
-             r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|'
-             r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|'
-             r'unsigned|until|until_with|untyped|use|uwire|var|vectored|'
-             r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|'
-             r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ),
-
-            (r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|'
-             r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|'
-             r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|'
-             r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b',
-             Comment.Preproc ),
-
-            (r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|'
-             r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|'
-             r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|'
-             r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|'
-             r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|'
-             r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|'
-             r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|'
-             r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|'
-             r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|'
-             r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|'
-             r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|'
-             r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|'
-             r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|'
-             r'\$writememh|\$writeo)\b' , Name.Builtin ),
-
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(byte|shortint|int|longint|integer|time|'
-             r'bit|logic|reg|'
-             r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
-             r'shortreal|real|realtime)\b', Keyword.Type),
-            ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'classname': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
-        ]
-    }
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            # Convention: mark all upper case names as constants
-            if token is Name:
-                if value.isupper():
-                    token = Name.Constant
-            yield index, token, value
-
-    def analyse_text(text):
-        if text.startswith('//') or text.startswith('/*'):
-            return 0.5
-
-
-class VhdlLexer(RegexLexer):
-    """
-    For VHDL source code.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'vhdl'
-    aliases = ['vhdl']
-    filenames = ['*.vhdl', '*.vhd']
-    mimetypes = ['text/x-vhdl']
-    flags = re.MULTILINE | re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single),
-            (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r"'[a-zA-Z_][a-zA-Z0-9_]*", Name.Attribute),
-            (r'[()\[\],.;\']', Punctuation),
-            (r'"[^\n\\]*"', String),
-
-            (r'(library)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Keyword, Text, Name.Namespace)),
-            (r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
-            (r'(use)(\s+)([a-zA-Z_][\.a-zA-Z0-9_]*)',
-             bygroups(Keyword, Text, Name.Namespace)),
-            (r'(entity|component)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Keyword, Text, Name.Class)),
-            (r'(architecture|configuration)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)'
-             r'(of)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)(is)',
-             bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
-                      Name.Class, Text, Keyword)),
-
-            (r'(end)(\s+)', bygroups(using(this), Text), 'endblock'),
-
-            include('types'),
-            include('keywords'),
-            include('numbers'),
-
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'endblock': [
-            include('keywords'),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class),
-            (r'(\s+)', Text),
-            (r';', Punctuation, '#pop'),
-        ],
-        'types': [
-            (r'(boolean|bit|character|severity_level|integer|time|delay_length|'
-             r'natural|positive|string|bit_vector|file_open_kind|'
-             r'file_open_status|std_ulogic|std_ulogic_vector|std_logic|'
-             r'std_logic_vector)\b', Keyword.Type),
-        ],
-        'keywords': [
-            (r'(abs|access|after|alias|all|and|'
-             r'architecture|array|assert|attribute|begin|block|'
-             r'body|buffer|bus|case|component|configuration|'
-             r'constant|disconnect|downto|else|elsif|end|'
-             r'entity|exit|file|for|function|generate|'
-             r'generic|group|guarded|if|impure|in|'
-             r'inertial|inout|is|label|library|linkage|'
-             r'literal|loop|map|mod|nand|new|'
-             r'next|nor|not|null|of|on|'
-             r'open|or|others|out|package|port|'
-             r'postponed|procedure|process|pure|range|record|'
-             r'register|reject|return|rol|ror|select|'
-             r'severity|signal|shared|sla|sli|sra|'
-             r'srl|subtype|then|to|transport|type|'
-             r'units|until|use|variable|wait|when|'
-             r'while|with|xnor|xor)\b', Keyword),
-        ],
-        'numbers': [
-            (r'\d{1,2}#[0-9a-fA-F_]+#?', Number.Integer),
-            (r'[0-1_]+(\.[0-1_])', Number.Integer),
-            (r'\d+', Number.Integer),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
-            (r'H"[0-9a-fA-F_]+"', Number.Oct),
-            (r'O"[0-7_]+"', Number.Oct),
-            (r'B"[0-1_]+"', Number.Oct),
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/jvm.py b/python/ext-libs/pygments/lexers/jvm.py
deleted file mode 100644
index 717621e..0000000
--- a/python/ext-libs/pygments/lexers/jvm.py
+++ /dev/null
@@ -1,1109 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.jvm
-    ~~~~~~~~~~~~~~~~~~~
-
-    Pygments lexers for JVM languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
-     this
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-     Number, Punctuation
-from pygments.util import get_choice_opt
-from pygments import unistring as uni
-
-
-__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
-           'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'KotlinLexer',
-           'XtendLexer', 'AspectJLexer', 'CeylonLexer']
-
-
-class JavaLexer(RegexLexer):
-    """
-    For `Java <http://www.sun.com/java/>`_ source code.
-    """
-
-    name = 'Java'
-    aliases = ['java']
-    filenames = ['*.java']
-    mimetypes = ['text/x-java']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]<>]*\s+)+?)' # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'                      # method name
-             r'(\s*)(\()',                                    # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
-            (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
-             r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
-             Keyword),
-            (r'(abstract|const|enum|extends|final|implements|native|private|'
-             r'protected|public|static|strictfp|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Declaration),
-            (r'(boolean|byte|char|double|float|int|long|short|void)\b',
-             Keyword.Type),
-            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
-            (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
-            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text)
-        ],
-        'class': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
-        ],
-    }
-
-
-class AspectJLexer(JavaLexer):
-    """
-    For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'AspectJ'
-    aliases = ['aspectj']
-    filenames = ['*.aj']
-    mimetypes = ['text/x-aspectj']
-
-    aj_keywords = [
-        'aspect', 'pointcut', 'privileged', 'call', 'execution',
-        'initialization', 'preinitialization', 'handler', 'get', 'set',
-        'staticinitialization', 'target', 'args', 'within', 'withincode',
-        'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
-        'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
-        'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
-        'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
-        'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
-        'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
-    ]
-    aj_inter_type = ['parents:', 'warning:', 'error:', 'soft:', 'precedence:']
-    aj_inter_type_annotation = ['@type', '@method', '@constructor', '@field']
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
-            if token is Name and value in self.aj_keywords:
-                yield index, Keyword, value
-            elif token is Name.Label and value in self.aj_inter_type:
-                yield index, Keyword, value[:-1]
-                yield index, Operator, value[-1]
-            elif token is Name.Decorator and value in self.aj_inter_type_annotation:
-                yield index, Keyword, value
-            else:
-                yield index, token, value
-
-
-class ScalaLexer(RegexLexer):
-    """
-    For `Scala <http://www.scala-lang.org>`_ source code.
-    """
-
-    name = 'Scala'
-    aliases = ['scala']
-    filenames = ['*.scala']
-    mimetypes = ['text/x-scala']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    # don't use raw unicode strings!
-    op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
-          u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
-          u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
-          u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
-          u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
-          u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
-          u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
-          u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
-          u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
-          u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
-          u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
-          u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
-          u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
-          u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
-          u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
-
-    letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
-              u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
-              u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
-              u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
-              u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
-              u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
-              u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
-              u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
-              u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
-              u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
-              u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
-              u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
-              u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
-              u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
-              u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
-              u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
-              u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
-              u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
-              u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
-              u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
-              u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
-              u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
-              u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
-              u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
-              u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
-              u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
-              u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
-              u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
-              u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
-              u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
-              u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
-              u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
-              u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
-
-    upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
-             u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
-             u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
-             u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
-             u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
-             u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
-             u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
-             u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
-             u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
-             u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
-             u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
-             u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
-             u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
-             u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
-             u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
-             u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
-             u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
-             u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
-             u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
-             u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
-             u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
-             u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
-             u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
-             u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
-             u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
-             u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
-             u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
-             u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
-             u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
-             u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
-             u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
-             u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
-             u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
-             u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
-             u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
-             u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
-             u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
-             u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
-             u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
-             u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
-             u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
-             u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
-             u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
-             u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
-             u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
-             u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
-             u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
-             u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
-             u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
-             u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
-             u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
-             u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
-             u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
-             u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
-             u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
-             u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
-             u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
-             u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
-             u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
-             u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
-             u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
-
-    idrest = ur'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
-
-    tokens = {
-        'root': [
-            # method names
-            (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
-            (ur"'%s" % idrest, Text.Symbol),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*', Comment.Multiline, 'comment'),
-            (ur'@%s' % idrest, Name.Decorator),
-            (ur'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
-             ur'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
-             ur'lazy|match|new|override|pr(?:ivate|otected)'
-             ur'|re(?:quires|turn)|s(?:ealed|uper)|'
-             ur't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|'
-             u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
-            (ur':(?!%s)' % op, Keyword, 'type'),
-            (ur'%s%s\b' % (upper, idrest), Name.Class),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
-            (r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
-            (r'""".*?"""(?!")', String),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
-#            (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
-#             Name.Attribute)),
-            (idrest, Name),
-            (r'`[^`]+`', Name),
-            (r'\[', Operator, 'typeparam'),
-            (r'[\(\)\{\};,.#]', Operator),
-            (op, Operator),
-            (r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
-             Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text)
-        ],
-        'class': [
-            (ur'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op),
-             bygroups(Name.Class, Text, Operator), 'typeparam'),
-            (r'\s+', Text),
-            (r'{', Operator, '#pop'),
-            (r'\(', Operator, '#pop'),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (ur'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
-        ],
-        'type': [
-            (r'\s+', Text),
-            (u'<[%:]|>:|[#_\u21D2]|forSome|type', Keyword),
-            (r'([,\);}]|=>|=)(\s*)', bygroups(Operator, Text), '#pop'),
-            (r'[\(\{]', Operator, '#push'),
-            (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' %
-             (idrest, op, idrest, op),
-             bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
-            (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' %
-             (idrest, op, idrest, op),
-             bygroups(Keyword.Type, Text), '#pop'),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
-        ],
-        'typeparam': [
-            (r'[\s,]+', Text),
-            (u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
-            (r'([\]\)\}])', Operator, '#pop'),
-            (r'[\(\[\{]', Operator, '#push'),
-            (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
-        ],
-        'comment': [
-            (r'[^/\*]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ],
-        'import': [
-            (ur'(%s|\.)+' % idrest, Name.Namespace, '#pop')
-        ],
-    }
-
-
-class GosuLexer(RegexLexer):
-    """
-    For Gosu source code.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Gosu'
-    aliases = ['gosu']
-    filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
-    mimetypes = ['text/x-gosu']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # modifiers etc.
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'                    # method name
-             r'(\s*)(\()',                                  # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
-            (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
-             r'index|while|do|continue|break|return|try|catch|finally|this|'
-             r'throw|new|switch|case|default|eval|super|outer|classpath|'
-             r'using)\b', Keyword),
-            (r'(var|delegate|construct|function|private|internal|protected|'
-             r'public|abstract|override|final|static|extends|transient|'
-             r'implements|represents|readonly)\b', Keyword.Declaration),
-            (r'(property\s+)(get|set)?', Keyword.Declaration),
-            (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
-             Keyword.Type),
-            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
-            (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
-            (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Keyword.Declaration, Text, Name.Class)),
-            (r'(uses)(\s+)([a-zA-Z0-9_.]+\*?)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace)),
-            (r'"', String, 'string'),
-            (r'(\??[\.#])([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Name.Attribute)),
-            (r'(:)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'and|or|not|[\\~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r'\n', Text)
-        ],
-        'templateText': [
-          (r'(\\<)|(\\\$)', String),
-          (r'(<%@\s+)(extends|params)',
-           bygroups(Operator, Name.Decorator), 'stringTemplate'),
-          (r'<%!--.*?--%>', Comment.Multiline),
-          (r'(<%)|(<%=)', Operator, 'stringTemplate'),
-          (r'\$\{', Operator, 'stringTemplateShorthand'),
-          (r'.', String)
-        ],
-        'string': [
-          (r'"', String, '#pop'),
-          include('templateText')
-        ],
-        'stringTemplate': [
-          (r'"', String, 'string'),
-          (r'%>', Operator, '#pop'),
-          include('root')
-        ],
-        'stringTemplateShorthand': [
-          (r'"', String, 'string'),
-          (r'\{', Operator, 'stringTemplateShorthand'),
-          (r'\}', Operator, '#pop'),
-          include('root')
-        ],
-    }
-
-
-class GosuTemplateLexer(Lexer):
-    """
-    For Gosu templates.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Gosu Template'
-    aliases = ['gst']
-    filenames = ['*.gst']
-    mimetypes = ['text/x-gosu-template']
-    lexer = GosuLexer()
-
-    def get_tokens_unprocessed(self, text):
-        stack = ['templateText']
-        for item in self.lexer.get_tokens_unprocessed(text, stack):
-            yield item
-
-
-class GroovyLexer(RegexLexer):
-    """
-    For `Groovy <http://groovy.codehaus.org/>`_ source code.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Groovy'
-    aliases = ['groovy']
-    filenames = ['*.groovy']
-    mimetypes = ['text/x-groovy']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'                    # method name
-             r'(\s*)(\()',                                  # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
-            (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
-             r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
-             Keyword),
-            (r'(abstract|const|enum|extends|final|implements|native|private|'
-             r'protected|public|static|strictfp|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Declaration),
-            (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
-             Keyword.Type),
-            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
-             'class'),
-            (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'\$/((?!/\$).)*/\$', String),
-            (r'/(\\\\|\\"|[^/])*/', String),
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
-            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text)
-        ],
-        'class': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
-        ],
-    }
-
-
-class IokeLexer(RegexLexer):
-    """
-    For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
-    prototype based programming language) source.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'Ioke'
-    filenames = ['*.ik']
-    aliases = ['ioke', 'ik']
-    mimetypes = ['text/x-iokesrc']
-    tokens = {
-        'interpolatableText': [
-            (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
-             r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
-            (r'#{', Punctuation, 'textInterpolationRoot')
-            ],
-
-        'text': [
-            (r'(?<!\\)"', String, '#pop'),
-            include('interpolatableText'),
-            (r'[^"]', String)
-            ],
-
-        'documentation': [
-            (r'(?<!\\)"', String.Doc, '#pop'),
-            include('interpolatableText'),
-            (r'[^"]', String.Doc)
-            ],
-
-        'textInterpolationRoot': [
-            (r'}', Punctuation, '#pop'),
-            include('root')
-            ],
-
-        'slashRegexp': [
-            (r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
-            include('interpolatableText'),
-            (r'\\/', String.Regex),
-            (r'[^/]', String.Regex)
-            ],
-
-        'squareRegexp': [
-            (r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
-            include('interpolatableText'),
-            (r'\\]', String.Regex),
-            (r'[^\]]', String.Regex)
-            ],
-
-        'squareText': [
-            (r'(?<!\\)]', String, '#pop'),
-            include('interpolatableText'),
-            (r'[^\]]', String)
-            ],
-
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text),
-
-            # Comments
-            (r';(.*?)\n', Comment),
-            (r'\A#!(.*?)\n', Comment),
-
-            #Regexps
-            (r'#/', String.Regex, 'slashRegexp'),
-            (r'#r\[', String.Regex, 'squareRegexp'),
-
-            #Symbols
-            (r':[a-zA-Z0-9_!:?]+', String.Symbol),
-            (r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
-            (r':"(\\\\|\\"|[^"])*"', String.Symbol),
-
-            #Documentation
-            (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
-             r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
-             r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
-
-            #Text
-            (r'"', String, 'text'),
-            (r'#\[', String, 'squareText'),
-
-            #Mimic
-            (r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
-
-            #Assignment
-            (r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
-             Name.Variable),
-
-            # keywords
-            (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
-             r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
-             r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
-            # Origin
-            (r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
-
-            # Base
-            (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
-             r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
-             r'(?![a-zA-Z0-9!:_?])', Keyword),
-
-            # Ground
-            (r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
-
-            #DefaultBehaviour Literals
-            (r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
-            #DefaultBehaviour Case
-            (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
-             r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
-            #DefaultBehaviour Reflection
-            (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
-             r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
-             r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
-             r'(?![a-zA-Z0-9!:_?])', Keyword),
-
-            #DefaultBehaviour Aspects
-            (r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
-            # DefaultBehaviour
-            (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
-             r'(?![a-zA-Z0-9!:_?])', Keyword),
-            (r'(use|destructuring)', Keyword.Reserved),
-
-            #DefaultBehavior BaseBehavior
-            (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
-             r'documentation|identity|removeCell!|undefineCell)'
-             r'(?![a-zA-Z0-9!:_?])', Keyword),
-
-            #DefaultBehavior Internal
-            (r'(internal:compositeRegexp|internal:concatenateText|'
-             r'internal:createDecimal|internal:createNumber|'
-             r'internal:createRegexp|internal:createText)'
-             r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
-            #DefaultBehaviour Conditions
-            (r'(availableRestarts|bind|error\!|findRestart|handle|'
-             r'invokeRestart|rescue|restart|signal\!|warn\!)'
-             r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
-            # constants
-            (r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
-
-            # names
-            (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
-             r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
-             r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
-             r'Conditions|Definitions|FlowControl|Internal|Literals|'
-             r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
-             r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
-             r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
-             r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
-             r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
-             r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
-
-            # functions
-            (ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
-             ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
-             ur'(?![a-zA-Z0-9!:_?])', Name.Function),
-
-            # Numbers
-            (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
-            (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'-?\d+', Number.Integer),
-
-            (r'#\(', Punctuation),
-
-             # Operators
-            (ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
-             ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
-             ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
-             ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
-             ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
-             ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
-             ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
-            (r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
-             Operator),
-
-            # Punctuation
-            (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|{|})', Punctuation),
-
-            #kinds
-            (r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
-
-            #default cellnames
-            (r'[a-z_][a-zA-Z0-9_!:?]*', Name)
-        ]
-    }
-
-
-class ClojureLexer(RegexLexer):
-    """
-    Lexer for `Clojure <http://clojure.org/>`_ source code.
-
-    *New in Pygments 0.11.*
-    """
-    name = 'Clojure'
-    aliases = ['clojure', 'clj']
-    filenames = ['*.clj']
-    mimetypes = ['text/x-clojure', 'application/x-clojure']
-
-    special_forms = [
-        '.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
-    ]
-
-    # It's safe to consider 'ns' a declaration thing because it defines a new
-    # namespace.
-    declarations = [
-        'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
-        'defstruct', 'defonce', 'declare', 'definline', 'definterface',
-        'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
-    ]
-
-    builtins = [
-        '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
-        'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
-        'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
-        'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
-        'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
-        'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
-        'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
-        'butlast', 'byte', 'cast', 'char', 'children', 'class',
-        'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
-        'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
-        'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
-        'cycle', 'dec',  'deref', 'difference', 'disj', 'dissoc', 'distinct',
-        'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
-        'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
-        'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
-        'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
-        'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
-        'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
-        'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
-        'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
-        'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
-        'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
-        'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
-        'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
-        'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
-        'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
-        'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
-        'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
-        'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
-        'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
-        'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
-        'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
-        'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
-        're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
-        'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
-        'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
-        'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
-        'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
-        'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
-        'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
-        'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
-        'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
-        'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
-        'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
-        'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
-        'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
-        'vector?', 'when', 'when-first', 'when-let', 'when-not',
-        'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
-        'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper']
-
-    # valid names for identifiers
-    # well, names can only not consist fully of numbers
-    # but this should be good enough for now
-
-    # TODO / should divide keywords/symbols into namespace/rest
-    # but that's hard, so just pretend / is part of the name
-    valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
-
-    def _multi_escape(entries):
-        return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries))
-
-    tokens = {
-        'root': [
-            # the comments - always starting with semicolon
-            # and going to the end of the line
-            (r';.*$', Comment.Single),
-
-            # whitespaces - usually not relevant
-            (r'[,\s]+', Text),
-
-            # numbers
-            (r'-?\d+\.\d+', Number.Float),
-            (r'-?\d+', Number.Integer),
-            (r'0x-?[abcdef\d]+', Number.Hex),
-
-            # strings, symbols and characters
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'" + valid_name, String.Symbol),
-            (r"\\(.|[a-z]+)", String.Char),
-
-            # keywords
-            (r'::?' + valid_name, String.Symbol),
-
-            # special operators
-            (r'~@|[`\'#^~&@]', Operator),
-
-            # highlight the special forms
-            (_multi_escape(special_forms), Keyword),
-
-            # Technically, only the special forms are 'keywords'. The problem
-            # is that only treating them as keywords means that things like
-            # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
-            # and weird for most styles. So, as a compromise we're going to
-            # highlight them as Keyword.Declarations.
-            (_multi_escape(declarations), Keyword.Declaration),
-
-            # highlight the builtins
-            (_multi_escape(builtins), Name.Builtin),
-
-            # the remaining functions
-            (r'(?<=\()' + valid_name, Name.Function),
-
-            # find the remaining variables
-            (valid_name, Name.Variable),
-
-            # Clojure accepts vector notation
-            (r'(\[|\])', Punctuation),
-
-            # Clojure accepts map notation
-            (r'(\{|\})', Punctuation),
-
-            # the famous parentheses!
-            (r'(\(|\))', Punctuation),
-        ],
-    }
-
-
-class TeaLangLexer(RegexLexer):
-    """
-    For `Tea <http://teatrove.org/>`_ source code. Only used within a
-    TeaTemplateLexer.
-
-    *New in Pygments 1.5.*
-    """
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'                    # method name
-             r'(\s*)(\()',                                  # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
-            (r'(and|break|else|foreach|if|in|not|or|reverse)\b',
-             Keyword),
-            (r'(as|call|define)\b', Keyword.Declaration),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
-            (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r'\'(\\\\|\\\'|[^\'])*\'', String),
-            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text)
-        ],
-        'template': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
-        ],
-    }
-
-
-class CeylonLexer(RegexLexer):
-    """
-    For `Ceylon <http://ceylon-lang.org/>`_ source code.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Ceylon'
-    aliases = ['ceylon']
-    filenames = ['*.ceylon']
-    mimetypes = ['text/x-ceylon']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'                    # method name
-             r'(\s*)(\()',                                  # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'(variable|shared|abstract|doc|by|formal|actual)',
-             Name.Decorator),
-            (r'(break|case|catch|continue|default|else|finally|for|in|'
-             r'variable|if|return|switch|this|throw|try|while|is|exists|'
-             r'nonempty|then|outer)\b', Keyword),
-            (r'(abstracts|extends|satisfies|adapts|'
-             r'super|given|of|out|assign|'
-             r'transient|volatile)\b', Keyword.Declaration),
-            (r'(function|value|void)\b',
-             Keyword.Type),
-            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(class|interface|object)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'class'),
-            (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Quoted),
-            (r"`\\.`|`[^\\]`|`\\u[0-9a-fA-F]{4}`", String.Char),
-            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
-            (r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
-            (r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
-             Number.Float),
-            (r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
-            (r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
-             Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
-            (r'[0-9]+[kMGTP]?', Number.Integer),
-            (r'\n', Text)
-        ],
-        'class': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_.]+\w+ \{([a-zA-Z,]+|\.\.\.)\}',
-             Name.Namespace, '#pop')
-        ],
-    }
-
-
-class KotlinLexer(RegexLexer):
-    """
-    For `Kotlin <http://confluence.jetbrains.net/display/Kotlin/>`_
-    source code.
-
-    Additional options accepted:
-
-    `unicodelevel`
-      Determines which Unicode characters this lexer allows for identifiers.
-      The possible values are:
-
-      * ``none`` -- only the ASCII letters and numbers are allowed. This
-        is the fastest selection.
-      * ``basic`` -- all Unicode characters from the specification except
-        category ``Lo`` are allowed.
-      * ``full`` -- all Unicode characters as specified in the C# specs
-        are allowed.  Note that this means a considerable slowdown since the
-        ``Lo`` category has more than 40,000 characters in it!
-
-      The default value is ``basic``.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Kotlin'
-    aliases = ['kotlin']
-    filenames = ['*.kt']
-    mimetypes = ['text/x-kotlin'] # inferred
-
-    flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
-    # for the range of allowed unicode characters in identifiers,
-    # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
-    levels = {
-        'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
-        'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
-                  '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
-                  uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
-        'full': ('@?(?:_|[^' +
-                 uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
-                 + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
-                                        'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
-    }
-
-    tokens = {}
-    token_variants = True
-
-    for levelname, cs_ident in levels.items():
-        tokens[levelname] = {
-            'root': [
-                # method names
-                (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
-                 r'(' + cs_ident + ')'                           # method name
-                 r'(\s*)(\()',                               # signature start
-                 bygroups(using(this), Name.Function, Text, Punctuation)),
-                (r'^\s*\[.*?\]', Name.Attribute),
-                (r'[^\S\n]+', Text),
-                (r'\\\n', Text), # line continuation
-                (r'//.*?\n', Comment.Single),
-                (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-                (r'\n', Text),
-                (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
-                (r'[{}]', Punctuation),
-                (r'@"(""|[^"])*"', String),
-                (r'"(\\\\|\\"|[^"\n])*["\n]', String),
-                (r"'\\.'|'[^\\]'", String.Char),
-                (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
-                 r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
-                (r'#[ \t]*(if|endif|else|elif|define|undef|'
-                 r'line|error|warning|region|endregion|pragma)\b.*?\n',
-                 Comment.Preproc),
-                (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
-                 Keyword)),
-                (r'(abstract|as|break|catch|'
-                 r'fun|continue|default|delegate|'
-                 r'do|else|enum|extern|false|finally|'
-                 r'fixed|for|goto|if|implicit|in|interface|'
-                 r'internal|is|lock|null|'
-                 r'out|override|private|protected|public|readonly|'
-                 r'ref|return|sealed|sizeof|'
-                 r'when|this|throw|true|try|typeof|'
-                 r'unchecked|unsafe|virtual|void|while|'
-                 r'get|set|new|partial|yield|val|var)\b', Keyword),
-                (r'(global)(::)', bygroups(Keyword, Punctuation)),
-                (r'(bool|byte|char|decimal|double|dynamic|float|int|long|'
-                 r'short)\b\??', Keyword.Type),
-                (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
-                (r'(package|using)(\s+)', bygroups(Keyword, Text), 'package'),
-                (cs_ident, Name),
-            ],
-            'class': [
-                (cs_ident, Name.Class, '#pop')
-            ],
-            'package': [
-                (r'(?=\()', Text, '#pop'), # using (resource)
-                ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
-            ]
-        }
-
-    def __init__(self, **options):
-        level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(),
-                               'basic')
-        if level not in self._all_tokens:
-            # compile the regexes now
-            self._tokens = self.__class__.process_tokendef(level)
-        else:
-            self._tokens = self._all_tokens[level]
-
-        RegexLexer.__init__(self, **options)
-
-
-class XtendLexer(RegexLexer):
-    """
-    For `Xtend <http://xtend-lang.org/>`_ source code.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Xtend'
-    aliases = ['xtend']
-    filenames = ['*.xtend']
-    mimetypes = ['text/x-xtend']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
-             r'([a-zA-Z_$][a-zA-Z0-9_$]*)'                    # method name
-             r'(\s*)(\()',                                  # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
-            (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
-             r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
-             r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
-             Keyword),
-            (r'(def|abstract|const|enum|extends|final|implements|native|private|'
-             r'protected|public|static|strictfp|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Declaration),
-            (r'(boolean|byte|char|double|float|int|long|short|void)\b',
-             Keyword.Type),
-            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
-             'class'),
-            (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
-            (r"(''')", String, 'template'),
-            (ur"(\u00BB)", String, 'template'),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'(\\\\|\\'|[^'])*'", String),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text)
-        ],
-        'class': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
-        ],
-        'template': [
-            (r"'''", String, '#pop'),
-            (ur"\u00AB", String, '#pop'),
-            (r'.', String)
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/math.py b/python/ext-libs/pygments/lexers/math.py
deleted file mode 100644
index 537c6d0..0000000
--- a/python/ext-libs/pygments/lexers/math.py
+++ /dev/null
@@ -1,1652 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.math
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for math languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.util import shebang_matches
-from pygments.lexer import Lexer, RegexLexer, bygroups, include, \
-    combined, do_insertions
-from pygments.token import Comment, String, Punctuation, Keyword, Name, \
-    Operator, Number, Text, Generic
-
-from pygments.lexers.agile import PythonLexer
-from pygments.lexers import _scilab_builtins
-from pygments.lexers import _stan_builtins
-
-__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
-           'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
-           'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
-           'IDLLexer', 'RdLexer']
-
-
-class JuliaLexer(RegexLexer):
-    """
-    For `Julia <http://julialang.org/>`_ source code.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Julia'
-    aliases = ['julia','jl']
-    filenames = ['*.jl']
-    mimetypes = ['text/x-julia','application/x-julia']
-
-    builtins = [
-        'exit','whos','edit','load','is','isa','isequal','typeof','tuple',
-        'ntuple','uid','hash','finalizer','convert','promote','subtype',
-        'typemin','typemax','realmin','realmax','sizeof','eps','promote_type',
-        'method_exists','applicable','invoke','dlopen','dlsym','system',
-        'error','throw','assert','new','Inf','Nan','pi','im',
-    ]
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'[^\S\n]+', Text),
-            (r'#.*$', Comment),
-            (r'[]{}:(),;[@]', Punctuation),
-            (r'\\\n', Text),
-            (r'\\', Text),
-
-            # keywords
-            (r'(begin|while|for|in|return|break|continue|'
-             r'macro|quote|let|if|elseif|else|try|catch|end|'
-             r'bitstype|ccall|do|using|module|import|export|'
-             r'importall|baremodule)\b', Keyword),
-            (r'(local|global|const)\b', Keyword.Declaration),
-            (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
-             r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
-                Keyword.Type),
-
-            # functions
-            (r'(function)((?:\s|\\\s)+)',
-                bygroups(Keyword,Name.Function), 'funcname'),
-
-            # types
-            (r'(type|typealias|abstract)((?:\s|\\\s)+)',
-                bygroups(Keyword,Name.Class), 'typename'),
-
-            # operators
-            (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
-            (r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
-
-            # builtins
-            ('(' + '|'.join(builtins) + r')\b',  Name.Builtin),
-
-            # backticks
-            (r'`(?s).*?`', String.Backtick),
-
-            # chars
-            (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
-             r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
-
-            # try to match trailing transpose
-            (r'(?<=[.\w\)\]])\'+', Operator),
-
-            # strings
-            (r'(?:[IL])"', String, 'string'),
-            (r'[E]?"', String, combined('stringescape', 'string')),
-
-            # names
-            (r'@[a-zA-Z0-9_.]+', Name.Decorator),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-
-            # numbers
-            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
-            (r'0b[01]+', Number.Binary),
-            (r'0o[0-7]+', Number.Oct),
-            (r'0x[a-fA-F0-9]+', Number.Hex),
-            (r'\d+', Number.Integer)
-        ],
-
-        'funcname': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop'),
-            ('\([^\s\w{]{1,2}\)', Operator, '#pop'),
-            ('[^\s\w{]{1,2}', Operator, '#pop'),
-        ],
-
-        'typename': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-
-        'stringescape': [
-            (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
-             r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
-        ],
-
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
-            (r'\$(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
-                String.Interpol),
-            (r'[^\\"$]+', String),
-            # quotes, dollar signs, and backslashes must be parsed one at a time
-            (r'["\\]', String),
-            # unhandled string formatting sign
-            (r'\$', String)
-        ],
-    }
-
-    def analyse_text(text):
-        return shebang_matches(text, r'julia')
-
-
-line_re  = re.compile('.*?\n')
-
-class JuliaConsoleLexer(Lexer):
-    """
-    For Julia console sessions. Modeled after MatlabSessionLexer.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Julia console'
-    aliases = ['jlcon']
-
-    def get_tokens_unprocessed(self, text):
-        jllexer = JuliaLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-
-        for match in line_re.finditer(text):
-            line = match.group()
-
-            if line.startswith('julia>'):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:3])]))
-                curcode += line[3:]
-
-            elif line.startswith('      '):
-
-                idx = len(curcode)
-
-                # without is showing error on same line as before...?
-                line = "\n" + line
-                token = (0, Generic.Traceback, line)
-                insertions.append((idx, [token]))
-
-            else:
-                if curcode:
-                    for item in do_insertions(
-                        insertions, jllexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-
-                yield match.start(), Generic.Output, line
-
-        if curcode: # or item:
-            for item in do_insertions(
-                insertions, jllexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-class MuPADLexer(RegexLexer):
-    """
-    A `MuPAD <http://www.mupad.com>`_ lexer.
-    Contributed by Christopher Creutzig <christopher at creutzig.de>.
-
-    *New in Pygments 0.8.*
-    """
-    name = 'MuPAD'
-    aliases = ['mupad']
-    filenames = ['*.mu']
-
-    tokens = {
-      'root' : [
-        (r'//.*?$', Comment.Single),
-        (r'/\*', Comment.Multiline, 'comment'),
-        (r'"(?:[^"\\]|\\.)*"', String),
-        (r'\(|\)|\[|\]|\{|\}', Punctuation),
-        (r'''(?x)\b(?:
-            next|break|end|
-            axiom|end_axiom|category|end_category|domain|end_domain|inherits|
-            if|%if|then|elif|else|end_if|
-            case|of|do|otherwise|end_case|
-            while|end_while|
-            repeat|until|end_repeat|
-            for|from|to|downto|step|end_for|
-            proc|local|option|save|begin|end_proc|
-            delete|frame
-          )\b''', Keyword),
-        (r'''(?x)\b(?:
-            DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
-            DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
-            DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
-            DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
-          )\b''', Name.Class),
-        (r'''(?x)\b(?:
-            PI|EULER|E|CATALAN|
-            NIL|FAIL|undefined|infinity|
-            TRUE|FALSE|UNKNOWN
-          )\b''',
-          Name.Constant),
-        (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
-        (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
-        (r'''(?x)\b(?:
-            and|or|not|xor|
-            assuming|
-            div|mod|
-            union|minus|intersect|in|subset
-          )\b''',
-          Operator.Word),
-        (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
-        #(r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
-        (r'''(?x)
-          ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
-          (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''',
-          bygroups(Name.Function, Text, Punctuation)),
-        (r'''(?x)
-          (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
-          (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable),
-        (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
-        (r'\.[0-9]+(?:e[0-9]+)?', Number),
-        (r'.', Text)
-      ],
-      'comment' : [
-        (r'[^*/]', Comment.Multiline),
-        (r'/\*', Comment.Multiline, '#push'),
-        (r'\*/', Comment.Multiline, '#pop'),
-        (r'[*/]', Comment.Multiline)
-      ]
-    }
-
-
-class MatlabLexer(RegexLexer):
-    """
-    For Matlab source code.
-
-    *New in Pygments 0.10.*
-    """
-    name = 'Matlab'
-    aliases = ['matlab']
-    filenames = ['*.m']
-    mimetypes = ['text/matlab']
-
-    #
-    # These lists are generated automatically.
-    # Run the following in bash shell:
-    #
-    # for f in elfun specfun elmat; do
-    #   echo -n "$f = "
-    #   matlab -nojvm -r "help $f;exit;" | perl -ne \
-    #   'push(@c,$1) if /^    (\w+)\s+-/; END {print q{["}.join(q{","}, at c).qq{"]\n};}'
-    # done
-    #
-    # elfun: Elementary math functions
-    # specfun: Special Math functions
-    # elmat: Elementary matrices and matrix manipulation
-    #
-    # taken from Matlab version 7.4.0.336 (R2007a)
-    #
-    elfun = ["sin","sind","sinh","asin","asind","asinh","cos","cosd","cosh",
-             "acos","acosd","acosh","tan","tand","tanh","atan","atand","atan2",
-             "atanh","sec","secd","sech","asec","asecd","asech","csc","cscd",
-             "csch","acsc","acscd","acsch","cot","cotd","coth","acot","acotd",
-             "acoth","hypot","exp","expm1","log","log1p","log10","log2","pow2",
-             "realpow","reallog","realsqrt","sqrt","nthroot","nextpow2","abs",
-             "angle","complex","conj","imag","real","unwrap","isreal","cplxpair",
-             "fix","floor","ceil","round","mod","rem","sign"]
-    specfun = ["airy","besselj","bessely","besselh","besseli","besselk","beta",
-               "betainc","betaln","ellipj","ellipke","erf","erfc","erfcx",
-               "erfinv","expint","gamma","gammainc","gammaln","psi","legendre",
-               "cross","dot","factor","isprime","primes","gcd","lcm","rat",
-               "rats","perms","nchoosek","factorial","cart2sph","cart2pol",
-               "pol2cart","sph2cart","hsv2rgb","rgb2hsv"]
-    elmat = ["zeros","ones","eye","repmat","rand","randn","linspace","logspace",
-             "freqspace","meshgrid","accumarray","size","length","ndims","numel",
-             "disp","isempty","isequal","isequalwithequalnans","cat","reshape",
-             "diag","blkdiag","tril","triu","fliplr","flipud","flipdim","rot90",
-             "find","end","sub2ind","ind2sub","bsxfun","ndgrid","permute",
-             "ipermute","shiftdim","circshift","squeeze","isscalar","isvector",
-             "ans","eps","realmax","realmin","pi","i","inf","nan","isnan",
-             "isinf","isfinite","j","why","compan","gallery","hadamard","hankel",
-             "hilb","invhilb","magic","pascal","rosser","toeplitz","vander",
-             "wilkinson"]
-
-    tokens = {
-        'root': [
-            # line starting with '!' is sent as a system command.  not sure what
-            # label to use...
-            (r'^!.*', String.Other),
-            (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
-            (r'%.*$', Comment),
-            (r'^\s*function', Keyword, 'deffunc'),
-
-            # from 'iskeyword' on version 7.11 (R2010):
-            (r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|'
-             r'events|for|function|global|if|methods|otherwise|parfor|'
-             r'persistent|properties|return|spmd|switch|try|while)\b', Keyword),
-
-            ("(" + "|".join(elfun+specfun+elmat) + r')\b',  Name.Builtin),
-
-            # line continuation with following comment:
-            (r'\.\.\..*$', Comment),
-
-            # operators:
-            (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
-            # operators requiring escape for re:
-            (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-            # punctuation:
-            (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
-            (r'=|:|;', Punctuation),
-
-            # quote can be transpose, instead of string:
-            # (not great, but handles common cases...)
-            (r'(?<=[\w\)\]])\'', Operator),
-
-            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
-            (r'\d+', Number.Integer),
-
-            (r'(?<![\w\)\]])\'', String, 'string'),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'.', Text),
-        ],
-        'string': [
-            (r'[^\']*\'', String, '#pop')
-        ],
-        'blockcomment': [
-            (r'^\s*%\}', Comment.Multiline, '#pop'),
-            (r'^.*\n', Comment.Multiline),
-            (r'.', Comment.Multiline),
-        ],
-        'deffunc': [
-            (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
-             bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
-                      Text.Whitespace, Name.Function, Punctuation, Text,
-                      Punctuation, Text.Whitespace), '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        if re.match('^\s*%', text, re.M): # comment
-            return 0.9
-        elif re.match('^!\w+', text, re.M): # system cmd
-            return 0.9
-        return 0.1
-
-
-line_re  = re.compile('.*?\n')
-
-class MatlabSessionLexer(Lexer):
-    """
-    For Matlab sessions.  Modeled after PythonConsoleLexer.
-    Contributed by Ken Schutte <kschutte at csail.mit.edu>.
-
-    *New in Pygments 0.10.*
-    """
-    name = 'Matlab session'
-    aliases = ['matlabsession']
-
-    def get_tokens_unprocessed(self, text):
-        mlexer = MatlabLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-
-        for match in line_re.finditer(text):
-            line = match.group()
-
-            if line.startswith('>>'):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:3])]))
-                curcode += line[3:]
-
-            elif line.startswith('???'):
-
-                idx = len(curcode)
-
-                # without is showing error on same line as before...?
-                line = "\n" + line
-                token = (0, Generic.Traceback, line)
-                insertions.append((idx, [token]))
-
-            else:
-                if curcode:
-                    for item in do_insertions(
-                        insertions, mlexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-
-                yield match.start(), Generic.Output, line
-
-        if curcode: # or item:
-            for item in do_insertions(
-                insertions, mlexer.get_tokens_unprocessed(curcode)):
-                yield item
-
-
-class OctaveLexer(RegexLexer):
-    """
-    For GNU Octave source code.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'Octave'
-    aliases = ['octave']
-    filenames = ['*.m']
-    mimetypes = ['text/octave']
-
-    # These lists are generated automatically.
-    # Run the following in bash shell:
-    #
-    # First dump all of the Octave manual into a plain text file:
-    #
-    #   $ info octave --subnodes -o octave-manual
-    #
-    # Now grep through it:
-
-    # for i in \
-    #     "Built-in Function" "Command" "Function File" \
-    #     "Loadable Function" "Mapping Function";
-    # do
-    #     perl -e '@name = qw('"$i"');
-    #              print lc($name[0]),"_kw = [\n"';
-    #
-    #     perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
-    #         octave-manual | sort | uniq ;
-    #     echo "]" ;
-    #     echo;
-    # done
-
-    # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
-    builtin_kw = [ "addlistener", "addpath", "addproperty", "all",
-                   "and", "any", "argnames", "argv", "assignin",
-                   "atexit", "autoload",
-                   "available_graphics_toolkits", "beep_on_error",
-                   "bitand", "bitmax", "bitor", "bitshift", "bitxor",
-                   "cat", "cell", "cellstr", "char", "class", "clc",
-                   "columns", "command_line_path",
-                   "completion_append_char", "completion_matches",
-                   "complex", "confirm_recursive_rmdir", "cputime",
-                   "crash_dumps_octave_core", "ctranspose", "cumprod",
-                   "cumsum", "debug_on_error", "debug_on_interrupt",
-                   "debug_on_warning", "default_save_options",
-                   "dellistener", "diag", "diff", "disp",
-                   "doc_cache_file", "do_string_escapes", "double",
-                   "drawnow", "e", "echo_executing_commands", "eps",
-                   "eq", "errno", "errno_list", "error", "eval",
-                   "evalin", "exec", "exist", "exit", "eye", "false",
-                   "fclear", "fclose", "fcntl", "fdisp", "feof",
-                   "ferror", "feval", "fflush", "fgetl", "fgets",
-                   "fieldnames", "file_in_loadpath", "file_in_path",
-                   "filemarker", "filesep", "find_dir_in_path",
-                   "fixed_point_format", "fnmatch", "fopen", "fork",
-                   "formula", "fprintf", "fputs", "fread", "freport",
-                   "frewind", "fscanf", "fseek", "fskipl", "ftell",
-                   "functions", "fwrite", "ge", "genpath", "get",
-                   "getegid", "getenv", "geteuid", "getgid",
-                   "getpgrp", "getpid", "getppid", "getuid", "glob",
-                   "gt", "gui_mode", "history_control",
-                   "history_file", "history_size",
-                   "history_timestamp_format_string", "home",
-                   "horzcat", "hypot", "ifelse",
-                   "ignore_function_time_stamp", "inferiorto",
-                   "info_file", "info_program", "inline", "input",
-                   "intmax", "intmin", "ipermute",
-                   "is_absolute_filename", "isargout", "isbool",
-                   "iscell", "iscellstr", "ischar", "iscomplex",
-                   "isempty", "isfield", "isfloat", "isglobal",
-                   "ishandle", "isieee", "isindex", "isinteger",
-                   "islogical", "ismatrix", "ismethod", "isnull",
-                   "isnumeric", "isobject", "isreal",
-                   "is_rooted_relative_filename", "issorted",
-                   "isstruct", "isvarname", "kbhit", "keyboard",
-                   "kill", "lasterr", "lasterror", "lastwarn",
-                   "ldivide", "le", "length", "link", "linspace",
-                   "logical", "lstat", "lt", "make_absolute_filename",
-                   "makeinfo_program", "max_recursion_depth", "merge",
-                   "methods", "mfilename", "minus", "mislocked",
-                   "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
-                   "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
-                   "munlock", "nargin", "nargout",
-                   "native_float_format", "ndims", "ne", "nfields",
-                   "nnz", "norm", "not", "numel", "nzmax",
-                   "octave_config_info", "octave_core_file_limit",
-                   "octave_core_file_name",
-                   "octave_core_file_options", "ones", "or",
-                   "output_max_field_width", "output_precision",
-                   "page_output_immediately", "page_screen_output",
-                   "path", "pathsep", "pause", "pclose", "permute",
-                   "pi", "pipe", "plus", "popen", "power",
-                   "print_empty_dimensions", "printf",
-                   "print_struct_array_contents", "prod",
-                   "program_invocation_name", "program_name",
-                   "putenv", "puts", "pwd", "quit", "rats", "rdivide",
-                   "readdir", "readlink", "read_readline_init_file",
-                   "realmax", "realmin", "rehash", "rename",
-                   "repelems", "re_read_readline_init_file", "reset",
-                   "reshape", "resize", "restoredefaultpath",
-                   "rethrow", "rmdir", "rmfield", "rmpath", "rows",
-                   "save_header_format_string", "save_precision",
-                   "saving_history", "scanf", "set", "setenv",
-                   "shell_cmd", "sighup_dumps_octave_core",
-                   "sigterm_dumps_octave_core", "silent_functions",
-                   "single", "size", "size_equal", "sizemax",
-                   "sizeof", "sleep", "source", "sparse_auto_mutate",
-                   "split_long_rows", "sprintf", "squeeze", "sscanf",
-                   "stat", "stderr", "stdin", "stdout", "strcmp",
-                   "strcmpi", "string_fill_char", "strncmp",
-                   "strncmpi", "struct", "struct_levels_to_print",
-                   "strvcat", "subsasgn", "subsref", "sum", "sumsq",
-                   "superiorto", "suppress_verbose_help_message",
-                   "symlink", "system", "tic", "tilde_expand",
-                   "times", "tmpfile", "tmpnam", "toc", "toupper",
-                   "transpose", "true", "typeinfo", "umask", "uminus",
-                   "uname", "undo_string_escapes", "unlink", "uplus",
-                   "upper", "usage", "usleep", "vec", "vectorize",
-                   "vertcat", "waitpid", "warning", "warranty",
-                   "whos_line_format", "yes_or_no", "zeros",
-                   "inf", "Inf", "nan", "NaN"]
-
-    command_kw = [ "close", "load", "who", "whos", ]
-
-    function_kw = [ "accumarray", "accumdim", "acosd", "acotd",
-                   "acscd", "addtodate", "allchild", "ancestor",
-                   "anova", "arch_fit", "arch_rnd", "arch_test",
-                   "area", "arma_rnd", "arrayfun", "ascii", "asctime",
-                   "asecd", "asind", "assert", "atand",
-                   "autoreg_matrix", "autumn", "axes", "axis", "bar",
-                   "barh", "bartlett", "bartlett_test", "beep",
-                   "betacdf", "betainv", "betapdf", "betarnd",
-                   "bicgstab", "bicubic", "binary", "binocdf",
-                   "binoinv", "binopdf", "binornd", "bitcmp",
-                   "bitget", "bitset", "blackman", "blanks",
-                   "blkdiag", "bone", "box", "brighten", "calendar",
-                   "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
-                   "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
-                   "chisquare_test_homogeneity",
-                   "chisquare_test_independence", "circshift", "cla",
-                   "clabel", "clf", "clock", "cloglog", "closereq",
-                   "colon", "colorbar", "colormap", "colperm",
-                   "comet", "common_size", "commutation_matrix",
-                   "compan", "compare_versions", "compass",
-                   "computer", "cond", "condest", "contour",
-                   "contourc", "contourf", "contrast", "conv",
-                   "convhull", "cool", "copper", "copyfile", "cor",
-                   "corrcoef", "cor_test", "cosd", "cotd", "cov",
-                   "cplxpair", "cross", "cscd", "cstrcat", "csvread",
-                   "csvwrite", "ctime", "cumtrapz", "curl", "cut",
-                   "cylinder", "date", "datenum", "datestr",
-                   "datetick", "datevec", "dblquad", "deal",
-                   "deblank", "deconv", "delaunay", "delaunayn",
-                   "delete", "demo", "detrend", "diffpara", "diffuse",
-                   "dir", "discrete_cdf", "discrete_inv",
-                   "discrete_pdf", "discrete_rnd", "display",
-                   "divergence", "dlmwrite", "dos", "dsearch",
-                   "dsearchn", "duplication_matrix", "durbinlevinson",
-                   "ellipsoid", "empirical_cdf", "empirical_inv",
-                   "empirical_pdf", "empirical_rnd", "eomday",
-                   "errorbar", "etime", "etreeplot", "example",
-                   "expcdf", "expinv", "expm", "exppdf", "exprnd",
-                   "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
-                   "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
-                   "factorial", "fail", "fcdf", "feather", "fftconv",
-                   "fftfilt", "fftshift", "figure", "fileattrib",
-                   "fileparts", "fill", "findall", "findobj",
-                   "findstr", "finv", "flag", "flipdim", "fliplr",
-                   "flipud", "fpdf", "fplot", "fractdiff", "freqz",
-                   "freqz_plot", "frnd", "fsolve",
-                   "f_test_regression", "ftp", "fullfile", "fzero",
-                   "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
-                   "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
-                   "geoinv", "geopdf", "geornd", "getfield", "ginput",
-                   "glpk", "gls", "gplot", "gradient",
-                   "graphics_toolkit", "gray", "grid", "griddata",
-                   "griddatan", "gtext", "gunzip", "gzip", "hadamard",
-                   "hamming", "hankel", "hanning", "hggroup",
-                   "hidden", "hilb", "hist", "histc", "hold", "hot",
-                   "hotelling_test", "housh", "hsv", "hurst",
-                   "hygecdf", "hygeinv", "hygepdf", "hygernd",
-                   "idivide", "ifftshift", "image", "imagesc",
-                   "imfinfo", "imread", "imshow", "imwrite", "index",
-                   "info", "inpolygon", "inputname", "interpft",
-                   "interpn", "intersect", "invhilb", "iqr", "isa",
-                   "isdefinite", "isdir", "is_duplicate_entry",
-                   "isequal", "isequalwithequalnans", "isfigure",
-                   "ishermitian", "ishghandle", "is_leap_year",
-                   "isletter", "ismac", "ismember", "ispc", "isprime",
-                   "isprop", "isscalar", "issquare", "isstrprop",
-                   "issymmetric", "isunix", "is_valid_file_id",
-                   "isvector", "jet", "kendall",
-                   "kolmogorov_smirnov_cdf",
-                   "kolmogorov_smirnov_test", "kruskal_wallis_test",
-                   "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
-                   "laplace_pdf", "laplace_rnd", "legend", "legendre",
-                   "license", "line", "linkprop", "list_primes",
-                   "loadaudio", "loadobj", "logistic_cdf",
-                   "logistic_inv", "logistic_pdf", "logistic_rnd",
-                   "logit", "loglog", "loglogerr", "logm", "logncdf",
-                   "logninv", "lognpdf", "lognrnd", "logspace",
-                   "lookfor", "ls_command", "lsqnonneg", "magic",
-                   "mahalanobis", "manova", "matlabroot",
-                   "mcnemar_test", "mean", "meansq", "median", "menu",
-                   "mesh", "meshc", "meshgrid", "meshz", "mexext",
-                   "mget", "mkpp", "mode", "moment", "movefile",
-                   "mpoles", "mput", "namelengthmax", "nargchk",
-                   "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
-                   "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
-                   "nonzeros", "normcdf", "normest", "norminv",
-                   "normpdf", "normrnd", "now", "nthroot", "null",
-                   "ocean", "ols", "onenormest", "optimget",
-                   "optimset", "orderfields", "orient", "orth",
-                   "pack", "pareto", "parseparams", "pascal", "patch",
-                   "pathdef", "pcg", "pchip", "pcolor", "pcr",
-                   "peaks", "periodogram", "perl", "perms", "pie",
-                   "pink", "planerot", "playaudio", "plot",
-                   "plotmatrix", "plotyy", "poisscdf", "poissinv",
-                   "poisspdf", "poissrnd", "polar", "poly",
-                   "polyaffine", "polyarea", "polyderiv", "polyfit",
-                   "polygcd", "polyint", "polyout", "polyreduce",
-                   "polyval", "polyvalm", "postpad", "powerset",
-                   "ppder", "ppint", "ppjumps", "ppplot", "ppval",
-                   "pqpnonneg", "prepad", "primes", "print",
-                   "print_usage", "prism", "probit", "qp", "qqplot",
-                   "quadcc", "quadgk", "quadl", "quadv", "quiver",
-                   "qzhess", "rainbow", "randi", "range", "rank",
-                   "ranks", "rat", "reallog", "realpow", "realsqrt",
-                   "record", "rectangle_lw", "rectangle_sw",
-                   "rectint", "refresh", "refreshdata",
-                   "regexptranslate", "repmat", "residue", "ribbon",
-                   "rindex", "roots", "rose", "rosser", "rotdim",
-                   "rref", "run", "run_count", "rundemos", "run_test",
-                   "runtests", "saveas", "saveaudio", "saveobj",
-                   "savepath", "scatter", "secd", "semilogx",
-                   "semilogxerr", "semilogy", "semilogyerr",
-                   "setaudio", "setdiff", "setfield", "setxor",
-                   "shading", "shift", "shiftdim", "sign_test",
-                   "sinc", "sind", "sinetone", "sinewave", "skewness",
-                   "slice", "sombrero", "sortrows", "spaugment",
-                   "spconvert", "spdiags", "spearman", "spectral_adf",
-                   "spectral_xdf", "specular", "speed", "spencer",
-                   "speye", "spfun", "sphere", "spinmap", "spline",
-                   "spones", "sprand", "sprandn", "sprandsym",
-                   "spring", "spstats", "spy", "sqp", "stairs",
-                   "statistics", "std", "stdnormal_cdf",
-                   "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
-                   "stem", "stft", "strcat", "strchr", "strjust",
-                   "strmatch", "strread", "strsplit", "strtok",
-                   "strtrim", "strtrunc", "structfun", "studentize",
-                   "subplot", "subsindex", "subspace", "substr",
-                   "substruct", "summer", "surf", "surface", "surfc",
-                   "surfl", "surfnorm", "svds", "swapbytes",
-                   "sylvester_matrix", "symvar", "synthesis", "table",
-                   "tand", "tar", "tcdf", "tempdir", "tempname",
-                   "test", "text", "textread", "textscan", "tinv",
-                   "title", "toeplitz", "tpdf", "trace", "trapz",
-                   "treelayout", "treeplot", "triangle_lw",
-                   "triangle_sw", "tril", "trimesh", "triplequad",
-                   "triplot", "trisurf", "triu", "trnd", "tsearchn",
-                   "t_test", "t_test_regression", "type", "unidcdf",
-                   "unidinv", "unidpdf", "unidrnd", "unifcdf",
-                   "unifinv", "unifpdf", "unifrnd", "union", "unique",
-                   "unix", "unmkpp", "unpack", "untabify", "untar",
-                   "unwrap", "unzip", "u_test", "validatestring",
-                   "vander", "var", "var_test", "vech", "ver",
-                   "version", "view", "voronoi", "voronoin",
-                   "waitforbuttonpress", "wavread", "wavwrite",
-                   "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
-                   "welch_test", "what", "white", "whitebg",
-                   "wienrnd", "wilcoxon_test", "wilkinson", "winter",
-                   "xlabel", "xlim", "ylabel", "yulewalker", "zip",
-                   "zlabel", "z_test", ]
-
-    loadable_kw = [ "airy", "amd", "balance", "besselh", "besseli",
-                   "besselj", "besselk", "bessely", "bitpack",
-                   "bsxfun", "builtin", "ccolamd", "cellfun",
-                   "cellslices", "chol", "choldelete", "cholinsert",
-                   "cholinv", "cholshift", "cholupdate", "colamd",
-                   "colloc", "convhulln", "convn", "csymamd",
-                   "cummax", "cummin", "daspk", "daspk_options",
-                   "dasrt", "dasrt_options", "dassl", "dassl_options",
-                   "dbclear", "dbdown", "dbstack", "dbstatus",
-                   "dbstop", "dbtype", "dbup", "dbwhere", "det",
-                   "dlmread", "dmperm", "dot", "eig", "eigs",
-                   "endgrent", "endpwent", "etree", "fft", "fftn",
-                   "fftw", "filter", "find", "full", "gcd",
-                   "getgrent", "getgrgid", "getgrnam", "getpwent",
-                   "getpwnam", "getpwuid", "getrusage", "givens",
-                   "gmtime", "gnuplot_binary", "hess", "ifft",
-                   "ifftn", "inv", "isdebugmode", "issparse", "kron",
-                   "localtime", "lookup", "lsode", "lsode_options",
-                   "lu", "luinc", "luupdate", "matrix_type", "max",
-                   "min", "mktime", "pinv", "qr", "qrdelete",
-                   "qrinsert", "qrshift", "qrupdate", "quad",
-                   "quad_options", "qz", "rand", "rande", "randg",
-                   "randn", "randp", "randperm", "rcond", "regexp",
-                   "regexpi", "regexprep", "schur", "setgrent",
-                   "setpwent", "sort", "spalloc", "sparse", "spparms",
-                   "sprank", "sqrtm", "strfind", "strftime",
-                   "strptime", "strrep", "svd", "svd_driver", "syl",
-                   "symamd", "symbfact", "symrcm", "time", "tsearch",
-                   "typecast", "urlread", "urlwrite", ]
-
-    mapping_kw = [ "abs", "acos", "acosh", "acot", "acoth", "acsc",
-                  "acsch", "angle", "arg", "asec", "asech", "asin",
-                  "asinh", "atan", "atanh", "beta", "betainc",
-                  "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
-                  "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
-                  "erfcx", "erfinv", "exp", "finite", "fix", "floor",
-                  "fmod", "gamma", "gammainc", "gammaln", "imag",
-                  "isalnum", "isalpha", "isascii", "iscntrl",
-                  "isdigit", "isfinite", "isgraph", "isinf",
-                  "islower", "isna", "isnan", "isprint", "ispunct",
-                  "isspace", "isupper", "isxdigit", "lcm", "lgamma",
-                  "log", "lower", "mod", "real", "rem", "round",
-                  "roundb", "sec", "sech", "sign", "sin", "sinh",
-                  "sqrt", "tan", "tanh", "toascii", "tolower", "xor",
-                  ]
-
-    builtin_consts = [ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
-                   "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
-                   "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
-                   "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
-                   "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
-                   "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
-                   "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
-                   "WSTOPSIG", "WTERMSIG", "WUNTRACED", ]
-
-    tokens = {
-        'root': [
-            #We should look into multiline comments
-            (r'[%#].*$', Comment),
-            (r'^\s*function', Keyword, 'deffunc'),
-
-            # from 'iskeyword' on hg changeset 8cc154f45e37
-            (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
-             r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
-             r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
-             r'endswitch|endwhile|events|for|function|get|global|if|methods|'
-             r'otherwise|persistent|properties|return|set|static|switch|try|'
-             r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
-
-            ("(" + "|".join(  builtin_kw + command_kw
-                            + function_kw + loadable_kw
-                            + mapping_kw) + r')\b',  Name.Builtin),
-
-            ("(" + "|".join(builtin_consts) + r')\b', Name.Constant),
-
-            # operators in Octave but not Matlab:
-            (r'-=|!=|!|/=|--', Operator),
-            # operators:
-            (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
-            # operators in Octave but not Matlab requiring escape for re:
-            (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*',Operator),
-            # operators requiring escape for re:
-            (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
-            # punctuation:
-            (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
-            (r'=|:|;', Punctuation),
-
-            (r'"[^"]*"', String),
-
-            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
-            (r'\d+', Number.Integer),
-
-            # quote can be transpose, instead of string:
-            # (not great, but handles common cases...)
-            (r'(?<=[\w\)\]])\'', Operator),
-            (r'(?<![\w\)\]])\'', String, 'string'),
-
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'.', Text),
-        ],
-        'string': [
-            (r"[^']*'", String, '#pop'),
-        ],
-        'deffunc': [
-            (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
-             bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
-                      Text.Whitespace, Name.Function, Punctuation, Text,
-                      Punctuation, Text.Whitespace), '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        if re.match('^\s*[%#]', text, re.M): #Comment
-            return 0.1
-
-
-class ScilabLexer(RegexLexer):
-    """
-    For Scilab source code.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'Scilab'
-    aliases = ['scilab']
-    filenames = ['*.sci', '*.sce', '*.tst']
-    mimetypes = ['text/scilab']
-
-    tokens = {
-        'root': [
-            (r'//.*?$', Comment.Single),
-            (r'^\s*function', Keyword, 'deffunc'),
-
-            (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
-             r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
-             r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
-             r'endswitch|endwhile|events|for|function|get|global|if|methods|'
-             r'otherwise|persistent|properties|return|set|static|switch|try|'
-             r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
-
-            ("(" + "|".join(_scilab_builtins.functions_kw +
-                            _scilab_builtins.commands_kw +
-                            _scilab_builtins.macros_kw
-                            ) + r')\b',  Name.Builtin),
-
-            (r'(%s)\b' % "|".join(map(re.escape, _scilab_builtins.builtin_consts)),
-             Name.Constant),
-
-            # operators:
-            (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
-            # operators requiring escape for re:
-            (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-            # punctuation:
-            (r'[\[\](){}@.,=:;]', Punctuation),
-
-            (r'"[^"]*"', String),
-
-            # quote can be transpose, instead of string:
-            # (not great, but handles common cases...)
-            (r'(?<=[\w\)\]])\'', Operator),
-            (r'(?<![\w\)\]])\'', String, 'string'),
-
-            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
-            (r'\d+', Number.Integer),
-
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'.', Text),
-        ],
-        'string': [
-            (r"[^']*'", String, '#pop'),
-            (r'.', String, '#pop'),
-        ],
-        'deffunc': [
-            (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
-             bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
-                      Text.Whitespace, Name.Function, Punctuation, Text,
-                      Punctuation, Text.Whitespace), '#pop'),
-        ],
-    }
-
-
-class NumPyLexer(PythonLexer):
-    """
-    A Python lexer recognizing Numerical Python builtins.
-
-    *New in Pygments 0.10.*
-    """
-
-    name = 'NumPy'
-    aliases = ['numpy']
-
-    # override the mimetypes to not inherit them from python
-    mimetypes = []
-    filenames = []
-
-    EXTRA_KEYWORDS = set([
-        'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
-        'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
-        'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
-        'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
-        'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
-        'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
-        'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
-        'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
-        'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
-        'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
-        'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
-        'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
-        'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
-        'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
-        'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
-        'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
-        'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
-        'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
-        'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
-        'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
-        'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
-        'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
-        'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
-        'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
-        'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
-        'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
-        'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
-        'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
-        'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
-        'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
-        'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
-        'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
-        'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
-        'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
-        'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
-        'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
-        'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
-        'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
-        'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
-        'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
-        'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
-        'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
-        'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
-        'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
-        'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
-        'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
-        'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
-        'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
-        'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
-        'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
-        'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
-        'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
-        'set_numeric_ops', 'set_printoptions', 'set_string_function',
-        'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
-        'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
-        'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
-        'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
-        'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
-        'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
-        'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
-        'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
-        'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
-        'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
-        'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
-    ])
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-                PythonLexer.get_tokens_unprocessed(self, text):
-            if token is Name and value in self.EXTRA_KEYWORDS:
-                yield index, Keyword.Pseudo, value
-            else:
-                yield index, token, value
-
-
-class RConsoleLexer(Lexer):
-    """
-    For R console transcripts or R CMD BATCH output files.
-    """
-
-    name = 'RConsole'
-    aliases = ['rconsole', 'rout']
-    filenames = ['*.Rout']
-
-    def get_tokens_unprocessed(self, text):
-        slexer = SLexer(**self.options)
-
-        current_code_block = ''
-        insertions = []
-
-        for match in line_re.finditer(text):
-            line = match.group()
-            if line.startswith('>') or line.startswith('+'):
-                # Colorize the prompt as such,
-                # then put rest of line into current_code_block
-                insertions.append((len(current_code_block),
-                                   [(0, Generic.Prompt, line[:2])]))
-                current_code_block += line[2:]
-            else:
-                # We have reached a non-prompt line!
-                # If we have stored prompt lines, need to process them first.
-                if current_code_block:
-                    # Weave together the prompts and highlight code.
-                    for item in do_insertions(insertions,
-                          slexer.get_tokens_unprocessed(current_code_block)):
-                        yield item
-                    # Reset vars for next code block.
-                    current_code_block = ''
-                    insertions = []
-                # Now process the actual line itself, this is output from R.
-                yield match.start(), Generic.Output, line
-
-        # If we happen to end on a code block with nothing after it, need to
-        # process the last code block. This is neither elegant nor DRY so
-        # should be changed.
-        if current_code_block:
-            for item in do_insertions(insertions,
-                    slexer.get_tokens_unprocessed(current_code_block)):
-                yield item
-
-
-class SLexer(RegexLexer):
-    """
-    For S, S-plus, and R source code.
-
-    *New in Pygments 0.10.*
-    """
-
-    name = 'S'
-    aliases = ['splus', 's', 'r']
-    filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile']
-    mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
-                 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
-    tokens = {
-        'comments': [
-            (r'#.*$', Comment.Single),
-        ],
-        'valid_name': [
-            (r'[a-zA-Z][0-9a-zA-Z\._]*', Text),
-            # can begin with ., but not if that is followed by a digit
-            (r'\.[a-zA-Z_][0-9a-zA-Z\._]*', Text),
-        ],
-        'punctuation': [
-            (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
-        ],
-        'keywords': [
-            (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
-             r'(?![0-9a-zA-Z\._])',
-             Keyword.Reserved)
-        ],
-        'operators': [
-            (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
-            (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
-        ],
-        'builtin_symbols': [
-            (r'(NULL|NA(_(integer|real|complex|character)_)?|'
-             r'Inf|TRUE|FALSE|NaN|\.\.(\.|[0-9]+))'
-             r'(?![0-9a-zA-Z\._])',
-             Keyword.Constant),
-            (r'(T|F)\b', Keyword.Variable),
-        ],
-        'numbers': [
-            # hex number
-            (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
-            # decimal number
-            (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+)([eE][+-]?[0-9]+)?[Li]?',
-             Number),
-        ],
-        'statements': [
-            include('comments'),
-            # whitespaces
-            (r'\s+', Text),
-            (r'`.*?`', String.Backtick),
-            (r'\'', String, 'string_squote'),
-            (r'\"', String, 'string_dquote'),
-            include('builtin_symbols'),
-            include('numbers'),
-            include('keywords'),
-            include('punctuation'),
-            include('operators'),
-            include('valid_name'),
-        ],
-        'root': [
-            include('statements'),
-            # blocks:
-            (r'\{|\}', Punctuation),
-            #(r'\{', Punctuation, 'block'),
-            (r'.', Text),
-        ],
-        #'block': [
-        #    include('statements'),
-        #    ('\{', Punctuation, '#push'),
-        #    ('\}', Punctuation, '#pop')
-        #],
-        'string_squote': [
-            (r'([^\'\\]|\\.)*\'', String, '#pop'),
-        ],
-        'string_dquote': [
-            (r'([^"\\]|\\.)*"', String, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        return '<-' in text
-
-
-class BugsLexer(RegexLexer):
-    """
-    Pygments Lexer for `OpenBugs <http://www.openbugs.info/w/>`_ and WinBugs
-    models.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'BUGS'
-    aliases = ['bugs', 'winbugs', 'openbugs']
-    filenames = ['*.bug']
-
-    _FUNCTIONS = [
-        # Scalar functions
-        'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
-        'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
-        'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
-        'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
-        'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
-        'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
-        'trunc',
-        # Vector functions
-        'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
-        'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
-        'sd', 'sort', 'sum',
-        ## Special
-        'D', 'I', 'F', 'T', 'C']
-    """ OpenBUGS built-in functions
-
-    From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
-    This also includes
-
-    - T, C, I : Truncation and censoring.
-      ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
-    - D : ODE
-    - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
-    """
-
-    _DISTRIBUTIONS = ['dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
-                      'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
-                      'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
-                      'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
-                      'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
-                      'dmt', 'dwish']
-    """ OpenBUGS built-in distributions
-
-    Functions from
-    http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
-    """
-
-
-    tokens = {
-        'whitespace' : [
-            (r"\s+", Text),
-            ],
-        'comments' : [
-            # Comments
-            (r'#.*$', Comment.Single),
-            ],
-        'root': [
-            # Comments
-            include('comments'),
-            include('whitespace'),
-            # Block start
-            (r'(model)(\s+)({)',
-             bygroups(Keyword.Namespace, Text, Punctuation)),
-            # Reserved Words
-            (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
-            # Built-in Functions
-            (r'(%s)(?=\s*\()'
-             % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
-             Name.Builtin),
-            # Regular variable names
-            (r'[A-Za-z][A-Za-z0-9_.]*', Name),
-            # Number Literals
-            (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
-            # Punctuation
-            (r'\[|\]|\(|\)|:|,|;', Punctuation),
-            # Assignment operators
-            # SLexer makes these tokens Operators.
-            (r'<-|~', Operator),
-            # Infix and prefix operators
-            (r'\+|-|\*|/', Operator),
-            # Block
-            (r'[{}]', Punctuation),
-            ]
-        }
-
-    def analyse_text(text):
-        if re.search(r"^\s*model\s*{", text, re.M):
-            return 0.7
-        else:
-            return 0.0
-
-class JagsLexer(RegexLexer):
-    """
-    Pygments Lexer for JAGS.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'JAGS'
-    aliases = ['jags']
-    filenames = ['*.jag', '*.bug']
-
-    ## JAGS
-    _FUNCTIONS = [
-        'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
-        'cos', 'cosh', 'cloglog',
-        'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
-        'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
-        'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
-        'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
-        'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
-        # Truncation/Censoring (should I include)
-        'T', 'I']
-    # Distributions with density, probability and quartile functions
-    _DISTRIBUTIONS = ['[dpq]%s' % x for x in
-                           ['bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
-                            'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
-                            'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib']]
-    # Other distributions without density and probability
-    _OTHER_DISTRIBUTIONS = [
-        'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
-        'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
-        'dnbinom', 'dweibull', 'ddirich']
-
-    tokens = {
-        'whitespace' : [
-            (r"\s+", Text),
-            ],
-        'names' : [
-            # Regular variable names
-            (r'[a-zA-Z][a-zA-Z0-9_.]*\b', Name),
-            ],
-        'comments' : [
-            # do not use stateful comments
-            (r'(?s)/\*.*?\*/', Comment.Multiline),
-            # Comments
-            (r'#.*$', Comment.Single),
-            ],
-        'root': [
-            # Comments
-            include('comments'),
-            include('whitespace'),
-            # Block start
-            (r'(model|data)(\s+)({)',
-             bygroups(Keyword.Namespace, Text, Punctuation)),
-            (r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
-            # Reserved Words
-            (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
-            # Builtins
-            # Need to use lookahead because . is a valid char
-            (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
-                                 + _DISTRIBUTIONS
-                                 + _OTHER_DISTRIBUTIONS),
-             Name.Builtin),
-            # Names
-            include('names'),
-            # Number Literals
-            (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
-            (r'\[|\]|\(|\)|:|,|;', Punctuation),
-            # Assignment operators
-            (r'<-|~', Operator),
-            # # JAGS includes many more than OpenBUGS
-            (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
-            (r'[{}]', Punctuation),
-            ]
-        }
-
-    def analyse_text(text):
-        if re.search(r'^\s*model\s*\{', text, re.M):
-            if re.search(r'^\s*data\s*\{', text, re.M):
-                return 0.9
-            elif re.search(r'^\s*var', text, re.M):
-                return 0.9
-            else:
-                return 0.3
-        else:
-            return 0
-
-class StanLexer(RegexLexer):
-    """
-    Pygments Lexer for Stan models.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Stan'
-    aliases = ['stan']
-    filenames = ['*.stan']
-
-    _RESERVED = ('for', 'in', 'while', 'repeat', 'until', 'if',
-                 'then', 'else', 'true', 'false', 'T',
-                 'lower', 'upper', 'print')
-
-    _TYPES = ('int', 'real', 'vector', 'simplex', 'ordered', 'row_vector',
-              'matrix', 'corr_matrix', 'cov_matrix', 'positive_ordered')
-
-    tokens = {
-        'whitespace' : [
-            (r"\s+", Text),
-            ],
-        'comments' : [
-            (r'(?s)/\*.*?\*/', Comment.Multiline),
-            # Comments
-            (r'(//|#).*$', Comment.Single),
-            ],
-        'root': [
-            # Stan is more restrictive on strings than this regex
-            (r'"[^"]*"', String),
-            # Comments
-            include('comments'),
-            # block start
-            include('whitespace'),
-            # Block start
-            (r'(%s)(\s*)({)' %
-             r'|'.join(('data', r'transformed\s+?data',
-                        'parameters', r'transformed\s+parameters',
-                        'model', r'generated\s+quantities')),
-             bygroups(Keyword.Namespace, Text, Punctuation)),
-            # Reserved Words
-            (r'(%s)\b' % r'|'.join(_RESERVED), Keyword.Reserved),
-            # Data types
-            (r'(%s)\b' % r'|'.join(_TYPES), Keyword.Type),
-            # Punctuation
-            (r"[;:,\[\]()<>]", Punctuation),
-            # Builtin
-            (r'(%s)(?=\s*\()'
-             % r'|'.join(_stan_builtins.FUNCTIONS
-                         + _stan_builtins.DISTRIBUTIONS),
-             Name.Builtin),
-            (r'(%s)(?=\s*\()'
-             % r'|'.join(_stan_builtins.CONSTANTS), Keyword.Constant),
-            # Special names ending in __, like lp__
-            (r'[A-Za-z][A-Za-z0-9_]*__\b', Name.Builtin.Pseudo),
-            # Regular variable names
-            (r'[A-Za-z][A-Za-z0-9_]*\b', Name),
-            # Real Literals
-            (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
-            (r'-?[0-9]*\.[0-9]*', Number.Float),
-            # Integer Literals
-            (r'-?[0-9]+', Number.Integer),
-            # Assignment operators
-            # SLexer makes these tokens Operators.
-            (r'<-|~', Operator),
-            # Infix and prefix operators (and = )
-            (r"\+|-|\.?\*|\.?/|\\|'|=", Operator),
-            # Block delimiters
-            (r'[{}]', Punctuation),
-            ]
-        }
-
-    def analyse_text(text):
-        if re.search(r'^\s*parameters\s*\{', text, re.M):
-            return 1.0
-        else:
-            return 0.0
-
-
-class IDLLexer(RegexLexer):
-    """
-    Pygments Lexer for IDL (Interactive Data Language).
-
-    *New in Pygments 1.6.*
-    """
-    name = 'IDL'
-    aliases = ['idl']
-    filenames = ['*.pro']
-    mimetypes = ['text/idl']
-
-    _RESERVED = ['and', 'begin', 'break', 'case', 'common', 'compile_opt',
-                 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
-                 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
-                 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
-                 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
-                 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
-                 'repeat', 'switch', 'then', 'until', 'while', 'xor']
-    """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
-
-    _BUILTIN_LIB = ['abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
-                    'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
-                    'arg_present', 'array_equal', 'array_indices', 'arrow',
-                    'ascii_template', 'asin', 'assoc', 'atan', 'axis',
-                    'a_correlate', 'bandpass_filter', 'bandreject_filter',
-                    'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
-                    'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
-                    'binomial', 'bin_date', 'bit_ffs', 'bit_population',
-                    'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
-                    'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
-                    'bytscl', 'caldat', 'calendar', 'call_external',
-                    'call_function', 'call_method', 'call_procedure', 'canny',
-                    'catch', 'cd', 'cdf_[0-9a-za-z_]*', 'ceil', 'chebyshev',
-                    'check_math',
-                    'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
-                    'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
-                    'cmyk_convert', 'colorbar', 'colorize_sample',
-                    'colormap_applicable', 'colormap_gradient',
-                    'colormap_rotation', 'colortable', 'color_convert',
-                    'color_exchange', 'color_quan', 'color_range_map', 'comfit',
-                    'command_line_args', 'complex', 'complexarr', 'complexround',
-                    'compute_mesh_normals', 'cond', 'congrid', 'conj',
-                    'constrained_min', 'contour', 'convert_coord', 'convol',
-                    'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
-                    'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
-                    'create_view', 'crossp', 'crvlength', 'cti_test',
-                    'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
-                    'cw_animate', 'cw_animate_getp', 'cw_animate_load',
-                    'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
-                    'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
-                    'cw_form', 'cw_fslider', 'cw_light_editor',
-                    'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
-                    'cw_palette_editor', 'cw_palette_editor_get',
-                    'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
-                    'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
-                    'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
-                    'define_msgblk', 'define_msgblk_from_file', 'defroi',
-                    'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
-                    'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
-                    'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
-                    'dialog_printersetup', 'dialog_printjob',
-                    'dialog_read_image', 'dialog_write_image', 'digital_filter',
-                    'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
-                    'dlm_load', 'dlm_register', 'doc_library', 'double',
-                    'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
-                    'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
-                    'eof', 'eos_[0-9a-za-z_]*', 'erase', 'erf', 'erfc', 'erfcx',
-                    'erode', 'errorplot', 'errplot', 'estimator_filter',
-                    'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
-                    'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
-                    'file_basename', 'file_chmod', 'file_copy', 'file_delete',
-                    'file_dirname', 'file_expand_path', 'file_info',
-                    'file_lines', 'file_link', 'file_mkdir', 'file_move',
-                    'file_poll_input', 'file_readlink', 'file_same',
-                    'file_search', 'file_test', 'file_which', 'findgen',
-                    'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
-                    'fltarr', 'flush', 'format_axis_values', 'free_lun',
-                    'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
-                    'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
-                    'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
-                    'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
-                    'getwindows', 'get_drive_list', 'get_dxf_objects',
-                    'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
-                    'greg2jul', 'grib_[0-9a-za-z_]*', 'grid3', 'griddata',
-                    'grid_input', 'grid_tps', 'gs_iter',
-                    'h5[adfgirst]_[0-9a-za-z_]*', 'h5_browser', 'h5_close',
-                    'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
-                    'hanning', 'hash', 'hdf_[0-9a-za-z_]*', 'heap_free',
-                    'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
-                    'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
-                    'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
-                    'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
-                    'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
-                    'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
-                    'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
-                    'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
-                    'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
-                    'image_cont', 'image_statistics', 'imaginary', 'imap',
-                    'indgen', 'intarr', 'interpol', 'interpolate',
-                    'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
-                    'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
-                    'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
-                    'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
-                    'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
-                    'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
-                    'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
-                    'json_serialize', 'jul2greg', 'julday', 'keyword_set',
-                    'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
-                    'label_region', 'ladfit', 'laguerre', 'laplacian',
-                    'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
-                    'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
-                    'la_gm_linear_model', 'la_hqr', 'la_invert',
-                    'la_least_squares', 'la_least_square_equality',
-                    'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
-                    'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
-                    'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
-                    'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
-                    'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
-                    'lngamma', 'lnp_test', 'loadct', 'locale_get',
-                    'logical_and', 'logical_or', 'logical_true', 'lon64arr',
-                    'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
-                    'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
-                    'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
-                    'map_continents', 'map_grid', 'map_image', 'map_patch',
-                    'map_proj_forward', 'map_proj_image', 'map_proj_info',
-                    'map_proj_init', 'map_proj_inverse', 'map_set',
-                    'matrix_multiply', 'matrix_power', 'max', 'md_test',
-                    'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
-                    'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
-                    'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
-                    'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
-                    'message', 'min', 'min_curve_surf', 'mk_html_help',
-                    'modifyct', 'moment', 'morph_close', 'morph_distance',
-                    'morph_gradient', 'morph_hitormiss', 'morph_open',
-                    'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
-                    'ncdf_[0-9a-za-z_]*', 'newton', 'noise_hurl', 'noise_pick',
-                    'noise_scatter', 'noise_slur', 'norm', 'n_elements',
-                    'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
-                    'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
-                    'online_help', 'on_error', 'open', 'oplot', 'oploterr',
-                    'parse_url', 'particle_trace', 'path_cache', 'path_sep',
-                    'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
-                    'plot_field', 'pnt_line', 'point_lun', 'polarplot',
-                    'polar_contour', 'polar_surface', 'poly', 'polyfill',
-                    'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
-                    'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
-                    'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
-                    'print', 'printd', 'product', 'profile', 'profiler',
-                    'profiles', 'project_vol', 'psafm', 'pseudo',
-                    'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
-                    'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
-                    'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
-                    'query_csv', 'query_dicom', 'query_gif', 'query_image',
-                    'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
-                    'query_png', 'query_ppm', 'query_srf', 'query_tiff',
-                    'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
-                    'rdpix', 'read', 'reads', 'readu', 'read_ascii',
-                    'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
-                    'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
-                    'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
-                    'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
-                    'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
-                    'read_xwd', 'real_part', 'rebin', 'recall_commands',
-                    'recon3', 'reduce_colors', 'reform', 'region_grow',
-                    'register_cursor', 'regress', 'replicate',
-                    'replicate_inplace', 'resolve_all', 'resolve_routine',
-                    'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
-                    'rot', 'rotate', 'round', 'routine_filepath',
-                    'routine_info', 'rs_test', 'r_correlate', 'r_test',
-                    'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
-                    'scope_traceback', 'scope_varfetch', 'scope_varname',
-                    'search2d', 'search3d', 'sem_create', 'sem_delete',
-                    'sem_lock', 'sem_release', 'setenv', 'set_plot',
-                    'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
-                    'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
-                    'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
-                    'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
-                    'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
-                    'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
-                    'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
-                    'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
-                    'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
-                    'streamline', 'stregex', 'stretch', 'string', 'strjoin',
-                    'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
-                    'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
-                    'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
-                    'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
-                    'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
-                    'tanh', 'tek_color', 'temporary', 'tetra_clip',
-                    'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
-                    'timegen', 'time_test2', 'tm_test', 'total', 'trace',
-                    'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
-                    'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
-                    'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
-                    'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
-                    'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
-                    'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
-                    'value_locate', 'variance', 'vector', 'vector_field', 'vel',
-                    'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
-                    'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
-                    'where', 'widget_base', 'widget_button', 'widget_combobox',
-                    'widget_control', 'widget_displaycontextmen', 'widget_draw',
-                    'widget_droplist', 'widget_event', 'widget_info',
-                    'widget_label', 'widget_list', 'widget_propertysheet',
-                    'widget_slider', 'widget_tab', 'widget_table',
-                    'widget_text', 'widget_tree', 'widget_tree_move',
-                    'widget_window', 'wiener_filter', 'window', 'writeu',
-                    'write_bmp', 'write_csv', 'write_gif', 'write_image',
-                    'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
-                    'write_png', 'write_ppm', 'write_spr', 'write_srf',
-                    'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
-                    'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
-                    'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
-                    'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
-                    'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
-                    'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
-                    'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
-                    'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
-                    'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
-                    'xmtool', 'xobjview', 'xobjview_rotate',
-                    'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
-                    'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
-                    'xvolume', 'xvolume_rotate', 'xvolume_write_image',
-                    'xyouts', 'zoom', 'zoom_24']
-    """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
-
-    tokens = {
-        'root': [
-            (r'^\s*;.*?\n', Comment.Singleline),
-            (r'\b(' + '|'.join(_RESERVED) + r')\b', Keyword),
-            (r'\b(' + '|'.join(_BUILTIN_LIB) + r')\b', Name.Builtin),
-            (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
-            (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
-            (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
-            (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
-            (r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number),
-            (r'.', Text),
-        ]
-    }
-
-
-class RdLexer(RegexLexer):
-    """
-    Pygments Lexer for R documentation (Rd) files
-
-    This is a very minimal implementation, highlighting little more
-    than the macros. A description of Rd syntax is found in `Writing R
-    Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
-    and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Rd'
-    aliases = ['rd']
-    filenames = ['*.Rd']
-    mimetypes = ['text/x-r-doc']
-
-    # To account for verbatim / LaTeX-like / and R-like areas
-    # would require parsing.
-    tokens = {
-        'root' : [
-            # catch escaped brackets and percent sign
-            (r'\\[\\{}%]', String.Escape),
-            # comments
-            (r'%.*$', Comment),
-            # special macros with no arguments
-            (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
-            # macros
-            (r'\\[a-zA-Z]+\b', Keyword),
-            # special preprocessor macros
-            (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
-            # non-escaped brackets
-            (r'[{}]', Name.Builtin),
-            # everything else
-            (r'[^\\%\n{}]+', Text),
-            (r'.', Text),
-            ]
-        }
diff --git a/python/ext-libs/pygments/lexers/other.py b/python/ext-libs/pygments/lexers/other.py
deleted file mode 100644
index d121aa6..0000000
--- a/python/ext-libs/pygments/lexers/other.py
+++ /dev/null
@@ -1,3667 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.other
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for other languages.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
-     this, combined, ExtendedRegexLexer
-from pygments.token import Error, Punctuation, Literal, Token, \
-     Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import get_bool_opt
-from pygments.lexers.web import HtmlLexer
-
-from pygments.lexers._openedgebuiltins import OPENEDGEKEYWORDS
-from pygments.lexers._robotframeworklexer import RobotFrameworkLexer
-
-# backwards compatibility
-from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
-from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
-     TcshLexer
-
-__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
-           'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer',
-           'AppleScriptLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
-           'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', 'PostScriptLexer',
-           'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer',
-           'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'SnobolLexer',
-           'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer',
-           'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
-           'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
-           'CbmBasicV2Lexer', 'AutoItLexer']
-
-
-class ECLLexer(RegexLexer):
-    """
-    Lexer for the declarative big-data `ECL
-    <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
-    language.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'ECL'
-    aliases = ['ecl']
-    filenames = ['*.ecl']
-    mimetypes = ['application/x-ecl']
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            include('statements'),
-        ],
-        'whitespace': [
-            (r'\s+', Text),
-            (r'\/\/.*', Comment.Single),
-            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
-        ],
-        'statements': [
-            include('types'),
-            include('keywords'),
-            include('functions'),
-            include('hash'),
-            (r'"', String, 'string'),
-            (r'\'', String, 'string'),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
-            (r'0[0-7]+[LlUu]*', Number.Oct),
-            (r'\d+[LlUu]*', Number.Integer),
-            (r'\*/', Error),
-            (r'[~!%^&*+=|?:<>/-]+', Operator),
-            (r'[{}()\[\],.;]', Punctuation),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'hash': [
-            (r'^#.*$', Comment.Preproc),
-        ],
-        'types': [
-            (r'(RECORD|END)\D', Keyword.Declaration),
-            (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
-             r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
-             r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
-             bygroups(Keyword.Type, Text)),
-        ],
-        'keywords': [
-            (r'(APPLY|ASSERT|BUILD|BUILDINDEX|EVALUATE|FAIL|KEYDIFF|KEYPATCH|'
-             r'LOADXML|NOTHOR|NOTIFY|OUTPUT|PARALLEL|SEQUENTIAL|SOAPCALL|WAIT'
-             r'CHECKPOINT|DEPRECATED|FAILCODE|FAILMESSAGE|FAILURE|GLOBAL|'
-             r'INDEPENDENT|ONWARNING|PERSIST|PRIORITY|RECOVERY|STORED|SUCCESS|'
-             r'WAIT|WHEN)\b', Keyword.Reserved),
-            # These are classed differently, check later
-            (r'(ALL|AND|ANY|AS|ATMOST|BEFORE|BEGINC\+\+|BEST|BETWEEN|CASE|CONST|'
-             r'COUNTER|CSV|DESCEND|ENCRYPT|ENDC\+\+|ENDMACRO|EXCEPT|EXCLUSIVE|'
-             r'EXPIRE|EXPORT|EXTEND|FALSE|FEW|FIRST|FLAT|FULL|FUNCTION|GROUP|'
-             r'HEADER|HEADING|HOLE|IFBLOCK|IMPORT|IN|JOINED|KEEP|KEYED|LAST|'
-             r'LEFT|LIMIT|LOAD|LOCAL|LOCALE|LOOKUP|MACRO|MANY|MAXCOUNT|'
-             r'MAXLENGTH|MIN SKEW|MODULE|INTERFACE|NAMED|NOCASE|NOROOT|NOSCAN|'
-             r'NOSORT|NOT|OF|ONLY|OPT|OR|OUTER|OVERWRITE|PACKED|PARTITION|'
-             r'PENALTY|PHYSICALLENGTH|PIPE|QUOTE|RELATIONSHIP|REPEAT|RETURN|'
-             r'RIGHT|SCAN|SELF|SEPARATOR|SERVICE|SHARED|SKEW|SKIP|SQL|STORE|'
-             r'TERMINATOR|THOR|THRESHOLD|TOKEN|TRANSFORM|TRIM|TRUE|TYPE|'
-             r'UNICODEORDER|UNSORTED|VALIDATE|VIRTUAL|WHOLE|WILD|WITHIN|XML|'
-             r'XPATH|__COMPRESSED__)\b', Keyword.Reserved),
-        ],
-        'functions': [
-            (r'(ABS|ACOS|ALLNODES|ASCII|ASIN|ASSTRING|ATAN|ATAN2|AVE|CASE|'
-             r'CHOOSE|CHOOSEN|CHOOSESETS|CLUSTERSIZE|COMBINE|CORRELATION|COS|'
-             r'COSH|COUNT|COVARIANCE|CRON|DATASET|DEDUP|DEFINE|DENORMALIZE|'
-             r'DISTRIBUTE|DISTRIBUTED|DISTRIBUTION|EBCDIC|ENTH|ERROR|EVALUATE|'
-             r'EVENT|EVENTEXTRA|EVENTNAME|EXISTS|EXP|FAILCODE|FAILMESSAGE|'
-             r'FETCH|FROMUNICODE|GETISVALID|GLOBAL|GRAPH|GROUP|HASH|HASH32|'
-             r'HASH64|HASHCRC|HASHMD5|HAVING|IF|INDEX|INTFORMAT|ISVALID|'
-             r'ITERATE|JOIN|KEYUNICODE|LENGTH|LIBRARY|LIMIT|LN|LOCAL|LOG|LOOP|'
-             r'MAP|MATCHED|MATCHLENGTH|MATCHPOSITION|MATCHTEXT|MATCHUNICODE|'
-             r'MAX|MERGE|MERGEJOIN|MIN|NOLOCAL|NONEMPTY|NORMALIZE|PARSE|PIPE|'
-             r'POWER|PRELOAD|PROCESS|PROJECT|PULL|RANDOM|RANGE|RANK|RANKED|'
-             r'REALFORMAT|RECORDOF|REGEXFIND|REGEXREPLACE|REGROUP|REJECTED|'
-             r'ROLLUP|ROUND|ROUNDUP|ROW|ROWDIFF|SAMPLE|SET|SIN|SINH|SIZEOF|'
-             r'SOAPCALL|SORT|SORTED|SQRT|STEPPED|STORED|SUM|TABLE|TAN|TANH|'
-             r'THISNODE|TOPN|TOUNICODE|TRANSFER|TRIM|TRUNCATE|TYPEOF|UNGROUP|'
-             r'UNICODEORDER|VARIANCE|WHICH|WORKUNIT|XMLDECODE|XMLENCODE|'
-             r'XMLTEXT|XMLUNICODE)\b', Name.Function),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\'', String, '#pop'),
-            (r'[^"\']+', String),
-        ],
-    }
-
-
-class BrainfuckLexer(RegexLexer):
-    """
-    Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
-    language.
-    """
-
-    name = 'Brainfuck'
-    aliases = ['brainfuck', 'bf']
-    filenames = ['*.bf', '*.b']
-    mimetypes = ['application/x-brainfuck']
-
-    tokens = {
-        'common': [
-            # use different colors for different instruction types
-            (r'[.,]+', Name.Tag),
-            (r'[+-]+', Name.Builtin),
-            (r'[<>]+', Name.Variable),
-            (r'[^.,+\-<>\[\]]+', Comment),
-        ],
-        'root': [
-            (r'\[', Keyword, 'loop'),
-            (r'\]', Error),
-            include('common'),
-        ],
-        'loop': [
-            (r'\[', Keyword, '#push'),
-            (r'\]', Keyword, '#pop'),
-            include('common'),
-        ]
-    }
-
-
-class BefungeLexer(RegexLexer):
-    """
-    Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
-    language.
-
-    *New in Pygments 0.7.*
-    """
-    name = 'Befunge'
-    aliases = ['befunge']
-    filenames = ['*.befunge']
-    mimetypes = ['application/x-befunge']
-
-    tokens = {
-        'root': [
-            (r'[0-9a-f]', Number),
-            (r'[\+\*/%!`-]', Operator), # Traditional math
-            (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
-            (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
-            (r'[|_mw]', Keyword),
-            (r'[{}]', Name.Tag), # Befunge-98 stack ops
-            (r'".*?"', String.Double), # Strings don't appear to allow escapes
-            (r'\'.', String.Single), # Single character
-            (r'[#;]', Comment), # Trampoline... depends on direction hit
-            (r'[pg&~=@iotsy]', Keyword), # Misc
-            (r'[()A-Z]', Comment), # Fingerprints
-            (r'\s+', Text), # Whitespace doesn't matter
-        ],
-    }
-
-
-class RedcodeLexer(RegexLexer):
-    """
-    A simple Redcode lexer based on ICWS'94.
-    Contributed by Adam Blinkinsop <blinks at acm.org>.
-
-    *New in Pygments 0.8.*
-    """
-    name = 'Redcode'
-    aliases = ['redcode']
-    filenames = ['*.cw']
-
-    opcodes = ['DAT','MOV','ADD','SUB','MUL','DIV','MOD',
-               'JMP','JMZ','JMN','DJN','CMP','SLT','SPL',
-               'ORG','EQU','END']
-    modifiers = ['A','B','AB','BA','F','X','I']
-
-    tokens = {
-        'root': [
-            # Whitespace:
-            (r'\s+', Text),
-            (r';.*$', Comment.Single),
-            # Lexemes:
-            #  Identifiers
-            (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
-            (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
-            (r'[A-Za-z_][A-Za-z_0-9]+', Name),
-            #  Operators
-            (r'[-+*/%]', Operator),
-            (r'[#$@<>]', Operator), # mode
-            (r'[.,]', Punctuation), # mode
-            #  Numbers
-            (r'[-+]?\d+', Number.Integer),
-        ],
-    }
-
-
-class MOOCodeLexer(RegexLexer):
-    """
-    For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
-    language).
-
-    *New in Pygments 0.9.*
-    """
-    name = 'MOOCode'
-    filenames = ['*.moo']
-    aliases = ['moocode']
-    mimetypes = ['text/x-moocode']
-
-    tokens = {
-        'root' : [
-            # Numbers
-            (r'(0|[1-9][0-9_]*)', Number.Integer),
-            # Strings
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # exceptions
-            (r'(E_PERM|E_DIV)', Name.Exception),
-            # db-refs
-            (r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity),
-            # Keywords
-            (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
-             r'|endwhile|break|continue|return|try'
-             r'|except|endtry|finally|in)\b', Keyword),
-            # builtins
-            (r'(random|length)', Name.Builtin),
-            # special variables
-            (r'(player|caller|this|args)', Name.Variable.Instance),
-            # skip whitespace
-            (r'\s+', Text),
-            (r'\n', Text),
-            # other operators
-            (r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator),
-            # function call
-            (r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)),
-            # variables
-            (r'([a-zA-Z_0-9]+)', Text),
-        ]
-    }
-
-
-class SmalltalkLexer(RegexLexer):
-    """
-    For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
-    Contributed by Stefan Matthias Aust.
-    Rewritten by Nils Winter.
-
-    *New in Pygments 0.10.*
-    """
-    name = 'Smalltalk'
-    filenames = ['*.st']
-    aliases = ['smalltalk', 'squeak']
-    mimetypes = ['text/x-smalltalk']
-
-    tokens = {
-        'root' : [
-            (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
-            include('squeak fileout'),
-            include('whitespaces'),
-            include('method definition'),
-            (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
-            include('objects'),
-            (r'\^|\:=|\_', Operator),
-            # temporaries
-            (r'[\]({}.;!]', Text),
-        ],
-        'method definition' : [
-            # Not perfect can't allow whitespaces at the beginning and the
-            # without breaking everything
-            (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
-             bygroups(Name.Function, Text, Name.Variable)),
-            (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
-            (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
-             bygroups(Name.Function, Text, Name.Variable, Text)),
-        ],
-        'blockvariables' : [
-            include('whitespaces'),
-            (r'(:)(\s*)(\w+)',
-             bygroups(Operator, Text, Name.Variable)),
-            (r'\|', Operator, '#pop'),
-            (r'', Text, '#pop'), # else pop
-        ],
-        'literals' : [
-            (r"'(''|[^'])*'", String, 'afterobject'),
-            (r'\$.', String.Char, 'afterobject'),
-            (r'#\(', String.Symbol, 'parenth'),
-            (r'\)', Text, 'afterobject'),
-            (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
-        ],
-        '_parenth_helper' : [
-            include('whitespaces'),
-            (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
-            (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
-            # literals
-            (r"'(''|[^'])*'", String),
-            (r'\$.', String.Char),
-            (r'#*\(', String.Symbol, 'inner_parenth'),
-        ],
-        'parenth' : [
-            # This state is a bit tricky since
-            # we can't just pop this state
-            (r'\)', String.Symbol, ('root', 'afterobject')),
-            include('_parenth_helper'),
-        ],
-        'inner_parenth': [
-            (r'\)', String.Symbol, '#pop'),
-            include('_parenth_helper'),
-        ],
-        'whitespaces' : [
-            # skip whitespace and comments
-            (r'\s+', Text),
-            (r'"(""|[^"])*"', Comment),
-        ],
-        'objects' : [
-            (r'\[', Text, 'blockvariables'),
-            (r'\]', Text, 'afterobject'),
-            (r'\b(self|super|true|false|nil|thisContext)\b',
-             Name.Builtin.Pseudo, 'afterobject'),
-            (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
-            (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
-            (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
-             String.Symbol, 'afterobject'),
-            include('literals'),
-        ],
-        'afterobject' : [
-            (r'! !$', Keyword , '#pop'), # squeak chunk delimiter
-            include('whitespaces'),
-            (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
-             Name.Builtin, '#pop'),
-            (r'\b(new\b(?!:))', Name.Builtin),
-            (r'\:=|\_', Operator, '#pop'),
-            (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
-            (r'\b[a-zA-Z]+\w*', Name.Function),
-            (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
-            (r'\.', Punctuation, '#pop'),
-            (r';', Punctuation),
-            (r'[\])}]', Text),
-            (r'[\[({]', Text, '#pop'),
-        ],
-        'squeak fileout' : [
-            # Squeak fileout format (optional)
-            (r'^"(""|[^"])*"!', Keyword),
-            (r"^'(''|[^'])*'!", Keyword),
-            (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
-                bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
-            (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
-                bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
-            (r'^(\w+)( subclass: )(#\w+)'
-             r'(\s+instanceVariableNames: )(.*?)'
-             r'(\s+classVariableNames: )(.*?)'
-             r'(\s+poolDictionaries: )(.*?)'
-             r'(\s+category: )(.*?)(!)',
-                bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
-                         String, Keyword, String, Keyword, String, Keyword)),
-            (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
-                bygroups(Name.Class, Keyword, String, Keyword)),
-            (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
-            (r'! !$', Keyword),
-        ],
-    }
-
-
-class LogtalkLexer(RegexLexer):
-    """
-    For `Logtalk <http://logtalk.org/>`_ source code.
-
-    *New in Pygments 0.10.*
-    """
-
-    name = 'Logtalk'
-    aliases = ['logtalk']
-    filenames = ['*.lgt']
-    mimetypes = ['text/x-logtalk']
-
-    tokens = {
-        'root': [
-            # Directives
-            (r'^\s*:-\s',Punctuation,'directive'),
-            # Comments
-            (r'%.*?\n', Comment),
-            (r'/\*(.|\n)*?\*/',Comment),
-            # Whitespace
-            (r'\n', Text),
-            (r'\s+', Text),
-            # Numbers
-            (r"0'.", Number),
-            (r'0b[01]+', Number),
-            (r'0o[0-7]+', Number),
-            (r'0x[0-9a-fA-F]+', Number),
-            (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
-            # Variables
-            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
-            # Event handlers
-            (r'(after|before)(?=[(])', Keyword),
-            # Execution-context methods
-            (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
-            # Reflection
-            (r'(current_predicate|predicate_property)(?=[(])', Keyword),
-            # DCGs and term expansion
-            (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])',
-             Keyword),
-            # Entity
-            (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])',
-             Keyword),
-            (r'(object|protocol|category)_property(?=[(])', Keyword),
-            # Entity relations
-            (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
-            (r'extends_(object|protocol|category)(?=[(])', Keyword),
-            (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
-            (r'(instantiat|specializ)es_class(?=[(])', Keyword),
-            # Events
-            (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
-            # Flags
-            (r'(current|set)_logtalk_flag(?=[(])', Keyword),
-            # Compiling, loading, and library paths
-            (r'logtalk_(compile|l(ibrary_path|oad_context|oad))(?=[(])',
-             Keyword),
-            # Database
-            (r'(clause|retract(all)?)(?=[(])', Keyword),
-            (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
-            # Control constructs
-            (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
-            (r'(fail|true)\b', Keyword),
-            # All solutions
-            (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
-            # Multi-threading meta-predicates
-            (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])',
-             Keyword),
-            # Term unification
-            (r'unify_with_occurs_check(?=[(])', Keyword),
-            # Term creation and decomposition
-            (r'(functor|arg|copy_term|numbervars)(?=[(])', Keyword),
-            # Evaluable functors
-            (r'(rem|mod|abs|sign)(?=[(])', Keyword),
-            (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
-            (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
-            # Other arithmetic functors
-            (r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
-            # Term testing
-            (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|'
-             r'ground)(?=[(])', Keyword),
-            # Term comparison
-            (r'compare(?=[(])', Keyword),
-            # Stream selection and control
-            (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
-            (r'(open|close)(?=[(])', Keyword),
-            (r'flush_output(?=[(])', Keyword),
-            (r'(at_end_of_stream|flush_output)\b', Keyword),
-            (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])',
-             Keyword),
-            # Character and byte input/output
-            (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
-            (r'\bnl\b', Keyword),
-            # Term input/output
-            (r'read(_term)?(?=[(])', Keyword),
-            (r'write(q|_(canonical|term))?(?=[(])', Keyword),
-            (r'(current_)?op(?=[(])', Keyword),
-            (r'(current_)?char_conversion(?=[(])', Keyword),
-            # Atomic term processing
-            (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
-            (r'(char_code|sub_atom)(?=[(])', Keyword),
-            (r'number_c(har|ode)s(?=[(])', Keyword),
-            # Implementation defined hooks functions
-            (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
-            (r'\bhalt\b', Keyword),
-            (r'halt(?=[(])', Keyword),
-            # Message sending operators
-            (r'(::|:|\^\^)', Operator),
-            # External call
-            (r'[{}]', Keyword),
-            # Logic and control
-            (r'\b(ignore|once)(?=[(])', Keyword),
-            (r'\brepeat\b', Keyword),
-            # Sorting
-            (r'(key)?sort(?=[(])', Keyword),
-            # Bitwise functors
-            (r'(>>|<<|/\\|\\\\|\\)', Operator),
-            # Arithemtic evaluation
-            (r'\bis\b', Keyword),
-            # Arithemtic comparison
-            (r'(=:=|=\\=|<|=<|>=|>)', Operator),
-            # Term creation and decomposition
-            (r'=\.\.', Operator),
-            # Term unification
-            (r'(=|\\=)', Operator),
-            # Term comparison
-            (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
-            # Evaluable functors
-            (r'(//|[-+*/])', Operator),
-            (r'\b(e|pi|mod|rem)\b', Operator),
-            # Other arithemtic functors
-            (r'\b\*\*\b', Operator),
-            # DCG rules
-            (r'-->', Operator),
-            # Control constructs
-            (r'([!;]|->)', Operator),
-            # Logic and control
-            (r'\\+', Operator),
-            # Mode operators
-            (r'[?@]', Operator),
-            # Existential quantifier
-            (r'\^', Operator),
-            # Strings
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # Ponctuation
-            (r'[()\[\],.|]', Text),
-            # Atoms
-            (r"[a-z][a-zA-Z0-9_]*", Text),
-            (r"'", String, 'quoted_atom'),
-        ],
-
-        'quoted_atom': [
-            (r"''", String),
-            (r"'", String, '#pop'),
-            (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
-            (r"[^\\'\n]+", String),
-            (r'\\', String),
-        ],
-
-        'directive': [
-            # Conditional compilation directives
-            (r'(el)?if(?=[(])', Keyword, 'root'),
-            (r'(e(lse|ndif))[.]', Keyword, 'root'),
-            # Entity directives
-            (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
-            (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
-            # Predicate scope directives
-            (r'(public|protected|private)(?=[(])', Keyword, 'root'),
-            # Other directives
-            (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
-            (r'in(fo|itialization)(?=[(])', Keyword, 'root'),
-            (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
-            (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|'
-             r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])',
-             Keyword, 'root'),
-            (r'op(?=[(])', Keyword, 'root'),
-            (r'(c(alls|oinductive)|reexport|use(s|_module))(?=[(])',
-             Keyword, 'root'),
-            (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
-            (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'),
-        ],
-
-        'entityrelations': [
-            (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)'
-             r'(?=[(])', Keyword),
-            # Numbers
-            (r"0'.", Number),
-            (r'0b[01]+', Number),
-            (r'0o[0-7]+', Number),
-            (r'0x[0-9a-fA-F]+', Number),
-            (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
-            # Variables
-            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
-            # Atoms
-            (r"[a-z][a-zA-Z0-9_]*", Text),
-            (r"'", String, 'quoted_atom'),
-            # Strings
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # End of entity-opening directive
-            (r'([)]\.)', Text, 'root'),
-            # Scope operator
-            (r'(::)', Operator),
-            # Ponctuation
-            (r'[()\[\],.|]', Text),
-            # Comments
-            (r'%.*?\n', Comment),
-            (r'/\*(.|\n)*?\*/',Comment),
-            # Whitespace
-            (r'\n', Text),
-            (r'\s+', Text),
-        ]
-    }
-
-    def analyse_text(text):
-        if ':- object(' in text:
-            return True
-        if ':- protocol(' in text:
-            return True
-        if ':- category(' in text:
-            return True
-        return False
-
-
-def _shortened(word):
-    dpos = word.find('$')
-    return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b'
-                     for i in range(len(word), dpos, -1)])
-def _shortened_many(*words):
-    return '|'.join(map(_shortened, words))
-
-class GnuplotLexer(RegexLexer):
-    """
-    For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
-
-    *New in Pygments 0.11.*
-    """
-
-    name = 'Gnuplot'
-    aliases = ['gnuplot']
-    filenames = ['*.plot', '*.plt']
-    mimetypes = ['text/x-gnuplot']
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            (_shortened('bi$nd'), Keyword, 'bind'),
-            (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
-            (_shortened('f$it'), Keyword, 'fit'),
-            (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
-            (r'else\b', Keyword),
-            (_shortened('pa$use'), Keyword, 'pause'),
-            (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
-            (_shortened('sa$ve'), Keyword, 'save'),
-            (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
-            (_shortened_many('sh$ow', 'uns$et'),
-             Keyword, ('noargs', 'optionarg')),
-            (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
-                             'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
-                             'pwd$', 're$read', 'res$et', 'scr$eendump',
-                             'she$ll', 'sy$stem', 'up$date'),
-             Keyword, 'genericargs'),
-            (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
-                             'she$ll', 'test$'),
-             Keyword, 'noargs'),
-            ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(=)',
-             bygroups(Name.Variable, Text, Operator), 'genericargs'),
-            ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*\(.*?\)\s*)(=)',
-             bygroups(Name.Function, Text, Operator), 'genericargs'),
-            (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
-            (r';', Keyword),
-        ],
-        'comment': [
-            (r'[^\\\n]', Comment),
-            (r'\\\n', Comment),
-            (r'\\', Comment),
-            # don't add the newline to the Comment token
-            ('', Comment, '#pop'),
-        ],
-        'whitespace': [
-            ('#', Comment, 'comment'),
-            (r'[ \t\v\f]+', Text),
-        ],
-        'noargs': [
-            include('whitespace'),
-            # semicolon and newline end the argument list
-            (r';', Punctuation, '#pop'),
-            (r'\n', Text, '#pop'),
-        ],
-        'dqstring': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-            (r'\n', String, '#pop'), # newline ends the string too
-        ],
-        'sqstring': [
-            (r"''", String), # escaped single quote
-            (r"'", String, '#pop'),
-            (r"[^\\'\n]+", String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # normal backslash
-            (r'\n', String, '#pop'), # newline ends the string too
-        ],
-        'genericargs': [
-            include('noargs'),
-            (r'"', String, 'dqstring'),
-            (r"'", String, 'sqstring'),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
-            (r'(\d+\.\d*|\.\d+)', Number.Float),
-            (r'-?\d+', Number.Integer),
-            ('[,.~!%^&*+=|?:<>/-]', Operator),
-            ('[{}()\[\]]', Punctuation),
-            (r'(eq|ne)\b', Operator.Word),
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
-             bygroups(Name.Function, Text, Punctuation)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
-            (r'\\\n', Text),
-        ],
-        'optionarg': [
-            include('whitespace'),
-            (_shortened_many(
-                "a$ll","an$gles","ar$row","au$toscale","b$ars","bor$der",
-                "box$width","cl$abel","c$lip","cn$trparam","co$ntour","da$ta",
-                "data$file","dg$rid3d","du$mmy","enc$oding","dec$imalsign",
-                "fit$","font$path","fo$rmat","fu$nction","fu$nctions","g$rid",
-                "hid$den3d","his$torysize","is$osamples","k$ey","keyt$itle",
-                "la$bel","li$nestyle","ls$","loa$dpath","loc$ale","log$scale",
-                "mac$ros","map$ping","map$ping3d","mar$gin","lmar$gin",
-                "rmar$gin","tmar$gin","bmar$gin","mo$use","multi$plot",
-                "mxt$ics","nomxt$ics","mx2t$ics","nomx2t$ics","myt$ics",
-                "nomyt$ics","my2t$ics","nomy2t$ics","mzt$ics","nomzt$ics",
-                "mcbt$ics","nomcbt$ics","of$fsets","or$igin","o$utput",
-                "pa$rametric","pm$3d","pal$ette","colorb$ox","p$lot",
-                "poi$ntsize","pol$ar","pr$int","obj$ect","sa$mples","si$ze",
-                "st$yle","su$rface","table$","t$erminal","termo$ptions","ti$cs",
-                "ticsc$ale","ticsl$evel","timef$mt","tim$estamp","tit$le",
-                "v$ariables","ve$rsion","vi$ew","xyp$lane","xda$ta","x2da$ta",
-                "yda$ta","y2da$ta","zda$ta","cbda$ta","xl$abel","x2l$abel",
-                "yl$abel","y2l$abel","zl$abel","cbl$abel","xti$cs","noxti$cs",
-                "x2ti$cs","nox2ti$cs","yti$cs","noyti$cs","y2ti$cs","noy2ti$cs",
-                "zti$cs","nozti$cs","cbti$cs","nocbti$cs","xdti$cs","noxdti$cs",
-                "x2dti$cs","nox2dti$cs","ydti$cs","noydti$cs","y2dti$cs",
-                "noy2dti$cs","zdti$cs","nozdti$cs","cbdti$cs","nocbdti$cs",
-                "xmti$cs","noxmti$cs","x2mti$cs","nox2mti$cs","ymti$cs",
-                "noymti$cs","y2mti$cs","noy2mti$cs","zmti$cs","nozmti$cs",
-                "cbmti$cs","nocbmti$cs","xr$ange","x2r$ange","yr$ange",
-                "y2r$ange","zr$ange","cbr$ange","rr$ange","tr$ange","ur$ange",
-                "vr$ange","xzeroa$xis","x2zeroa$xis","yzeroa$xis","y2zeroa$xis",
-                "zzeroa$xis","zeroa$xis","z$ero"), Name.Builtin, '#pop'),
-        ],
-        'bind': [
-            ('!', Keyword, '#pop'),
-            (_shortened('all$windows'), Name.Builtin),
-            include('genericargs'),
-        ],
-        'quit': [
-            (r'gnuplot\b', Keyword),
-            include('noargs'),
-        ],
-        'fit': [
-            (r'via\b', Name.Builtin),
-            include('plot'),
-        ],
-        'if': [
-            (r'\)', Punctuation, '#pop'),
-            include('genericargs'),
-        ],
-        'pause': [
-            (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
-            (_shortened('key$press'), Name.Builtin),
-            include('genericargs'),
-        ],
-        'plot': [
-            (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
-                             'mat$rix', 's$mooth', 'thru$', 't$itle',
-                             'not$itle', 'u$sing', 'w$ith'),
-             Name.Builtin),
-            include('genericargs'),
-        ],
-        'save': [
-            (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
-             Name.Builtin),
-            include('genericargs'),
-        ],
-    }
-
-
-class PovrayLexer(RegexLexer):
-    """
-    For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
-
-    *New in Pygments 0.11.*
-    """
-    name = 'POVRay'
-    aliases = ['pov']
-    filenames = ['*.pov', '*.inc']
-    mimetypes = ['text/x-povray']
-
-    tokens = {
-        'root': [
-            (r'/\*[\w\W]*?\*/', Comment.Multiline),
-            (r'//.*\n', Comment.Single),
-            (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
-            (r'#(debug|default|else|end|error|fclose|fopen|ifdef|ifndef|'
-             r'include|range|read|render|statistics|switch|undef|version|'
-             r'warning|while|write|define|macro|local|declare)\b',
-             Comment.Preproc),
-            (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
-             r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|'
-             r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|'
-             r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|'
-             r'attenuating|average|background|black_hole|blue|blur_samples|'
-             r'bounded_by|box_mapping|bozo|break|brick|brick_size|'
-             r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|'
-             r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|'
-             r'color|color_map|colour|colour_map|component|composite|concat|'
-             r'confidence|conic_sweep|constant|control0|control1|cos|cosh|'
-             r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|'
-             r'debug|declare|default|degrees|dents|diffuse|direction|'
-             r'distance|distance_maximum|div|dust|dust_type|eccentricity|'
-             r'else|emitting|end|error|error_bound|exp|exponent|'
-             r'fade_distance|fade_power|falloff|falloff_angle|false|'
-             r'file_exists|filter|finish|fisheye|flatness|flip|floor|'
-             r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|'
-             r'global_settings|glowing|gradient|granite|gray_threshold|'
-             r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|'
-             r'if|ifdef|iff|image_map|incidence|include|int|interpolate|'
-             r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|'
-             r'linear|linear_spline|linear_sweep|location|log|looks_like|'
-             r'look_at|low_error_factor|mandel|map_type|marble|material_map|'
-             r'matrix|max|max_intersections|max_iteration|max_trace_level|'
-             r'max_value|metallic|min|minimum_reuse|mod|mortar|'
-             r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|'
-             r'octaves|off|offset|omega|omnimax|on|once|onion|open|'
-             r'orthographic|panoramic|pattern1|pattern2|pattern3|'
-             r'perspective|pgm|phase|phong|phong_size|pi|pigment|'
-             r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|'
-             r'precision|pwr|quadratic_spline|quaternion|quick_color|'
-             r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|'
-             r'ramp_wave|rand|range|reciprocal|recursion_limit|red|'
-             r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|'
-             r'right|ripples|rotate|roughness|samples|scale|scallop_wave|'
-             r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|'
-             r'slice|slope_map|smooth|specular|spherical_mapping|spiral|'
-             r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|'
-             r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|'
-             r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|'
-             r'test_camera_4|texture|texture_map|tga|thickness|threshold|'
-             r'tightness|tile2|tiles|track|transform|translate|transmit|'
-             r'triangle_wave|true|ttf|turbulence|turb_depth|type|'
-             r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|'
-             r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|'
-             r'vnormalize|volume_object|volume_rendered|vol_with_light|'
-             r'vrotate|v_steps|warning|warp|water_level|waves|while|width|'
-             r'wood|wrinkles|yes)\b', Keyword),
-            (r'(bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|'
-             r'disc|height_field|intersection|julia_fractal|lathe|'
-             r'light_source|merge|mesh|object|plane|poly|polygon|prism|'
-             r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|'
-             r'text|torus|triangle|union)\b', Name.Builtin),
-            # TODO: <=, etc
-            (r'[\[\](){}<>;,]', Punctuation),
-            (r'[-+*/=]', Operator),
-            (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
-            (r'[0-9]+\.[0-9]*', Number.Float),
-            (r'\.[0-9]+', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r'\s+', Text),
-        ]
-    }
-
-
-class AppleScriptLexer(RegexLexer):
-    """
-    For `AppleScript source code
-    <http://developer.apple.com/documentation/AppleScript/
-    Conceptual/AppleScriptLangGuide>`_,
-    including `AppleScript Studio
-    <http://developer.apple.com/documentation/AppleScript/
-    Reference/StudioReference>`_.
-    Contributed by Andreas Amann <aamann at mac.com>.
-    """
-
-    name = 'AppleScript'
-    aliases = ['applescript']
-    filenames = ['*.applescript']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    Identifiers = r'[a-zA-Z]\w*'
-    Literals = ['AppleScript', 'current application', 'false', 'linefeed',
-                'missing value', 'pi','quote', 'result', 'return', 'space',
-                'tab', 'text item delimiters', 'true', 'version']
-    Classes = ['alias ', 'application ', 'boolean ', 'class ', 'constant ',
-               'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
-               'real ', 'record ', 'reference ', 'RGB color ', 'script ',
-               'text ', 'unit types', '(?:Unicode )?text', 'string']
-    BuiltIn = ['attachment', 'attribute run', 'character', 'day', 'month',
-               'paragraph', 'word', 'year']
-    HandlerParams = ['about', 'above', 'against', 'apart from', 'around',
-                     'aside from', 'at', 'below', 'beneath', 'beside',
-                     'between', 'for', 'given', 'instead of', 'on', 'onto',
-                     'out of', 'over', 'since']
-    Commands = ['ASCII (character|number)', 'activate', 'beep', 'choose URL',
-                'choose application', 'choose color', 'choose file( name)?',
-                'choose folder', 'choose from list',
-                'choose remote application', 'clipboard info',
-                'close( access)?', 'copy', 'count', 'current date', 'delay',
-                'delete', 'display (alert|dialog)', 'do shell script',
-                'duplicate', 'exists', 'get eof', 'get volume settings',
-                'info for', 'launch', 'list (disks|folder)', 'load script',
-                'log', 'make', 'mount volume', 'new', 'offset',
-                'open( (for access|location))?', 'path to', 'print', 'quit',
-                'random number', 'read', 'round', 'run( script)?',
-                'say', 'scripting components',
-                'set (eof|the clipboard to|volume)', 'store script',
-                'summarize', 'system attribute', 'system info',
-                'the clipboard', 'time to GMT', 'write', 'quoted form']
-    References = ['(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
-                  'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
-                  'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
-                  'before', 'behind', 'every', 'front', 'index', 'last',
-                  'middle', 'some', 'that', 'through', 'thru', 'where', 'whose']
-    Operators = ["and", "or", "is equal", "equals", "(is )?equal to", "is not",
-                 "isn't", "isn't equal( to)?", "is not equal( to)?",
-                 "doesn't equal", "does not equal", "(is )?greater than",
-                 "comes after", "is not less than or equal( to)?",
-                 "isn't less than or equal( to)?", "(is )?less than",
-                 "comes before", "is not greater than or equal( to)?",
-                 "isn't greater than or equal( to)?",
-                 "(is  )?greater than or equal( to)?", "is not less than",
-                 "isn't less than", "does not come before",
-                 "doesn't come before", "(is )?less than or equal( to)?",
-                 "is not greater than", "isn't greater than",
-                 "does not come after", "doesn't come after", "starts? with",
-                 "begins? with", "ends? with", "contains?", "does not contain",
-                 "doesn't contain", "is in", "is contained by", "is not in",
-                 "is not contained by", "isn't contained by", "div", "mod",
-                 "not", "(a  )?(ref( to)?|reference to)", "is", "does"]
-    Control = ['considering', 'else', 'error', 'exit', 'from', 'if',
-               'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
-               'try', 'until', 'using terms from', 'while', 'whith',
-               'with timeout( of)?', 'with transaction', 'by', 'continue',
-               'end', 'its?', 'me', 'my', 'return', 'of' , 'as']
-    Declarations = ['global', 'local', 'prop(erty)?', 'set', 'get']
-    Reserved = ['but', 'put', 'returning', 'the']
-    StudioClasses = ['action cell', 'alert reply', 'application', 'box',
-                     'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
-                     'clip view', 'color well', 'color-panel',
-                     'combo box( item)?', 'control',
-                     'data( (cell|column|item|row|source))?', 'default entry',
-                     'dialog reply', 'document', 'drag info', 'drawer',
-                     'event', 'font(-panel)?', 'formatter',
-                     'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
-                     'movie( view)?', 'open-panel', 'outline view', 'panel',
-                     'pasteboard', 'plugin', 'popup button',
-                     'progress indicator', 'responder', 'save-panel',
-                     'scroll view', 'secure text field( cell)?', 'slider',
-                     'sound', 'split view', 'stepper', 'tab view( item)?',
-                     'table( (column|header cell|header view|view))',
-                     'text( (field( cell)?|view))?', 'toolbar( item)?',
-                     'user-defaults', 'view', 'window']
-    StudioEvents = ['accept outline drop', 'accept table drop', 'action',
-                    'activated', 'alert ended', 'awake from nib', 'became key',
-                    'became main', 'begin editing', 'bounds changed',
-                    'cell value', 'cell value changed', 'change cell value',
-                    'change item value', 'changed', 'child of item',
-                    'choose menu item', 'clicked', 'clicked toolbar item',
-                    'closed', 'column clicked', 'column moved',
-                    'column resized', 'conclude drop', 'data representation',
-                    'deminiaturized', 'dialog ended', 'document nib name',
-                    'double clicked', 'drag( (entered|exited|updated))?',
-                    'drop', 'end editing', 'exposed', 'idle', 'item expandable',
-                    'item value', 'item value changed', 'items changed',
-                    'keyboard down', 'keyboard up', 'launched',
-                    'load data representation', 'miniaturized', 'mouse down',
-                    'mouse dragged', 'mouse entered', 'mouse exited',
-                    'mouse moved', 'mouse up', 'moved',
-                    'number of browser rows', 'number of items',
-                    'number of rows', 'open untitled', 'opened', 'panel ended',
-                    'parameters updated', 'plugin loaded', 'prepare drop',
-                    'prepare outline drag', 'prepare outline drop',
-                    'prepare table drag', 'prepare table drop',
-                    'read from file', 'resigned active', 'resigned key',
-                    'resigned main', 'resized( sub views)?',
-                    'right mouse down', 'right mouse dragged',
-                    'right mouse up', 'rows changed', 'scroll wheel',
-                    'selected tab view item', 'selection changed',
-                    'selection changing', 'should begin editing',
-                    'should close', 'should collapse item',
-                    'should end editing', 'should expand item',
-                    'should open( untitled)?',
-                    'should quit( after last window closed)?',
-                    'should select column', 'should select item',
-                    'should select row', 'should select tab view item',
-                    'should selection change', 'should zoom', 'shown',
-                    'update menu item', 'update parameters',
-                    'update toolbar item', 'was hidden', 'was miniaturized',
-                    'will become active', 'will close', 'will dismiss',
-                    'will display browser cell', 'will display cell',
-                    'will display item cell', 'will display outline cell',
-                    'will finish launching', 'will hide', 'will miniaturize',
-                    'will move', 'will open', 'will pop up', 'will quit',
-                    'will resign active', 'will resize( sub views)?',
-                    'will select tab view item', 'will show', 'will zoom',
-                    'write to file', 'zoomed']
-    StudioCommands = ['animate', 'append', 'call method', 'center',
-                      'close drawer', 'close panel', 'display',
-                      'display alert', 'display dialog', 'display panel', 'go',
-                      'hide', 'highlight', 'increment', 'item for',
-                      'load image', 'load movie', 'load nib', 'load panel',
-                      'load sound', 'localized string', 'lock focus', 'log',
-                      'open drawer', 'path for', 'pause', 'perform action',
-                      'play', 'register', 'resume', 'scroll', 'select( all)?',
-                      'show', 'size to fit', 'start', 'step back',
-                      'step forward', 'stop', 'synchronize', 'unlock focus',
-                      'update']
-    StudioProperties = ['accepts arrow key', 'action method', 'active',
-                        'alignment', 'allowed identifiers',
-                        'allows branch selection', 'allows column reordering',
-                        'allows column resizing', 'allows column selection',
-                        'allows customization',
-                        'allows editing text attributes',
-                        'allows empty selection', 'allows mixed state',
-                        'allows multiple selection', 'allows reordering',
-                        'allows undo', 'alpha( value)?', 'alternate image',
-                        'alternate increment value', 'alternate title',
-                        'animation delay', 'associated file name',
-                        'associated object', 'auto completes', 'auto display',
-                        'auto enables items', 'auto repeat',
-                        'auto resizes( outline column)?',
-                        'auto save expanded items', 'auto save name',
-                        'auto save table columns', 'auto saves configuration',
-                        'auto scroll', 'auto sizes all columns to fit',
-                        'auto sizes cells', 'background color', 'bezel state',
-                        'bezel style', 'bezeled', 'border rect', 'border type',
-                        'bordered', 'bounds( rotation)?', 'box type',
-                        'button returned', 'button type',
-                        'can choose directories', 'can choose files',
-                        'can draw', 'can hide',
-                        'cell( (background color|size|type))?', 'characters',
-                        'class', 'click count', 'clicked( data)? column',
-                        'clicked data item', 'clicked( data)? row',
-                        'closeable', 'collating', 'color( (mode|panel))',
-                        'command key down', 'configuration',
-                        'content(s| (size|view( margins)?))?', 'context',
-                        'continuous', 'control key down', 'control size',
-                        'control tint', 'control view',
-                        'controller visible', 'coordinate system',
-                        'copies( on scroll)?', 'corner view', 'current cell',
-                        'current column', 'current( field)?  editor',
-                        'current( menu)? item', 'current row',
-                        'current tab view item', 'data source',
-                        'default identifiers', 'delta (x|y|z)',
-                        'destination window', 'directory', 'display mode',
-                        'displayed cell', 'document( (edited|rect|view))?',
-                        'double value', 'dragged column', 'dragged distance',
-                        'dragged items', 'draws( cell)? background',
-                        'draws grid', 'dynamically scrolls', 'echos bullets',
-                        'edge', 'editable', 'edited( data)? column',
-                        'edited data item', 'edited( data)? row', 'enabled',
-                        'enclosing scroll view', 'ending page',
-                        'error handling', 'event number', 'event type',
-                        'excluded from windows menu', 'executable path',
-                        'expanded', 'fax number', 'field editor', 'file kind',
-                        'file name', 'file type', 'first responder',
-                        'first visible column', 'flipped', 'floating',
-                        'font( panel)?', 'formatter', 'frameworks path',
-                        'frontmost', 'gave up', 'grid color', 'has data items',
-                        'has horizontal ruler', 'has horizontal scroller',
-                        'has parent data item', 'has resize indicator',
-                        'has shadow', 'has sub menu', 'has vertical ruler',
-                        'has vertical scroller', 'header cell', 'header view',
-                        'hidden', 'hides when deactivated', 'highlights by',
-                        'horizontal line scroll', 'horizontal page scroll',
-                        'horizontal ruler view', 'horizontally resizable',
-                        'icon image', 'id', 'identifier',
-                        'ignores multiple clicks',
-                        'image( (alignment|dims when disabled|frame style|'
-                            'scaling))?',
-                        'imports graphics', 'increment value',
-                        'indentation per level', 'indeterminate', 'index',
-                        'integer value', 'intercell spacing', 'item height',
-                        'key( (code|equivalent( modifier)?|window))?',
-                        'knob thickness', 'label', 'last( visible)? column',
-                        'leading offset', 'leaf', 'level', 'line scroll',
-                        'loaded', 'localized sort', 'location', 'loop mode',
-                        'main( (bunde|menu|window))?', 'marker follows cell',
-                        'matrix mode', 'maximum( content)? size',
-                        'maximum visible columns',
-                        'menu( form representation)?', 'miniaturizable',
-                        'miniaturized', 'minimized image', 'minimized title',
-                        'minimum column width', 'minimum( content)? size',
-                        'modal', 'modified', 'mouse down state',
-                        'movie( (controller|file|rect))?', 'muted', 'name',
-                        'needs display', 'next state', 'next text',
-                        'number of tick marks', 'only tick mark values',
-                        'opaque', 'open panel', 'option key down',
-                        'outline table column', 'page scroll', 'pages across',
-                        'pages down', 'palette label', 'pane splitter',
-                        'parent data item', 'parent window', 'pasteboard',
-                        'path( (names|separator))?', 'playing',
-                        'plays every frame', 'plays selection only', 'position',
-                        'preferred edge', 'preferred type', 'pressure',
-                        'previous text', 'prompt', 'properties',
-                        'prototype cell', 'pulls down', 'rate',
-                        'released when closed', 'repeated',
-                        'requested print time', 'required file type',
-                        'resizable', 'resized column', 'resource path',
-                        'returns records', 'reuses columns', 'rich text',
-                        'roll over', 'row height', 'rulers visible',
-                        'save panel', 'scripts path', 'scrollable',
-                        'selectable( identifiers)?', 'selected cell',
-                        'selected( data)? columns?', 'selected data items?',
-                        'selected( data)? rows?', 'selected item identifier',
-                        'selection by rect', 'send action on arrow key',
-                        'sends action when done editing', 'separates columns',
-                        'separator item', 'sequence number', 'services menu',
-                        'shared frameworks path', 'shared support path',
-                        'sheet', 'shift key down', 'shows alpha',
-                        'shows state by', 'size( mode)?',
-                        'smart insert delete enabled', 'sort case sensitivity',
-                        'sort column', 'sort order', 'sort type',
-                        'sorted( data rows)?', 'sound', 'source( mask)?',
-                        'spell checking enabled', 'starting page', 'state',
-                        'string value', 'sub menu', 'super menu', 'super view',
-                        'tab key traverses cells', 'tab state', 'tab type',
-                        'tab view', 'table view', 'tag', 'target( printer)?',
-                        'text color', 'text container insert',
-                        'text container origin', 'text returned',
-                        'tick mark position', 'time stamp',
-                        'title(d| (cell|font|height|position|rect))?',
-                        'tool tip', 'toolbar', 'trailing offset', 'transparent',
-                        'treat packages as directories', 'truncated labels',
-                        'types', 'unmodified characters', 'update views',
-                        'use sort indicator', 'user defaults',
-                        'uses data source', 'uses ruler',
-                        'uses threaded animation',
-                        'uses title from previous column', 'value wraps',
-                        'version',
-                        'vertical( (line scroll|page scroll|ruler view))?',
-                        'vertically resizable', 'view',
-                        'visible( document rect)?', 'volume', 'width', 'window',
-                        'windows menu', 'wraps', 'zoomable', 'zoomed']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (ur'¬\n', String.Escape),
-            (r"'s\s+", Text), # This is a possessive, consider moving
-            (r'(--|#).*?$', Comment),
-            (r'\(\*', Comment.Multiline, 'comment'),
-            (r'[\(\){}!,.:]', Punctuation),
-            (ur'(«)([^»]+)(»)',
-             bygroups(Text, Name.Builtin, Text)),
-            (r'\b((?:considering|ignoring)\s*)'
-             r'(application responses|case|diacriticals|hyphens|'
-             r'numeric strings|punctuation|white space)',
-             bygroups(Keyword, Name.Builtin)),
-            (ur'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
-            (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
-            (r'^(\s*(?:on|end)\s+)'
-             r'(%s)' % '|'.join(StudioEvents[::-1]),
-             bygroups(Keyword, Name.Function)),
-            (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
-            (r'\b(as )(%s)\b' % '|'.join(Classes),
-             bygroups(Keyword, Name.Class)),
-            (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
-            (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(Control), Keyword),
-            (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
-            (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
-            (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
-            (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r'\b(%s)\b' % Identifiers, Name.Variable),
-            (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
-            (r'[-+]?\d+', Number.Integer),
-        ],
-        'comment': [
-            ('\(\*', Comment.Multiline, '#push'),
-            ('\*\)', Comment.Multiline, '#pop'),
-            ('[^*(]+', Comment.Multiline),
-            ('[*(]', Comment.Multiline),
-        ],
-    }
-
-
-class ModelicaLexer(RegexLexer):
-    """
-    For `Modelica <http://www.modelica.org/>`_ source code.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'Modelica'
-    aliases = ['modelica']
-    filenames = ['*.mo']
-    mimetypes = ['text/x-modelica']
-
-    flags = re.IGNORECASE | re.DOTALL
-
-    tokens = {
-        'whitespace': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
-            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
-        ],
-        'statements': [
-            (r'"', String, 'string'),
-            (r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float),
-            (r'(\d+\.\d*|\.\d+)', Number.Float),
-            (r'\d+[Ll]?', Number.Integer),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\]{},.;]', Punctuation),
-            (r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin),
-            (r"([a-zA-Z_][\w]*|'[a-zA-Z_\+\-\*\/\^][\w]*')"
-             r"(\.([a-zA-Z_][\w]*|'[a-zA-Z_\+\-\*\/\^][\w]*'))+", Name.Class),
-            (r"('[\w\+\-\*\/\^]+'|\w+)", Name),
-        ],
-        'root': [
-            include('whitespace'),
-            include('keywords'),
-            include('functions'),
-            include('operators'),
-            include('classes'),
-            (r'("<html>|<html>)', Name.Tag, 'html-content'),
-            include('statements'),
-        ],
-        'keywords': [
-            (r'(algorithm|annotation|break|connect|constant|constrainedby|'
-            r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
-            r'end|equation|exit|expandable|extends|'
-            r'external|false|final|flow|for|if|import|impure|in|initial\sequation|'
-            r'inner|input|loop|nondiscrete|outer|output|parameter|partial|'
-            r'protected|public|pure|redeclare|replaceable|stream|time|then|true|'
-            r'when|while|within)\b', Keyword),
-        ],
-        'functions': [
-            (r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|'
-             r'cross|div|exp|floor|getInstanceName|log|log10|mod|rem|'
-             r'semiLinear|sign|sin|sinh|size|spatialDistribution|sqrt|tan|'
-             r'tanh|zeros)\b', Name.Function),
-        ],
-        'operators': [
-            (r'(actualStream|and|assert|cardinality|change|Clock|delay|der|edge|'
-             r'hold|homotopy|initial|inStream|noEvent|not|or|pre|previous|reinit|'
-             r'return|sample|smooth|spatialDistribution|subSample|terminal|'
-             r'terminate)\b', Name.Builtin),
-        ],
-        'classes': [
-            (r'(block|class|connector|function|model|package|'
-             r'record|type)(\s+)([A-Za-z_]+)',
-             bygroups(Keyword, Text, Name.Class))
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})',
-             String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'html-content': [
-            (r'<\s*/\s*html\s*>', Name.Tag, '#pop'),
-            (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)),
-        ]
-    }
-
-
-class RebolLexer(RegexLexer):
-    """
-    A `REBOL <http://www.rebol.com/>`_ lexer.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'REBOL'
-    aliases = ['rebol']
-    filenames = ['*.r', '*.r3']
-    mimetypes = ['text/x-rebol']
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    re.IGNORECASE
-
-    escape_re = r'(?:\^\([0-9a-fA-F]{1,4}\)*)'
-
-    def word_callback(lexer, match):
-        word = match.group()
-
-        if re.match(".*:$", word):
-            yield match.start(), Generic.Subheading, word
-        elif re.match(
-            r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
-            r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
-            r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
-            r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
-            r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
-            r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
-            r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
-            r'while|compress|decompress|secure|open|close|read|read-io|'
-            r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
-            r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
-            r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
-            r'browse|launch|stats|get-modes|set-modes|to-local-file|'
-            r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
-            r'hide|draw|show|size-text|textinfo|offset-to-caret|'
-            r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
-            r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
-            r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
-            r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
-            r'rsa-encrypt)$', word):
-            yield match.start(), Name.Builtin, word
-        elif re.match(
-            r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
-            r'minimum|maximum|negate|complement|absolute|random|head|tail|'
-            r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
-            r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
-            r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
-            r'copy)$', word):
-            yield match.start(), Name.Function, word
-        elif re.match(
-            r'(error|source|input|license|help|install|echo|Usage|with|func|'
-            r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
-            r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
-            r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
-            r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
-            r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
-            r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
-            r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
-            r'write-user|save-user|set-user-name|protect-system|parse-xml|'
-            r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
-            r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
-            r'request-dir|center-face|do-events|net-error|decode-url|'
-            r'parse-header|parse-header-date|parse-email-addrs|import-email|'
-            r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
-            r'find-key-face|do-face|viewtop|confine|find-window|'
-            r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
-            r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
-            r'read-thru|load-thru|do-thru|launch-thru|load-image|'
-            r'request-download|do-face-alt|set-font|set-para|get-style|'
-            r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
-            r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
-            r'resize-face|load-stock|load-stock-block|notify|request|flash|'
-            r'request-color|request-pass|request-text|request-list|'
-            r'request-date|request-file|dbug|editor|link-relative-path|'
-            r'emailer|parse-error)$', word):
-            yield match.start(), Keyword.Namespace, word
-        elif re.match(
-            r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
-            r'return|exit|break)$', word):
-            yield match.start(), Name.Exception, word
-        elif re.match('REBOL$', word):
-            yield match.start(), Generic.Heading, word
-        elif re.match("to-.*", word):
-            yield match.start(), Keyword, word
-        elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
-                      word):
-            yield match.start(), Operator, word
-        elif re.match(".*\?$", word):
-            yield match.start(), Keyword, word
-        elif re.match(".*\!$", word):
-            yield match.start(), Keyword.Type, word
-        elif re.match("'.*", word):
-            yield match.start(), Name.Variable.Instance, word # lit-word
-        elif re.match("#.*", word):
-            yield match.start(), Name.Label, word # issue
-        elif re.match("%.*", word):
-            yield match.start(), Name.Decorator, word # file
-        else:
-            yield match.start(), Name.Variable, word
-
-    tokens = {
-        'root': [
-            (r'REBOL', Generic.Strong, 'script'),
-            (r'R', Comment),
-            (r'[^R]+', Comment),
-        ],
-        'script': [
-            (r'\s+', Text),
-            (r'#"', String.Char, 'char'),
-            (r'#{[0-9a-fA-F]*}', Number.Hex),
-            (r'2#{', Number.Hex, 'bin2'),
-            (r'64#{[0-9a-zA-Z+/=\s]*}', Number.Hex),
-            (r'"', String, 'string'),
-            (r'{', String, 'string2'),
-            (r';#+.*\n', Comment.Special),
-            (r';\*+.*\n', Comment.Preproc),
-            (r';.*\n', Comment),
-            (r'%"', Name.Decorator, 'stringFile'),
-            (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
-            (r'<[a-zA-Z0-9:._-]*>', Name.Tag),
-            (r'<[^(<>\s")]+', Name.Tag, 'tag'),
-            (r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
-            (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
-            (r'\d+\-[0-9a-zA-Z]+\-\d+(\/\d+\:\d+(\:\d+)?'
-             r'([\.\d+]?([+-]?\d+:\d+)?)?)?', String.Other), # date
-            (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
-            (r'\d+[xX]\d+', Keyword.Constant), # pair
-            (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
-            (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
-            (r'[+-]?\d+(\'\d+)?', Number),
-            (r'[\[\]\(\)]', Generic.Strong),
-            (r'[a-zA-Z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
-            (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
-            (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
-            (r'comment\s', Comment, 'comment'),
-            (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
-            (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
-            (r'([^(\^{^")\s]+)', Text),
-        ],
-        'string': [
-            (r'[^(\^")]+', String),
-            (escape_re, String.Escape),
-            (r'[\(|\)]+', String),
-            (r'\^.', String.Escape),
-            (r'"', String, '#pop'),
-        ],
-        'string2': [
-            (r'[^(\^{^})]+', String),
-            (escape_re, String.Escape),
-            (r'[\(|\)]+', String),
-            (r'\^.', String.Escape),
-            (r'{', String, '#push'),
-            (r'}', String, '#pop'),
-        ],
-        'stringFile': [
-            (r'[^(\^")]+', Name.Decorator),
-            (escape_re, Name.Decorator),
-            (r'\^.', Name.Decorator),
-            (r'"', Name.Decorator, '#pop'),
-        ],
-        'char': [
-            (escape_re + '"', String.Char, '#pop'),
-            (r'\^."', String.Char, '#pop'),
-            (r'."', String.Char, '#pop'),
-        ],
-        'tag': [
-            (escape_re, Name.Tag),
-            (r'"', Name.Tag, 'tagString'),
-            (r'[^(<>\r\n")]+', Name.Tag),
-            (r'>', Name.Tag, '#pop'),
-        ],
-        'tagString': [
-            (r'[^(\^")]+', Name.Tag),
-            (escape_re, Name.Tag),
-            (r'[\(|\)]+', Name.Tag),
-            (r'\^.', Name.Tag),
-            (r'"', Name.Tag, '#pop'),
-        ],
-        'tuple': [
-            (r'(\d+\.)+', Keyword.Constant),
-            (r'\d+', Keyword.Constant, '#pop'),
-        ],
-        'bin2': [
-            (r'\s+', Number.Hex),
-            (r'([0-1]\s*){8}', Number.Hex),
-            (r'}', Number.Hex, '#pop'),
-        ],
-        'comment': [
-            (r'"', Comment, 'commentString1'),
-            (r'{', Comment, 'commentString2'),
-            (r'\[', Comment, 'commentBlock'),
-            (r'[^(\s{\"\[]+', Comment, '#pop'),
-        ],
-        'commentString1': [
-            (r'[^(\^")]+', Comment),
-            (escape_re, Comment),
-            (r'[\(|\)]+', Comment),
-            (r'\^.', Comment),
-            (r'"', Comment, '#pop'),
-        ],
-        'commentString2': [
-            (r'[^(\^{^})]+', Comment),
-            (escape_re, Comment),
-            (r'[\(|\)]+', Comment),
-            (r'\^.', Comment),
-            (r'{', Comment, '#push'),
-            (r'}', Comment, '#pop'),
-        ],
-        'commentBlock': [
-            (r'\[', Comment, '#push'),
-            (r'\]', Comment, '#pop'),
-            (r'[^(\[\])]+', Comment),
-        ],
-    }
-
-
-class ABAPLexer(RegexLexer):
-    """
-    Lexer for ABAP, SAP's integrated language.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'ABAP'
-    aliases = ['abap']
-    filenames = ['*.abap']
-    mimetypes = ['text/x-abap']
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    tokens = {
-        'common': [
-            (r'\s+', Text),
-            (r'^\*.*$', Comment.Single),
-            (r'\".*?\n', Comment.Single),
-            ],
-        'variable-names': [
-            (r'<[\S_]+>', Name.Variable),
-            (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
-            ],
-        'root': [
-            include('common'),
-            #function calls
-            (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
-                bygroups(Keyword, Text, Name.Function)),
-            (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
-             r'TRANSACTION|TRANSFORMATION))\b',
-                Keyword),
-            (r'(FORM|PERFORM)(\s+)(\w+)',
-                bygroups(Keyword, Text, Name.Function)),
-            (r'(PERFORM)(\s+)(\()(\w+)(\))',
-                bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation )),
-            (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
-                bygroups(Keyword, Text, Name.Function, Text, Keyword)),
-
-            # method implementation
-            (r'(METHOD)(\s+)([\w~]+)',
-                bygroups(Keyword, Text, Name.Function)),
-            # method calls
-            (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
-                bygroups(Text, Name.Variable, Operator, Name.Function)),
-            # call methodnames returning style
-            (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
-
-            # keywords with dashes in them.
-            # these need to be first, because for instance the -ID part
-            # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
-            # first in the list of keywords.
-            (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
-             r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
-             r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
-             r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
-             r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
-             r'INTERFACE-POOL|INVERTED-DATE|'
-             r'LOAD-OF-PROGRAM|LOG-POINT|'
-             r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
-             r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
-             r'OUTPUT-LENGTH|PRINT-CONTROL|'
-             r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
-             r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
-             r'TYPE-POOL|TYPE-POOLS'
-             r')\b', Keyword),
-
-             # keyword kombinations
-            (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
-             r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
-             r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
-             r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
-             r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
-             r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
-             r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
-                      r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
-                      r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
-             r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
-                      r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
-                      r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
-                      r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
-                      r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
-             r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
-             r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
-             r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
-                            r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
-             r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
-             r'FREE\s(MEMORY|OBJECT)?|'
-             r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
-                          r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
-             r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
-             r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
-                                     r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
-             r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
-                                     r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
-                                     r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
-                                     r'SKIP|ULINE)|'
-             r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
-                        r'TO LIST-PROCESSING|TO TRANSACTION)'
-             r'(ENDING|STARTING)\s+AT|'
-             r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
-             r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
-             r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
-             r'(BEGIN|END)\s+OF|'
-             r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
-             r'COMPARING(\s+ALL\s+FIELDS)?|'
-             r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
-             r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
-             r'END-OF-(DEFINITION|PAGE|SELECTION)|'
-             r'WITH\s+FRAME(\s+TITLE)|'
-
-             # simple kombinations
-             r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
-             r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
-             r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
-             r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
-             r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
-             r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
-             r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
-
-            # single word keywords.
-            (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
-             r'ASSIGN(ING)?|AT(\s+FIRST)?|'
-             r'BACK|BLOCK|BREAK-POINT|'
-             r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
-             r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
-             r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
-             r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
-             r'DETAIL|DIRECTORY|DIVIDE|DO|'
-             r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
-             r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
-             r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
-             r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
-             r'HIDE|'
-             r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
-             r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
-             r'LENGTH|LINES|LOAD|LOCAL|'
-             r'JOIN|'
-             r'KEY|'
-             r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
-             r'NODES|'
-             r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
-             r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
-             r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
-             r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
-             r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
-             r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
-             r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
-             r'ULINE|UNDER|UNPACK|UPDATE|USING|'
-             r'VALUE|VALUES|VIA|'
-             r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
-
-             # builtins
-            (r'(abs|acos|asin|atan|'
-             r'boolc|boolx|bit_set|'
-             r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
-             r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
-             r'count|count_any_of|count_any_not_of|'
-             r'dbmaxlen|distance|'
-             r'escape|exp|'
-             r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
-             r'insert|'
-             r'lines|log|log10|'
-             r'match|matches|'
-             r'nmax|nmin|numofchar|'
-             r'repeat|replace|rescale|reverse|round|'
-             r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
-             r'substring|substring_after|substring_from|substring_before|substring_to|'
-             r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
-             r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
-
-            (r'&[0-9]', Name),
-            (r'[0-9]+', Number.Integer),
-
-            # operators which look like variable names before
-            # parsing variable names.
-            (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
-             r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
-             r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
-
-            include('variable-names'),
-
-            # standard oparators after variable names,
-            # because < and > are part of field symbols.
-            (r'[?*<>=\-+]', Operator),
-            (r"'(''|[^'])*'", String.Single),
-            (r'[/;:()\[\],\.]', Punctuation)
-        ],
-    }
-
-
-class NewspeakLexer(RegexLexer):
-    """
-    For `Newspeak <http://newspeaklanguage.org/>` syntax.
-    """
-    name = 'Newspeak'
-    filenames = ['*.ns2']
-    aliases = ['newspeak', ]
-    mimetypes = ['text/x-newspeak']
-
-    tokens = {
-       'root' : [
-           (r'\b(Newsqueak2)\b',Keyword.Declaration),
-           (r"'[^']*'",String),
-           (r'\b(class)(\s+)([a-zA-Z0-9_]+)(\s*)',
-            bygroups(Keyword.Declaration,Text,Name.Class,Text)),
-           (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
-            Keyword),
-           (r'([a-zA-Z0-9_]+\:)(\s*)([a-zA-Z_]\w+)',
-            bygroups(Name.Function,Text,Name.Variable)),
-           (r'([a-zA-Z0-9_]+)(\s*)(=)',
-            bygroups(Name.Attribute,Text,Operator)),
-           (r'<[a-zA-Z0-9_]+>', Comment.Special),
-           include('expressionstat'),
-           include('whitespace')
-        ],
-
-       'expressionstat': [
-          (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-          (r'\d+', Number.Integer),
-          (r':\w+',Name.Variable),
-          (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
-          (r'\w+:', Name.Function),
-          (r'\w+', Name.Variable),
-          (r'\(|\)', Punctuation),
-          (r'\[|\]', Punctuation),
-          (r'\{|\}', Punctuation),
-
-          (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
-          (r'\.|;', Punctuation),
-          include('whitespace'),
-          include('literals'),
-       ],
-       'literals': [
-         (r'\$.', String),
-         (r"'[^']*'", String),
-         (r"#'[^']*'", String.Symbol),
-         (r"#\w+:?", String.Symbol),
-         (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
-
-       ],
-       'whitespace' : [
-         (r'\s+', Text),
-         (r'"[^"]*"', Comment)
-       ]
-    }
-
-
-class GherkinLexer(RegexLexer):
-    """
-    For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
-
-    *New in Pygments 1.2.*
-    """
-    name = 'Gherkin'
-    aliases = ['Cucumber', 'cucumber', 'Gherkin', 'gherkin']
-    filenames = ['*.feature']
-    mimetypes = ['text/x-gherkin']
-
-    feature_keywords         = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
-    feature_element_keywords = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|S [...]
-    examples_keywords        = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
-    step_keywords            = ur'^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna [...]
-
-    tokens = {
-        'comments': [
-            (r'#.*$', Comment),
-          ],
-        'feature_elements' : [
-            (step_keywords, Keyword, "step_content_stack"),
-            include('comments'),
-            (r"(\s|.)", Name.Function),
-          ],
-        'feature_elements_on_stack' : [
-            (step_keywords, Keyword, "#pop:2"),
-            include('comments'),
-            (r"(\s|.)", Name.Function),
-          ],
-        'examples_table': [
-            (r"\s+\|", Keyword, 'examples_table_header'),
-            include('comments'),
-            (r"(\s|.)", Name.Function),
-          ],
-        'examples_table_header': [
-            (r"\s+\|\s*$", Keyword, "#pop:2"),
-            include('comments'),
-            (r"\s*\|", Keyword),
-            (r"[^\|]", Name.Variable),
-          ],
-        'scenario_sections_on_stack': [
-            (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements_on_stack"),
-          ],
-        'narrative': [
-            include('scenario_sections_on_stack'),
-            (r"(\s|.)", Name.Function),
-          ],
-        'table_vars': [
-            (r'(<[^>]+>)', Name.Variable),
-          ],
-        'numbers': [
-            (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
-          ],
-        'string': [
-            include('table_vars'),
-            (r'(\s|.)', String),
-          ],
-        'py_string': [
-            (r'"""', Keyword, "#pop"),
-            include('string'),
-          ],
-          'step_content_root':[
-            (r"$", Keyword, "#pop"),
-            include('step_content'),
-          ],
-          'step_content_stack':[
-            (r"$", Keyword, "#pop:2"),
-            include('step_content'),
-          ],
-          'step_content':[
-            (r'"', Name.Function, "double_string"),
-            include('table_vars'),
-            include('numbers'),
-            include('comments'),
-            (r'(\s|.)', Name.Function),
-          ],
-          'table_content': [
-            (r"\s+\|\s*$", Keyword, "#pop"),
-            include('comments'),
-            (r"\s*\|", Keyword),
-            include('string'),
-          ],
-        'double_string': [
-            (r'"', Name.Function, "#pop"),
-            include('string'),
-          ],
-        'root': [
-            (r'\n', Name.Function),
-            include('comments'),
-            (r'"""', Keyword, "py_string"),
-            (r'\s+\|', Keyword, 'table_content'),
-            (r'"', Name.Function, "double_string"),
-            include('table_vars'),
-            include('numbers'),
-            (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
-            (step_keywords, bygroups(Name.Function, Keyword),
-             'step_content_root'),
-            (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
-             'narrative'),
-            (feature_element_keywords,
-             bygroups(Name.Function, Keyword, Keyword, Name.Function),
-             'feature_elements'),
-            (examples_keywords,
-             bygroups(Name.Function, Keyword, Keyword, Name.Function),
-             'examples_table'),
-            (r'(\s|.)', Name.Function),
-        ]
-    }
-
-class AsymptoteLexer(RegexLexer):
-    """
-    For `Asymptote <http://asymptote.sf.net/>`_ source code.
-
-    *New in Pygments 1.2.*
-    """
-    name = 'Asymptote'
-    aliases = ['asy', 'asymptote']
-    filenames = ['*.asy']
-    mimetypes = ['text/x-asymptote']
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
-    tokens = {
-        'whitespace': [
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
-            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
-        ],
-        'statements': [
-            # simple string (TeX friendly)
-            (r'"(\\\\|\\"|[^"])*"', String),
-            # C style string (with character escapes)
-            (r"'", String, 'string'),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\],.]', Punctuation),
-            (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
-            (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
-             r'return|break|continue|struct|typedef|new|access|import|'
-             r'unravel|from|include|quote|static|public|private|restricted|'
-             r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
-            # Since an asy-type-name can be also an asy-function-name,
-            # in the following we test if the string "  [a-zA-Z]" follows
-            # the Keyword.Type.
-            # Of course it is not perfect !
-            (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
-             r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
-             r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
-             r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
-             r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
-             r'path3|pen|picture|point|position|projection|real|revolution|'
-             r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
-             r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
-             r'transformation|tree|triangle|trilinear|triple|vector|'
-             r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type),
-            # Now the asy-type-name which are not asy-function-name
-            # except yours !
-            # Perhaps useless
-            (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
-             r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
-             r'picture|position|real|revolution|slice|splitface|ticksgridT|'
-             r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
-            ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            ],
-        'root': [
-            include('whitespace'),
-            # functions
-            (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|\*))'    # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'             # method name
-             r'(\s*\([^;]*?\))'                      # signature
-             r'(' + _ws + r')({)',
-             bygroups(using(this), Name.Function, using(this), using(this),
-                      Punctuation),
-             'function'),
-            # function declarations
-            (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|\*))'    # return arguments
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'             # method name
-             r'(\s*\([^;]*?\))'                      # signature
-             r'(' + _ws + r')(;)',
-             bygroups(using(this), Name.Function, using(this), using(this),
-                      Punctuation)),
-            ('', Text, 'statement'),
-        ],
-        'statement' : [
-            include('whitespace'),
-            include('statements'),
-            ('[{}]', Punctuation),
-            (';', Punctuation, '#pop'),
-        ],
-        'function': [
-            include('whitespace'),
-            include('statements'),
-            (';', Punctuation),
-            ('{', Punctuation, '#push'),
-            ('}', Punctuation, '#pop'),
-        ],
-        'string': [
-            (r"'", String, '#pop'),
-            (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'\n', String),
-            (r"[^\\'\n]+", String), # all other characters
-            (r'\\\n', String),
-            (r'\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-            ]
-        }
-
-    def get_tokens_unprocessed(self, text):
-        from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
-        for index, token, value in \
-               RegexLexer.get_tokens_unprocessed(self, text):
-           if token is Name and value in ASYFUNCNAME:
-               token = Name.Function
-           elif token is Name and value in ASYVARNAME:
-               token = Name.Variable
-           yield index, token, value
-
-
-class PostScriptLexer(RegexLexer):
-    """
-    Lexer for PostScript files.
-
-    The PostScript Language Reference published by Adobe at
-    <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
-    is the authority for this.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'PostScript'
-    aliases = ['postscript']
-    filenames = ['*.ps', '*.eps']
-    mimetypes = ['application/postscript']
-
-    delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
-    delimiter_end = r'(?=[%s])' % delimiter
-
-    valid_name_chars = r'[^%s]' % delimiter
-    valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
-    tokens = {
-        'root': [
-            # All comment types
-            (r'^%!.+\n', Comment.Preproc),
-            (r'%%.*\n', Comment.Special),
-            (r'(^%.*\n){2,}', Comment.Multiline),
-            (r'%.*\n', Comment.Single),
-
-            # String literals are awkward; enter separate state.
-            (r'\(', String, 'stringliteral'),
-
-            (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
-
-            # Numbers
-            (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
-            # Slight abuse: use Oct to signify any explicit base system
-            (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
-             r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
-            (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
-             + delimiter_end, Number.Float),
-            (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
-
-            # References
-            (r'\/%s' % valid_name, Name.Variable),
-
-            # Names
-            (valid_name, Name.Function),      # Anything else is executed
-
-            # These keywords taken from
-            # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
-            # Is there an authoritative list anywhere that doesn't involve
-            # trawling documentation?
-
-            (r'(false|true)' + delimiter_end, Keyword.Constant),
-
-            # Conditionals / flow control
-            (r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)'
-             + delimiter_end, Keyword.Reserved),
-
-            ('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|'
-             'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|'
-             'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|'
-             'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|'
-             'fill|findfont|floor|get|getinterval|grestore|gsave|gt|'
-             'identmatrix|idiv|idtransform|index|invertmatrix|itransform|'
-             'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|'
-             'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|'
-             'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|'
-             'save|scale|scalefont|setdash|setfont|setgray|setlinecap|'
-             'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|'
-             'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|'
-             'syntaxerror|transform|translate|truncate|typecheck|undefined|'
-             'undefinedfilename|undefinedresult)' + delimiter_end,
-             Name.Builtin),
-
-            (r'\s+', Text),
-        ],
-
-        'stringliteral': [
-            (r'[^\(\)\\]+', String),
-            (r'\\', String.Escape, 'escape'),
-            (r'\(', String, '#push'),
-            (r'\)', String, '#pop'),
-        ],
-
-        'escape': [
-            (r'([0-8]{3}|n|r|t|b|f|\\|\(|\))?', String.Escape, '#pop'),
-        ],
-    }
-
-
-class AutohotkeyLexer(RegexLexer):
-    """
-    For `autohotkey <http://www.autohotkey.com/>`_ source code.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'autohotkey'
-    aliases = ['ahk']
-    filenames = ['*.ahk', '*.ahkl']
-    mimetypes = ['text/x-autohotkey']
-
-    tokens = {
-        'root': [
-            (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline),
-                             'incomment'),
-            (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
-            (r'\s+;.*?$', Comment.Singleline),
-            (r'^;.*?$', Comment.Singleline),
-            (r'[]{}(),;[]', Punctuation),
-            (r'(in|is|and|or|not)\b', Operator.Word),
-            (r'\%[a-zA-Z_#@$][a-zA-Z0-9_#@$]*\%', Name.Variable),
-            (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
-            include('commands'),
-            include('labels'),
-            include('builtInFunctions'),
-            include('builtInVariables'),
-            (r'"', String, combined('stringescape', 'dqs')),
-            include('numbers'),
-            (r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
-            (r'\\|\'', Text),
-            (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
-            include('garbage'),
-        ],
-        'incomment': [
-            (r'^\s*\*/', Comment.Multiline, '#pop'),
-            (r'[^*/]', Comment.Multiline),
-            (r'[*/]', Comment.Multiline)
-        ],
-        'incontinuation': [
-            (r'^\s*\)', Generic, '#pop'),
-            (r'[^)]', Generic),
-            (r'[)]', Generic),
-        ],
-        'commands': [
-            (r'(?i)^(\s*)(global|local|static|'
-             r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
-             r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
-             r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
-             r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
-             r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
-             r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
-             r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
-             r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
-             r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
-             r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
-             r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
-             r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
-             r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
-             r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
-             r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
-             r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
-             r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
-             r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
-             r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
-             r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
-             r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
-             r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
-             r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
-             r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
-             r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
-             r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
-             r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
-             r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
-             r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
-             r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
-             r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
-             r'SetBatchLines|SetCapslockState|SetControlDelay|'
-             r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
-             r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
-             r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
-             r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
-             r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
-             r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
-             r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
-             r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
-             r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
-             r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
-             r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
-             r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
-             r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
-             r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
-             r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
-             r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
-             r'WinWait)\b', bygroups(Text, Name.Builtin)),
-        ],
-        'builtInFunctions': [
-            (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
-             r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
-             r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
-             r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
-             r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
-             r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
-             r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
-             r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
-             r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
-             r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
-             r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
-             r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
-             r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
-             r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
-             r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
-             r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
-             Name.Function),
-        ],
-        'builtInVariables': [
-            (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
-             r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
-             r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
-             r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
-             r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
-             r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
-             r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
-             r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
-             r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
-             r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
-             r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
-             r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
-             r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
-             r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
-             r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
-             r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
-             r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
-             r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
-             r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
-             r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
-             r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
-             r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
-             r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
-             r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
-             r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
-             r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
-             r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
-             r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
-             r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
-             r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
-             Name.Variable),
-        ],
-        'labels': [
-            # hotkeys and labels
-            # technically, hotkey names are limited to named keys and buttons
-            (r'(^\s*)([^:\s\(\"]+?:{1,2})', bygroups(Text, Name.Label)),
-            (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
-        ],
-        'numbers': [
-            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eE][+-]?[0-9]+', Number.Float),
-            (r'0\d+', Number.Oct),
-            (r'0[xX][a-fA-F0-9]+', Number.Hex),
-            (r'\d+L', Number.Integer.Long),
-            (r'\d+', Number.Integer)
-        ],
-        'stringescape': [
-            (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
-        ],
-        'strings': [
-            (r'[^"\n]+', String),
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            include('strings')
-        ],
-        'garbage': [
-            (r'[^\S\n]', Text),
-            # (r'.', Text),      # no cheating
-        ],
-    }
-
-
-class MaqlLexer(RegexLexer):
-    """
-    Lexer for `GoodData MAQL
-    <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
-    scripts.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'MAQL'
-    aliases = ['maql']
-    filenames = ['*.maql']
-    mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql']
-
-    flags = re.IGNORECASE
-    tokens = {
-        'root': [
-            # IDENTITY
-            (r'IDENTIFIER\b', Name.Builtin),
-            # IDENTIFIER
-            (r'\{[^}]+\}', Name.Variable),
-            # NUMBER
-            (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
-            # STRING
-            (r'"', Literal.String, 'string-literal'),
-            #  RELATION
-            (r'\<\>|\!\=', Operator),
-            (r'\=|\>\=|\>|\<\=|\<', Operator),
-            # :=
-            (r'\:\=', Operator),
-            # OBJECT
-            (r'\[[^]]+\]', Name.Variable.Class),
-            # keywords
-            (r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|'
-             r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|'
-             r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|'
-             r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|'
-             r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|'
-             r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|'
-             r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|'
-             r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|'
-             r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|'
-             r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword),
-            # FUNCNAME
-            (r'[a-zA-Z]\w*\b', Name.Function),
-            # Comments
-            (r'#.*', Comment.Single),
-            # Punctuation
-            (r'[,;\(\)]', Token.Punctuation),
-            # Space is not significant
-            (r'\s+', Text)
-        ],
-        'string-literal': [
-            (r'\\[tnrfbae"\\]', String.Escape),
-            (r'"', Literal.String, '#pop'),
-            (r'[^\\"]+', Literal.String)
-        ],
-    }
-
-
-class GoodDataCLLexer(RegexLexer):
-    """
-    Lexer for `GoodData-CL <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt>`_
-    script files.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'GoodData-CL'
-    aliases = ['gooddata-cl']
-    filenames = ['*.gdc']
-    mimetypes = ['text/x-gooddata-cl']
-
-    flags = re.IGNORECASE
-    tokens = {
-        'root': [
-            # Comments
-            (r'#.*', Comment.Single),
-            # Function call
-            (r'[a-zA-Z]\w*', Name.Function),
-            # Argument list
-            (r'\(', Token.Punctuation, 'args-list'),
-            # Punctuation
-            (r';', Token.Punctuation),
-            # Space is not significant
-            (r'\s+', Text)
-        ],
-        'args-list': [
-            (r'\)', Token.Punctuation, '#pop'),
-            (r',', Token.Punctuation),
-            (r'[a-zA-Z]\w*', Name.Variable),
-            (r'=', Operator),
-            (r'"', Literal.String, 'string-literal'),
-            (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
-            # Space is not significant
-            (r'\s', Text)
-        ],
-        'string-literal': [
-            (r'\\[tnrfbae"\\]', String.Escape),
-            (r'"', Literal.String, '#pop'),
-            (r'[^\\"]+', Literal.String)
-        ]
-    }
-
-
-class ProtoBufLexer(RegexLexer):
-    """
-    Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
-    definition files.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'Protocol Buffer'
-    aliases = ['protobuf']
-    filenames = ['*.proto']
-
-    tokens = {
-        'root': [
-            (r'[ \t]+', Text),
-            (r'[,;{}\[\]\(\)]', Punctuation),
-            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
-            (r'\b(import|option|optional|required|repeated|default|packed|'
-             r'ctype|extensions|to|max|rpc|returns)\b', Keyword),
-            (r'(int32|int64|uint32|uint64|sint32|sint64|'
-             r'fixed32|fixed64|sfixed32|sfixed64|'
-             r'float|double|bool|string|bytes)\b', Keyword.Type),
-            (r'(true|false)\b', Keyword.Constant),
-            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
-            (r'(message|extend)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'message'),
-            (r'(enum|group|service)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'type'),
-            (r'\".*\"', String),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'(\-?(inf|nan))', Number.Float),
-            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
-            (r'0[0-7]+[LlUu]*', Number.Oct),
-            (r'\d+[LlUu]*', Number.Integer),
-            (r'[+-=]', Operator),
-            (r'([a-zA-Z_][a-zA-Z0-9_\.]*)([ \t]*)(=)',
-             bygroups(Name.Attribute, Text, Operator)),
-            ('[a-zA-Z_][a-zA-Z0-9_\.]*', Name),
-        ],
-        'package': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Namespace, '#pop')
-        ],
-        'message': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'type': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name, '#pop')
-        ],
-    }
-
-
-class HybrisLexer(RegexLexer):
-    """
-    For `Hybris <http://www.hybris-lang.org>`_ source code.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'Hybris'
-    aliases = ['hybris', 'hy']
-    filenames = ['*.hy', '*.hyb']
-    mimetypes = ['text/x-hybris', 'application/x-hybris']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:function|method|operator\s+)+?)'
-             r'([a-zA-Z_][a-zA-Z0-9_]*)'
-             r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
-            (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
-             r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
-            (r'(extends|private|protected|public|static|throws|function|method|'
-             r'operator)\b', Keyword.Declaration),
-            (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
-             r'__INC_PATH__)\b', Keyword.Constant),
-            (r'(class|struct)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'class'),
-            (r'(import|include)(\s+)',
-             bygroups(Keyword.Namespace, Text), 'import'),
-            (r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|'
-             r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|'
-             r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|'
-             r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|'
-             r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|'
-             r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|'
-             r'user_functions|dyn_functions|methods|call|call_method|mknod|'
-             r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|'
-             r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|'
-             r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|'
-             r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|'
-             r'smtp_send|http_get|http_post|http_download|socket|bind|listen|'
-             r'accept|getsockname|getpeername|settimeout|connect|server|recv|'
-             r'send|close|print|println|printf|input|readline|serial_open|'
-             r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|'
-             r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|'
-             r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|'
-             r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|'
-             r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|'
-             r'remove|contains|join)\b', Name.Builtin),
-            (r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|'
-             r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|'
-             r'Exception)\b', Keyword.Type),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
-            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
-            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-f]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text),
-        ],
-        'class': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
-        ],
-    }
-
-
-class AwkLexer(RegexLexer):
-    """
-    For Awk scripts.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Awk'
-    aliases = ['awk', 'gawk', 'mawk', 'nawk']
-    filenames = ['*.awk']
-    mimetypes = ['application/x-awk']
-
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'#.*$', Comment.Single)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'\B', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            (r'', Text, '#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'root': [
-            (r'^(?=\s|/)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|--|\|\||&&|in|\$|!?~|'
-             r'(\*\*|[-<>+*%\^/!=])=?', Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'(break|continue|do|while|exit|for|if|'
-             r'return)\b', Keyword, 'slashstartsregex'),
-            (r'function\b', Keyword.Declaration, 'slashstartsregex'),
-            (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
-             r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
-             r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
-             r'delete|system)\b', Keyword.Reserved),
-            (r'(ARGC|ARGIND|ARGV|CONVFMT|ENVIRON|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|'
-             r'IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|'
-             r'SUBSEP)\b', Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-        ]
-    }
-
-
-class Cfengine3Lexer(RegexLexer):
-    """
-    Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'CFEngine3'
-    aliases = ['cfengine3', 'cf3']
-    filenames = ['*.cf']
-    mimetypes = []
-
-    tokens = {
-        'root': [
-            (r'#.*?\n', Comment),
-            (r'(body)(\s+)(\S+)(\s+)(control)',
-             bygroups(Keyword, Text, Keyword, Text, Keyword)),
-            (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
-             bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
-             'arglist'),
-            (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
-             bygroups(Keyword, Text, Keyword, Text, Name.Function)),
-            (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
-             bygroups(Punctuation,Name.Variable,Punctuation,
-                      Text,Keyword.Type,Text,Operator,Text)),
-            (r'(\S+)(\s*)(=>)(\s*)',
-             bygroups(Keyword.Reserved,Text,Operator,Text)),
-            (r'"', String, 'string'),
-            (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
-            (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)),
-            (r'(\w+)(:)', bygroups(Keyword.Declaration,Punctuation)),
-            (r'@[\{\(][^\)\}]+[\}\)]', Name.Variable),
-            (r'[(){},;]', Punctuation),
-            (r'=>', Operator),
-            (r'->', Operator),
-            (r'\d+\.\d+', Number.Float),
-            (r'\d+', Number.Integer),
-            (r'\w+', Name.Function),
-            (r'\s+', Text),
-        ],
-        'string': [
-            (r'\$[\{\(]', String.Interpol, 'interpol'),
-            (r'\\.', String.Escape),
-            (r'"', String, '#pop'),
-            (r'\n', String),
-            (r'.', String),
-        ],
-        'interpol': [
-            (r'\$[\{\(]', String.Interpol, '#push'),
-            (r'[\}\)]', String.Interpol, '#pop'),
-            (r'[^\$\{\(\)\}]+', String.Interpol),
-        ],
-        'arglist': [
-            (r'\)', Punctuation, '#pop'),
-            (r',', Punctuation),
-            (r'\w+', Name.Variable),
-            (r'\s+', Text),
-        ],
-    }
-
-
-class SnobolLexer(RegexLexer):
-    """
-    Lexer for the SNOBOL4 programming language.
-
-    Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
-    Does not require spaces around binary operators.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = "Snobol"
-    aliases = ["snobol"]
-    filenames = ['*.snobol']
-    mimetypes = ['text/x-snobol']
-
-    tokens = {
-        # root state, start of line
-        # comments, continuation lines, and directives start in column 1
-        # as do labels
-        'root': [
-            (r'\*.*\n', Comment),
-            (r'[\+\.] ', Punctuation, 'statement'),
-            (r'-.*\n', Comment),
-            (r'END\s*\n', Name.Label, 'heredoc'),
-            (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'),
-            (r'\s+', Text, 'statement'),
-        ],
-        # statement state, line after continuation or label
-        'statement': [
-            (r'\s*\n', Text, '#pop'),
-            (r'\s+', Text),
-            (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
-             r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
-             r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
-             r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
-             Name.Builtin),
-            (r'[A-Za-z][\w\.]*', Name),
-            # ASCII equivalents of original operators
-            # | for the EBCDIC equivalent, ! likewise
-            # \ for EBCDIC negation
-            (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\=]', Operator),
-            (r'"[^"]*"', String),
-            (r"'[^']*'", String),
-            # Accept SPITBOL syntax for real numbers
-            # as well as Macro SNOBOL4
-            (r'[0-9]+(?=[^\.EeDd])', Number.Integer),
-            (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
-            # Goto
-            (r':', Punctuation, 'goto'),
-            (r'[\(\)<>,;]', Punctuation),
-        ],
-        # Goto block
-        'goto': [
-            (r'\s*\n', Text, "#pop:2"),
-            (r'\s+', Text),
-            (r'F|S', Keyword),
-            (r'(\()([A-Za-z][\w.]*)(\))',
-             bygroups(Punctuation, Name.Label, Punctuation))
-        ],
-        # everything after the END statement is basically one
-        # big heredoc.
-        'heredoc': [
-            (r'.*\n', String.Heredoc)
-        ]
-    }
-
-
-class UrbiscriptLexer(ExtendedRegexLexer):
-    """
-    For UrbiScript source code.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'UrbiScript'
-    aliases = ['urbiscript']
-    filenames = ['*.u']
-    mimetypes = ['application/x-urbiscript']
-
-    flags = re.DOTALL
-
-    ## TODO
-    # - handle Experimental and deprecated tags with specific tokens
-    # - handle Angles and Durations with specific tokens
-
-    def blob_callback(lexer, match, ctx):
-        text_before_blob = match.group(1)
-        blob_start = match.group(2)
-        blob_size_str = match.group(3)
-        blob_size = int(blob_size_str)
-        yield match.start(), String, text_before_blob
-        ctx.pos += len(text_before_blob)
-
-        # if blob size doesn't match blob format (example : "\B(2)(aaa)")
-        # yield blob as a string
-        if ctx.text[match.end() + blob_size] != ")":
-            result = "\\B(" + blob_size_str + ")("
-            yield match.start(), String, result
-            ctx.pos += len(result)
-            return
-
-        # if blob is well formated, yield as Escape
-        blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
-        yield match.start(), String.Escape, blob_text
-        ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            # comments
-            (r'//.*?\n', Comment),
-            (r'/\*', Comment.Multiline, 'comment'),
-            (r'(?:every|for|loop|while)(?:;|&|\||,)',Keyword),
-            (r'(?:assert|at|break|case|catch|closure|compl|continue|'
-             r'default|else|enum|every|external|finally|for|freezeif|if|new|'
-             r'onleave|return|stopif|switch|this|throw|timeout|try|'
-             r'waituntil|whenever|while)\b', Keyword),
-            (r'(?:asm|auto|bool|char|const_cast|delete|double|dynamic_cast|'
-             r'explicit|export|extern|float|friend|goto|inline|int|'
-             r'long|mutable|namespace|register|reinterpret_cast|short|'
-             r'signed|sizeof|static_cast|struct|template|typedef|typeid|'
-             r'typename|union|unsigned|using|virtual|volatile|'
-             r'wchar_t)\b', Keyword.Reserved),
-            # deprecated keywords, use a meaningfull token when available
-            (r'(?:emit|foreach|internal|loopn|static)\b', Keyword),
-            # ignored keywords, use a meaningfull token when available
-            (r'(?:private|protected|public)\b', Keyword),
-            (r'(?:var|do|const|function|class)\b', Keyword.Declaration),
-            (r'(?:true|false|nil|void)\b', Keyword.Constant),
-            (r'(?:Barrier|Binary|Boolean|CallMessage|Channel|Code|'
-             r'Comparable|Container|Control|Date|Dictionary|Directory|'
-             r'Duration|Enumeration|Event|Exception|Executable|File|Finalizable|'
-             r'Float|FormatInfo|Formatter|Global|Group|Hash|InputStream|'
-             r'IoService|Job|Kernel|Lazy|List|Loadable|Lobby|Location|Logger|Math|'
-             r'Mutex|nil|Object|Orderable|OutputStream|Pair|Path|Pattern|Position|'
-             r'Primitive|Process|Profile|PseudoLazy|PubSub|RangeIterable|Regexp|'
-             r'Semaphore|Server|Singleton|Socket|StackFrame|Stream|String|System|'
-             r'Tag|Timeout|Traceable|TrajectoryGenerator|Triplet|Tuple'
-             r'|UObject|UValue|UVar)\b', Name.Builtin),
-            (r'(?:this)\b', Name.Builtin.Pseudo),
-            # don't match single | and &
-            (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
-            (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
-             Operator.Word),
-            (r'[{}\[\]()]+', Punctuation),
-            (r'(?:;|\||,|&|\?|!)+', Punctuation),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            # Float, Integer, Angle and Duration
-            (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
-             r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
-            # handle binary blob in strings
-            (r'"', String.Double, "string.double"),
-            (r"'", String.Single, "string.single"),
-        ],
-        'string.double': [
-            (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
-            (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
-        ],
-        'string.single': [
-            (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
-            (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
-        ],
-        # from http://pygments.org/docs/lexerdevelopment/#changing-states
-        'comment': [
-            (r'[^*/]', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline),
-        ]
-    }
-
-
-class OpenEdgeLexer(RegexLexer):
-    """
-    Lexer for `OpenEdge ABL (formerly Progress)
-    <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'OpenEdge ABL'
-    aliases = ['openedge', 'abl', 'progress']
-    filenames = ['*.p', '*.cls']
-    mimetypes = ['text/x-openedge', 'application/x-openedge']
-
-    types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
-             r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
-             r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
-             r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
-             r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
-
-    keywords = (r'(?i)(^|(?<=[^0-9a-z_\-]))(' +
-                r'|'.join(OPENEDGEKEYWORDS) +
-                r')\s*($|(?=[^0-9a-z_\-]))')
-    tokens = {
-        'root': [
-            (r'/\*', Comment.Multiline, 'comment'),
-            (r'\{', Comment.Preproc, 'preprocessor'),
-            (r'\s*&.*', Comment.Preproc),
-            (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
-            (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
-            (types, Keyword.Type),
-            (keywords, Name.Builtin),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r'\s+', Text),
-            (r'[+*/=-]', Operator),
-            (r'[.:()]', Punctuation),
-            (r'.', Name.Variable), # Lazy catch-all
-        ],
-        'comment': [
-            (r'[^*/]', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ],
-        'preprocessor': [
-            (r'[^{}]', Comment.Preproc),
-            (r'{', Comment.Preproc, '#push'),
-            (r'}', Comment.Preproc, '#pop'),
-        ],
-    }
-
-
-class BroLexer(RegexLexer):
-    """
-    For `Bro <http://bro-ids.org/>`_ scripts.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'Bro'
-    aliases = ['bro']
-    filenames = ['*.bro']
-
-    _hex = r'[0-9a-fA-F_]+'
-    _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
-    _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
-
-    tokens = {
-        'root': [
-            # Whitespace
-            (r'^@.*?\n', Comment.Preproc),
-            (r'#.*?\n', Comment.Single),
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text),
-            # Keywords
-            (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
-             r'|export|for|function|if|global|local|module|next'
-             r'|of|print|redef|return|schedule|type|when|while)\b', Keyword),
-            (r'(addr|any|bool|count|counter|double|file|int|interval|net'
-             r'|pattern|port|record|set|string|subnet|table|time|timer'
-             r'|vector)\b', Keyword.Type),
-            (r'(T|F)\b', Keyword.Constant),
-            (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire'
-             r'|default|disable_print_hook|raw_output|encrypt|group|log'
-             r'|mergeable|optional|persistent|priority|redef'
-             r'|rotate_(?:interval|size)|synchronized)\b', bygroups(Punctuation,
-                 Keyword)),
-            (r'\s+module\b', Keyword.Namespace),
-            # Addresses, ports and networks
-            (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
-            (r'(\d+\.){3}\d+', Number),
-            (r'(' + _hex + r'){7}' + _hex, Number),
-            (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
-            (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
-            (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
-            # Hostnames
-            (_h + r'(\.' + _h + r')+', String),
-            # Numeric
-            (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
-            (r'0[xX]' + _hex, Number.Hex),
-            (_float, Number.Float),
-            (r'\d+', Number.Integer),
-            (r'/', String.Regex, 'regex'),
-            (r'"', String, 'string'),
-            # Operators
-            (r'[!%*/+:<=>?~|-]', Operator),
-            (r'([-+=&|]{2}|[+=!><-]=)', Operator),
-            (r'(in|match)\b', Operator.Word),
-            (r'[{}()\[\]$.,;]', Punctuation),
-            # Identfier
-            (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name)
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String),
-            (r'\\\n', String),
-            (r'\\', String)
-        ],
-        'regex': [
-            (r'/', String.Regex, '#pop'),
-            (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here.
-            (r'[^\\/\n]+', String.Regex),
-            (r'\\\n', String.Regex),
-            (r'\\', String.Regex)
-        ]
-    }
-
-
-class CbmBasicV2Lexer(RegexLexer):
-    """
-    For CBM BASIC V2 sources.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'CBM BASIC V2'
-    aliases = ['cbmbas']
-    filenames = ['*.bas']
-
-    flags = re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'rem.*\n', Comment.Single),
-            (r'\s+', Text),
-            (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
-             r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
-             r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
-            (r'data|restore|dim|let|def|fn', Keyword.Declaration),
-            (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
-             r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
-            (r'[-+*/^<>=]', Operator),
-            (r'not|and|or', Operator.Word),
-            (r'"[^"\n]*.', String),
-            (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
-            (r'[\(\),:;]', Punctuation),
-            (r'\w+[$%]?', Name),
-        ]
-    }
-
-    def analyse_text(self, text):
-        # if it starts with a line number, it shouldn't be a "modern" Basic
-        # like VB.net
-        if re.match(r'\d+', text):
-            return True
-
-
-class MscgenLexer(RegexLexer):
-    """
-    For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Mscgen'
-    aliases = ['mscgen', 'msc']
-    filenames = ['*.msc']
-
-    _var = r'([a-zA-Z0-9_]+|"(?:\\"|[^"])*")'
-
-    tokens = {
-        'root': [
-            (r'msc\b', Keyword.Type),
-            # Options
-            (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
-             r'|arcgradient|ARCGRADIENT)\b', Name.Property),
-            # Operators
-            (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
-            (r'(\.|-|\|){3}', Keyword),
-            (r'(?:-|=|\.|:){2}'
-             r'|<<=>>|<->|<=>|<<>>|<:>'
-             r'|->|=>>|>>|=>|:>|-x|-X'
-             r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
-            # Names
-            (r'\*', Name.Builtin),
-            (_var, Name.Variable),
-            # Other
-            (r'\[', Punctuation, 'attrs'),
-            (r'\{|\}|,|;', Punctuation),
-            include('comments')
-        ],
-        'attrs': [
-            (r'\]', Punctuation, '#pop'),
-            (_var + r'(\s*)(=)(\s*)' + _var,
-             bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
-                      String)),
-            (r',', Punctuation),
-            include('comments')
-        ],
-        'comments': [
-            (r'(?://|#).*?\n', Comment.Single),
-            (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
-            (r'[ \t\r\n]+', Text.Whitespace)
-        ]
-    }
-
-
-def _rx_indent(level):
-    # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
-    # Edit this if you are in an environment where KconfigLexer gets expanded
-    # input (tabs expanded to spaces) and the expansion tab width is != 8,
-    # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
-    # Value range here is 2 <= {tab_width} <= 8.
-    tab_width = 8
-    # Regex matching a given indentation {level}, assuming that indentation is
-    # a multiple of {tab_width}. In other cases there might be problems.
-    return r'(?:\t| {1,%s}\t| {%s}){%s}.*\n' % (tab_width-1, tab_width, level)
-
-
-class KconfigLexer(RegexLexer):
-    """
-    For Linux-style Kconfig files.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Kconfig'
-    aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
-    # Adjust this if new kconfig file names appear in your environment
-    filenames = ['Kconfig', '*Config.in*', 'external.in*',
-                 'standard-modules.in']
-    mimetypes = ['text/x-kconfig']
-    # No re.MULTILINE, indentation-aware help text needs line-by-line handling
-    flags = 0
-
-    def call_indent(level):
-        # If indentation >= {level} is detected, enter state 'indent{level}'
-        return (_rx_indent(level), String.Doc, 'indent%s' % level)
-
-    def do_indent(level):
-        # Print paragraphs of indentation level >= {level} as String.Doc,
-        # ignoring blank lines. Then return to 'root' state.
-        return [
-            (_rx_indent(level), String.Doc),
-            (r'\s*\n', Text),
-            (r'', Generic, '#pop:2')
-        ]
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'#.*?\n', Comment.Single),
-            (r'(mainmenu|config|menuconfig|choice|endchoice|comment|menu|'
-             r'endmenu|visible if|if|endif|source|prompt|select|depends on|'
-             r'default|range|option)\b', Keyword),
-            (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
-            (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
-             Name.Builtin),
-            (r'[!=&|]', Operator),
-            (r'[()]', Punctuation),
-            (r'[0-9]+', Number.Integer),
-            (r"'(''|[^'])*'", String.Single),
-            (r'"(""|[^"])*"', String.Double),
-            (r'\S+', Text),
-        ],
-        # Help text is indented, multi-line and ends when a lower indentation
-        # level is detected.
-        'help': [
-            # Skip blank lines after help token, if any
-            (r'\s*\n', Text),
-            # Determine the first help line's indentation level heuristically(!).
-            # Attention: this is not perfect, but works for 99% of "normal"
-            # indentation schemes up to a max. indentation level of 7.
-            call_indent(7),
-            call_indent(6),
-            call_indent(5),
-            call_indent(4),
-            call_indent(3),
-            call_indent(2),
-            call_indent(1),
-            ('', Text, '#pop'),  # for incomplete help sections without text
-        ],
-        # Handle text for indentation levels 7 to 1
-        'indent7': do_indent(7),
-        'indent6': do_indent(6),
-        'indent5': do_indent(5),
-        'indent4': do_indent(4),
-        'indent3': do_indent(3),
-        'indent2': do_indent(2),
-        'indent1': do_indent(1),
-    }
-
-
-class VGLLexer(RegexLexer):
-    """
-    For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
-    source code.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'VGL'
-    aliases = ['vgl']
-    filenames = ['*.rpf']
-
-    flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'\{[^\}]*\}', Comment.Multiline),
-            (r'declare', Keyword.Constant),
-            (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
-             r'|create|on|line|with|global|routine|value|endroutine|constant'
-             r'|global|set|join|library|compile_option|file|exists|create|copy'
-             r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
-             Keyword),
-            (r'(true|false|null|empty|error|locked)', Keyword.Constant),
-            (r'[~\^\*\#!%&\[\]\(\)<>\|+=:;,./?-]', Operator),
-            (r'"[^"]*"', String),
-            (r'(\.)([a-z_\$][a-z0-9_\$]*)', bygroups(Operator, Name.Attribute)),
-            (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
-            (r'[a-z_\$][a-z0-9_\$]*', Name),
-            (r'[\r\n]+', Text),
-            (r'\s+', Text)
-        ]
-    }
-
-
-class SourcePawnLexer(RegexLexer):
-    """
-    For SourcePawn source code with preprocessor directives.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'SourcePawn'
-    aliases = ['sp']
-    filenames = ['*.sp']
-    mimetypes = ['text/x-sourcepawn']
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
-    tokens = {
-        'root': [
-            # preprocessor directives: without whitespace
-            ('^#if\s+0', Comment.Preproc, 'if0'),
-            ('^#', Comment.Preproc, 'macro'),
-            # or with whitespace
-            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
-            ('^' + _ws + '#', Comment.Preproc, 'macro'),
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
-            (r'[{}]', Punctuation),
-            (r'L?"', String, 'string'),
-            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
-            (r'0[0-7]+[LlUu]*', Number.Oct),
-            (r'\d+[LlUu]*', Number.Integer),
-            (r'\*/', Error),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\],.;]', Punctuation),
-            (r'(case|const|continue|native|'
-             r'default|else|enum|for|if|new|operator|'
-             r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
-            (r'(bool|Float)\b', Keyword.Type),
-            (r'(true|false)\b', Keyword.Constant),
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/\*(.|\n)*?\*/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
-            (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
-            (r'.*?\n', Comment),
-        ]
-    }
-
-    SM_TYPES = ['Action', 'bool', 'Float', 'Plugin', 'String', 'any',
-                'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
-                'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
-                'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
-                'ConVarBounds', 'QueryCookie', 'ReplySource',
-                'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
-                'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
-                'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
-                'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
-                'EventHook', 'FileType', 'FileTimeMode', 'PathType',
-                'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
-                'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
-                'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
-                'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
-                'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
-                'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
-                'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
-                'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
-                'TopMenuPosition', 'TopMenuObject', 'UserMsg']
-
-    def __init__(self, **options):
-        self.smhighlighting = get_bool_opt(options,
-                'sourcemod', True)
-
-        self._functions = []
-        if self.smhighlighting:
-            from pygments.lexers._sourcemodbuiltins import FUNCTIONS
-            self._functions.extend(FUNCTIONS)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name:
-                if self.smhighlighting:
-                    if value in self.SM_TYPES:
-                        token = Keyword.Type
-                    elif value in self._functions:
-                        token = Name.Builtin
-            yield index, token, value
-
-
-class PuppetLexer(RegexLexer):
-    """
-    For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Puppet'
-    aliases = ['puppet']
-    filenames = ['*.pp']
-
-    tokens = {
-        'root': [
-            include('comments'),
-            include('keywords'),
-            include('names'),
-            include('numbers'),
-            include('operators'),
-            include('strings'),
-
-            (r'[]{}:(),;[]', Punctuation),
-            (r'[^\S\n]+', Text),
-        ],
-
-        'comments': [
-            (r'\s*#.*$', Comment),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-        ],
-
-        'operators': [
-            (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
-            (r'(in|and|or|not)\b', Operator.Word),
-        ],
-
-        'names': [
-            ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute),
-            (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
-                                               String, Punctuation)),
-            (r'\$\S+', Name.Variable),
-        ],
-
-        'numbers': [
-            # Copypasta from the Python lexer
-            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
-            (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
-            (r'0[0-7]+j?', Number.Oct),
-            (r'0[xX][a-fA-F0-9]+', Number.Hex),
-            (r'\d+L', Number.Integer.Long),
-            (r'\d+j?', Number.Integer)
-        ],
-
-        'keywords': [
-            # Left out 'group' and 'require'
-            # Since they're often used as attributes
-            (r'(?i)(absent|alert|alias|audit|augeas|before|case|check|class|'
-             r'computer|configured|contained|create_resources|crit|cron|debug|'
-             r'default|define|defined|directory|else|elsif|emerg|err|exec|'
-             r'extlookup|fail|false|file|filebucket|fqdn_rand|generate|host|if|'
-             r'import|include|info|inherits|inline_template|installed|'
-             r'interface|k5login|latest|link|loglevel|macauthorization|'
-             r'mailalias|maillist|mcx|md5|mount|mounted|nagios_command|'
-             r'nagios_contact|nagios_contactgroup|nagios_host|'
-             r'nagios_hostdependency|nagios_hostescalation|nagios_hostextinfo|'
-             r'nagios_hostgroup|nagios_service|nagios_servicedependency|'
-             r'nagios_serviceescalation|nagios_serviceextinfo|'
-             r'nagios_servicegroup|nagios_timeperiod|node|noop|notice|notify|'
-             r'package|present|purged|realize|regsubst|resources|role|router|'
-             r'running|schedule|scheduled_task|search|selboolean|selmodule|'
-             r'service|sha1|shellquote|split|sprintf|ssh_authorized_key|sshkey|'
-             r'stage|stopped|subscribe|tag|tagged|template|tidy|true|undef|'
-             r'unmounted|user|versioncmp|vlan|warning|yumrepo|zfs|zone|'
-             r'zpool)\b', Keyword),
-        ],
-
-        'strings': [
-            (r'"([^"])*"', String),
-            (r'\'([^\'])*\'', String),
-        ],
-
-    }
-
-
-class NSISLexer(RegexLexer):
-    """
-    For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'NSIS'
-    aliases = ['nsis', 'nsi', 'nsh']
-    filenames = ['*.nsi', '*.nsh']
-    mimetypes = ['text/x-nsis']
-
-    flags = re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'[;\#].*\n', Comment),
-            (r"'.*'", String.Single),
-            (r'"', String.Double, 'str_double'),
-            (r'`', String.Backtick, 'str_backtick'),
-            include('macro'),
-            include('interpol'),
-            include('basic'),
-            (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
-            (r'/[a-z_]\w*', Name.Attribute),
-            ('.', Text),
-        ],
-        'basic': [
-            (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
-             bygroups(Text, Keyword, Text, Name.Function)),
-            (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
-             bygroups(Keyword.Namespace, Punctuation, Name.Function)),
-            (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
-            (r'(\b[ULS]|\B)([\!\<\>=]?=|\<\>?|\>)\B', Operator),
-            (r'[|+-]', Operator),
-            (r'\\', Punctuation),
-            (r'\b(Abort|Add(?:BrandingImage|Size)|'
-             r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
-             r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
-             r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
-             r'ComponentText|CopyFiles|CRCCheck|'
-             r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
-             r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
-             r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
-             r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
-             r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
-             r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
-             r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
-             r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
-             r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
-             r'InstDirError|LabelAddress|TempFileName)|'
-             r'Goto|HideWindow|Icon|'
-             r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
-             r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
-             r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
-             r'IsWindow|LangString(?:UP)?|'
-             r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
-             r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
-             r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
-             r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
-             r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
-             r'Return|RMDir|SearchPath|Section(?:Divider|End|'
-             r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
-             r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
-             r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
-             r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
-             r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
-             r'Silent|StaticBkColor)|'
-             r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
-             r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
-             r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
-             r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
-             r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
-             r'XPStyle)\b', Keyword),
-            (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
-             r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
-             r'HK(CC|CR|CU|DD|LM|PD|U)|'
-             r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
-             r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
-             r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
-             r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
-             r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
-             r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
-             r'YESNO(?:CANCEL)?)|SET|SHCTX|'
-             r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
-             r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
-             r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
-             r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
-             r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
-             r'true|try|user|zlib)\b', Name.Constant),
-        ],
-        'macro': [
-            (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
-             r'delfilefile|echo(?:message)?|else|endif|error|execute|'
-             r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
-             r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
-             r'warning)\b', Comment.Preproc),
-        ],
-        'interpol': [
-            (r'\$(R?[0-9])', Name.Builtin.Pseudo),    # registers
-            (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
-            r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
-            r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
-            r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
-            r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
-            r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
-             Name.Builtin),
-            (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
-            (r'\$[a-z_]\w*', Name.Variable),
-        ],
-        'str_double': [
-            (r'"', String, '#pop'),
-            (r'\$(\\[nrt"]|\$)', String.Escape),
-            include('interpol'),
-            (r'.', String.Double),
-        ],
-        'str_backtick': [
-            (r'`', String, '#pop'),
-            (r'\$(\\[nrt"]|\$)', String.Escape),
-            include('interpol'),
-            (r'.', String.Double),
-        ],
-    }
-
-
-class RPMSpecLexer(RegexLexer):
-    """
-    For RPM *.spec files
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'RPMSpec'
-    aliases = ['spec']
-    filenames = ['*.spec']
-    mimetypes = ['text/x-rpm-spec']
-
-    _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
-                   'post[a-z]*|trigger[a-z]*|files)')
-
-    tokens = {
-        'root': [
-            (r'#.*\n', Comment),
-            include('basic'),
-        ],
-        'description': [
-            (r'^(%' + _directives + ')(.*)$',
-             bygroups(Name.Decorator, Text), '#pop'),
-            (r'\n', Text),
-            (r'.', Text),
-        ],
-        'changelog': [
-            (r'\*.*\n', Generic.Subheading),
-            (r'^(%' + _directives + ')(.*)$',
-             bygroups(Name.Decorator, Text), '#pop'),
-            (r'\n', Text),
-            (r'.', Text),
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            include('interpol'),
-            (r'.', String.Double),
-        ],
-        'basic': [
-            include('macro'),
-            (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
-             r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
-             r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Provides|Conflicts|'
-             r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
-             bygroups(Generic.Heading, Punctuation, using(this))),
-            (r'^%description', Name.Decorator, 'description'),
-            (r'^%changelog', Name.Decorator, 'changelog'),
-            (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
-            (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
-             r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
-             Keyword),
-            include('interpol'),
-            (r"'.*'", String.Single),
-            (r'"', String.Double, 'string'),
-            (r'.', Text),
-        ],
-        'macro': [
-            (r'%define.*\n', Comment.Preproc),
-            (r'%\{\!\?.*%define.*\}', Comment.Preproc),
-            (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
-             bygroups(Comment.Preproc, Text)),
-        ],
-        'interpol': [
-            (r'%\{?__[a-z_]+\}?', Name.Function),
-            (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
-            (r'%\{\?[A-Za-z0-9_]+\}', Name.Variable),
-            (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
-            (r'%\{[a-zA-Z][a-zA-Z0-9_]+\}', Keyword.Constant),
-        ]
-    }
-
-
-class AutoItLexer(RegexLexer):
-    """
-    For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
-
-    AutoIt is a freeware BASIC-like scripting language
-    designed for automating the Windows GUI and general scripting
-
-    *New in Pygments 1.6.*
-    """
-    name = 'AutoIt'
-    aliases = ['autoit', 'Autoit']
-    filenames = ['*.au3']
-    mimetypes = ['text/x-autoit']
-
-    # Keywords, functions, macros from au3.keywords.properties
-    # which can be found in AutoIt installed directory, e.g.
-    # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
-
-    keywords = """\
-    #include-once #include #endregion #forcedef #forceref #region
-    and byref case continueloop dim do else elseif endfunc endif
-    endselect exit exitloop for func global
-    if local next not or return select step
-    then to until wend while exit""".split()
-
-    functions = """\
-    abs acos adlibregister adlibunregister asc ascw asin assign atan
-    autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
-    binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
-    blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
-    consolewrite consolewriteerror controlclick controlcommand controldisable
-    controlenable controlfocus controlgetfocus controlgethandle controlgetpos
-    controlgettext controlhide controllistview controlmove controlsend
-    controlsettext controlshow controltreeview cos dec dircopy dircreate
-    dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
-    dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
-    dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
-    drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
-    drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
-    drivespacetotal drivestatus envget envset envupdate eval execute exp
-    filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
-    filedelete fileexists filefindfirstfile filefindnextfile fileflush
-    filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
-    filegetshortname filegetsize filegettime filegetversion fileinstall filemove
-    fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
-    filesavedialog fileselectfolder filesetattrib filesetpos filesettime
-    filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
-    guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
-    guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
-    guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
-    guictrlcreateinput guictrlcreatelabel guictrlcreatelist
-    guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
-    guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
-    guictrlcreatepic guictrlcreateprogress guictrlcreateradio
-    guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
-    guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
-    guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
-    guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
-    guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
-    guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
-    guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
-    guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
-    guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
-    guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
-    guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
-    httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
-    inetread inidelete iniread inireadsection inireadsectionnames
-    inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
-    isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
-    isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
-    mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
-    number objcreate objcreateinterface objevent objevent objget objname
-    onautoitexitregister onautoitexitunregister opt ping pixelchecksum
-    pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
-    processgetstats processlist processsetpriority processwait processwaitclose
-    progressoff progresson progressset ptr random regdelete regenumkey
-    regenumval regread regwrite round run runas runaswait runwait send
-    sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
-    sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
-    sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
-    string stringaddcr stringcompare stringformat stringfromasciiarray
-    stringinstr stringisalnum stringisalpha stringisascii stringisdigit
-    stringisfloat stringisint stringislower stringisspace stringisupper
-    stringisxdigit stringleft stringlen stringlower stringmid stringregexp
-    stringregexpreplace stringreplace stringright stringsplit stringstripcr
-    stringstripws stringtoasciiarray stringtobinary stringtrimleft
-    stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
-    tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
-    timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
-    trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
-    trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
-    traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
-    udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
-    winactivate winactive winclose winexists winflash wingetcaretpos
-    wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
-    wingetstate wingettext wingettitle winkill winlist winmenuselectitem
-    winminimizeall winminimizeallundo winmove winsetontop winsetstate
-    winsettitle winsettrans winwait winwaitactive winwaitclose
-    winwaitnotactive""".split()
-
-    macros = """\
-    @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
-    @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
-    @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
-    @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
-    @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
-    @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
-    @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
-    @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
-    @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
-    @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
-    @osversion @programfilesdir @programscommondir @programsdir @scriptdir
-    @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
-    @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
-    @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
-    @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
-    @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
-    @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
-    @windowsdir @workingdir @yday @year""".split()
-
-    tokens = {
-        'root': [
-            (r';.*\n', Comment.Single),
-            (r'(#comments-start|#cs).*?(#comments-end|#ce)', Comment.Multiline),
-            (r'[\[\]{}(),;]', Punctuation),
-            (r'(and|or|not)\b', Operator.Word),
-            (r'[\$|@][a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
-            (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
-            include('commands'),
-            include('labels'),
-            include('builtInFunctions'),
-            include('builtInMarcros'),
-            (r'"', String, combined('stringescape', 'dqs')),
-            include('numbers'),
-            (r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
-            (r'\\|\'', Text),
-            (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
-            (r'_\n', Text), # Line continuation
-            include('garbage'),
-        ],
-        'commands': [
-            (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
-            bygroups(Text, Name.Builtin)),
-        ],
-        'builtInFunctions': [
-            (r'(?i)(%s)\b' % '|'.join(functions),
-             Name.Function),
-        ],
-        'builtInMarcros': [
-            (r'(?i)(%s)\b' % '|'.join(macros),
-             Name.Variable.Global),
-        ],
-        'labels': [
-            # sendkeys
-            (r'(^\s*)({\S+?})', bygroups(Text, Name.Label)),
-        ],
-        'numbers': [
-            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eE][+-]?[0-9]+', Number.Float),
-            (r'0\d+', Number.Oct),
-            (r'0[xX][a-fA-F0-9]+', Number.Hex),
-            (r'\d+L', Number.Integer.Long),
-            (r'\d+', Number.Integer)
-        ],
-        'stringescape': [
-            (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
-        ],
-        'strings': [
-            (r'[^"\n]+', String),
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            include('strings')
-        ],
-        'garbage': [
-            (r'[^\S\n]', Text),
-        ],
-    }
diff --git a/python/ext-libs/pygments/lexers/parsers.py b/python/ext-libs/pygments/lexers/parsers.py
deleted file mode 100644
index c1ad710..0000000
--- a/python/ext-libs/pygments/lexers/parsers.py
+++ /dev/null
@@ -1,778 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.parsers
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for parser generators.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, \
-    include, bygroups, using
-from pygments.token import Punctuation, Other, Text, Comment, Operator, \
-     Keyword, Name, String, Number, Whitespace
-from pygments.lexers.compiled import JavaLexer, CLexer, CppLexer, \
-    ObjectiveCLexer, DLexer
-from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.agile import RubyLexer, PythonLexer, PerlLexer
-from pygments.lexers.web import ActionScriptLexer
-
-
-__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
-           'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
-           'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
-           'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
-           #'AntlrCLexer',
-           'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
-           'AntlrJavaLexer', "AntlrActionScriptLexer",
-           'TreetopLexer']
-
-
-class RagelLexer(RegexLexer):
-    """
-    A pure `Ragel <http://www.complang.org/ragel/>`_ lexer.  Use this for
-    fragments of Ragel.  For ``.rl`` files, use RagelEmbeddedLexer instead
-    (or one of the language-specific subclasses).
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel'
-    aliases = ['ragel']
-    filenames = []
-
-    tokens = {
-        'whitespace': [
-            (r'\s+', Whitespace)
-        ],
-        'comments': [
-            (r'\#.*$', Comment),
-        ],
-        'keywords': [
-            (r'(access|action|alphtype)\b', Keyword),
-            (r'(getkey|write|machine|include)\b', Keyword),
-            (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
-            (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
-        ],
-        'numbers': [
-            (r'0x[0-9A-Fa-f]+', Number.Hex),
-            (r'[+-]?[0-9]+', Number.Integer),
-        ],
-        'literals': [
-            (r'"(\\\\|\\"|[^"])*"', String), # double quote string
-            (r"'(\\\\|\\'|[^'])*'", String), # single quote string
-            (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
-            (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
-        ],
-        'identifiers': [
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable),
-        ],
-        'operators': [
-            (r',', Operator), # Join
-            (r'\||&|--?', Operator), # Union, Intersection and Subtraction
-            (r'\.|<:|:>>?', Operator), # Concatention
-            (r':', Operator), # Label
-            (r'->', Operator), # Epsilon Transition
-            (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
-            (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
-            (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
-            (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
-            (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
-            (r'>|@|\$|%', Operator), # Transition Actions and Priorities
-            (r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition
-            (r'!|\^', Operator), # Negation
-            (r'\(|\)', Operator), # Grouping
-        ],
-        'root': [
-            include('literals'),
-            include('whitespace'),
-            include('comments'),
-            include('keywords'),
-            include('numbers'),
-            include('identifiers'),
-            include('operators'),
-            (r'{', Punctuation, 'host'),
-            (r'=', Operator),
-            (r';', Punctuation),
-        ],
-        'host': [
-            (r'(' + r'|'.join(( # keep host code in largest possible chunks
-                r'[^{}\'"/#]+', # exclude unsafe characters
-                r'[^\\][\\][{}]', # allow escaped { or }
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\"|[^"])*"', # double quote string
-                r"'(\\\\|\\'|[^'])*'", # single quote string
-                r'//.*$\n?', # single line comment
-                r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-                r'\#.*$\n?', # ruby comment
-
-                # regular expression: There's no reason for it to start
-                # with a * and this stops confusion with comments.
-                r'/(?!\*)(\\\\|\\/|[^/])*/',
-
-                # / is safe now that we've handled regex and javadoc comments
-                r'/',
-            )) + r')+', Other),
-
-            (r'{', Punctuation, '#push'),
-            (r'}', Punctuation, '#pop'),
-        ],
-    }
-
-
-class RagelEmbeddedLexer(RegexLexer):
-    """
-    A lexer for `Ragel`_ embedded in a host language file.
-
-    This will only highlight Ragel statements. If you want host language
-    highlighting then call the language-specific Ragel lexer.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Embedded Ragel'
-    aliases = ['ragel-em']
-    filenames = ['*.rl']
-
-    tokens = {
-        'root': [
-            (r'(' + r'|'.join(( # keep host code in largest possible chunks
-                r'[^%\'"/#]+', # exclude unsafe characters
-                r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\"|[^"])*"', # double quote string
-                r"'(\\\\|\\'|[^'])*'", # single quote string
-                r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-                r'//.*$\n?', # single line comment
-                r'\#.*$\n?', # ruby/ragel comment
-                r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
-
-                # / is safe now that we've handled regex and javadoc comments
-                r'/',
-            )) + r')+', Other),
-
-            # Single Line FSM.
-            # Please don't put a quoted newline in a single line FSM.
-            # That's just mean. It will break this.
-            (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
-                                                     using(RagelLexer),
-                                                     Punctuation, Text)),
-
-            # Multi Line FSM.
-            (r'(%%%%|%%){', Punctuation, 'multi-line-fsm'),
-        ],
-        'multi-line-fsm': [
-            (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
-                r'(' + r'|'.join((
-                    r'[^}\'"\[/#]', # exclude unsafe characters
-                    r'}(?=[^%]|$)', # } is okay as long as it's not followed by %
-                    r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
-                    r'[^\\][\\][{}]', # ...and } is okay if it's escaped
-
-                    # allow / if it's preceded with one of these symbols
-                    # (ragel EOF actions)
-                    r'(>|\$|%|<|@|<>)/',
-
-                    # specifically allow regex followed immediately by *
-                    # so it doesn't get mistaken for a comment
-                    r'/(?!\*)(\\\\|\\/|[^/])*/\*',
-
-                    # allow / as long as it's not followed by another / or by a *
-                    r'/(?=[^/\*]|$)',
-
-                    # We want to match as many of these as we can in one block.
-                    # Not sure if we need the + sign here,
-                    # does it help performance?
-                    )) + r')+',
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\"|[^"])*"', # double quote string
-                r"'(\\\\|\\'|[^'])*'", # single quote string
-                r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
-                r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-                r'//.*$\n?', # single line comment
-                r'\#.*$\n?', # ruby/ragel comment
-            )) + r')+', using(RagelLexer)),
-
-            (r'}%%', Punctuation, '#pop'),
-        ]
-    }
-
-    def analyse_text(text):
-        return '@LANG: indep' in text or 0.1
-
-
-class RagelRubyLexer(DelegatingLexer):
-    """
-    A lexer for `Ragel`_ in a Ruby host file.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel in Ruby Host'
-    aliases = ['ragel-ruby', 'ragel-rb']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
-                                              **options)
-
-    def analyse_text(text):
-        return '@LANG: ruby' in text
-
-
-class RagelCLexer(DelegatingLexer):
-    """
-    A lexer for `Ragel`_ in a C host file.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel in C Host'
-    aliases = ['ragel-c']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super(RagelCLexer, self).__init__(CLexer, RagelEmbeddedLexer,
-                                          **options)
-
-    def analyse_text(text):
-        return '@LANG: c' in text
-
-
-class RagelDLexer(DelegatingLexer):
-    """
-    A lexer for `Ragel`_ in a D host file.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel in D Host'
-    aliases = ['ragel-d']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: d' in text
-
-
-class RagelCppLexer(DelegatingLexer):
-    """
-    A lexer for `Ragel`_ in a CPP host file.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel in CPP Host'
-    aliases = ['ragel-cpp']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: c++' in text
-
-
-class RagelObjectiveCLexer(DelegatingLexer):
-    """
-    A lexer for `Ragel`_ in an Objective C host file.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel in Objective C Host'
-    aliases = ['ragel-objc']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super(RagelObjectiveCLexer, self).__init__(ObjectiveCLexer,
-                                                   RagelEmbeddedLexer,
-                                                   **options)
-
-    def analyse_text(text):
-        return '@LANG: objc' in text
-
-
-class RagelJavaLexer(DelegatingLexer):
-    """
-    A lexer for `Ragel`_ in a Java host file.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Ragel in Java Host'
-    aliases = ['ragel-java']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super(RagelJavaLexer, self).__init__(JavaLexer, RagelEmbeddedLexer,
-                                             **options)
-
-    def analyse_text(text):
-        return '@LANG: java' in text
-
-
-class AntlrLexer(RegexLexer):
-    """
-    Generic `ANTLR`_ Lexer.
-    Should not be called directly, instead
-    use DelegatingLexer for your target language.
-
-    *New in Pygments 1.1.*
-
-    .. _ANTLR: http://www.antlr.org/
-    """
-
-    name = 'ANTLR'
-    aliases = ['antlr']
-    filenames = []
-
-    _id =          r'[A-Za-z][A-Za-z_0-9]*'
-    _TOKEN_REF =   r'[A-Z][A-Za-z_0-9]*'
-    _RULE_REF =    r'[a-z][A-Za-z_0-9]*'
-    _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
-    _INT = r'[0-9]+'
-
-    tokens = {
-        'whitespace': [
-            (r'\s+', Whitespace),
-        ],
-        'comments': [
-            (r'//.*$', Comment),
-            (r'/\*(.|\n)*?\*/', Comment),
-        ],
-        'root': [
-            include('whitespace'),
-            include('comments'),
-
-            (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
-             bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
-                      Punctuation)),
-            # optionsSpec
-            (r'options\b', Keyword, 'options'),
-            # tokensSpec
-            (r'tokens\b', Keyword, 'tokens'),
-            # attrScope
-            (r'(scope)(\s*)(' + _id + ')(\s*)({)',
-             bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
-                      Punctuation), 'action'),
-            # exception
-            (r'(catch|finally)\b', Keyword, 'exception'),
-            # action
-            (r'(@' + _id + ')(\s*)(::)?(\s*)(' + _id + ')(\s*)({)',
-             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
-                      Name.Label, Whitespace, Punctuation), 'action'),
-            # rule
-            (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?', \
-             bygroups(Keyword, Whitespace, Name.Label, Punctuation),
-             ('rule-alts', 'rule-prelims')),
-        ],
-        'exception': [
-            (r'\n', Whitespace, '#pop'),
-            (r'\s', Whitespace),
-            include('comments'),
-
-            (r'\[', Punctuation, 'nested-arg-action'),
-            (r'\{', Punctuation, 'action'),
-        ],
-        'rule-prelims': [
-            include('whitespace'),
-            include('comments'),
-
-            (r'returns\b', Keyword),
-            (r'\[', Punctuation, 'nested-arg-action'),
-            (r'\{', Punctuation, 'action'),
-            # throwsSpec
-            (r'(throws)(\s+)(' + _id + ')',
-             bygroups(Keyword, Whitespace, Name.Label)),
-            (r'(,)(\s*)(' + _id + ')',
-             bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
-            # optionsSpec
-            (r'options\b', Keyword, 'options'),
-            # ruleScopeSpec - scope followed by target language code or name of action
-            # TODO finish implementing other possibilities for scope
-            # L173 ANTLRv3.g from ANTLR book
-            (r'(scope)(\s+)({)', bygroups(Keyword, Whitespace, Punctuation),
-            'action'),
-            (r'(scope)(\s+)(' + _id + ')(\s*)(;)',
-             bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
-            # ruleAction
-            (r'(@' + _id + ')(\s*)({)',
-             bygroups(Name.Label, Whitespace, Punctuation), 'action'),
-            # finished prelims, go to rule alts!
-            (r':', Punctuation, '#pop')
-        ],
-        'rule-alts': [
-            include('whitespace'),
-            include('comments'),
-
-            # These might need to go in a separate 'block' state triggered by (
-            (r'options\b', Keyword, 'options'),
-            (r':', Punctuation),
-
-            # literals
-            (r"'(\\\\|\\'|[^'])*'", String),
-            (r'"(\\\\|\\"|[^"])*"', String),
-            (r'<<([^>]|>[^>])>>', String),
-            # identifiers
-            # Tokens start with capital letter.
-            (r'\$?[A-Z_][A-Za-z_0-9]*', Name.Constant),
-            # Rules start with small letter.
-            (r'\$?[a-z_][A-Za-z_0-9]*', Name.Variable),
-            # operators
-            (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
-            (r',', Punctuation),
-            (r'\[', Punctuation, 'nested-arg-action'),
-            (r'\{', Punctuation, 'action'),
-            (r';', Punctuation, '#pop')
-        ],
-        'tokens': [
-            include('whitespace'),
-            include('comments'),
-            (r'{', Punctuation),
-            (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
-             + ')?(\s*)(;)',
-             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
-                      String, Whitespace, Punctuation)),
-            (r'}', Punctuation, '#pop'),
-        ],
-        'options': [
-            include('whitespace'),
-            include('comments'),
-            (r'{', Punctuation),
-            (r'(' + _id + r')(\s*)(=)(\s*)(' +
-             '|'.join((_id, _STRING_LITERAL, _INT, '\*'))+ ')(\s*)(;)',
-             bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
-                      Text, Whitespace, Punctuation)),
-            (r'}', Punctuation, '#pop'),
-        ],
-        'action': [
-            (r'(' + r'|'.join(( # keep host code in largest possible chunks
-                r'[^\${}\'"/\\]+', # exclude unsafe characters
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\"|[^"])*"', # double quote string
-                r"'(\\\\|\\'|[^'])*'", # single quote string
-                r'//.*$\n?', # single line comment
-                r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
-                # regular expression: There's no reason for it to start
-                # with a * and this stops confusion with comments.
-                r'/(?!\*)(\\\\|\\/|[^/])*/',
-
-                # backslashes are okay, as long as we are not backslashing a %
-                r'\\(?!%)',
-
-                # Now that we've handled regex and javadoc comments
-                # it's safe to let / through.
-                r'/',
-            )) + r')+', Other),
-            (r'(\\)(%)', bygroups(Punctuation, Other)),
-            (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
-             bygroups(Name.Variable, Punctuation, Name.Property)),
-            (r'{', Punctuation, '#push'),
-            (r'}', Punctuation, '#pop'),
-        ],
-        'nested-arg-action': [
-            (r'(' + r'|'.join(( # keep host code in largest possible chunks.
-                r'[^\$\[\]\'"/]+', # exclude unsafe characters
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\"|[^"])*"', # double quote string
-                r"'(\\\\|\\'|[^'])*'", # single quote string
-                r'//.*$\n?', # single line comment
-                r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
-                # regular expression: There's no reason for it to start
-                # with a * and this stops confusion with comments.
-                r'/(?!\*)(\\\\|\\/|[^/])*/',
-
-                # Now that we've handled regex and javadoc comments
-                # it's safe to let / through.
-                r'/',
-            )) + r')+', Other),
-
-
-            (r'\[', Punctuation, '#push'),
-            (r'\]', Punctuation, '#pop'),
-            (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
-             bygroups(Name.Variable, Punctuation, Name.Property)),
-            (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
-        ]
-    }
-
-    def analyse_text(text):
-        return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-
-# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
-
-# TH: I'm not aware of any language features of C++ that will cause
-# incorrect lexing of C files.  Antlr doesn't appear to make a distinction,
-# so just assume they're C++.  No idea how to make Objective C work in the
-# future.
-
-#class AntlrCLexer(DelegatingLexer):
-#    """
-#    ANTLR with C Target
-#
-#    *New in Pygments 1.1*
-#    """
-#
-#    name = 'ANTLR With C Target'
-#    aliases = ['antlr-c']
-#    filenames = ['*.G', '*.g']
-#
-#    def __init__(self, **options):
-#        super(AntlrCLexer, self).__init__(CLexer, AntlrLexer, **options)
-#
-#    def analyse_text(text):
-#        return re.match(r'^\s*language\s*=\s*C\s*;', text)
-
-class AntlrCppLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with CPP Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With CPP Target'
-    aliases = ['antlr-cpp']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrCppLexer, self).__init__(CppLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
-
-
-class AntlrObjectiveCLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with Objective-C Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With ObjectiveC Target'
-    aliases = ['antlr-objc']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer,
-                                                   AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
-
-
-class AntlrCSharpLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with C# Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With C# Target'
-    aliases = ['antlr-csharp', 'antlr-c#']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrCSharpLexer, self).__init__(CSharpLexer, AntlrLexer,
-                                               **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
-
-
-class AntlrPythonLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with Python Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With Python Target'
-    aliases = ['antlr-python']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrPythonLexer, self).__init__(PythonLexer, AntlrLexer,
-                                               **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
-
-
-class AntlrJavaLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with Java Target
-
-    *New in Pygments 1.1*
-    """
-
-    name = 'ANTLR With Java Target'
-    aliases = ['antlr-java']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrJavaLexer, self).__init__(JavaLexer, AntlrLexer,
-                                             **options)
-
-    def analyse_text(text):
-        # Antlr language is Java by default
-        return AntlrLexer.analyse_text(text) and 0.9
-
-
-class AntlrRubyLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with Ruby Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With Ruby Target'
-    aliases = ['antlr-ruby', 'antlr-rb']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrRubyLexer, self).__init__(RubyLexer, AntlrLexer,
-                                             **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
-
-
-class AntlrPerlLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with Perl Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With Perl Target'
-    aliases = ['antlr-perl']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrPerlLexer, self).__init__(PerlLexer, AntlrLexer,
-                                             **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
-
-
-class AntlrActionScriptLexer(DelegatingLexer):
-    """
-    `ANTLR`_ with ActionScript Target
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'ANTLR With ActionScript Target'
-    aliases = ['antlr-as', 'antlr-actionscript']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
-                                                     AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-               re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
-
-class TreetopBaseLexer(RegexLexer):
-    """
-    A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
-    Not for direct use; use TreetopLexer instead.
-
-    *New in Pygments 1.6.*
-    """
-
-    tokens = {
-        'root': [
-            include('space'),
-            (r'require[ \t]+[^\n\r]+[\n\r]', Other),
-            (r'module\b', Keyword.Namespace, 'module'),
-            (r'grammar\b', Keyword, 'grammar'),
-        ],
-        'module': [
-            include('space'),
-            include('end'),
-            (r'module\b', Keyword, '#push'),
-            (r'grammar\b', Keyword, 'grammar'),
-            (r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Namespace),
-        ],
-        'grammar': [
-            include('space'),
-            include('end'),
-            (r'rule\b', Keyword, 'rule'),
-            (r'include\b', Keyword, 'include'),
-            (r'[A-Z][A-Za-z_0-9]*', Name),
-        ],
-        'include': [
-            include('space'),
-            (r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Class, '#pop'),
-        ],
-        'rule': [
-            include('space'),
-            include('end'),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'([A-Za-z_][A-Za-z_0-9]*)(:)', bygroups(Name.Label, Punctuation)),
-            (r'[A-Za-z_][A-Za-z_0-9]*', Name),
-            (r'[()]', Punctuation),
-            (r'[?+*/&!~]', Operator),
-            (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
-            (r'([0-9]*)(\.\.)([0-9]*)',
-             bygroups(Number.Integer, Operator, Number.Integer)),
-            (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
-            (r'{', Punctuation, 'inline_module'),
-            (r'\.', String.Regex),
-        ],
-        'inline_module': [
-            (r'{', Other, 'ruby'),
-            (r'}', Punctuation, '#pop'),
-            (r'[^{}]+', Other),
-        ],
-        'ruby': [
-            (r'{', Other, '#push'),
-            (r'}', Other, '#pop'),
-            (r'[^{}]+', Other),
-        ],
-        'space': [
-            (r'[ \t\n\r]+', Whitespace),
-            (r'#[^\n]*', Comment.Single),
-        ],
-        'end': [
-            (r'end\b', Keyword, '#pop'),
-        ],
-    }
-
-class TreetopLexer(DelegatingLexer):
-    """
-    A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Treetop'
-    aliases = ['treetop']
-    filenames = ['*.treetop', '*.tt']
-
-    def __init__(self, **options):
-        super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
diff --git a/python/ext-libs/pygments/lexers/shell.py b/python/ext-libs/pygments/lexers/shell.py
deleted file mode 100644
index b95faf9..0000000
--- a/python/ext-libs/pygments/lexers/shell.py
+++ /dev/null
@@ -1,410 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.shell
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for various shells.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
-from pygments.token import Punctuation, \
-     Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import shebang_matches
-
-
-__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
-           'PowerShellLexer', 'ShellSessionLexer']
-
-line_re  = re.compile('.*?\n')
-
-
-class BashLexer(RegexLexer):
-    """
-    Lexer for (ba|k|)sh shell scripts.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'Bash'
-    aliases = ['bash', 'sh', 'ksh']
-    filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
-                 '.bashrc', 'bashrc', '.bash_*', 'bash_*']
-    mimetypes = ['application/x-sh', 'application/x-shellscript']
-
-    tokens = {
-        'root': [
-            include('basic'),
-            (r'\$\(\(', Keyword, 'math'),
-            (r'\$\(', Keyword, 'paren'),
-            (r'\${#?', Keyword, 'curly'),
-            (r'`', String.Backtick, 'backticks'),
-            include('data'),
-        ],
-        'basic': [
-            (r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
-             r'select|continue|until|esac|elif)\s*\b',
-             Keyword),
-            (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
-             r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
-             r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
-             r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
-             r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
-             r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
-             Name.Builtin),
-            (r'#.*\n', Comment),
-            (r'\\[\w\W]', String.Escape),
-            (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
-            (r'[\[\]{}()=]', Operator),
-            (r'<<<', Operator),  # here-string
-            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
-            (r'&&|\|\|', Operator),
-        ],
-        'data': [
-            (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
-            (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
-            (r';', Text),
-            (r'\s+', Text),
-            (r'[^=\s\[\]{}()$"\'`\\<]+', Text),
-            (r'\d+(?= |\Z)', Number),
-            (r'\$#?(\w+|.)', Name.Variable),
-            (r'<', Text),
-        ],
-        'curly': [
-            (r'}', Keyword, '#pop'),
-            (r':-', Keyword),
-            (r'[a-zA-Z0-9_]+', Name.Variable),
-            (r'[^}:"\'`$]+', Punctuation),
-            (r':', Punctuation),
-            include('root'),
-        ],
-        'paren': [
-            (r'\)', Keyword, '#pop'),
-            include('root'),
-        ],
-        'math': [
-            (r'\)\)', Keyword, '#pop'),
-            (r'[-+*/%^|&]|\*\*|\|\|', Operator),
-            (r'\d+', Number),
-            include('root'),
-        ],
-        'backticks': [
-            (r'`', String.Backtick, '#pop'),
-            include('root'),
-        ],
-    }
-
-    def analyse_text(text):
-        return shebang_matches(text, r'(ba|z|)sh')
-
-
-class BashSessionLexer(Lexer):
-    """
-    Lexer for simplistic shell sessions.
-
-    *New in Pygments 1.1.*
-    """
-
-    name = 'Bash Session'
-    aliases = ['console']
-    filenames = ['*.sh-session']
-    mimetypes = ['application/x-shell-session']
-
-    def get_tokens_unprocessed(self, text):
-        bashlexer = BashLexer(**self.options)
-
-        pos = 0
-        curcode = ''
-        insertions = []
-
-        for match in line_re.finditer(text):
-            line = match.group()
-            m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
-                          r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
-            if m:
-                # To support output lexers (say diff output), the output
-                # needs to be broken by prompts whenever the output lexer
-                # changes.
-                if not insertions:
-                    pos = match.start()
-
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, m.group(1))]))
-                curcode += m.group(2)
-            elif line.startswith('>'):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:1])]))
-                curcode += line[1:]
-            else:
-                if insertions:
-                    toks = bashlexer.get_tokens_unprocessed(curcode)
-                    for i, t, v in do_insertions(insertions, toks):
-                        yield pos+i, t, v
-                yield match.start(), Generic.Output, line
-                insertions = []
-                curcode = ''
-        if insertions:
-            for i, t, v in do_insertions(insertions,
-                                         bashlexer.get_tokens_unprocessed(curcode)):
-                yield pos+i, t, v
-
-
-class ShellSessionLexer(Lexer):
-    """
-    Lexer for shell sessions that works with different command prompts
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Shell Session'
-    aliases = ['shell-session']
-    filenames = ['*.shell-session']
-    mimetypes = ['application/x-sh-session']
-
-    def get_tokens_unprocessed(self, text):
-        bashlexer = BashLexer(**self.options)
-
-        pos = 0
-        curcode = ''
-        insertions = []
-
-        for match in line_re.finditer(text):
-            line = match.group()
-            m = re.match(r'^((?:\[?\S+@[^$#%]+)[$#%])(.*\n?)', line)
-            if m:
-                # To support output lexers (say diff output), the output
-                # needs to be broken by prompts whenever the output lexer
-                # changes.
-                if not insertions:
-                    pos = match.start()
-
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, m.group(1))]))
-                curcode += m.group(2)
-            else:
-                if insertions:
-                    toks = bashlexer.get_tokens_unprocessed(curcode)
-                    for i, t, v in do_insertions(insertions, toks):
-                        yield pos+i, t, v
-                yield match.start(), Generic.Output, line
-                insertions = []
-                curcode = ''
-        if insertions:
-            for i, t, v in do_insertions(insertions,
-                                         bashlexer.get_tokens_unprocessed(curcode)):
-                yield pos+i, t, v
-
-
-class BatchLexer(RegexLexer):
-    """
-    Lexer for the DOS/Windows Batch file format.
-
-    *New in Pygments 0.7.*
-    """
-    name = 'Batchfile'
-    aliases = ['bat']
-    filenames = ['*.bat', '*.cmd']
-    mimetypes = ['application/x-dos-batch']
-
-    flags = re.MULTILINE | re.IGNORECASE
-
-    tokens = {
-        'root': [
-            # Lines can start with @ to prevent echo
-            (r'^\s*@', Punctuation),
-            (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
-            (r'".*?"', String.Double),
-            (r"'.*?'", String.Single),
-            # If made more specific, make sure you still allow expansions
-            # like %~$VAR:zlt
-            (r'%%?[~$:\w]+%?', Name.Variable),
-            (r'::.*', Comment), # Technically :: only works at BOL
-            (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
-            (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
-            (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
-            (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
-             r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
-             r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
-            (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
-            include('basic'),
-            (r'.', Text),
-        ],
-        'echo': [
-            # Escapes only valid within echo args?
-            (r'\^\^|\^<|\^>|\^\|', String.Escape),
-            (r'\n', Text, '#pop'),
-            include('basic'),
-            (r'[^\'"^]+', Text),
-        ],
-        'basic': [
-            (r'".*?"', String.Double),
-            (r"'.*?'", String.Single),
-            (r'`.*?`', String.Backtick),
-            (r'-?\d+', Number),
-            (r',', Punctuation),
-            (r'=', Operator),
-            (r'/\S+', Name),
-            (r':\w+', Name.Label),
-            (r'\w:\w+', Text),
-            (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
-        ],
-    }
-
-
-class TcshLexer(RegexLexer):
-    """
-    Lexer for tcsh scripts.
-
-    *New in Pygments 0.10.*
-    """
-
-    name = 'Tcsh'
-    aliases = ['tcsh', 'csh']
-    filenames = ['*.tcsh', '*.csh']
-    mimetypes = ['application/x-csh']
-
-    tokens = {
-        'root': [
-            include('basic'),
-            (r'\$\(', Keyword, 'paren'),
-            (r'\${#?', Keyword, 'curly'),
-            (r'`', String.Backtick, 'backticks'),
-            include('data'),
-        ],
-        'basic': [
-            (r'\b(if|endif|else|while|then|foreach|case|default|'
-             r'continue|goto|breaksw|end|switch|endsw)\s*\b',
-             Keyword),
-            (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
-             r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
-             r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
-             r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
-             r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
-             r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
-             r'source|stop|suspend|source|suspend|telltc|time|'
-             r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
-             r'ver|wait|warp|watchlog|where|which)\s*\b',
-             Name.Builtin),
-            (r'#.*\n', Comment),
-            (r'\\[\w\W]', String.Escape),
-            (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
-            (r'[\[\]{}()=]+', Operator),
-            (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
-        ],
-        'data': [
-            (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
-            (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
-            (r'\s+', Text),
-            (r'[^=\s\[\]{}()$"\'`\\]+', Text),
-            (r'\d+(?= |\Z)', Number),
-            (r'\$#?(\w+|.)', Name.Variable),
-        ],
-        'curly': [
-            (r'}', Keyword, '#pop'),
-            (r':-', Keyword),
-            (r'[a-zA-Z0-9_]+', Name.Variable),
-            (r'[^}:"\'`$]+', Punctuation),
-            (r':', Punctuation),
-            include('root'),
-        ],
-        'paren': [
-            (r'\)', Keyword, '#pop'),
-            include('root'),
-        ],
-        'backticks': [
-            (r'`', String.Backtick, '#pop'),
-            include('root'),
-        ],
-    }
-
-
-class PowerShellLexer(RegexLexer):
-    """
-    For Windows PowerShell code.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'PowerShell'
-    aliases = ['powershell', 'posh', 'ps1']
-    filenames = ['*.ps1']
-    mimetypes = ['text/x-powershell']
-
-    flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
-
-    keywords = (
-        'while validateset validaterange validatepattern validatelength '
-        'validatecount until trap switch return ref process param parameter in '
-        'if global: function foreach for finally filter end elseif else '
-        'dynamicparam do default continue cmdletbinding break begin alias \\? '
-        '% #script #private #local #global mandatory parametersetname position '
-        'valuefrompipeline valuefrompipelinebypropertyname '
-        'valuefromremainingarguments helpmessage try catch').split()
-
-    operators = (
-        'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
-        'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
-        'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
-        'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
-        'lt match ne not notcontains notlike notmatch or regex replace '
-        'wildcard').split()
-
-    verbs = (
-        'write where wait use update unregister undo trace test tee take '
-        'suspend stop start split sort skip show set send select scroll resume '
-        'restore restart resolve resize reset rename remove register receive '
-        'read push pop ping out new move measure limit join invoke import '
-        'group get format foreach export expand exit enter enable disconnect '
-        'disable debug cxnew copy convertto convertfrom convert connect '
-        'complete compare clear checkpoint aggregate add').split()
-
-    commenthelp = (
-        'component description example externalhelp forwardhelpcategory '
-        'forwardhelptargetname functionality inputs link '
-        'notes outputs parameter remotehelprunspace role synopsis').split()
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
-             bygroups(Comment, String.Doc, Comment)),
-            (r'#[^\n]*?$', Comment),
-            (r'(<|<)#', Comment.Multiline, 'multline'),
-            (r'@"\n.*?\n"@', String.Heredoc),
-            (r"@'\n.*?\n'@", String.Heredoc),
-            # escaped syntax
-            (r'`[\'"$@-]', Punctuation),
-            (r'"', String.Double, 'string'),
-            (r"'([^']|'')*'", String.Single),
-            (r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
-             Name.Variable),
-            (r'(%s)\b' % '|'.join(keywords), Keyword),
-            (r'-(%s)\b' % '|'.join(operators), Operator),
-            (r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
-            (r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant),  # .net [type]s
-            (r'-[a-z_][a-z0-9_]*', Name),
-            (r'\w+', Name),
-            (r'[.,{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
-        ],
-        'multline': [
-            (r'[^#&.]+', Comment.Multiline),
-            (r'#(>|>)', Comment.Multiline, '#pop'),
-            (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
-            (r'[#&.]', Comment.Multiline),
-        ],
-        'string': [
-            (r'[^$`"]+', String.Double),
-            (r'\$\(', String.Interpol, 'interpol'),
-            (r'`"|""', String.Double),
-            (r'[`$]', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'interpol': [
-            (r'[^$)]+', String.Interpol),
-            (r'\$\(', String.Interpol, '#push'),
-            (r'\)', String.Interpol, '#pop'),
-        ]
-    }
diff --git a/python/ext-libs/pygments/lexers/special.py b/python/ext-libs/pygments/lexers/special.py
deleted file mode 100644
index 9b3cd50..0000000
--- a/python/ext-libs/pygments/lexers/special.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.special
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Special lexers.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-import cStringIO
-
-from pygments.lexer import Lexer
-from pygments.token import Token, Error, Text
-from pygments.util import get_choice_opt, b
-
-
-__all__ = ['TextLexer', 'RawTokenLexer']
-
-
-class TextLexer(Lexer):
-    """
-    "Null" lexer, doesn't highlight anything.
-    """
-    name = 'Text only'
-    aliases = ['text']
-    filenames = ['*.txt']
-    mimetypes = ['text/plain']
-
-    def get_tokens_unprocessed(self, text):
-        yield 0, Text, text
-
-
-_ttype_cache = {}
-
-line_re = re.compile(b('.*?\n'))
-
-class RawTokenLexer(Lexer):
-    """
-    Recreate a token stream formatted with the `RawTokenFormatter`.  This
-    lexer raises exceptions during parsing if the token stream in the
-    file is malformed.
-
-    Additional options accepted:
-
-    `compress`
-        If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
-        the given compression algorithm before lexing (default: ``""``).
-    """
-    name = 'Raw token data'
-    aliases = ['raw']
-    filenames = []
-    mimetypes = ['application/x-pygments-tokens']
-
-    def __init__(self, **options):
-        self.compress = get_choice_opt(options, 'compress',
-                                       ['', 'none', 'gz', 'bz2'], '')
-        Lexer.__init__(self, **options)
-
-    def get_tokens(self, text):
-        if isinstance(text, unicode):
-            # raw token stream never has any non-ASCII characters
-            text = text.encode('ascii')
-        if self.compress == 'gz':
-            import gzip
-            gzipfile = gzip.GzipFile('', 'rb', 9, cStringIO.StringIO(text))
-            text = gzipfile.read()
-        elif self.compress == 'bz2':
-            import bz2
-            text = bz2.decompress(text)
-
-        # do not call Lexer.get_tokens() because we do not want Unicode
-        # decoding to occur, and stripping is not optional.
-        text = text.strip(b('\n')) + b('\n')
-        for i, t, v in self.get_tokens_unprocessed(text):
-            yield t, v
-
-    def get_tokens_unprocessed(self, text):
-        length = 0
-        for match in line_re.finditer(text):
-            try:
-                ttypestr, val = match.group().split(b('\t'), 1)
-            except ValueError:
-                val = match.group().decode(self.encoding)
-                ttype = Error
-            else:
-                ttype = _ttype_cache.get(ttypestr)
-                if not ttype:
-                    ttype = Token
-                    ttypes = ttypestr.split('.')[1:]
-                    for ttype_ in ttypes:
-                        if not ttype_ or not ttype_[0].isupper():
-                            raise ValueError('malformed token name')
-                        ttype = getattr(ttype, ttype_)
-                    _ttype_cache[ttypestr] = ttype
-                val = val[2:-2].decode('unicode-escape')
-            yield length, ttype, val
-            length += len(val)
diff --git a/python/ext-libs/pygments/lexers/sql.py b/python/ext-libs/pygments/lexers/sql.py
deleted file mode 100644
index dcfd8fa..0000000
--- a/python/ext-libs/pygments/lexers/sql.py
+++ /dev/null
@@ -1,559 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.sql
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for various SQL dialects and related interactive sessions.
-
-    Postgres specific lexers:
-
-    `PostgresLexer`
-        A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
-        lexer are:
-
-        - keywords and data types list parsed from the PG docs (run the
-          `_postgres_builtins` module to update them);
-        - Content of $-strings parsed using a specific lexer, e.g. the content
-          of a PL/Python function is parsed using the Python lexer;
-        - parse PG specific constructs: E-strings, $-strings, U&-strings,
-          different operators and punctuation.
-
-    `PlPgsqlLexer`
-        A lexer for the PL/pgSQL language. Adds a few specific construct on
-        top of the PG SQL lexer (such as <<label>>).
-
-    `PostgresConsoleLexer`
-        A lexer to highlight an interactive psql session:
-
-        - identifies the prompt and does its best to detect the end of command
-          in multiline statement where not all the lines are prefixed by a
-          prompt, telling them apart from the output;
-        - highlights errors in the output and notification levels;
-        - handles psql backslash commands.
-
-    The ``tests/examplefiles`` contains a few test files with data to be
-    parsed by these lexers.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
-from pygments.token import Punctuation, \
-     Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.lexers import get_lexer_by_name, ClassNotFound
-
-from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
-     PSEUDO_TYPES, PLPGSQL_KEYWORDS
-
-
-__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
-           'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer']
-
-line_re  = re.compile('.*?\n')
-
-language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
-
-def language_callback(lexer, match):
-    """Parse the content of a $-string using a lexer
-
-    The lexer is chosen looking for a nearby LANGUAGE.
-    """
-    l = None
-    m = language_re.match(lexer.text[match.end():match.end()+100])
-    if m is not None:
-        l = lexer._get_lexer(m.group(1))
-    else:
-        m = list(language_re.finditer(
-            lexer.text[max(0, match.start()-100):match.start()]))
-        if m:
-            l = lexer._get_lexer(m[-1].group(1))
-
-    if l:
-        yield (match.start(1), String, match.group(1))
-        for x in l.get_tokens_unprocessed(match.group(2)):
-            yield x
-        yield (match.start(3), String, match.group(3))
-
-    else:
-        yield (match.start(), String, match.group())
-
-
-class PostgresBase(object):
-    """Base class for Postgres-related lexers.
-
-    This is implemented as a mixin to avoid the Lexer metaclass kicking in.
-    this way the different lexer don't have a common Lexer ancestor. If they
-    had, _tokens could be created on this ancestor and not updated for the
-    other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
-    seem to suggest that regexp lexers are not really subclassable.
-    """
-    def get_tokens_unprocessed(self, text, *args):
-        # Have a copy of the entire text to be used by `language_callback`.
-        self.text = text
-        for x in super(PostgresBase, self).get_tokens_unprocessed(
-                text, *args):
-            yield x
-
-    def _get_lexer(self, lang):
-        if lang.lower() == 'sql':
-            return get_lexer_by_name('postgresql', **self.options)
-
-        tries = [ lang ]
-        if lang.startswith('pl'):
-            tries.append(lang[2:])
-        if lang.endswith('u'):
-            tries.append(lang[:-1])
-        if lang.startswith('pl') and lang.endswith('u'):
-            tries.append(lang[2:-1])
-
-        for l in tries:
-            try:
-                return get_lexer_by_name(l, **self.options)
-            except ClassNotFound:
-                pass
-        else:
-            # TODO: better logging
-            # print >>sys.stderr, "language not found:", lang
-            return None
-
-
-class PostgresLexer(PostgresBase, RegexLexer):
-    """
-    Lexer for the PostgreSQL dialect of SQL.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'PostgreSQL SQL dialect'
-    aliases = ['postgresql', 'postgres']
-    mimetypes = ['text/x-postgresql']
-
-    flags = re.IGNORECASE
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'--.*?\n', Comment.Single),
-            (r'/\*', Comment.Multiline, 'multiline-comments'),
-            (r'(' + '|'.join([s.replace(" ", "\s+")
-                for s in DATATYPES + PSEUDO_TYPES])
-                  + r')\b', Name.Builtin),
-            (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
-            (r'[+*/<>=~!@#%^&|`?-]+', Operator),
-            (r'::', Operator),  # cast
-            (r'\$\d+', Name.Variable),
-            (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r"(E|U&)?'(''|[^'])*'", String.Single),
-            (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
-            (r'(?s)(\$[^\$]*\$)(.*?)(\1)', language_callback),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-
-            # psql variable in SQL
-            (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
-
-            (r'[;:()\[\]\{\},\.]', Punctuation),
-        ],
-        'multiline-comments': [
-            (r'/\*', Comment.Multiline, 'multiline-comments'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[^/\*]+', Comment.Multiline),
-            (r'[/*]', Comment.Multiline)
-        ],
-    }
-
-
-class PlPgsqlLexer(PostgresBase, RegexLexer):
-    """
-    Handle the extra syntax in Pl/pgSQL language.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'PL/pgSQL'
-    aliases = ['plpgsql']
-    mimetypes = ['text/x-plpgsql']
-
-    flags = re.IGNORECASE
-    tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
-
-    # extend the keywords list
-    for i, pattern in enumerate(tokens['root']):
-        if pattern[1] == Keyword:
-            tokens['root'][i] = (
-                r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
-                Keyword)
-            del i
-            break
-    else:
-        assert 0, "SQL keywords not found"
-
-    # Add specific PL/pgSQL rules (before the SQL ones)
-    tokens['root'][:0] = [
-        (r'\%[a-z][a-z0-9_]*\b', Name.Builtin),     # actually, a datatype
-        (r':=', Operator),
-        (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
-        (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo),   # #variable_conflict
-    ]
-
-
-class PsqlRegexLexer(PostgresBase, RegexLexer):
-    """
-    Extend the PostgresLexer adding support specific for psql commands.
-
-    This is not a complete psql lexer yet as it lacks prompt support
-    and output rendering.
-    """
-
-    name = 'PostgreSQL console - regexp based lexer'
-    aliases = []    # not public
-
-    flags = re.IGNORECASE
-    tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
-
-    tokens['root'].append(
-        (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
-    tokens['psql-command'] = [
-        (r'\n', Text, 'root'),
-        (r'\s+', Text),
-        (r'\\[^\s]+', Keyword.Pseudo),
-        (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
-        (r"'(''|[^'])*'", String.Single),
-        (r"`([^`])*`", String.Backtick),
-        (r"[^\s]+", String.Symbol),
-    ]
-
-re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
-re_psql_command = re.compile(r'\s*\\')
-re_end_command = re.compile(r';\s*(--.*?)?$')
-re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
-re_error = re.compile(r'(ERROR|FATAL):')
-re_message = re.compile(
-    r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
-    r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
-
-
-class lookahead(object):
-    """Wrap an iterator and allow pushing back an item."""
-    def __init__(self, x):
-        self.iter = iter(x)
-        self._nextitem = None
-    def __iter__(self):
-        return self
-    def send(self, i):
-        self._nextitem = i
-        return i
-    def next(self):
-        if self._nextitem is not None:
-            ni = self._nextitem
-            self._nextitem = None
-            return ni
-        return self.iter.next()
-
-
-class PostgresConsoleLexer(Lexer):
-    """
-    Lexer for psql sessions.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'PostgreSQL console (psql)'
-    aliases = ['psql', 'postgresql-console', 'postgres-console']
-    mimetypes = ['text/x-postgresql-psql']
-
-    def get_tokens_unprocessed(self, data):
-        sql = PsqlRegexLexer(**self.options)
-
-        lines = lookahead(line_re.findall(data))
-
-        # prompt-output cycle
-        while 1:
-
-            # consume the lines of the command: start with an optional prompt
-            # and continue until the end of command is detected
-            curcode = ''
-            insertions = []
-            while 1:
-                try:
-                    line = lines.next()
-                except StopIteration:
-                    # allow the emission of partially collected items
-                    # the repl loop will be broken below
-                    break
-
-                # Identify a shell prompt in case of psql commandline example
-                if line.startswith('$') and not curcode:
-                    lexer = get_lexer_by_name('console', **self.options)
-                    for x in lexer.get_tokens_unprocessed(line):
-                        yield x
-                    break
-
-                # Identify a psql prompt
-                mprompt = re_prompt.match(line)
-                if mprompt is not None:
-                    insertions.append((len(curcode),
-                                       [(0, Generic.Prompt, mprompt.group())]))
-                    curcode += line[len(mprompt.group()):]
-                else:
-                    curcode += line
-
-                # Check if this is the end of the command
-                # TODO: better handle multiline comments at the end with
-                # a lexer with an external state?
-                if re_psql_command.match(curcode) \
-                or re_end_command.search(curcode):
-                    break
-
-            # Emit the combined stream of command and prompt(s)
-            for item in do_insertions(insertions,
-                    sql.get_tokens_unprocessed(curcode)):
-                yield item
-
-            # Emit the output lines
-            out_token = Generic.Output
-            while 1:
-                line = lines.next()
-                mprompt = re_prompt.match(line)
-                if mprompt is not None:
-                    # push the line back to have it processed by the prompt
-                    lines.send(line)
-                    break
-
-                mmsg = re_message.match(line)
-                if mmsg is not None:
-                    if mmsg.group(1).startswith("ERROR") \
-                    or mmsg.group(1).startswith("FATAL"):
-                        out_token = Generic.Error
-                    yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
-                    yield (mmsg.start(2), out_token, mmsg.group(2))
-                else:
-                    yield (0, out_token, line)
-
-
-class SqlLexer(RegexLexer):
-    """
-    Lexer for Structured Query Language. Currently, this lexer does
-    not recognize any special syntax except ANSI SQL.
-    """
-
-    name = 'SQL'
-    aliases = ['sql']
-    filenames = ['*.sql']
-    mimetypes = ['text/x-sql']
-
-    flags = re.IGNORECASE
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'--.*?\n', Comment.Single),
-            (r'/\*', Comment.Multiline, 'multiline-comments'),
-            (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
-             r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
-             r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
-             r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
-             r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
-             r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
-             r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
-             r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
-             r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
-             r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
-             r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
-             r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
-             r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
-             r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
-             r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
-             r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
-             r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
-             r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
-             r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
-             r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
-             r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
-             r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
-             r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
-             r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
-             r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
-             r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
-             r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
-             r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
-             r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
-             r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
-             r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
-             r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
-             r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
-             r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
-             r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
-             r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
-             r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
-             r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
-             r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
-             r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
-             r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
-             r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
-             r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
-             r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
-             r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
-             r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
-             r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
-             r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
-             r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
-             r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
-             r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
-             r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
-             r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
-             r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
-             r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
-             r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
-             r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
-             r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
-             r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
-             r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
-             r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
-             r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
-             r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
-             r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
-             r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
-             r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
-             r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
-             r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
-             r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
-             r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
-             r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
-             r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
-             r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
-             r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
-             r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
-            (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
-             r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
-             r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
-             Name.Builtin),
-            (r'[+*/<>=~!@#%^&|`?-]', Operator),
-            (r'[0-9]+', Number.Integer),
-            # TODO: Backslash escapes?
-            (r"'(''|[^'])*'", String.Single),
-            (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'[;:()\[\],\.]', Punctuation)
-        ],
-        'multiline-comments': [
-            (r'/\*', Comment.Multiline, 'multiline-comments'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[^/\*]+', Comment.Multiline),
-            (r'[/*]', Comment.Multiline)
-        ]
-    }
-
-
-class MySqlLexer(RegexLexer):
-    """
-    Special lexer for MySQL.
-    """
-
-    name = 'MySQL'
-    aliases = ['mysql']
-    mimetypes = ['text/x-mysql']
-
-    flags = re.IGNORECASE
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(#|--\s+).*?\n', Comment.Single),
-            (r'/\*', Comment.Multiline, 'multiline-comments'),
-            (r'[0-9]+', Number.Integer),
-            (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
-            # TODO: add backslash escapes
-            (r"'(''|[^'])*'", String.Single),
-            (r'"(""|[^"])*"', String.Double),
-            (r"`(``|[^`])*`", String.Symbol),
-            (r'[+*/<>=~!@#%^&|`?-]', Operator),
-            (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
-             r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
-             r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
-             r'precision|real|numeric|dec|decimal|timestamp|year|char|'
-             r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
-             bygroups(Keyword.Type, Text, Punctuation)),
-            (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
-             r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
-             r'character|check|collate|column|condition|constraint|continue|'
-             r'convert|create|cross|current_date|current_time|'
-             r'current_timestamp|current_user|cursor|database|databases|'
-             r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
-             r'declare|default|delayed|delete|desc|describe|deterministic|'
-             r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
-             r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8'
-             r'|for|force|foreign|from|fulltext|grant|group|having|'
-             r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
-             r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
-             r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
-             r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
-             r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
-             r'minute_microsecond|minute_second|mod|modifies|natural|'
-             r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
-             r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
-             r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
-             r'replace|require|restrict|return|revoke|right|rlike|schema|'
-             r'schemas|second_microsecond|select|sensitive|separator|set|'
-             r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
-             r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
-             r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
-             r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
-             r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
-             r'varying|when|where|while|with|write|x509|xor|year_month|'
-             r'zerofill)\b', Keyword),
-            # TODO: this list is not complete
-            (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
-            (r'(true|false|null)', Name.Constant),
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
-             bygroups(Name.Function, Text, Punctuation)),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-            (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
-            (r'[;:()\[\],\.]', Punctuation)
-        ],
-        'multiline-comments': [
-            (r'/\*', Comment.Multiline, 'multiline-comments'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[^/\*]+', Comment.Multiline),
-            (r'[/*]', Comment.Multiline)
-        ]
-    }
-
-
-class SqliteConsoleLexer(Lexer):
-    """
-    Lexer for example sessions using sqlite3.
-
-    *New in Pygments 0.11.*
-    """
-
-    name = 'sqlite3con'
-    aliases = ['sqlite3']
-    filenames = ['*.sqlite3-console']
-    mimetypes = ['text/x-sqlite3-console']
-
-    def get_tokens_unprocessed(self, data):
-        sql = SqlLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        for match in line_re.finditer(data):
-            line = match.group()
-            if line.startswith('sqlite> ') or line.startswith('   ...> '):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:8])]))
-                curcode += line[8:]
-            else:
-                if curcode:
-                    for item in do_insertions(insertions,
-                                              sql.get_tokens_unprocessed(curcode)):
-                        yield item
-                    curcode = ''
-                    insertions = []
-                if line.startswith('SQL error: '):
-                    yield (match.start(), Generic.Traceback, line)
-                else:
-                    yield (match.start(), Generic.Output, line)
-        if curcode:
-            for item in do_insertions(insertions,
-                                      sql.get_tokens_unprocessed(curcode)):
-                yield item
diff --git a/python/ext-libs/pygments/lexers/templates.py b/python/ext-libs/pygments/lexers/templates.py
deleted file mode 100644
index b3e70d0..0000000
--- a/python/ext-libs/pygments/lexers/templates.py
+++ /dev/null
@@ -1,1742 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.templates
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for various template engines' markup.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers.web import \
-     PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer
-from pygments.lexers.agile import PythonLexer, PerlLexer
-from pygments.lexers.compiled import JavaLexer
-from pygments.lexers.jvm import TeaLangLexer
-from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
-     include, using, this
-from pygments.token import Error, Punctuation, \
-     Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
-from pygments.util import html_doctype_matches, looks_like_xml
-
-__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
-           'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
-           'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
-           'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
-           'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
-           'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
-           'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
-           'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
-           'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
-           'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
-           'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
-           'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
-           'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
-           'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
-           'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer',
-           'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer', 'LassoHtmlLexer',
-           'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer']
-
-
-class ErbLexer(Lexer):
-    """
-    Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
-    lexer.
-
-    Just highlights ruby code between the preprocessor directives, other data
-    is left untouched by the lexer.
-
-    All options are also forwarded to the `RubyLexer`.
-    """
-
-    name = 'ERB'
-    aliases = ['erb']
-    mimetypes = ['application/x-ruby-templating']
-
-    _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
-
-    def __init__(self, **options):
-        from pygments.lexers.agile import RubyLexer
-        self.ruby_lexer = RubyLexer(**options)
-        Lexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        """
-        Since ERB doesn't allow "<%" and other tags inside of ruby
-        blocks we have to use a split approach here that fails for
-        that too.
-        """
-        tokens = self._block_re.split(text)
-        tokens.reverse()
-        state = idx = 0
-        try:
-            while True:
-                # text
-                if state == 0:
-                    val = tokens.pop()
-                    yield idx, Other, val
-                    idx += len(val)
-                    state = 1
-                # block starts
-                elif state == 1:
-                    tag = tokens.pop()
-                    # literals
-                    if tag in ('<%%', '%%>'):
-                        yield idx, Other, tag
-                        idx += 3
-                        state = 0
-                    # comment
-                    elif tag == '<%#':
-                        yield idx, Comment.Preproc, tag
-                        val = tokens.pop()
-                        yield idx + 3, Comment, val
-                        idx += 3 + len(val)
-                        state = 2
-                    # blocks or output
-                    elif tag in ('<%', '<%=', '<%-'):
-                        yield idx, Comment.Preproc, tag
-                        idx += len(tag)
-                        data = tokens.pop()
-                        r_idx = 0
-                        for r_idx, r_token, r_value in \
-                            self.ruby_lexer.get_tokens_unprocessed(data):
-                            yield r_idx + idx, r_token, r_value
-                        idx += len(data)
-                        state = 2
-                    elif tag in ('%>', '-%>'):
-                        yield idx, Error, tag
-                        idx += len(tag)
-                        state = 0
-                    # % raw ruby statements
-                    else:
-                        yield idx, Comment.Preproc, tag[0]
-                        r_idx = 0
-                        for r_idx, r_token, r_value in \
-                            self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
-                            yield idx + 1 + r_idx, r_token, r_value
-                        idx += len(tag)
-                        state = 0
-                # block ends
-                elif state == 2:
-                    tag = tokens.pop()
-                    if tag not in ('%>', '-%>'):
-                        yield idx, Other, tag
-                    else:
-                        yield idx, Comment.Preproc, tag
-                    idx += len(tag)
-                    state = 0
-        except IndexError:
-            return
-
-    def analyse_text(text):
-        if '<%' in text and '%>' in text:
-            return 0.4
-
-
-class SmartyLexer(RegexLexer):
-    """
-    Generic `Smarty <http://smarty.php.net/>`_ template lexer.
-
-    Just highlights smarty code between the preprocessor directives, other
-    data is left untouched by the lexer.
-    """
-
-    name = 'Smarty'
-    aliases = ['smarty']
-    filenames = ['*.tpl']
-    mimetypes = ['application/x-smarty']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            (r'[^{]+', Other),
-            (r'(\{)(\*.*?\*)(\})',
-             bygroups(Comment.Preproc, Comment, Comment.Preproc)),
-            (r'(\{php\})(.*?)(\{/php\})',
-             bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
-                      Comment.Preproc)),
-            (r'(\{)(/?[a-zA-Z_][a-zA-Z0-9_]*)(\s*)',
-             bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
-            (r'\{', Comment.Preproc, 'smarty')
-        ],
-        'smarty': [
-            (r'\s+', Text),
-            (r'\}', Comment.Preproc, '#pop'),
-            (r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable),
-            (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable),
-            (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
-             r"0[xX][0-9a-fA-F]+[Ll]?", Number),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute)
-        ]
-    }
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search('\{if\s+.*?\}.*?\{/if\}', text):
-            rv += 0.15
-        if re.search('\{include\s+file=.*?\}', text):
-            rv += 0.15
-        if re.search('\{foreach\s+.*?\}.*?\{/foreach\}', text):
-            rv += 0.15
-        if re.search('\{\$.*?\}', text):
-            rv += 0.01
-        return rv
-
-
-class VelocityLexer(RegexLexer):
-    """
-    Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
-
-    Just highlights velocity directives and variable references, other
-    data is left untouched by the lexer.
-    """
-
-    name = 'Velocity'
-    aliases = ['velocity']
-    filenames = ['*.vm','*.fhtml']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    identifier = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
-    tokens = {
-        'root': [
-            (r'[^{#$]+', Other),
-            (r'(#)(\*.*?\*)(#)',
-             bygroups(Comment.Preproc, Comment, Comment.Preproc)),
-            (r'(##)(.*?$)',
-             bygroups(Comment.Preproc, Comment)),
-            (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
-             bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
-             'directiveparams'),
-            (r'(#\{?)(' + identifier + r')(\}|\b)',
-             bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
-            (r'\$\{?', Punctuation, 'variable')
-        ],
-        'variable': [
-            (identifier, Name.Variable),
-            (r'\(', Punctuation, 'funcparams'),
-            (r'(\.)(' + identifier + r')',
-             bygroups(Punctuation, Name.Variable), '#push'),
-            (r'\}', Punctuation, '#pop'),
-            (r'', Other, '#pop')
-        ],
-        'directiveparams': [
-            (r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
-             Operator),
-            (r'\[', Operator, 'rangeoperator'),
-            (r'\b' + identifier + r'\b', Name.Function),
-            include('funcparams')
-        ],
-        'rangeoperator': [
-            (r'\.\.', Operator),
-            include('funcparams'),
-            (r'\]', Operator, '#pop')
-        ],
-        'funcparams': [
-            (r'\$\{?', Punctuation, 'variable'),
-            (r'\s+', Text),
-            (r',', Punctuation),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
-            (r"\b[0-9]+\b", Number),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'\(', Punctuation, '#push'),
-            (r'\)', Punctuation, '#pop')
-        ]
-    }
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
-            rv += 0.25
-        if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
-            rv += 0.15
-        if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
-            rv += 0.15
-        if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?'
-                     r'(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text):
-            rv += 0.01
-        return rv
-
-
-class VelocityHtmlLexer(DelegatingLexer):
-    """
-    Subclass of the `VelocityLexer` that highlights unlexer data
-    with the `HtmlLexer`.
-
-    """
-
-    name = 'HTML+Velocity'
-    aliases = ['html+velocity']
-    alias_filenames = ['*.html','*.fhtml']
-    mimetypes = ['text/html+velocity']
-
-    def __init__(self, **options):
-        super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
-                                              **options)
-
-
-class VelocityXmlLexer(DelegatingLexer):
-    """
-    Subclass of the `VelocityLexer` that highlights unlexer data
-    with the `XmlLexer`.
-
-    """
-
-    name = 'XML+Velocity'
-    aliases = ['xml+velocity']
-    alias_filenames = ['*.xml','*.vm']
-    mimetypes = ['application/xml+velocity']
-
-    def __init__(self, **options):
-        super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
-                                               **options)
-
-    def analyse_text(text):
-        rv = VelocityLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.5
-        return rv
-
-
-class DjangoLexer(RegexLexer):
-    """
-    Generic `django <http://www.djangoproject.com/documentation/templates/>`_
-    and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.
-
-    It just highlights django/jinja code between the preprocessor directives,
-    other data is left untouched by the lexer.
-    """
-
-    name = 'Django/Jinja'
-    aliases = ['django', 'jinja']
-    mimetypes = ['application/x-django-templating', 'application/x-jinja']
-
-    flags = re.M | re.S
-
-    tokens = {
-        'root': [
-            (r'[^{]+', Other),
-            (r'\{\{', Comment.Preproc, 'var'),
-            # jinja/django comments
-            (r'\{[*#].*?[*#]\}', Comment),
-            # django comments
-            (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
-             r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
-             bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
-                      Comment, Comment.Preproc, Text, Keyword, Text,
-                      Comment.Preproc)),
-            # raw jinja blocks
-            (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
-             r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
-             bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
-                      Text, Comment.Preproc, Text, Keyword, Text,
-                      Comment.Preproc)),
-            # filter blocks
-            (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
-             'block'),
-            (r'(\{%)(-?\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Comment.Preproc, Text, Keyword), 'block'),
-            (r'\{', Other)
-        ],
-        'varnames': [
-            (r'(\|)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Text, Name.Function)),
-            (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Keyword, Text, Keyword, Text, Name.Function)),
-            (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
-            (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
-             r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
-             Keyword),
-            (r'(loop|block|super|forloop)\b', Name.Builtin),
-            (r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
-            (r'\.[a-zA-Z0-9_]+', Name.Variable),
-            (r':?"(\\\\|\\"|[^"])*"', String.Double),
-            (r":?'(\\\\|\\'|[^'])*'", String.Single),
-            (r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator),
-            (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
-             r"0[xX][0-9a-fA-F]+[Ll]?", Number),
-        ],
-        'var': [
-            (r'\s+', Text),
-            (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
-            include('varnames')
-        ],
-        'block': [
-            (r'\s+', Text),
-            (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
-            include('varnames'),
-            (r'.', Punctuation)
-        ]
-    }
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search(r'\{%\s*(block|extends)', text) is not None:
-            rv += 0.4
-        if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
-            rv += 0.1
-        if re.search(r'\{\{.*?\}\}', text) is not None:
-            rv += 0.1
-        return rv
-
-
-class MyghtyLexer(RegexLexer):
-    """
-    Generic `myghty templates`_ lexer. Code that isn't Myghty
-    markup is yielded as `Token.Other`.
-
-    *New in Pygments 0.6.*
-
-    .. _myghty templates: http://www.myghty.org/
-    """
-
-    name = 'Myghty'
-    aliases = ['myghty']
-    filenames = ['*.myt', 'autodelegate']
-    mimetypes = ['application/x-myghty']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
-             bygroups(Name.Tag, Text, Name.Function, Name.Tag,
-                      using(this), Name.Tag)),
-            (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
-             bygroups(Name.Tag, Name.Function, Name.Tag,
-                      using(PythonLexer), Name.Tag)),
-            (r'(<&[^|])(.*?)(,.*?)?(&>)',
-             bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
-            (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
-             bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
-            (r'</&>', Name.Tag),
-            (r'(<%!?)(.*?)(%>)(?s)',
-             bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
-            (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
-            (r'(?<=^)(%)([^\n]*)(\n|\Z)',
-             bygroups(Name.Tag, using(PythonLexer), Other)),
-            (r"""(?sx)
-                 (.+?)               # anything, followed by:
-                 (?:
-                  (?<=\n)(?=[%#]) |  # an eval or comment line
-                  (?=</?[%&]) |      # a substitution or block or
-                                     # call start or end
-                                     # - don't consume
-                  (\\\n) |           # an escaped newline
-                  \Z                 # end of string
-                 )""", bygroups(Other, Operator)),
-        ]
-    }
-
-
-class MyghtyHtmlLexer(DelegatingLexer):
-    """
-    Subclass of the `MyghtyLexer` that highlights unlexer data
-    with the `HtmlLexer`.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'HTML+Myghty'
-    aliases = ['html+myghty']
-    mimetypes = ['text/html+myghty']
-
-    def __init__(self, **options):
-        super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
-                                              **options)
-
-
-class MyghtyXmlLexer(DelegatingLexer):
-    """
-    Subclass of the `MyghtyLexer` that highlights unlexer data
-    with the `XmlLexer`.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'XML+Myghty'
-    aliases = ['xml+myghty']
-    mimetypes = ['application/xml+myghty']
-
-    def __init__(self, **options):
-        super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
-                                             **options)
-
-
-class MyghtyJavascriptLexer(DelegatingLexer):
-    """
-    Subclass of the `MyghtyLexer` that highlights unlexer data
-    with the `JavascriptLexer`.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'JavaScript+Myghty'
-    aliases = ['js+myghty', 'javascript+myghty']
-    mimetypes = ['application/x-javascript+myghty',
-                 'text/x-javascript+myghty',
-                 'text/javascript+mygthy']
-
-    def __init__(self, **options):
-        super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
-                                                    MyghtyLexer, **options)
-
-
-class MyghtyCssLexer(DelegatingLexer):
-    """
-    Subclass of the `MyghtyLexer` that highlights unlexer data
-    with the `CssLexer`.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'CSS+Myghty'
-    aliases = ['css+myghty']
-    mimetypes = ['text/css+myghty']
-
-    def __init__(self, **options):
-        super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
-                                             **options)
-
-
-class MasonLexer(RegexLexer):
-    """
-    Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
-    Mason markup is HTML.
-
-    .. _mason templates: http://www.masonhq.com/
-
-    *New in Pygments 1.4.*
-    """
-    name = 'Mason'
-    aliases = ['mason']
-    filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
-    mimetypes = ['application/x-mason']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(<%doc>)(.*?)(</%doc>)(?s)',
-             bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
-            (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
-             bygroups(Name.Tag, Text, Name.Function, Name.Tag,
-                      using(this), Name.Tag)),
-            (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
-             bygroups(Name.Tag, Name.Function, Name.Tag,
-                      using(PerlLexer), Name.Tag)),
-            (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
-             bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
-            (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
-             bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
-            (r'</&>', Name.Tag),
-            (r'(<%!?)(.*?)(%>)(?s)',
-             bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
-            (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
-            (r'(?<=^)(%)([^\n]*)(\n|\Z)',
-             bygroups(Name.Tag, using(PerlLexer), Other)),
-            (r"""(?sx)
-                 (.+?)               # anything, followed by:
-                 (?:
-                  (?<=\n)(?=[%#]) |  # an eval or comment line
-                  (?=</?[%&]) |      # a substitution or block or
-                                     # call start or end
-                                     # - don't consume
-                  (\\\n) |           # an escaped newline
-                  \Z                 # end of string
-                 )""", bygroups(using(HtmlLexer), Operator)),
-        ]
-    }
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search('<&', text) is not None:
-            rv = 1.0
-        return rv
-
-
-class MakoLexer(RegexLexer):
-    """
-    Generic `mako templates`_ lexer. Code that isn't Mako
-    markup is yielded as `Token.Other`.
-
-    *New in Pygments 0.7.*
-
-    .. _mako templates: http://www.makotemplates.org/
-    """
-
-    name = 'Mako'
-    aliases = ['mako']
-    filenames = ['*.mao']
-    mimetypes = ['application/x-mako']
-
-    tokens = {
-        'root': [
-            (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
-             bygroups(Text, Comment.Preproc, Keyword, Other)),
-            (r'(\s*)(%)([^\n]*)(\n|\Z)',
-             bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
-            (r'(\s*)(##[^\n]*)(\n|\Z)',
-             bygroups(Text, Comment.Preproc, Other)),
-            (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
-            (r'(<%)([\w\.\:]+)',
-             bygroups(Comment.Preproc, Name.Builtin), 'tag'),
-            (r'(</%)([\w\.\:]+)(>)',
-             bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
-            (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
-            (r'(<%(?:!?))(.*?)(%>)(?s)',
-             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
-            (r'(\$\{)(.*?)(\})',
-             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
-            (r'''(?sx)
-                (.+?)                # anything, followed by:
-                (?:
-                 (?<=\n)(?=%|\#\#) | # an eval or comment line
-                 (?=\#\*) |          # multiline comment
-                 (?=</?%) |          # a python block
-                                     # call start or end
-                 (?=\$\{) |          # a substitution
-                 (?<=\n)(?=\s*%) |
-                                     # - don't consume
-                 (\\\n) |            # an escaped newline
-                 \Z                  # end of string
-                )
-            ''', bygroups(Other, Operator)),
-            (r'\s+', Text),
-        ],
-        'ondeftags': [
-            (r'<%', Comment.Preproc),
-            (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
-            include('tag'),
-        ],
-        'tag': [
-            (r'((?:\w+)\s*=)(\s*)(".*?")',
-             bygroups(Name.Attribute, Text, String)),
-            (r'/?\s*>', Comment.Preproc, '#pop'),
-            (r'\s+', Text),
-        ],
-        'attr': [
-            ('".*?"', String, '#pop'),
-            ("'.*?'", String, '#pop'),
-            (r'[^\s>]+', String, '#pop'),
-        ],
-    }
-
-
-class MakoHtmlLexer(DelegatingLexer):
-    """
-    Subclass of the `MakoLexer` that highlights unlexed data
-    with the `HtmlLexer`.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'HTML+Mako'
-    aliases = ['html+mako']
-    mimetypes = ['text/html+mako']
-
-    def __init__(self, **options):
-        super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
-                                              **options)
-
-class MakoXmlLexer(DelegatingLexer):
-    """
-    Subclass of the `MakoLexer` that highlights unlexer data
-    with the `XmlLexer`.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'XML+Mako'
-    aliases = ['xml+mako']
-    mimetypes = ['application/xml+mako']
-
-    def __init__(self, **options):
-        super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
-                                             **options)
-
-class MakoJavascriptLexer(DelegatingLexer):
-    """
-    Subclass of the `MakoLexer` that highlights unlexer data
-    with the `JavascriptLexer`.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'JavaScript+Mako'
-    aliases = ['js+mako', 'javascript+mako']
-    mimetypes = ['application/x-javascript+mako',
-                 'text/x-javascript+mako',
-                 'text/javascript+mako']
-
-    def __init__(self, **options):
-        super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
-                                                    MakoLexer, **options)
-
-class MakoCssLexer(DelegatingLexer):
-    """
-    Subclass of the `MakoLexer` that highlights unlexer data
-    with the `CssLexer`.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'CSS+Mako'
-    aliases = ['css+mako']
-    mimetypes = ['text/css+mako']
-
-    def __init__(self, **options):
-        super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
-                                             **options)
-
-
-# Genshi and Cheetah lexers courtesy of Matt Good.
-
-class CheetahPythonLexer(Lexer):
-    """
-    Lexer for handling Cheetah's special $ tokens in Python syntax.
-    """
-
-    def get_tokens_unprocessed(self, text):
-        pylexer = PythonLexer(**self.options)
-        for pos, type_, value in pylexer.get_tokens_unprocessed(text):
-            if type_ == Token.Error and value == '$':
-                type_ = Comment.Preproc
-            yield pos, type_, value
-
-
-class CheetahLexer(RegexLexer):
-    """
-    Generic `cheetah templates`_ lexer. Code that isn't Cheetah
-    markup is yielded as `Token.Other`.  This also works for
-    `spitfire templates`_ which use the same syntax.
-
-    .. _cheetah templates: http://www.cheetahtemplate.org/
-    .. _spitfire templates: http://code.google.com/p/spitfire/
-    """
-
-    name = 'Cheetah'
-    aliases = ['cheetah', 'spitfire']
-    filenames = ['*.tmpl', '*.spt']
-    mimetypes = ['application/x-cheetah', 'application/x-spitfire']
-
-    tokens = {
-        'root': [
-            (r'(##[^\n]*)$',
-             (bygroups(Comment))),
-            (r'#[*](.|\n)*?[*]#', Comment),
-            (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
-            (r'#slurp$', Comment.Preproc),
-            (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
-             (bygroups(Comment.Preproc, using(CheetahPythonLexer),
-                       Comment.Preproc))),
-            # TODO support other Python syntax like $foo['bar']
-            (r'(\$)([a-zA-Z_][a-zA-Z0-9_\.]*[a-zA-Z0-9_])',
-             bygroups(Comment.Preproc, using(CheetahPythonLexer))),
-            (r'(\$\{!?)(.*?)(\})(?s)',
-             bygroups(Comment.Preproc, using(CheetahPythonLexer),
-                      Comment.Preproc)),
-            (r'''(?sx)
-                (.+?)               # anything, followed by:
-                (?:
-                 (?=[#][#a-zA-Z]*) |   # an eval comment
-                 (?=\$[a-zA-Z_{]) | # a substitution
-                 \Z                 # end of string
-                )
-            ''', Other),
-            (r'\s+', Text),
-        ],
-    }
-
-
-class CheetahHtmlLexer(DelegatingLexer):
-    """
-    Subclass of the `CheetahLexer` that highlights unlexer data
-    with the `HtmlLexer`.
-    """
-
-    name = 'HTML+Cheetah'
-    aliases = ['html+cheetah', 'html+spitfire']
-    mimetypes = ['text/html+cheetah', 'text/html+spitfire']
-
-    def __init__(self, **options):
-        super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
-                                               **options)
-
-
-class CheetahXmlLexer(DelegatingLexer):
-    """
-    Subclass of the `CheetahLexer` that highlights unlexer data
-    with the `XmlLexer`.
-    """
-
-    name = 'XML+Cheetah'
-    aliases = ['xml+cheetah', 'xml+spitfire']
-    mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
-
-    def __init__(self, **options):
-        super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
-                                              **options)
-
-
-class CheetahJavascriptLexer(DelegatingLexer):
-    """
-    Subclass of the `CheetahLexer` that highlights unlexer data
-    with the `JavascriptLexer`.
-    """
-
-    name = 'JavaScript+Cheetah'
-    aliases = ['js+cheetah', 'javascript+cheetah',
-               'js+spitfire', 'javascript+spitfire']
-    mimetypes = ['application/x-javascript+cheetah',
-                 'text/x-javascript+cheetah',
-                 'text/javascript+cheetah',
-                 'application/x-javascript+spitfire',
-                 'text/x-javascript+spitfire',
-                 'text/javascript+spitfire']
-
-    def __init__(self, **options):
-        super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
-                                                     CheetahLexer, **options)
-
-
-class GenshiTextLexer(RegexLexer):
-    """
-    A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
-    templates.
-    """
-
-    name = 'Genshi Text'
-    aliases = ['genshitext']
-    mimetypes = ['application/x-genshi-text', 'text/x-genshi']
-
-    tokens = {
-        'root': [
-            (r'[^#\$\s]+', Other),
-            (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
-            (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
-            include('variable'),
-            (r'[#\$\s]', Other),
-        ],
-        'directive': [
-            (r'\n', Text, '#pop'),
-            (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
-            (r'(choose|when|with)([^\S\n]+)(.*)',
-             bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
-            (r'(choose|otherwise)\b', Keyword, '#pop'),
-            (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
-        ],
-        'variable': [
-            (r'(?<!\$)(\$\{)(.+?)(\})',
-             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
-            (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
-             Name.Variable),
-        ]
-    }
-
-
-class GenshiMarkupLexer(RegexLexer):
-    """
-    Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
-    `GenshiLexer`.
-    """
-
-    flags = re.DOTALL
-
-    tokens = {
-        'root': [
-            (r'[^<\$]+', Other),
-            (r'(<\?python)(.*?)(\?>)',
-             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
-            # yield style and script blocks as Other
-            (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
-            (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
-            (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
-            include('variable'),
-            (r'[<\$]', Other),
-        ],
-        'pytag': [
-            (r'\s+', Text),
-            (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'pyattr'),
-            (r'/?\s*>', Name.Tag, '#pop'),
-        ],
-        'pyattr': [
-            ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
-            ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
-            (r'[^\s>]+', String, '#pop'),
-        ],
-        'tag': [
-            (r'\s+', Text),
-            (r'py:[a-zA-Z0-9_-]+\s*=', Name.Attribute, 'pyattr'),
-            (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
-            (r'/?\s*>', Name.Tag, '#pop'),
-        ],
-        'attr': [
-            ('"', String, 'attr-dstring'),
-            ("'", String, 'attr-sstring'),
-            (r'[^\s>]*', String, '#pop')
-        ],
-        'attr-dstring': [
-            ('"', String, '#pop'),
-            include('strings'),
-            ("'", String)
-        ],
-        'attr-sstring': [
-            ("'", String, '#pop'),
-            include('strings'),
-            ("'", String)
-        ],
-        'strings': [
-            ('[^"\'$]+', String),
-            include('variable')
-        ],
-        'variable': [
-            (r'(?<!\$)(\$\{)(.+?)(\})',
-             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
-            (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
-             Name.Variable),
-        ]
-    }
-
-
-class HtmlGenshiLexer(DelegatingLexer):
-    """
-    A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
-    `kid <http://kid-templating.org/>`_ kid HTML templates.
-    """
-
-    name = 'HTML+Genshi'
-    aliases = ['html+genshi', 'html+kid']
-    alias_filenames = ['*.html', '*.htm', '*.xhtml']
-    mimetypes = ['text/html+genshi']
-
-    def __init__(self, **options):
-        super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
-                                              **options)
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search('\$\{.*?\}', text) is not None:
-            rv += 0.2
-        if re.search('py:(.*?)=["\']', text) is not None:
-            rv += 0.2
-        return rv + HtmlLexer.analyse_text(text) - 0.01
-
-
-class GenshiLexer(DelegatingLexer):
-    """
-    A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
-    `kid <http://kid-templating.org/>`_ kid XML templates.
-    """
-
-    name = 'Genshi'
-    aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
-    filenames = ['*.kid']
-    alias_filenames = ['*.xml']
-    mimetypes = ['application/x-genshi', 'application/x-kid']
-
-    def __init__(self, **options):
-        super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
-                                          **options)
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search('\$\{.*?\}', text) is not None:
-            rv += 0.2
-        if re.search('py:(.*?)=["\']', text) is not None:
-            rv += 0.2
-        return rv + XmlLexer.analyse_text(text) - 0.01
-
-
-class JavascriptGenshiLexer(DelegatingLexer):
-    """
-    A lexer that highlights javascript code in genshi text templates.
-    """
-
-    name = 'JavaScript+Genshi Text'
-    aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
-               'javascript+genshi']
-    alias_filenames = ['*.js']
-    mimetypes = ['application/x-javascript+genshi',
-                 'text/x-javascript+genshi',
-                 'text/javascript+genshi']
-
-    def __init__(self, **options):
-        super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
-                                                    GenshiTextLexer,
-                                                    **options)
-
-    def analyse_text(text):
-        return GenshiLexer.analyse_text(text) - 0.05
-
-
-class CssGenshiLexer(DelegatingLexer):
-    """
-    A lexer that highlights CSS definitions in genshi text templates.
-    """
-
-    name = 'CSS+Genshi Text'
-    aliases = ['css+genshitext', 'css+genshi']
-    alias_filenames = ['*.css']
-    mimetypes = ['text/css+genshi']
-
-    def __init__(self, **options):
-        super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
-                                             **options)
-
-    def analyse_text(text):
-        return GenshiLexer.analyse_text(text) - 0.05
-
-
-class RhtmlLexer(DelegatingLexer):
-    """
-    Subclass of the ERB lexer that highlights the unlexed data with the
-    html lexer.
-
-    Nested Javascript and CSS is highlighted too.
-    """
-
-    name = 'RHTML'
-    aliases = ['rhtml', 'html+erb', 'html+ruby']
-    filenames = ['*.rhtml']
-    alias_filenames = ['*.html', '*.htm', '*.xhtml']
-    mimetypes = ['text/html+ruby']
-
-    def __init__(self, **options):
-        super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
-
-    def analyse_text(text):
-        rv = ErbLexer.analyse_text(text) - 0.01
-        if html_doctype_matches(text):
-            # one more than the XmlErbLexer returns
-            rv += 0.5
-        return rv
-
-
-class XmlErbLexer(DelegatingLexer):
-    """
-    Subclass of `ErbLexer` which highlights data outside preprocessor
-    directives with the `XmlLexer`.
-    """
-
-    name = 'XML+Ruby'
-    aliases = ['xml+erb', 'xml+ruby']
-    alias_filenames = ['*.xml']
-    mimetypes = ['application/xml+ruby']
-
-    def __init__(self, **options):
-        super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
-
-    def analyse_text(text):
-        rv = ErbLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.4
-        return rv
-
-
-class CssErbLexer(DelegatingLexer):
-    """
-    Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
-    """
-
-    name = 'CSS+Ruby'
-    aliases = ['css+erb', 'css+ruby']
-    alias_filenames = ['*.css']
-    mimetypes = ['text/css+ruby']
-
-    def __init__(self, **options):
-        super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
-
-    def analyse_text(text):
-        return ErbLexer.analyse_text(text) - 0.05
-
-
-class JavascriptErbLexer(DelegatingLexer):
-    """
-    Subclass of `ErbLexer` which highlights unlexed data with the
-    `JavascriptLexer`.
-    """
-
-    name = 'JavaScript+Ruby'
-    aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
-    alias_filenames = ['*.js']
-    mimetypes = ['application/x-javascript+ruby',
-                 'text/x-javascript+ruby',
-                 'text/javascript+ruby']
-
-    def __init__(self, **options):
-        super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
-                                                 **options)
-
-    def analyse_text(text):
-        return ErbLexer.analyse_text(text) - 0.05
-
-
-class HtmlPhpLexer(DelegatingLexer):
-    """
-    Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
-
-    Nested Javascript and CSS is highlighted too.
-    """
-
-    name = 'HTML+PHP'
-    aliases = ['html+php']
-    filenames = ['*.phtml']
-    alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
-                       '*.php[345]']
-    mimetypes = ['application/x-php',
-                 'application/x-httpd-php', 'application/x-httpd-php3',
-                 'application/x-httpd-php4', 'application/x-httpd-php5']
-
-    def __init__(self, **options):
-        super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
-
-    def analyse_text(text):
-        rv = PhpLexer.analyse_text(text) - 0.01
-        if html_doctype_matches(text):
-            rv += 0.5
-        return rv
-
-
-class XmlPhpLexer(DelegatingLexer):
-    """
-    Subclass of `PhpLexer` that higlights unhandled data with the `XmlLexer`.
-    """
-
-    name = 'XML+PHP'
-    aliases = ['xml+php']
-    alias_filenames = ['*.xml', '*.php', '*.php[345]']
-    mimetypes = ['application/xml+php']
-
-    def __init__(self, **options):
-        super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
-
-    def analyse_text(text):
-        rv = PhpLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.4
-        return rv
-
-
-class CssPhpLexer(DelegatingLexer):
-    """
-    Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
-    """
-
-    name = 'CSS+PHP'
-    aliases = ['css+php']
-    alias_filenames = ['*.css']
-    mimetypes = ['text/css+php']
-
-    def __init__(self, **options):
-        super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
-
-    def analyse_text(text):
-        return PhpLexer.analyse_text(text) - 0.05
-
-
-class JavascriptPhpLexer(DelegatingLexer):
-    """
-    Subclass of `PhpLexer` which highlights unmatched data with the
-    `JavascriptLexer`.
-    """
-
-    name = 'JavaScript+PHP'
-    aliases = ['js+php', 'javascript+php']
-    alias_filenames = ['*.js']
-    mimetypes = ['application/x-javascript+php',
-                 'text/x-javascript+php',
-                 'text/javascript+php']
-
-    def __init__(self, **options):
-        super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
-                                                 **options)
-
-    def analyse_text(text):
-        return PhpLexer.analyse_text(text)
-
-
-class HtmlSmartyLexer(DelegatingLexer):
-    """
-    Subclass of the `SmartyLexer` that highighlights unlexed data with the
-    `HtmlLexer`.
-
-    Nested Javascript and CSS is highlighted too.
-    """
-
-    name = 'HTML+Smarty'
-    aliases = ['html+smarty']
-    alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
-    mimetypes = ['text/html+smarty']
-
-    def __init__(self, **options):
-        super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
-
-    def analyse_text(text):
-        rv = SmartyLexer.analyse_text(text) - 0.01
-        if html_doctype_matches(text):
-            rv += 0.5
-        return rv
-
-
-class XmlSmartyLexer(DelegatingLexer):
-    """
-    Subclass of the `SmartyLexer` that highlights unlexed data with the
-    `XmlLexer`.
-    """
-
-    name = 'XML+Smarty'
-    aliases = ['xml+smarty']
-    alias_filenames = ['*.xml', '*.tpl']
-    mimetypes = ['application/xml+smarty']
-
-    def __init__(self, **options):
-        super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
-
-    def analyse_text(text):
-        rv = SmartyLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.4
-        return rv
-
-
-class CssSmartyLexer(DelegatingLexer):
-    """
-    Subclass of the `SmartyLexer` that highlights unlexed data with the
-    `CssLexer`.
-    """
-
-    name = 'CSS+Smarty'
-    aliases = ['css+smarty']
-    alias_filenames = ['*.css', '*.tpl']
-    mimetypes = ['text/css+smarty']
-
-    def __init__(self, **options):
-        super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
-
-    def analyse_text(text):
-        return SmartyLexer.analyse_text(text) - 0.05
-
-
-class JavascriptSmartyLexer(DelegatingLexer):
-    """
-    Subclass of the `SmartyLexer` that highlights unlexed data with the
-    `JavascriptLexer`.
-    """
-
-    name = 'JavaScript+Smarty'
-    aliases = ['js+smarty', 'javascript+smarty']
-    alias_filenames = ['*.js', '*.tpl']
-    mimetypes = ['application/x-javascript+smarty',
-                 'text/x-javascript+smarty',
-                 'text/javascript+smarty']
-
-    def __init__(self, **options):
-        super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
-                                                    **options)
-
-    def analyse_text(text):
-        return SmartyLexer.analyse_text(text) - 0.05
-
-
-class HtmlDjangoLexer(DelegatingLexer):
-    """
-    Subclass of the `DjangoLexer` that highighlights unlexed data with the
-    `HtmlLexer`.
-
-    Nested Javascript and CSS is highlighted too.
-    """
-
-    name = 'HTML+Django/Jinja'
-    aliases = ['html+django', 'html+jinja']
-    alias_filenames = ['*.html', '*.htm', '*.xhtml']
-    mimetypes = ['text/html+django', 'text/html+jinja']
-
-    def __init__(self, **options):
-        super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
-
-    def analyse_text(text):
-        rv = DjangoLexer.analyse_text(text) - 0.01
-        if html_doctype_matches(text):
-            rv += 0.5
-        return rv
-
-
-class XmlDjangoLexer(DelegatingLexer):
-    """
-    Subclass of the `DjangoLexer` that highlights unlexed data with the
-    `XmlLexer`.
-    """
-
-    name = 'XML+Django/Jinja'
-    aliases = ['xml+django', 'xml+jinja']
-    alias_filenames = ['*.xml']
-    mimetypes = ['application/xml+django', 'application/xml+jinja']
-
-    def __init__(self, **options):
-        super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
-
-    def analyse_text(text):
-        rv = DjangoLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.4
-        return rv
-
-
-class CssDjangoLexer(DelegatingLexer):
-    """
-    Subclass of the `DjangoLexer` that highlights unlexed data with the
-    `CssLexer`.
-    """
-
-    name = 'CSS+Django/Jinja'
-    aliases = ['css+django', 'css+jinja']
-    alias_filenames = ['*.css']
-    mimetypes = ['text/css+django', 'text/css+jinja']
-
-    def __init__(self, **options):
-        super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
-
-    def analyse_text(text):
-        return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JavascriptDjangoLexer(DelegatingLexer):
-    """
-    Subclass of the `DjangoLexer` that highlights unlexed data with the
-    `JavascriptLexer`.
-    """
-
-    name = 'JavaScript+Django/Jinja'
-    aliases = ['js+django', 'javascript+django',
-               'js+jinja', 'javascript+jinja']
-    alias_filenames = ['*.js']
-    mimetypes = ['application/x-javascript+django',
-                 'application/x-javascript+jinja',
-                 'text/x-javascript+django',
-                 'text/x-javascript+jinja',
-                 'text/javascript+django',
-                 'text/javascript+jinja']
-
-    def __init__(self, **options):
-        super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
-                                                    **options)
-
-    def analyse_text(text):
-        return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JspRootLexer(RegexLexer):
-    """
-    Base for the `JspLexer`. Yields `Token.Other` for area outside of
-    JSP tags.
-
-    *New in Pygments 0.7.*
-    """
-
-    tokens = {
-        'root': [
-            (r'<%\S?', Keyword, 'sec'),
-            # FIXME: I want to make these keywords but still parse attributes.
-            (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
-             Keyword),
-            (r'[^<]+', Other),
-            (r'<', Other),
-        ],
-        'sec': [
-            (r'%>', Keyword, '#pop'),
-            # note: '\w\W' != '.' without DOTALL.
-            (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
-        ],
-    }
-
-
-class JspLexer(DelegatingLexer):
-    """
-    Lexer for Java Server Pages.
-
-    *New in Pygments 0.7.*
-    """
-    name = 'Java Server Page'
-    aliases = ['jsp']
-    filenames = ['*.jsp']
-    mimetypes = ['application/x-jsp']
-
-    def __init__(self, **options):
-        super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
-
-    def analyse_text(text):
-        rv = JavaLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.4
-        if '<%' in text and '%>' in text:
-            rv += 0.1
-        return rv
-
-
-class EvoqueLexer(RegexLexer):
-    """
-    For files using the Evoque templating system.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'Evoque'
-    aliases = ['evoque']
-    filenames = ['*.evoque']
-    mimetypes = ['application/x-evoque']
-
-    flags = re.DOTALL
-
-    tokens = {
-        'root': [
-            (r'[^#$]+', Other),
-            (r'#\[', Comment.Multiline, 'comment'),
-            (r'\$\$', Other),
-            # svn keywords
-            (r'\$\w+:[^$\n]*\$', Comment.Multiline),
-            # directives: begin, end
-            (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
-             bygroups(Punctuation, Name.Builtin, Punctuation, None,
-                      String, Punctuation)),
-            # directives: evoque, overlay
-            # see doc for handling first name arg: /directives/evoque/
-            #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
-            # should be using(PythonLexer), not passed out as String
-            (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
-             r'(.*?)((?(4)%)\})',
-             bygroups(Punctuation, Name.Builtin, Punctuation, None,
-                      String, using(PythonLexer), Punctuation)),
-            # directives: if, for, prefer, test
-            (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
-             bygroups(Punctuation, Name.Builtin, Punctuation, None,
-                      using(PythonLexer), Punctuation)),
-            # directive clauses (no {} expression)
-            (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
-            # expressions
-            (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
-             bygroups(Punctuation, None, using(PythonLexer),
-                      Name.Builtin, None, None, Punctuation)),
-            (r'#', Other),
-        ],
-        'comment': [
-            (r'[^\]#]', Comment.Multiline),
-            (r'#\[', Comment.Multiline, '#push'),
-            (r'\]#', Comment.Multiline, '#pop'),
-            (r'[\]#]', Comment.Multiline)
-        ],
-    }
-
-class EvoqueHtmlLexer(DelegatingLexer):
-    """
-    Subclass of the `EvoqueLexer` that highlights unlexed data with the
-    `HtmlLexer`.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'HTML+Evoque'
-    aliases = ['html+evoque']
-    filenames = ['*.html']
-    mimetypes = ['text/html+evoque']
-
-    def __init__(self, **options):
-        super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
-                                              **options)
-
-class EvoqueXmlLexer(DelegatingLexer):
-    """
-    Subclass of the `EvoqueLexer` that highlights unlexed data with the
-    `XmlLexer`.
-
-    *New in Pygments 1.1.*
-    """
-    name = 'XML+Evoque'
-    aliases = ['xml+evoque']
-    filenames = ['*.xml']
-    mimetypes = ['application/xml+evoque']
-
-    def __init__(self, **options):
-        super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
-                                             **options)
-
-class ColdfusionLexer(RegexLexer):
-    """
-    Coldfusion statements
-    """
-    name = 'cfstatement'
-    aliases = ['cfs']
-    filenames = []
-    mimetypes = []
-    flags = re.IGNORECASE | re.MULTILINE
-
-    tokens = {
-        'root': [
-            (r'//.*', Comment),
-            (r'\+\+|--', Operator),
-            (r'[-+*/^&=!]', Operator),
-            (r'<=|>=|<|>', Operator),
-            (r'mod\b', Operator),
-            (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
-            (r'\|\||&&', Operator),
-            (r'"', String.Double, 'string'),
-            # There is a special rule for allowing html in single quoted
-            # strings, evidently.
-            (r"'.*?'", String.Single),
-            (r'\d+', Number),
-            (r'(if|else|len|var|case|default|break|switch)\b', Keyword),
-            (r'([A-Za-z_$][A-Za-z0-9_.]*)(\s*)(\()',
-             bygroups(Name.Function, Text, Punctuation)),
-            (r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable),
-            (r'[()\[\]{};:,.\\]', Punctuation),
-            (r'\s+', Text),
-        ],
-        'string': [
-            (r'""', String.Double),
-            (r'#.+?#', String.Interp),
-            (r'[^"#]+', String.Double),
-            (r'#', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-    }
-
-
-class ColdfusionMarkupLexer(RegexLexer):
-    """
-    Coldfusion markup only
-    """
-    name = 'Coldfusion'
-    aliases = ['cf']
-    filenames = []
-    mimetypes = []
-
-    tokens = {
-        'root': [
-            (r'[^<]+', Other),
-            include('tags'),
-            (r'<[^<>]*', Other),
-        ],
-        'tags': [
-            (r'(?s)<!---.*?--->', Comment.Multiline),
-            (r'(?s)<!--.*?-->', Comment),
-            (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
-            (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
-             bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
-            # negative lookbehind is for strings with embedded >
-            (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
-             r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
-             r'mailpart|mail|header|content|zip|image|lock|argument|try|'
-             r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
-             bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
-        ],
-        'cfoutput': [
-            (r'[^#<]+', Other),
-            (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
-                                      Punctuation)),
-            #(r'<cfoutput.*?>', Name.Builtin, '#push'),
-            (r'</cfoutput.*?>', Name.Builtin, '#pop'),
-            include('tags'),
-            (r'(?s)<[^<>]*', Other),
-            (r'#', Other),
-        ],
-    }
-
-
-class ColdfusionHtmlLexer(DelegatingLexer):
-    """
-    Coldfusion markup in html
-    """
-    name = 'Coldfusion HTML'
-    aliases = ['cfm']
-    filenames = ['*.cfm', '*.cfml', '*.cfc']
-    mimetypes = ['application/x-coldfusion']
-
-    def __init__(self, **options):
-        super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
-                                                  **options)
-
-
-class SspLexer(DelegatingLexer):
-    """
-    Lexer for Scalate Server Pages.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'Scalate Server Page'
-    aliases = ['ssp']
-    filenames = ['*.ssp']
-    mimetypes = ['application/x-ssp']
-
-    def __init__(self, **options):
-        super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search('val \w+\s*:', text):
-            rv += 0.6
-        if looks_like_xml(text):
-            rv += 0.2
-        if '<%' in text and '%>' in text:
-            rv += 0.1
-        return rv
-
-
-class TeaTemplateRootLexer(RegexLexer):
-    """
-    Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
-    code blocks.
-
-    *New in Pygments 1.5.*
-    """
-
-    tokens = {
-        'root': [
-            (r'<%\S?', Keyword, 'sec'),
-            (r'[^<]+', Other),
-            (r'<', Other),
-            ],
-        'sec': [
-            (r'%>', Keyword, '#pop'),
-            # note: '\w\W' != '.' without DOTALL.
-            (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
-            ],
-        }
-
-
-class TeaTemplateLexer(DelegatingLexer):
-    """
-    Lexer for `Tea Templates <http://teatrove.org/>`_.
-
-    *New in Pygments 1.5.*
-    """
-    name = 'Tea'
-    aliases = ['tea']
-    filenames = ['*.tea']
-    mimetypes = ['text/x-tea']
-
-    def __init__(self, **options):
-        super(TeaTemplateLexer, self).__init__(XmlLexer,
-                                               TeaTemplateRootLexer, **options)
-
-    def analyse_text(text):
-        rv = TeaLangLexer.analyse_text(text) - 0.01
-        if looks_like_xml(text):
-            rv += 0.4
-        if '<%' in text and '%>' in text:
-            rv += 0.1
-        return rv
-
-
-class LassoHtmlLexer(DelegatingLexer):
-    """
-    Subclass of the `LassoLexer` which highlights unhandled data with the
-    `HtmlLexer`.
-
-    Nested JavaScript and CSS is also highlighted.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'HTML+Lasso'
-    aliases = ['html+lasso']
-    alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
-                       '*.incl', '*.inc', '*.las']
-    mimetypes = ['text/html+lasso',
-                 'application/x-httpd-lasso',
-                 'application/x-httpd-lasso[89]']
-
-    def __init__(self, **options):
-        super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
-
-    def analyse_text(text):
-        rv = LassoLexer.analyse_text(text)
-        if re.search(r'<\w+>', text, re.I):
-            rv += 0.2
-        if html_doctype_matches(text):
-            rv += 0.5
-        return rv
-
-
-class LassoXmlLexer(DelegatingLexer):
-    """
-    Subclass of the `LassoLexer` which highlights unhandled data with the
-    `XmlLexer`.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'XML+Lasso'
-    aliases = ['xml+lasso']
-    alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
-                       '*.incl', '*.inc', '*.las']
-    mimetypes = ['application/xml+lasso']
-
-    def __init__(self, **options):
-        super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
-
-    def analyse_text(text):
-        rv = LassoLexer.analyse_text(text)
-        if looks_like_xml(text):
-            rv += 0.5
-        return rv
-
-
-class LassoCssLexer(DelegatingLexer):
-    """
-    Subclass of the `LassoLexer` which highlights unhandled data with the
-    `CssLexer`.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'CSS+Lasso'
-    aliases = ['css+lasso']
-    alias_filenames = ['*.css']
-    mimetypes = ['text/css+lasso']
-
-    def __init__(self, **options):
-        options['requiredelimiters'] = True
-        super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
-
-    def analyse_text(text):
-        rv = LassoLexer.analyse_text(text)
-        if re.search(r'\w+:.+;', text):
-            rv += 0.1
-        if 'padding:' in text:
-            rv += 0.1
-        return rv
-
-
-class LassoJavascriptLexer(DelegatingLexer):
-    """
-    Subclass of the `LassoLexer` which highlights unhandled data with the
-    `JavascriptLexer`.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'JavaScript+Lasso'
-    aliases = ['js+lasso', 'javascript+lasso']
-    alias_filenames = ['*.js']
-    mimetypes = ['application/x-javascript+lasso',
-                 'text/x-javascript+lasso',
-                 'text/javascript+lasso']
-
-    def __init__(self, **options):
-        options['requiredelimiters'] = True
-        super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
-                                                   **options)
-
-    def analyse_text(text):
-        rv = LassoLexer.analyse_text(text)
-        if 'function' in text:
-            rv += 0.2
-        return rv
diff --git a/python/ext-libs/pygments/lexers/text.py b/python/ext-libs/pygments/lexers/text.py
deleted file mode 100644
index 57b7549..0000000
--- a/python/ext-libs/pygments/lexers/text.py
+++ /dev/null
@@ -1,1843 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.text
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for non-source code file types.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-from bisect import bisect
-
-from pygments.lexer import Lexer, LexerContext, RegexLexer, ExtendedRegexLexer, \
-     bygroups, include, using, this, do_insertions
-from pygments.token import Punctuation, Text, Comment, Keyword, Name, String, \
-     Generic, Operator, Number, Whitespace, Literal
-from pygments.util import get_bool_opt, ClassNotFound
-from pygments.lexers.other import BashLexer
-
-__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer',
-           'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer',
-           'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
-           'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
-           'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
-           'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
-           'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer']
-
-
-class IniLexer(RegexLexer):
-    """
-    Lexer for configuration files in INI style.
-    """
-
-    name = 'INI'
-    aliases = ['ini', 'cfg']
-    filenames = ['*.ini', '*.cfg']
-    mimetypes = ['text/x-ini']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'[;#].*', Comment.Single),
-            (r'\[.*?\]$', Keyword),
-            (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
-             bygroups(Name.Attribute, Text, Operator, Text, String))
-        ]
-    }
-
-    def analyse_text(text):
-        npos = text.find('\n')
-        if npos < 3:
-            return False
-        return text[0] == '[' and text[npos-1] == ']'
-
-
-class RegeditLexer(RegexLexer):
-    """
-    Lexer for `Windows Registry
-    <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
-    by regedit.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'reg'
-    aliases = ['registry']
-    filenames = ['*.reg']
-    mimetypes = ['text/x-windows-registry']
-
-    tokens = {
-        'root': [
-            (r'Windows Registry Editor.*', Text),
-            (r'\s+', Text),
-            (r'[;#].*', Comment.Single),
-            (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
-             bygroups(Keyword, Operator, Name.Builtin, Keyword)),
-            # String keys, which obey somewhat normal escaping
-            (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
-             bygroups(Name.Attribute, Text, Operator, Text),
-             'value'),
-            # Bare keys (includes @)
-            (r'(.*?)([ \t]*)(=)([ \t]*)',
-             bygroups(Name.Attribute, Text, Operator, Text),
-             'value'),
-        ],
-        'value': [
-            (r'-', Operator, '#pop'), # delete value
-            (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
-             bygroups(Name.Variable, Punctuation, Number), '#pop'),
-            # As far as I know, .reg files do not support line continuation.
-            (r'.*', String, '#pop'),
-        ]
-    }
-
-    def analyse_text(text):
-        return text.startswith('Windows Registry Editor')
-
-
-class PropertiesLexer(RegexLexer):
-    """
-    Lexer for configuration files in Java's properties format.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'Properties'
-    aliases = ['properties']
-    filenames = ['*.properties']
-    mimetypes = ['text/x-java-properties']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(?:[;#]|//).*$', Comment),
-            (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
-             bygroups(Name.Attribute, Text, Operator, Text, String)),
-        ],
-    }
-
-
-class SourcesListLexer(RegexLexer):
-    """
-    Lexer that highlights debian sources.list files.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'Debian Sourcelist'
-    aliases = ['sourceslist', 'sources.list']
-    filenames = ['sources.list']
-    mimetype = ['application/x-debian-sourceslist']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'#.*?$', Comment),
-            (r'^(deb(?:-src)?)(\s+)',
-             bygroups(Keyword, Text), 'distribution')
-        ],
-        'distribution': [
-            (r'#.*?$', Comment, '#pop'),
-            (r'\$\(ARCH\)', Name.Variable),
-            (r'[^\s$[]+', String),
-            (r'\[', String.Other, 'escaped-distribution'),
-            (r'\$', String),
-            (r'\s+', Text, 'components')
-        ],
-        'escaped-distribution': [
-            (r'\]', String.Other, '#pop'),
-            (r'\$\(ARCH\)', Name.Variable),
-            (r'[^\]$]+', String.Other),
-            (r'\$', String.Other)
-        ],
-        'components': [
-            (r'#.*?$', Comment, '#pop:2'),
-            (r'$', Text, '#pop:2'),
-            (r'\s+', Text),
-            (r'\S+', Keyword.Pseudo),
-        ]
-    }
-
-    def analyse_text(text):
-        for line in text.split('\n'):
-            line = line.strip()
-            if not (line.startswith('#') or line.startswith('deb ') or
-                    line.startswith('deb-src ') or not line):
-                return False
-        return True
-
-
-class MakefileLexer(Lexer):
-    """
-    Lexer for BSD and GNU make extensions (lenient enough to handle both in
-    the same file even).
-
-    *Rewritten in Pygments 0.10.*
-    """
-
-    name = 'Makefile'
-    aliases = ['make', 'makefile', 'mf', 'bsdmake']
-    filenames = ['*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
-    mimetypes = ['text/x-makefile']
-
-    r_special = re.compile(r'^(?:'
-        # BSD Make
-        r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
-        # GNU Make
-        r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:))(?=\s)')
-    r_comment = re.compile(r'^\s*@?#')
-
-    def get_tokens_unprocessed(self, text):
-        ins = []
-        lines = text.splitlines(True)
-        done = ''
-        lex = BaseMakefileLexer(**self.options)
-        backslashflag = False
-        for line in lines:
-            if self.r_special.match(line) or backslashflag:
-                ins.append((len(done), [(0, Comment.Preproc, line)]))
-                backslashflag = line.strip().endswith('\\')
-            elif self.r_comment.match(line):
-                ins.append((len(done), [(0, Comment, line)]))
-            else:
-                done += line
-        for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
-            yield item
-
-
-class BaseMakefileLexer(RegexLexer):
-    """
-    Lexer for simple Makefiles (no preprocessing).
-
-    *New in Pygments 0.10.*
-    """
-
-    name = 'Base Makefile'
-    aliases = ['basemake']
-    filenames = []
-    mimetypes = []
-
-    tokens = {
-        'root': [
-            (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
-            (r'\$\((?:.*\\\n|.*\n)+', using(BashLexer)),
-            (r'\s+', Text),
-            (r'#.*?\n', Comment),
-            (r'(export)(\s+)(?=[a-zA-Z0-9_${}\t -]+\n)',
-             bygroups(Keyword, Text), 'export'),
-            (r'export\s+', Keyword),
-            # assignment
-            (r'([a-zA-Z0-9_${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
-             bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
-            # strings
-            (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
-            (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
-            # targets
-            (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
-             'block-header'),
-            # TODO: add paren handling (grr)
-        ],
-        'export': [
-            (r'[a-zA-Z0-9_${}-]+', Name.Variable),
-            (r'\n', Text, '#pop'),
-            (r'\s+', Text),
-        ],
-        'block-header': [
-            (r'[^,\\\n#]+', Number),
-            (r',', Punctuation),
-            (r'#.*?\n', Comment),
-            (r'\\\n', Text), # line continuation
-            (r'\\.', Text),
-            (r'(?:[\t ]+.*\n|\n)+', using(BashLexer), '#pop'),
-        ],
-    }
-
-
-class DiffLexer(RegexLexer):
-    """
-    Lexer for unified or context-style diffs or patches.
-    """
-
-    name = 'Diff'
-    aliases = ['diff', 'udiff']
-    filenames = ['*.diff', '*.patch']
-    mimetypes = ['text/x-diff', 'text/x-patch']
-
-    tokens = {
-        'root': [
-            (r' .*\n', Text),
-            (r'\+.*\n', Generic.Inserted),
-            (r'-.*\n', Generic.Deleted),
-            (r'!.*\n', Generic.Strong),
-            (r'@.*\n', Generic.Subheading),
-            (r'([Ii]ndex|diff).*\n', Generic.Heading),
-            (r'=.*\n', Generic.Heading),
-            (r'.*\n', Text),
-        ]
-    }
-
-    def analyse_text(text):
-        if text[:7] == 'Index: ':
-            return True
-        if text[:5] == 'diff ':
-            return True
-        if text[:4] == '--- ':
-            return 0.9
-
-
-DPATCH_KEYWORDS = ['hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
-    'replace']
-
-class DarcsPatchLexer(RegexLexer):
-    """
-    DarcsPatchLexer is a lexer for the various versions of the darcs patch
-    format.  Examples of this format are derived by commands such as
-    ``darcs annotate --patch`` and ``darcs send``.
-
-    *New in Pygments 0.10.*
-    """
-    name = 'Darcs Patch'
-    aliases = ['dpatch']
-    filenames = ['*.dpatch', '*.darcspatch']
-
-    tokens = {
-        'root': [
-            (r'<', Operator),
-            (r'>', Operator),
-            (r'{', Operator),
-            (r'}', Operator),
-            (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
-             bygroups(Operator, Keyword, Name, Text, Name, Operator,
-                      Literal.Date, Text, Operator)),
-            (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
-             bygroups(Operator, Keyword, Name, Text, Name, Operator,
-                      Literal.Date, Text), 'comment'),
-            (r'New patches:', Generic.Heading),
-            (r'Context:', Generic.Heading),
-            (r'Patch bundle hash:', Generic.Heading),
-            (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
-                bygroups(Text, Keyword, Text)),
-            (r'\+', Generic.Inserted, "insert"),
-            (r'-', Generic.Deleted, "delete"),
-            (r'.*\n', Text),
-        ],
-        'comment': [
-            (r'[^\]].*\n', Comment),
-            (r'\]', Operator, "#pop"),
-        ],
-        'specialText': [ # darcs add [_CODE_] special operators for clarity
-            (r'\n', Text, "#pop"), # line-based
-            (r'\[_[^_]*_]', Operator),
-        ],
-        'insert': [
-            include('specialText'),
-            (r'\[', Generic.Inserted),
-            (r'[^\n\[]+', Generic.Inserted),
-        ],
-        'delete': [
-            include('specialText'),
-            (r'\[', Generic.Deleted),
-            (r'[^\n\[]+', Generic.Deleted),
-        ],
-    }
-
-
-class IrcLogsLexer(RegexLexer):
-    """
-    Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
-    """
-
-    name = 'IRC logs'
-    aliases = ['irc']
-    filenames = ['*.weechatlog']
-    mimetypes = ['text/x-irclog']
-
-    flags = re.VERBOSE | re.MULTILINE
-    timestamp = r"""
-        (
-          # irssi / xchat and others
-          (?: \[|\()?                  # Opening bracket or paren for the timestamp
-            (?:                        # Timestamp
-                (?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits
-                 [T ])?                # Date/time separator: T or space
-                (?: \d?\d [:.]?)+      # Time as :/.-separated groups of 1 or 2 digits
-            )
-          (?: \]|\))?\s+               # Closing bracket or paren for the timestamp
-        |
-          # weechat
-          \d{4}\s\w{3}\s\d{2}\s        # Date
-          \d{2}:\d{2}:\d{2}\s+         # Time + Whitespace
-        |
-          # xchat
-          \w{3}\s\d{2}\s               # Date
-          \d{2}:\d{2}:\d{2}\s+         # Time + Whitespace
-        )?
-    """
-    tokens = {
-        'root': [
-                # log start/end
-            (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
-            # hack
-            ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
-            # normal msgs
-            ("^" + timestamp + r"""
-                (\s*<.*?>\s*)          # Nick """,
-             bygroups(Comment.Preproc, Name.Tag), 'msg'),
-            # /me msgs
-            ("^" + timestamp + r"""
-                (\s*[*]\s+)            # Star
-                (\S+\s+.*?\n)          # Nick + rest of message """,
-             bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
-            # join/part msgs
-            ("^" + timestamp + r"""
-                (\s*(?:\*{3}|<?-[!@=P]?->?)\s*)  # Star(s) or symbols
-                (\S+\s+)                     # Nick + Space
-                (.*?\n)                         # Rest of message """,
-             bygroups(Comment.Preproc, Keyword, String, Comment)),
-            (r"^.*?\n", Text),
-        ],
-        'msg': [
-            (r"\S+:(?!//)", Name.Attribute),  # Prefix
-            (r".*\n", Text, '#pop'),
-        ],
-    }
-
-
-class BBCodeLexer(RegexLexer):
-    """
-    A lexer that highlights BBCode(-like) syntax.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'BBCode'
-    aliases = ['bbcode']
-    mimetypes = ['text/x-bbcode']
-
-    tokens = {
-        'root': [
-            (r'[^[]+', Text),
-            # tag/end tag begin
-            (r'\[/?\w+', Keyword, 'tag'),
-            # stray bracket
-            (r'\[', Text),
-        ],
-        'tag': [
-            (r'\s+', Text),
-            # attribute with value
-            (r'(\w+)(=)("?[^\s"\]]+"?)',
-             bygroups(Name.Attribute, Operator, String)),
-            # tag argument (a la [color=green])
-            (r'(=)("?[^\s"\]]+"?)',
-             bygroups(Operator, String)),
-            # tag end
-            (r'\]', Keyword, '#pop'),
-        ],
-    }
-
-
-class TexLexer(RegexLexer):
-    """
-    Lexer for the TeX and LaTeX typesetting languages.
-    """
-
-    name = 'TeX'
-    aliases = ['tex', 'latex']
-    filenames = ['*.tex', '*.aux', '*.toc']
-    mimetypes = ['text/x-tex', 'text/x-latex']
-
-    tokens = {
-        'general': [
-            (r'%.*?\n', Comment),
-            (r'[{}]', Name.Builtin),
-            (r'[&_^]', Name.Builtin),
-        ],
-        'root': [
-            (r'\\\[', String.Backtick, 'displaymath'),
-            (r'\\\(', String, 'inlinemath'),
-            (r'\$\$', String.Backtick, 'displaymath'),
-            (r'\$', String, 'inlinemath'),
-            (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
-            include('general'),
-            (r'[^\\$%&_^{}]+', Text),
-        ],
-        'math': [
-            (r'\\([a-zA-Z]+|.)', Name.Variable),
-            include('general'),
-            (r'[0-9]+', Number),
-            (r'[-=!+*/()\[\]]', Operator),
-            (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
-        ],
-        'inlinemath': [
-            (r'\\\)', String, '#pop'),
-            (r'\$', String, '#pop'),
-            include('math'),
-        ],
-        'displaymath': [
-            (r'\\\]', String, '#pop'),
-            (r'\$\$', String, '#pop'),
-            (r'\$', Name.Builtin),
-            include('math'),
-        ],
-        'command': [
-            (r'\[.*?\]', Name.Attribute),
-            (r'\*', Keyword),
-            (r'', Text, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        for start in ("\\documentclass", "\\input", "\\documentstyle",
-                      "\\relax"):
-            if text[:len(start)] == start:
-                return True
-
-
-class GroffLexer(RegexLexer):
-    """
-    Lexer for the (g)roff typesetting language, supporting groff
-    extensions. Mainly useful for highlighting manpage sources.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'Groff'
-    aliases = ['groff', 'nroff', 'man']
-    filenames = ['*.[1234567]', '*.man']
-    mimetypes = ['application/x-troff', 'text/troff']
-
-    tokens = {
-        'root': [
-            (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
-            (r'\.', Punctuation, 'request'),
-            # Regular characters, slurp till we find a backslash or newline
-            (r'[^\\\n]*', Text, 'textline'),
-        ],
-        'textline': [
-            include('escapes'),
-            (r'[^\\\n]+', Text),
-            (r'\n', Text, '#pop'),
-        ],
-        'escapes': [
-            # groff has many ways to write escapes.
-            (r'\\"[^\n]*', Comment),
-            (r'\\[fn]\w', String.Escape),
-            (r'\\\(.{2}', String.Escape),
-            (r'\\.\[.*\]', String.Escape),
-            (r'\\.', String.Escape),
-            (r'\\\n', Text, 'request'),
-        ],
-        'request': [
-            (r'\n', Text, '#pop'),
-            include('escapes'),
-            (r'"[^\n"]+"', String.Double),
-            (r'\d+', Number),
-            (r'\S+', String),
-            (r'\s+', Text),
-        ],
-    }
-
-    def analyse_text(text):
-        if text[:1] != '.':
-            return False
-        if text[:3] == '.\\"':
-            return True
-        if text[:4] == '.TH ':
-            return True
-        if text[1:3].isalnum() and text[3].isspace():
-            return 0.9
-
-
-class ApacheConfLexer(RegexLexer):
-    """
-    Lexer for configuration files following the Apache config file
-    format.
-
-    *New in Pygments 0.6.*
-    """
-
-    name = 'ApacheConf'
-    aliases = ['apacheconf', 'aconf', 'apache']
-    filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
-    mimetypes = ['text/x-apacheconf']
-    flags = re.MULTILINE | re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(#.*?)$', Comment),
-            (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
-             bygroups(Name.Tag, Text, String, Name.Tag)),
-            (r'([a-zA-Z][a-zA-Z0-9_]*)(\s+)',
-             bygroups(Name.Builtin, Text), 'value'),
-            (r'\.+', Text),
-        ],
-        'value': [
-            (r'$', Text, '#pop'),
-            (r'[^\S\n]+', Text),
-            (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
-            (r'\d+', Number),
-            (r'/([a-zA-Z0-9][a-zA-Z0-9_./-]+)', String.Other),
-            (r'(on|off|none|any|all|double|email|dns|min|minimal|'
-             r'os|productonly|full|emerg|alert|crit|error|warn|'
-             r'notice|info|debug|registry|script|inetd|standalone|'
-             r'user|group)\b', Keyword),
-            (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
-            (r'[^\s"]+', Text)
-        ]
-    }
-
-
-class MoinWikiLexer(RegexLexer):
-    """
-    For MoinMoin (and Trac) Wiki markup.
-
-    *New in Pygments 0.7.*
-    """
-
-    name = 'MoinMoin/Trac Wiki markup'
-    aliases = ['trac-wiki', 'moin']
-    filenames = []
-    mimetypes = ['text/x-trac-wiki']
-    flags = re.MULTILINE | re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'^#.*$', Comment),
-            (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
-            # Titles
-            (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
-             bygroups(Generic.Heading, using(this), Generic.Heading, String)),
-            # Literal code blocks, with optional shebang
-            (r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
-            (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
-            # Lists
-            (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
-            (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
-            # Other Formatting
-            (r'\[\[\w+.*?\]\]', Keyword), # Macro
-            (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
-             bygroups(Keyword, String, Keyword)), # Link
-            (r'^----+$', Keyword), # Horizontal rules
-            (r'[^\n\'\[{!_~^,|]+', Text),
-            (r'\n', Text),
-            (r'.', Text),
-        ],
-        'codeblock': [
-            (r'}}}', Name.Builtin, '#pop'),
-            # these blocks are allowed to be nested in Trac, but not MoinMoin
-            (r'{{{', Text, '#push'),
-            (r'[^{}]+', Comment.Preproc), # slurp boring text
-            (r'.', Comment.Preproc), # allow loose { or }
-        ],
-    }
-
-
-class RstLexer(RegexLexer):
-    """
-    For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
-
-    *New in Pygments 0.7.*
-
-    Additional options accepted:
-
-    `handlecodeblocks`
-        Highlight the contents of ``.. sourcecode:: langauge`` and
-        ``.. code:: language`` directives with a lexer for the given
-        language (default: ``True``). *New in Pygments 0.8.*
-    """
-    name = 'reStructuredText'
-    aliases = ['rst', 'rest', 'restructuredtext']
-    filenames = ['*.rst', '*.rest']
-    mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
-    flags = re.MULTILINE
-
-    def _handle_sourcecode(self, match):
-        from pygments.lexers import get_lexer_by_name
-
-        # section header
-        yield match.start(1), Punctuation, match.group(1)
-        yield match.start(2), Text, match.group(2)
-        yield match.start(3), Operator.Word, match.group(3)
-        yield match.start(4), Punctuation, match.group(4)
-        yield match.start(5), Text, match.group(5)
-        yield match.start(6), Keyword, match.group(6)
-        yield match.start(7), Text, match.group(7)
-
-        # lookup lexer if wanted and existing
-        lexer = None
-        if self.handlecodeblocks:
-            try:
-                lexer = get_lexer_by_name(match.group(6).strip())
-            except ClassNotFound:
-                pass
-        indention = match.group(8)
-        indention_size = len(indention)
-        code = (indention + match.group(9) + match.group(10) + match.group(11))
-
-        # no lexer for this language. handle it like it was a code block
-        if lexer is None:
-            yield match.start(8), String, code
-            return
-
-        # highlight the lines with the lexer.
-        ins = []
-        codelines = code.splitlines(True)
-        code = ''
-        for line in codelines:
-            if len(line) > indention_size:
-                ins.append((len(code), [(0, Text, line[:indention_size])]))
-                code += line[indention_size:]
-            else:
-                code += line
-        for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
-            yield item
-
-    # from docutils.parsers.rst.states
-    closers = u'\'")]}>\u2019\u201d\xbb!?'
-    unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
-    end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
-                         % (re.escape(unicode_delimiters),
-                            re.escape(closers)))
-
-    tokens = {
-        'root': [
-            # Heading with overline
-            (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
-             r'(.+)(\n)(\1)(\n)',
-             bygroups(Generic.Heading, Text, Generic.Heading,
-                      Text, Generic.Heading, Text)),
-            # Plain heading
-            (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
-             r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
-             bygroups(Generic.Heading, Text, Generic.Heading, Text)),
-            # Bulleted lists
-            (r'^(\s*)([-*+])( .+\n(?:\1  .+\n)*)',
-             bygroups(Text, Number, using(this, state='inline'))),
-            # Numbered lists
-            (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1  .+\n)*)',
-             bygroups(Text, Number, using(this, state='inline'))),
-            (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1  .+\n)*)',
-             bygroups(Text, Number, using(this, state='inline'))),
-            # Numbered, but keep words at BOL from becoming lists
-            (r'^(\s*)([A-Z]+\.)( .+\n(?:\1  .+\n)+)',
-             bygroups(Text, Number, using(this, state='inline'))),
-            (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1  .+\n)+)',
-             bygroups(Text, Number, using(this, state='inline'))),
-            # Line blocks
-            (r'^(\s*)(\|)( .+\n(?:\|  .+\n)*)',
-             bygroups(Text, Operator, using(this, state='inline'))),
-            # Sourcecode directives
-            (r'^( *\.\.)(\s*)((?:source)?code)(::)([ \t]*)([^\n]+)'
-             r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
-             _handle_sourcecode),
-            # A directive
-            (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
-             bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
-                      using(this, state='inline'))),
-            # A reference target
-            (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
-             bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
-            # A footnote/citation target
-            (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
-             bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
-            # A substitution def
-            (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
-             bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
-                      Punctuation, Text, using(this, state='inline'))),
-            # Comments
-            (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
-            # Field list
-            (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)),
-            (r'^( *)(:.*?:)([ \t]+)(.*?)$',
-             bygroups(Text, Name.Class, Text, Name.Function)),
-            # Definition list
-            (r'^([^ ].*(?<!::)\n)((?:(?: +.*)\n)+)',
-             bygroups(using(this, state='inline'), using(this, state='inline'))),
-            # Code blocks
-            (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
-             bygroups(String.Escape, Text, String, String, Text, String)),
-            include('inline'),
-        ],
-        'inline': [
-            (r'\\.', Text), # escape
-            (r'``', String, 'literal'), # code
-            (r'(`.+?)(<.+?>)(`__?)',  # reference with inline target
-             bygroups(String, String.Interpol, String)),
-            (r'`.+?`__?', String), # reference
-            (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
-             bygroups(Name.Variable, Name.Attribute)), # role
-            (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
-             bygroups(Name.Attribute, Name.Variable)), # role (content first)
-            (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
-            (r'\*.+?\*', Generic.Emph), # Emphasis
-            (r'\[.*?\]_', String), # Footnote or citation
-            (r'<.+?>', Name.Tag), # Hyperlink
-            (r'[^\\\n\[*`:]+', Text),
-            (r'.', Text),
-        ],
-        'literal': [
-            (r'[^`]+', String),
-            (r'``' + end_string_suffix, String, '#pop'),
-            (r'`', String),
-        ]
-    }
-
-    def __init__(self, **options):
-        self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
-        RegexLexer.__init__(self, **options)
-
-    def analyse_text(text):
-        if text[:2] == '..' and text[2:3] != '.':
-            return 0.3
-        p1 = text.find("\n")
-        p2 = text.find("\n", p1 + 1)
-        if (p2 > -1 and              # has two lines
-            p1 * 2 + 1 == p2 and     # they are the same length
-            text[p1+1] in '-=' and   # the next line both starts and ends with
-            text[p1+1] == text[p2-1]): # ...a sufficiently high header
-            return 0.5
-
-
-class VimLexer(RegexLexer):
-    """
-    Lexer for VimL script files.
-
-    *New in Pygments 0.8.*
-    """
-    name = 'VimL'
-    aliases = ['vim']
-    filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
-                 '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
-    mimetypes = ['text/x-vim']
-    flags = re.MULTILINE
-
-    tokens = {
-        'root': [
-            (r'^\s*".*', Comment),
-
-            (r'[ \t]+', Text),
-            # TODO: regexes can have other delims
-            (r'/(\\\\|\\/|[^\n/])*/', String.Regex),
-            (r'"(\\\\|\\"|[^\n"])*"', String.Double),
-            (r"'(\\\\|\\'|[^\n'])*'", String.Single),
-
-            # Who decided that doublequote was a good comment character??
-            (r'(?<=\s)"[^\-:.%#=*].*', Comment),
-            (r'-?\d+', Number),
-            (r'#[0-9a-f]{6}', Number.Hex),
-            (r'^:', Punctuation),
-            (r'[()<>+=!|,~-]', Punctuation), # Inexact list.  Looks decent.
-            (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
-             Keyword),
-            (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
-            (r'\b\w+\b', Name.Other), # These are postprocessed below
-            (r'.', Text),
-        ],
-    }
-    def __init__(self, **options):
-        from pygments.lexers._vimbuiltins import command, option, auto
-        self._cmd = command
-        self._opt = option
-        self._aut = auto
-
-        RegexLexer.__init__(self, **options)
-
-    def is_in(self, w, mapping):
-        r"""
-        It's kind of difficult to decide if something might be a keyword
-        in VimL because it allows you to abbreviate them.  In fact,
-        'ab[breviate]' is a good example.  :ab, :abbre, or :abbreviate are
-        valid ways to call it so rather than making really awful regexps
-        like::
-
-            \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
-
-        we match `\b\w+\b` and then call is_in() on those tokens.  See
-        `scripts/get_vimkw.py` for how the lists are extracted.
-        """
-        p = bisect(mapping, (w,))
-        if p > 0:
-            if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
-               mapping[p-1][1][:len(w)] == w: return True
-        if p < len(mapping):
-            return mapping[p][0] == w[:len(mapping[p][0])] and \
-                   mapping[p][1][:len(w)] == w
-        return False
-
-    def get_tokens_unprocessed(self, text):
-        # TODO: builtins are only subsequent tokens on lines
-        #       and 'keywords' only happen at the beginning except
-        #       for :au ones
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name.Other:
-                if self.is_in(value, self._cmd):
-                    yield index, Keyword, value
-                elif self.is_in(value, self._opt) or \
-                     self.is_in(value, self._aut):
-                    yield index, Name.Builtin, value
-                else:
-                    yield index, Text, value
-            else:
-                yield index, token, value
-
-
-class GettextLexer(RegexLexer):
-    """
-    Lexer for Gettext catalog files.
-
-    *New in Pygments 0.9.*
-    """
-    name = 'Gettext Catalog'
-    aliases = ['pot', 'po']
-    filenames = ['*.pot', '*.po']
-    mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
-
-    tokens = {
-        'root': [
-            (r'^#,\s.*?$', Keyword.Type),
-            (r'^#:\s.*?$', Keyword.Declaration),
-            #(r'^#$', Comment),
-            (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
-            (r'^(")([A-Za-z-]+:)(.*")$',
-             bygroups(String, Name.Property, String)),
-            (r'^".*"$', String),
-            (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
-             bygroups(Name.Variable, Text, String)),
-            (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
-             bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
-        ]
-    }
-
-
-class SquidConfLexer(RegexLexer):
-    """
-    Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
-
-    *New in Pygments 0.9.*
-    """
-
-    name = 'SquidConf'
-    aliases = ['squidconf', 'squid.conf', 'squid']
-    filenames = ['squid.conf']
-    mimetypes = ['text/x-squidconf']
-    flags = re.IGNORECASE
-
-    keywords = [
-        "access_log", "acl", "always_direct", "announce_host",
-        "announce_period", "announce_port", "announce_to", "anonymize_headers",
-        "append_domain", "as_whois_server", "auth_param_basic",
-        "authenticate_children", "authenticate_program", "authenticate_ttl",
-        "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
-        "cache_dir", "cache_dns_program", "cache_effective_group",
-        "cache_effective_user", "cache_host", "cache_host_acl",
-        "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
-        "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
-        "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
-        "cache_stoplist_pattern", "cache_store_log", "cache_swap",
-        "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
-        "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
-        "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
-        "delay_initial_bucket_level", "delay_parameters", "delay_pools",
-        "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
-        "dns_testnames", "emulate_httpd_log", "err_html_text",
-        "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
-        "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
-        "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
-        "header_replace", "hierarchy_stoplist", "high_response_time_warning",
-        "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
-        "http_anonymizer", "httpd_accel", "httpd_accel_host",
-        "httpd_accel_port", "httpd_accel_uses_host_header",
-        "httpd_accel_with_proxy", "http_port", "http_reply_access",
-        "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
-        "ident_lookup", "ident_lookup_access", "ident_timeout",
-        "incoming_http_average", "incoming_icp_average", "inside_firewall",
-        "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
-        "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
-        "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
-        "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
-        "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
-        "memory_pools_limit", "memory_replacement_policy", "mime_table",
-        "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
-        "minimum_object_size", "minimum_retry_timeout", "miss_access",
-        "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
-        "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
-        "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
-        "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
-        "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
-        "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min",
-        "quick_abort_pct", "range_offset_limit", "read_timeout",
-        "redirect_children", "redirect_program",
-        "redirect_rewrites_host_header", "reference_age", "reference_age",
-        "refresh_pattern", "reload_into_ims", "request_body_max_size",
-        "request_size", "request_timeout", "shutdown_lifetime",
-        "single_parent_bypass", "siteselect_timeout", "snmp_access",
-        "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
-        "store_avg_object_size", "store_objects_per_bucket",
-        "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
-        "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
-        "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
-        "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
-        "unlinkd_program", "uri_whitespace", "useragent_log",
-        "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
-    ]
-
-    opts = [
-        "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
-        "multicast-responder", "on", "off", "all", "deny", "allow", "via",
-        "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
-        "credentialsttl", "none", "disable", "offline_toggle", "diskd",
-    ]
-
-    actions = [
-        "shutdown", "info", "parameter", "server_list", "client_list",
-        r'squid\.conf',
-    ]
-
-    actions_stats = [
-        "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
-        "redirector", "io", "reply_headers", "filedescriptors", "netdb",
-    ]
-
-    actions_log = ["status", "enable", "disable", "clear"]
-
-    acls = [
-        "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
-        "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
-        "dst", "time", "dstdomain", "ident", "snmp_community",
-    ]
-
-    ip_re = (
-        r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
-        r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
-        r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
-        r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
-        r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
-        r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
-        r'[1-9]?\d)){3}))'
-    )
-
-    def makelistre(list):
-        return r'\b(?:' + '|'.join(list) + r')\b'
-
-    tokens = {
-        'root': [
-            (r'\s+', Whitespace),
-            (r'#', Comment, 'comment'),
-            (makelistre(keywords), Keyword),
-            (makelistre(opts), Name.Constant),
-            # Actions
-            (makelistre(actions), String),
-            (r'stats/'+makelistre(actions), String),
-            (r'log/'+makelistre(actions)+r'=', String),
-            (makelistre(acls), Keyword),
-            (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
-            (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
-            (r'\S+', Text),
-        ],
-        'comment': [
-            (r'\s*TAG:.*', String.Escape, '#pop'),
-            (r'.*', Comment, '#pop'),
-        ],
-    }
-
-
-class DebianControlLexer(RegexLexer):
-    """
-    Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
-    *New in Pygments 0.9.*
-    """
-    name = 'Debian Control file'
-    aliases = ['control']
-    filenames = ['control']
-
-    tokens = {
-        'root': [
-            (r'^(Description)', Keyword, 'description'),
-            (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
-            (r'^((Build-)?Depends)', Keyword, 'depends'),
-            (r'^((?:Python-)?Version)(:\s*)(\S+)$',
-             bygroups(Keyword, Text, Number)),
-            (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
-             bygroups(Keyword, Text, Number)),
-            (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
-             bygroups(Keyword, Text, Number)),
-            (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
-             bygroups(Keyword, Whitespace, String)),
-        ],
-        'maintainer': [
-            (r'<[^>]+>', Generic.Strong),
-            (r'<[^>]+>$', Generic.Strong, '#pop'),
-            (r',\n?', Text),
-            (r'.', Text),
-        ],
-        'description': [
-            (r'(.*)(Homepage)(: )(\S+)',
-             bygroups(Text, String, Name, Name.Class)),
-            (r':.*\n', Generic.Strong),
-            (r' .*\n', Text),
-            ('', Text, '#pop'),
-        ],
-        'depends': [
-            (r':\s*', Text),
-            (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
-            (r'\(', Text, 'depend_vers'),
-            (r',', Text),
-            (r'\|', Operator),
-            (r'[\s]+', Text),
-            (r'[}\)]\s*$', Text, '#pop'),
-            (r'}', Text),
-            (r'[^,]$', Name.Function, '#pop'),
-            (r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
-            (r'\[.*?\]', Name.Entity),
-        ],
-        'depend_vers': [
-            (r'\),', Text, '#pop'),
-            (r'\)[^,]', Text, '#pop:2'),
-            (r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number))
-        ]
-    }
-
-
-class YamlLexerContext(LexerContext):
-    """Indentation context for the YAML lexer."""
-
-    def __init__(self, *args, **kwds):
-        super(YamlLexerContext, self).__init__(*args, **kwds)
-        self.indent_stack = []
-        self.indent = -1
-        self.next_indent = 0
-        self.block_scalar_indent = None
-
-
-class YamlLexer(ExtendedRegexLexer):
-    """
-    Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
-    language.
-
-    *New in Pygments 0.11.*
-    """
-
-    name = 'YAML'
-    aliases = ['yaml']
-    filenames = ['*.yaml', '*.yml']
-    mimetypes = ['text/x-yaml']
-
-
-    def something(token_class):
-        """Do not produce empty tokens."""
-        def callback(lexer, match, context):
-            text = match.group()
-            if not text:
-                return
-            yield match.start(), token_class, text
-            context.pos = match.end()
-        return callback
-
-    def reset_indent(token_class):
-        """Reset the indentation levels."""
-        def callback(lexer, match, context):
-            text = match.group()
-            context.indent_stack = []
-            context.indent = -1
-            context.next_indent = 0
-            context.block_scalar_indent = None
-            yield match.start(), token_class, text
-            context.pos = match.end()
-        return callback
-
-    def save_indent(token_class, start=False):
-        """Save a possible indentation level."""
-        def callback(lexer, match, context):
-            text = match.group()
-            extra = ''
-            if start:
-                context.next_indent = len(text)
-                if context.next_indent < context.indent:
-                    while context.next_indent < context.indent:
-                        context.indent = context.indent_stack.pop()
-                    if context.next_indent > context.indent:
-                        extra = text[context.indent:]
-                        text = text[:context.indent]
-            else:
-                context.next_indent += len(text)
-            if text:
-                yield match.start(), token_class, text
-            if extra:
-                yield match.start()+len(text), token_class.Error, extra
-            context.pos = match.end()
-        return callback
-
-    def set_indent(token_class, implicit=False):
-        """Set the previously saved indentation level."""
-        def callback(lexer, match, context):
-            text = match.group()
-            if context.indent < context.next_indent:
-                context.indent_stack.append(context.indent)
-                context.indent = context.next_indent
-            if not implicit:
-                context.next_indent += len(text)
-            yield match.start(), token_class, text
-            context.pos = match.end()
-        return callback
-
-    def set_block_scalar_indent(token_class):
-        """Set an explicit indentation level for a block scalar."""
-        def callback(lexer, match, context):
-            text = match.group()
-            context.block_scalar_indent = None
-            if not text:
-                return
-            increment = match.group(1)
-            if increment:
-                current_indent = max(context.indent, 0)
-                increment = int(increment)
-                context.block_scalar_indent = current_indent + increment
-            if text:
-                yield match.start(), token_class, text
-                context.pos = match.end()
-        return callback
-
-    def parse_block_scalar_empty_line(indent_token_class, content_token_class):
-        """Process an empty line in a block scalar."""
-        def callback(lexer, match, context):
-            text = match.group()
-            if (context.block_scalar_indent is None or
-                    len(text) <= context.block_scalar_indent):
-                if text:
-                    yield match.start(), indent_token_class, text
-            else:
-                indentation = text[:context.block_scalar_indent]
-                content = text[context.block_scalar_indent:]
-                yield match.start(), indent_token_class, indentation
-                yield (match.start()+context.block_scalar_indent,
-                        content_token_class, content)
-            context.pos = match.end()
-        return callback
-
-    def parse_block_scalar_indent(token_class):
-        """Process indentation spaces in a block scalar."""
-        def callback(lexer, match, context):
-            text = match.group()
-            if context.block_scalar_indent is None:
-                if len(text) <= max(context.indent, 0):
-                    context.stack.pop()
-                    context.stack.pop()
-                    return
-                context.block_scalar_indent = len(text)
-            else:
-                if len(text) < context.block_scalar_indent:
-                    context.stack.pop()
-                    context.stack.pop()
-                    return
-            if text:
-                yield match.start(), token_class, text
-                context.pos = match.end()
-        return callback
-
-    def parse_plain_scalar_indent(token_class):
-        """Process indentation spaces in a plain scalar."""
-        def callback(lexer, match, context):
-            text = match.group()
-            if len(text) <= context.indent:
-                context.stack.pop()
-                context.stack.pop()
-                return
-            if text:
-                yield match.start(), token_class, text
-                context.pos = match.end()
-        return callback
-
-
-
-    tokens = {
-        # the root rules
-        'root': [
-            # ignored whitespaces
-            (r'[ ]+(?=#|$)', Text),
-            # line breaks
-            (r'\n+', Text),
-            # a comment
-            (r'#[^\n]*', Comment.Single),
-            # the '%YAML' directive
-            (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
-            # the %TAG directive
-            (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
-            # document start and document end indicators
-            (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
-             'block-line'),
-            # indentation spaces
-            (r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True),
-             ('block-line', 'indentation')),
-        ],
-
-        # trailing whitespaces after directives or a block scalar indicator
-        'ignored-line': [
-            # ignored whitespaces
-            (r'[ ]+(?=#|$)', Text),
-            # a comment
-            (r'#[^\n]*', Comment.Single),
-            # line break
-            (r'\n', Text, '#pop:2'),
-        ],
-
-        # the %YAML directive
-        'yaml-directive': [
-            # the version number
-            (r'([ ]+)([0-9]+\.[0-9]+)',
-             bygroups(Text, Number), 'ignored-line'),
-        ],
-
-        # the %YAG directive
-        'tag-directive': [
-            # a tag handle and the corresponding prefix
-            (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
-             r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
-             bygroups(Text, Keyword.Type, Text, Keyword.Type),
-             'ignored-line'),
-        ],
-
-        # block scalar indicators and indentation spaces
-        'indentation': [
-            # trailing whitespaces are ignored
-            (r'[ ]*$', something(Text), '#pop:2'),
-            # whitespaces preceding block collection indicators
-            (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
-            # block collection indicators
-            (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
-            # the beginning a block line
-            (r'[ ]*', save_indent(Text), '#pop'),
-        ],
-
-        # an indented line in the block context
-        'block-line': [
-            # the line end
-            (r'[ ]*(?=#|$)', something(Text), '#pop'),
-            # whitespaces separating tokens
-            (r'[ ]+', Text),
-            # tags, anchors and aliases,
-            include('descriptors'),
-            # block collections and scalars
-            include('block-nodes'),
-            # flow collections and quoted scalars
-            include('flow-nodes'),
-            # a plain scalar
-            (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
-             something(Name.Variable),
-             'plain-scalar-in-block-context'),
-        ],
-
-        # tags, anchors, aliases
-        'descriptors' : [
-            # a full-form tag
-            (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type),
-            # a tag in the form '!', '!suffix' or '!handle!suffix'
-            (r'!(?:[0-9A-Za-z_-]+)?'
-             r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type),
-            # an anchor
-            (r'&[0-9A-Za-z_-]+', Name.Label),
-            # an alias
-            (r'\*[0-9A-Za-z_-]+', Name.Variable),
-        ],
-
-        # block collections and scalars
-        'block-nodes': [
-            # implicit key
-            (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
-            # literal and folded scalars
-            (r'[|>]', Punctuation.Indicator,
-             ('block-scalar-content', 'block-scalar-header')),
-        ],
-
-        # flow collections and quoted scalars
-        'flow-nodes': [
-            # a flow sequence
-            (r'\[', Punctuation.Indicator, 'flow-sequence'),
-            # a flow mapping
-            (r'\{', Punctuation.Indicator, 'flow-mapping'),
-            # a single-quoted scalar
-            (r'\'', String, 'single-quoted-scalar'),
-            # a double-quoted scalar
-            (r'\"', String, 'double-quoted-scalar'),
-        ],
-
-        # the content of a flow collection
-        'flow-collection': [
-            # whitespaces
-            (r'[ ]+', Text),
-            # line breaks
-            (r'\n+', Text),
-            # a comment
-            (r'#[^\n]*', Comment.Single),
-            # simple indicators
-            (r'[?:,]', Punctuation.Indicator),
-            # tags, anchors and aliases
-            include('descriptors'),
-            # nested collections and quoted scalars
-            include('flow-nodes'),
-            # a plain scalar
-            (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
-             something(Name.Variable),
-             'plain-scalar-in-flow-context'),
-        ],
-
-        # a flow sequence indicated by '[' and ']'
-        'flow-sequence': [
-            # include flow collection rules
-            include('flow-collection'),
-            # the closing indicator
-            (r'\]', Punctuation.Indicator, '#pop'),
-        ],
-
-        # a flow mapping indicated by '{' and '}'
-        'flow-mapping': [
-            # include flow collection rules
-            include('flow-collection'),
-            # the closing indicator
-            (r'\}', Punctuation.Indicator, '#pop'),
-        ],
-
-        # block scalar lines
-        'block-scalar-content': [
-            # line break
-            (r'\n', Text),
-            # empty line
-            (r'^[ ]+$',
-             parse_block_scalar_empty_line(Text, Name.Constant)),
-            # indentation spaces (we may leave the state here)
-            (r'^[ ]*', parse_block_scalar_indent(Text)),
-            # line content
-            (r'[^\n\r\f\v]+', Name.Constant),
-        ],
-
-        # the content of a literal or folded scalar
-        'block-scalar-header': [
-            # indentation indicator followed by chomping flag
-            (r'([1-9])?[+-]?(?=[ ]|$)',
-             set_block_scalar_indent(Punctuation.Indicator),
-             'ignored-line'),
-            # chomping flag followed by indentation indicator
-            (r'[+-]?([1-9])?(?=[ ]|$)',
-             set_block_scalar_indent(Punctuation.Indicator),
-             'ignored-line'),
-        ],
-
-        # ignored and regular whitespaces in quoted scalars
-        'quoted-scalar-whitespaces': [
-            # leading and trailing whitespaces are ignored
-            (r'^[ ]+', Text),
-            (r'[ ]+$', Text),
-            # line breaks are ignored
-            (r'\n+', Text),
-            # other whitespaces are a part of the value
-            (r'[ ]+', Name.Variable),
-        ],
-
-        # single-quoted scalars
-        'single-quoted-scalar': [
-            # include whitespace and line break rules
-            include('quoted-scalar-whitespaces'),
-            # escaping of the quote character
-            (r'\'\'', String.Escape),
-            # regular non-whitespace characters
-            (r'[^ \t\n\r\f\v\']+', String),
-            # the closing quote
-            (r'\'', String, '#pop'),
-        ],
-
-        # double-quoted scalars
-        'double-quoted-scalar': [
-            # include whitespace and line break rules
-            include('quoted-scalar-whitespaces'),
-            # escaping of special characters
-            (r'\\[0abt\tn\nvfre "\\N_LP]', String),
-            # escape codes
-            (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
-             String.Escape),
-            # regular non-whitespace characters
-            (r'[^ \t\n\r\f\v\"\\]+', String),
-            # the closing quote
-            (r'"', String, '#pop'),
-        ],
-
-        # the beginning of a new line while scanning a plain scalar
-        'plain-scalar-in-block-context-new-line': [
-            # empty lines
-            (r'^[ ]+$', Text),
-            # line breaks
-            (r'\n+', Text),
-            # document start and document end indicators
-            (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
-            # indentation spaces (we may leave the block line state here)
-            (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
-        ],
-
-        # a plain scalar in the block context
-        'plain-scalar-in-block-context': [
-            # the scalar ends with the ':' indicator
-            (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
-            # the scalar ends with whitespaces followed by a comment
-            (r'[ ]+(?=#)', Text, '#pop'),
-            # trailing whitespaces are ignored
-            (r'[ ]+$', Text),
-            # line breaks are ignored
-            (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
-            # other whitespaces are a part of the value
-            (r'[ ]+', Literal.Scalar.Plain),
-            # regular non-whitespace characters
-            (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain),
-        ],
-
-        # a plain scalar is the flow context
-        'plain-scalar-in-flow-context': [
-            # the scalar ends with an indicator character
-            (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
-            # the scalar ends with a comment
-            (r'[ ]+(?=#)', Text, '#pop'),
-            # leading and trailing whitespaces are ignored
-            (r'^[ ]+', Text),
-            (r'[ ]+$', Text),
-            # line breaks are ignored
-            (r'\n+', Text),
-            # other whitespaces are a part of the value
-            (r'[ ]+', Name.Variable),
-            # regular non-whitespace characters
-            (r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable),
-        ],
-
-    }
-
-    def get_tokens_unprocessed(self, text=None, context=None):
-        if context is None:
-            context = YamlLexerContext(text, 0)
-        return super(YamlLexer, self).get_tokens_unprocessed(text, context)
-
-
-class LighttpdConfLexer(RegexLexer):
-    """
-    Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
-
-    *New in Pygments 0.11.*
-    """
-    name = 'Lighttpd configuration file'
-    aliases = ['lighty', 'lighttpd']
-    filenames = []
-    mimetypes = ['text/x-lighttpd-conf']
-
-    tokens = {
-        'root': [
-            (r'#.*\n', Comment.Single),
-            (r'/\S*', Name), # pathname
-            (r'[a-zA-Z._-]+', Keyword),
-            (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
-            (r'[0-9]+', Number),
-            (r'=>|=~|\+=|==|=|\+', Operator),
-            (r'\$[A-Z]+', Name.Builtin),
-            (r'[(){}\[\],]', Punctuation),
-            (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
-            (r'\s+', Text),
-        ],
-
-    }
-
-
-class NginxConfLexer(RegexLexer):
-    """
-    Lexer for `Nginx <http://nginx.net/>`_ configuration files.
-
-    *New in Pygments 0.11.*
-    """
-    name = 'Nginx configuration file'
-    aliases = ['nginx']
-    filenames = []
-    mimetypes = ['text/x-nginx-conf']
-
-    tokens = {
-        'root': [
-            (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
-            (r'[^\s;#]+', Keyword, 'stmt'),
-            include('base'),
-        ],
-        'block': [
-            (r'}', Punctuation, '#pop:2'),
-            (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
-            include('base'),
-        ],
-        'stmt': [
-            (r'{', Punctuation, 'block'),
-            (r';', Punctuation, '#pop'),
-            include('base'),
-        ],
-        'base': [
-            (r'#.*\n', Comment.Single),
-            (r'on|off', Name.Constant),
-            (r'\$[^\s;#()]+', Name.Variable),
-            (r'([a-z0-9.-]+)(:)([0-9]+)',
-             bygroups(Name, Punctuation, Number.Integer)),
-            (r'[a-z-]+/[a-z-+]+', String), # mimetype
-            #(r'[a-zA-Z._-]+', Keyword),
-            (r'[0-9]+[km]?\b', Number.Integer),
-            (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
-            (r'[:=~]', Punctuation),
-            (r'[^\s;#{}$]+', String), # catch all
-            (r'/[^\s;#]*', Name), # pathname
-            (r'\s+', Text),
-            (r'[$;]', Text),  # leftover characters
-        ],
-    }
-
-
-class CMakeLexer(RegexLexer):
-    """
-    Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
-
-    *New in Pygments 1.2.*
-    """
-    name = 'CMake'
-    aliases = ['cmake']
-    filenames = ['*.cmake', 'CMakeLists.txt']
-    mimetypes = ['text/x-cmake']
-
-    tokens = {
-        'root': [
-            #(r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
-            # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
-            # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
-            # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
-            # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
-            # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
-            # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
-            # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
-            # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
-            # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
-            # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
-            # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
-            # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
-            # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
-            # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
-            # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
-            # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
-            # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
-            # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
-            # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
-            # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
-            # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
-            # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
-            # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
-            # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
-            # r'COUNTARGS)\b', Name.Builtin, 'args'),
-            (r'\b([A-Za-z_]+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
-                                                     Punctuation), 'args'),
-            include('keywords'),
-            include('ws')
-        ],
-        'args': [
-            (r'\(', Punctuation, '#push'),
-            (r'\)', Punctuation, '#pop'),
-            (r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)),
-            (r'(?s)".*?"', String.Double),
-            (r'\\\S+', String),
-            (r'[^\)$"# \t\n]+', String),
-            (r'\n', Text), # explicitly legal
-            include('keywords'),
-            include('ws')
-        ],
-        'string': [
-
-        ],
-        'keywords': [
-            (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
-             r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
-        ],
-        'ws': [
-            (r'[ \t]+', Text),
-            (r'#.+\n', Comment),
-        ]
-    }
-
-
-class HttpLexer(RegexLexer):
-    """
-    Lexer for HTTP sessions.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'HTTP'
-    aliases = ['http']
-
-    flags = re.DOTALL
-
-    def header_callback(self, match):
-        if match.group(1).lower() == 'content-type':
-            content_type = match.group(5).strip()
-            if ';' in content_type:
-                content_type = content_type[:content_type.find(';')].strip()
-            self.content_type = content_type
-        yield match.start(1), Name.Attribute, match.group(1)
-        yield match.start(2), Text, match.group(2)
-        yield match.start(3), Operator, match.group(3)
-        yield match.start(4), Text, match.group(4)
-        yield match.start(5), Literal, match.group(5)
-        yield match.start(6), Text, match.group(6)
-
-    def continuous_header_callback(self, match):
-        yield match.start(1), Text, match.group(1)
-        yield match.start(2), Literal, match.group(2)
-        yield match.start(3), Text, match.group(3)
-
-    def content_callback(self, match):
-        content_type = getattr(self, 'content_type', None)
-        content = match.group()
-        offset = match.start()
-        if content_type:
-            from pygments.lexers import get_lexer_for_mimetype
-            try:
-                lexer = get_lexer_for_mimetype(content_type)
-            except ClassNotFound:
-                pass
-            else:
-                for idx, token, value in lexer.get_tokens_unprocessed(content):
-                    yield offset + idx, token, value
-                return
-        yield offset, Text, content
-
-    tokens = {
-        'root': [
-            (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
-             r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
-             bygroups(Name.Function, Text, Name.Namespace, Text,
-                      Keyword.Reserved, Operator, Number, Text),
-             'headers'),
-            (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
-             bygroups(Keyword.Reserved, Operator, Number, Text, Number,
-                      Text, Name.Exception, Text),
-             'headers'),
-        ],
-        'headers': [
-            (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
-            (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
-            (r'\r?\n', Text, 'content')
-        ],
-        'content': [
-            (r'.+', content_callback)
-        ]
-    }
-
-
-class PyPyLogLexer(RegexLexer):
-    """
-    Lexer for PyPy log files.
-
-    *New in Pygments 1.5.*
-    """
-    name = "PyPy Log"
-    aliases = ["pypylog", "pypy"]
-    filenames = ["*.pypylog"]
-    mimetypes = ['application/x-pypylog']
-
-    tokens = {
-        "root": [
-            (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
-            (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
-            include("extra-stuff"),
-        ],
-        "jit-log": [
-            (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
-            (r"^\+\d+: ", Comment),
-            (r"--end of the loop--", Comment),
-            (r"[ifp]\d+", Name),
-            (r"ptr\d+", Name),
-            (r"(\()(\w+(?:\.\w+)?)(\))",
-             bygroups(Punctuation, Name.Builtin, Punctuation)),
-            (r"[\[\]=,()]", Punctuation),
-            (r"(\d+\.\d+|inf|-inf)", Number.Float),
-            (r"-?\d+", Number.Integer),
-            (r"'.*'", String),
-            (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
-            (r"<.*?>+", Name.Builtin),
-            (r"(label|debug_merge_point|jump|finish)", Name.Class),
-            (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
-             r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
-             r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
-             r"int_is_true|"
-             r"uint_floordiv|uint_ge|uint_lt|"
-             r"float_add|float_sub|float_mul|float_truediv|float_neg|"
-             r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
-             r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
-             r"cast_int_to_float|cast_float_to_int|"
-             r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
-             r"virtual_ref|mark_opaque_ptr|"
-             r"call_may_force|call_assembler|call_loopinvariant|"
-             r"call_release_gil|call_pure|call|"
-             r"new_with_vtable|new_array|newstr|newunicode|new|"
-             r"arraylen_gc|"
-             r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
-             r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
-             r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
-             r"getfield_raw|setfield_gc|setfield_raw|"
-             r"strgetitem|strsetitem|strlen|copystrcontent|"
-             r"unicodegetitem|unicodesetitem|unicodelen|"
-             r"guard_true|guard_false|guard_value|guard_isnull|"
-             r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
-             r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
-             Name.Builtin),
-            include("extra-stuff"),
-        ],
-        "jit-backend-counts": [
-            (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
-            (r":", Punctuation),
-            (r"\d+", Number),
-            include("extra-stuff"),
-        ],
-        "extra-stuff": [
-            (r"\s+", Text),
-            (r"#.*?$", Comment),
-        ],
-    }
-
-
-class HxmlLexer(RegexLexer):
-    """
-    Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
-
-    *New in Pygments 1.6.*
-    """
-    name = 'Hxml'
-    aliases = ['haxeml', 'hxml']
-    filenames = ['*.hxml']
-
-    tokens = {
-        'root': [
-            # Seperator
-            (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
-            # Compiler switches with one dash
-            (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
-            # Compilerswitches with two dashes
-            (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
-             r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
-            # Targets and other options that take an argument
-            (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
-             r'cp|cmd)( +)(.+)',
-             bygroups(Punctuation, Keyword, Whitespace, String)),
-            # Options that take only numerical arguments
-            (r'(-)(swf-version)( +)(\d+)',
-             bygroups(Punctuation, Keyword, Number.Integer)),
-            # An Option that defines the size, the fps and the background
-            # color of an flash movie
-            (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
-             bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
-                      Punctuation, Number.Integer, Punctuation, Number.Integer,
-                      Punctuation, Number.Hex)),
-            # options with two dashes that takes arguments
-            (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
-             r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
-            # Single line comment, multiline ones are not allowed.
-            (r'#.*', Comment.Single)
-        ]
-    }
diff --git a/python/ext-libs/pygments/lexers/web.py b/python/ext-libs/pygments/lexers/web.py
deleted file mode 100644
index 70223d2..0000000
--- a/python/ext-libs/pygments/lexers/web.py
+++ /dev/null
@@ -1,3423 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.lexers.web
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for web-related languages and markup.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-import copy
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
-     include, this
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-     Number, Other, Punctuation, Literal
-from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
-                          html_doctype_matches, unirange
-from pygments.lexers.agile import RubyLexer
-from pygments.lexers.compiled import ScalaLexer
-
-
-__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer',
-           'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
-           'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
-           'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer',
-           'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer',
-           'DtdLexer', 'DartLexer', 'LassoLexer', 'QmlLexer', 'TypeScriptLexer']
-
-
-class JavascriptLexer(RegexLexer):
-    """
-    For JavaScript source code.
-    """
-
-    name = 'JavaScript'
-    aliases = ['js', 'javascript']
-    filenames = ['*.js', ]
-    mimetypes = ['application/javascript', 'application/x-javascript',
-                 'text/x-javascript', 'text/javascript', ]
-
-    flags = re.DOTALL
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'<!--', Comment),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            (r'', Text, '#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'root': [
-            (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
-             r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
-             r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
-             r'this)\b', Keyword, 'slashstartsregex'),
-            (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-            (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
-             r'extends|final|float|goto|implements|import|int|interface|long|native|'
-             r'package|private|protected|public|short|static|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Reserved),
-            (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-        ]
-    }
-
-
-class JsonLexer(RegexLexer):
-    """
-    For JSON data structures.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'JSON'
-    aliases = ['json']
-    filenames = ['*.json']
-    mimetypes = [ 'application/json', ]
-
-    # integer part of a number
-    int_part = r'-?(0|[1-9]\d*)'
-
-    # fractional part of a number
-    frac_part = r'\.\d+'
-
-    # exponential part of a number
-    exp_part = r'[eE](\+|-)?\d+'
-
-
-    flags = re.DOTALL
-    tokens = {
-        'whitespace': [
-            (r'\s+', Text),
-        ],
-
-        # represents a simple terminal value
-        'simplevalue': [
-            (r'(true|false|null)\b', Keyword.Constant),
-            (('%(int_part)s(%(frac_part)s%(exp_part)s|'
-              '%(exp_part)s|%(frac_part)s)') % vars(),
-             Number.Float),
-            (int_part, Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-        ],
-
-
-        # the right hand side of an object, after the attribute name
-        'objectattribute': [
-            include('value'),
-            (r':', Punctuation),
-            # comma terminates the attribute but expects more
-            (r',', Punctuation, '#pop'),
-            # a closing bracket terminates the entire object, so pop twice
-            (r'}', Punctuation, ('#pop', '#pop')),
-        ],
-
-        # a json object - { attr, attr, ... }
-        'objectvalue': [
-            include('whitespace'),
-            (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
-            (r'}', Punctuation, '#pop'),
-        ],
-
-        # json array - [ value, value, ... }
-        'arrayvalue': [
-            include('whitespace'),
-            include('value'),
-            (r',', Punctuation),
-            (r']', Punctuation, '#pop'),
-        ],
-
-        # a json value - either a simple value or a complex value (object or array)
-        'value': [
-            include('whitespace'),
-            include('simplevalue'),
-            (r'{', Punctuation, 'objectvalue'),
-            (r'\[', Punctuation, 'arrayvalue'),
-        ],
-
-
-        # the root of a json document whould be a value
-        'root': [
-            include('value'),
-        ],
-
-    }
-
-JSONLexer = JsonLexer  # for backwards compatibility with Pygments 1.5
-
-
-class ActionScriptLexer(RegexLexer):
-    """
-    For ActionScript source code.
-
-    *New in Pygments 0.9.*
-    """
-
-    name = 'ActionScript'
-    aliases = ['as', 'actionscript']
-    filenames = ['*.as']
-    mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
-                 'text/actionscript3']
-
-    flags = re.DOTALL
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
-            (r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
-            (r'[{}\[\]();.]+', Punctuation),
-            (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
-             r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|'
-             r'switch)\b', Keyword),
-            (r'(class|public|final|internal|native|override|private|protected|'
-             r'static|import|extends|implements|interface|intrinsic|return|super|'
-             r'dynamic|function|const|get|namespace|package|set)\b',
-             Keyword.Declaration),
-            (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
-             Keyword.Constant),
-            (r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
-             r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|'
-             r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|'
-             r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|'
-             r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|'
-             r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|'
-             r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|'
-             r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|'
-             r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|'
-             r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|'
-             r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|'
-             r'EventPhase|ExternalInterface|FileFilter|FileReference|'
-             r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|'
-             r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|'
-             r'GradientGlowFilter|GradientType|Graphics|GridFitType|HTTPStatusEvent|'
-             r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput'
-             r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|'
-             r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|'
-             r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|'
-             r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|'
-             r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|'
-             r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|'
-             r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|'
-             r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|'
-             r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|'
-             r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|'
-             r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|'
-             r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|'
-             r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|'
-             r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|'
-             r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|'
-             r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|'
-             r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|'
-             r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|'
-             r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|'
-             r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|'
-             r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|'
-             r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b',
-             Name.Builtin),
-            (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
-             r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
-             r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
-             r'unescape)\b',Name.Function),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-f]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-        ]
-    }
-
-
-class ActionScript3Lexer(RegexLexer):
-    """
-    For ActionScript 3 source code.
-
-    *New in Pygments 0.11.*
-    """
-
-    name = 'ActionScript 3'
-    aliases = ['as3', 'actionscript3']
-    filenames = ['*.as']
-    mimetypes = ['application/x-actionscript', 'text/x-actionscript',
-                 'text/actionscript']
-
-    identifier = r'[$a-zA-Z_][a-zA-Z0-9_]*'
-    typeidentifier = identifier + '(?:\.<\w+>)?'
-
-    flags = re.DOTALL | re.MULTILINE
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'(function\s+)(' + identifier + r')(\s*)(\()',
-             bygroups(Keyword.Declaration, Name.Function, Text, Operator),
-             'funcparams'),
-            (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
-             typeidentifier + r')',
-             bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
-                      Keyword.Type)),
-            (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
-             bygroups(Keyword, Text, Name.Namespace, Text)),
-            (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
-             bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
-            (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
-            (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
-             r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
-             r'switch|import|include|as|is)\b',
-             Keyword),
-            (r'(class|public|final|internal|native|override|private|protected|'
-             r'static|import|extends|implements|interface|intrinsic|return|super|'
-             r'dynamic|function|const|get|namespace|package|set)\b',
-             Keyword.Declaration),
-            (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
-             Keyword.Constant),
-            (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
-             r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
-             r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
-             r'unescape)\b', Name.Function),
-            (identifier, Name),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-f]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator),
-        ],
-        'funcparams': [
-            (r'\s+', Text),
-            (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
-             typeidentifier + r'|\*)(\s*)',
-             bygroups(Text, Punctuation, Name, Text, Operator, Text,
-                      Keyword.Type, Text), 'defval'),
-            (r'\)', Operator, 'type')
-        ],
-        'type': [
-            (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
-             bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
-            (r'\s*', Text, '#pop:2')
-        ],
-        'defval': [
-            (r'(=)(\s*)([^(),]+)(\s*)(,?)',
-             bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
-            (r',?', Operator, '#pop')
-        ]
-    }
-
-    def analyse_text(text):
-        if re.match(r'\w+\s*:\s*\w', text):
-            return 0.3
-        return 0
-
-
-class CssLexer(RegexLexer):
-    """
-    For CSS (Cascading Style Sheets).
-    """
-
-    name = 'CSS'
-    aliases = ['css']
-    filenames = ['*.css']
-    mimetypes = ['text/css']
-
-    tokens = {
-        'root': [
-            include('basics'),
-        ],
-        'basics': [
-            (r'\s+', Text),
-            (r'/\*(?:.|\n)*?\*/', Comment),
-            (r'{', Punctuation, 'content'),
-            (r'\:[a-zA-Z0-9_-]+', Name.Decorator),
-            (r'\.[a-zA-Z0-9_-]+', Name.Class),
-            (r'\#[a-zA-Z0-9_-]+', Name.Function),
-            (r'@[a-zA-Z0-9_-]+', Keyword, 'atrule'),
-            (r'[a-zA-Z0-9_-]+', Name.Tag),
-            (r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single)
-        ],
-        'atrule': [
-            (r'{', Punctuation, 'atcontent'),
-            (r';', Punctuation, '#pop'),
-            include('basics'),
-        ],
-        'atcontent': [
-            include('basics'),
-            (r'}', Punctuation, '#pop:2'),
-        ],
-        'content': [
-            (r'\s+', Text),
-            (r'}', Punctuation, '#pop'),
-            (r'url\(.*?\)', String.Other),
-            (r'^@.*?$', Comment.Preproc),
-            (r'(azimuth|background-attachment|background-color|'
-             r'background-image|background-position|background-repeat|'
-             r'background|border-bottom-color|border-bottom-style|'
-             r'border-bottom-width|border-left-color|border-left-style|'
-             r'border-left-width|border-right|border-right-color|'
-             r'border-right-style|border-right-width|border-top-color|'
-             r'border-top-style|border-top-width|border-bottom|'
-             r'border-collapse|border-left|border-width|border-color|'
-             r'border-spacing|border-style|border-top|border|caption-side|'
-             r'clear|clip|color|content|counter-increment|counter-reset|'
-             r'cue-after|cue-before|cue|cursor|direction|display|'
-             r'elevation|empty-cells|float|font-family|font-size|'
-             r'font-size-adjust|font-stretch|font-style|font-variant|'
-             r'font-weight|font|height|letter-spacing|line-height|'
-             r'list-style-type|list-style-image|list-style-position|'
-             r'list-style|margin-bottom|margin-left|margin-right|'
-             r'margin-top|margin|marker-offset|marks|max-height|max-width|'
-             r'min-height|min-width|opacity|orphans|outline|outline-color|'
-             r'outline-style|outline-width|overflow(?:-x|-y)?|padding-bottom|'
-             r'padding-left|padding-right|padding-top|padding|page|'
-             r'page-break-after|page-break-before|page-break-inside|'
-             r'pause-after|pause-before|pause|pitch|pitch-range|'
-             r'play-during|position|quotes|richness|right|size|'
-             r'speak-header|speak-numeral|speak-punctuation|speak|'
-             r'speech-rate|stress|table-layout|text-align|text-decoration|'
-             r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
-             r'vertical-align|visibility|voice-family|volume|white-space|'
-             r'widows|width|word-spacing|z-index|bottom|left|'
-             r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
-             r'behind|below|bidi-override|blink|block|bold|bolder|both|'
-             r'capitalize|center-left|center-right|center|circle|'
-             r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
-             r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
-             r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
-             r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
-             r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
-             r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
-             r'inherit|inline-table|inline|inset|inside|invert|italic|'
-             r'justify|katakana-iroha|katakana|landscape|larger|large|'
-             r'left-side|leftwards|level|lighter|line-through|list-item|'
-             r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
-             r'lower|low|medium|message-box|middle|mix|monospace|'
-             r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
-             r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
-             r'open-quote|outset|outside|overline|pointer|portrait|px|'
-             r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
-             r'rightwards|s-resize|sans-serif|scroll|se-resize|'
-             r'semi-condensed|semi-expanded|separate|serif|show|silent|'
-             r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
-             r'spell-out|square|static|status-bar|super|sw-resize|'
-             r'table-caption|table-cell|table-column|table-column-group|'
-             r'table-footer-group|table-header-group|table-row|'
-             r'table-row-group|text|text-bottom|text-top|thick|thin|'
-             r'transparent|ultra-condensed|ultra-expanded|underline|'
-             r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
-             r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
-             r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
-            (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
-             r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
-             r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
-             r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
-             r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
-             r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
-             r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
-             r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
-             r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
-             r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
-             r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
-             r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
-             r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
-             r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
-             r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
-             r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
-             r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
-             r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
-             r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
-             r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
-             r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
-             r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
-             r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
-             r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
-             r'blueviolet|peachpuff)\b', Name.Builtin),
-            (r'\!important', Comment.Preproc),
-            (r'/\*(?:.|\n)*?\*/', Comment),
-            (r'\#[a-zA-Z0-9]{1,6}', Number),
-            (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex|s)\b', Number),
-            (r'-?[0-9]+', Number),
-            (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
-            (r'[\[\]();]+', Punctuation),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name)
-        ]
-    }
-
-
-class ObjectiveJLexer(RegexLexer):
-    """
-    For Objective-J source code with preprocessor directives.
-
-    *New in Pygments 1.3.*
-    """
-
-    name = 'Objective-J'
-    aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
-    filenames = ['*.j']
-    mimetypes = ['text/x-objective-j']
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
-
-    flags = re.DOTALL | re.MULTILINE
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-
-            # function definition
-            (r'^(' + _ws + r'[\+-]' + _ws + r')([\(a-zA-Z_].*?[^\(])(' + _ws + '{)',
-             bygroups(using(this), using(this, state='function_signature'),
-                      using(this))),
-
-            # class definition
-            (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
-             'classname'),
-            (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
-             'forward_classname'),
-            (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
-
-            include('statements'),
-            ('[{\(\)}]', Punctuation),
-            (';', Punctuation),
-        ],
-        'whitespace': [
-            (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
-             bygroups(Comment.Preproc, Text, String.Double)),
-            (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
-             bygroups(Comment.Preproc, Text, String.Double)),
-            (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
-             bygroups(Comment.Preproc, Text, String.Double)),
-            (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
-             bygroups(Comment.Preproc, Text, String.Double)),
-
-            (r'#if\s+0', Comment.Preproc, 'if0'),
-            (r'#', Comment.Preproc, 'macro'),
-
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text), # line continuation
-            (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            (r'<!--', Comment),
-        ],
-        'slashstartsregex': [
-            include('whitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            (r'', Text, '#pop'),
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop'),
-        ],
-        'statements': [
-            (r'(L|@)?"', String, 'string'),
-            (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
-             String.Char),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
-
-            (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
-             r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?',
-             Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-
-            (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
-             r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
-             r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
-
-            (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-
-            (r'(@selector|@private|@protected|@public|@encode|'
-             r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
-             r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
-
-            (r'(int|long|float|short|double|char|unsigned|signed|void|'
-             r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
-             Keyword.Type),
-
-            (r'(self|super)\b', Name.Builtin),
-
-            (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
-            (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-            (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
-             r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
-             r'SQRT2)\b', Keyword.Constant),
-
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-
-            (r'([$a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r')(?=\()',
-             bygroups(Name.Function, using(this))),
-
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name),
-        ],
-        'classname' : [
-            # interface definition that inherits
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r':' + _ws +
-             r')([a-zA-Z_][a-zA-Z0-9_]*)?',
-             bygroups(Name.Class, using(this), Name.Class), '#pop'),
-            # interface definition for a category
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r'\()([a-zA-Z_][a-zA-Z0-9_]*)(\))',
-             bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
-            # simple interface / implementation
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)', Name.Class, '#pop'),
-        ],
-        'forward_classname' : [
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*,\s*)',
-             bygroups(Name.Class, Text), '#push'),
-            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*;?)',
-             bygroups(Name.Class, Text), '#pop'),
-        ],
-        'function_signature': [
-            include('whitespace'),
-
-            # start of a selector w/ parameters
-            (r'(\(' + _ws + r')'                # open paren
-             r'([a-zA-Z_][a-zA-Z0-9_]+)'        # return type
-             r'(' + _ws + r'\)' + _ws + r')'    # close paren
-             r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
-             bygroups(using(this), Keyword.Type, using(this),
-                 Name.Function), 'function_parameters'),
-
-            # no-param function
-            (r'(\(' + _ws + r')'                # open paren
-             r'([a-zA-Z_][a-zA-Z0-9_]+)'        # return type
-             r'(' + _ws + r'\)' + _ws + r')'    # close paren
-             r'([$a-zA-Z_][a-zA-Z0-9_]+)',      # function name
-             bygroups(using(this), Keyword.Type, using(this),
-                 Name.Function), "#pop"),
-
-            # no return type given, start of a selector w/ parameters
-            (r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
-             bygroups (Name.Function), 'function_parameters'),
-
-            # no return type given, no-param function
-            (r'([$a-zA-Z_][a-zA-Z0-9_]+)',      # function name
-             bygroups(Name.Function), "#pop"),
-
-            ('', Text, '#pop'),
-        ],
-        'function_parameters': [
-            include('whitespace'),
-
-            # parameters
-            (r'(\(' + _ws + ')'                 # open paren
-             r'([^\)]+)'                        # type
-             r'(' + _ws + r'\)' + _ws + r')'    # close paren
-             r'([$a-zA-Z_][a-zA-Z0-9_]+)',      # param name
-             bygroups(using(this), Keyword.Type, using(this), Text)),
-
-            # one piece of a selector name
-            (r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)',     # function name
-             Name.Function),
-
-            # smallest possible selector piece
-            (r'(:)', Name.Function),
-
-            # var args
-            (r'(,' + _ws + r'\.\.\.)', using(this)),
-
-            # param name
-            (r'([$a-zA-Z_][a-zA-Z0-9_]+)', Text),
-        ],
-        'expression' : [
-            (r'([$a-zA-Z_][a-zA-Z0-9_]*)(\()', bygroups(Name.Function,
-                                                        Punctuation)),
-            (r'(\))', Punctuation, "#pop"),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String), # all other characters
-            (r'\\\n', String), # line continuation
-            (r'\\', String), # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
-            (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
-            (r'.*?\n', Comment),
-        ]
-    }
-
-    def analyse_text(text):
-        if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
-            # special directive found in most Objective-J files
-            return True
-        return False
-
-
-class HtmlLexer(RegexLexer):
-    """
-    For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
-    by the appropriate lexer.
-    """
-
-    name = 'HTML'
-    aliases = ['html']
-    filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
-    mimetypes = ['text/html', 'application/xhtml+xml']
-
-    flags = re.IGNORECASE | re.DOTALL
-    tokens = {
-        'root': [
-            ('[^<&]+', Text),
-            (r'&\S*?;', Name.Entity),
-            (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
-            ('<!--', Comment, 'comment'),
-            (r'<\?.*?\?>', Comment.Preproc),
-            ('<![^>]*>', Comment.Preproc),
-            (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
-            (r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
-            (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
-            (r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
-        ],
-        'comment': [
-            ('[^-]+', Comment),
-            ('-->', Comment, '#pop'),
-            ('-', Comment),
-        ],
-        'tag': [
-            (r'\s+', Text),
-            (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
-            (r'[a-zA-Z0-9_:-]+', Name.Attribute),
-            (r'/?\s*>', Name.Tag, '#pop'),
-        ],
-        'script-content': [
-            (r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
-            (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
-        ],
-        'style-content': [
-            (r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
-            (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
-        ],
-        'attr': [
-            ('".*?"', String, '#pop'),
-            ("'.*?'", String, '#pop'),
-            (r'[^\s>]+', String, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        if html_doctype_matches(text):
-            return 0.5
-
-
-class PhpLexer(RegexLexer):
-    """
-    For `PHP <http://www.php.net/>`_ source code.
-    For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
-    Additional options accepted:
-
-    `startinline`
-        If given and ``True`` the lexer starts highlighting with
-        php code (i.e.: no starting ``<?php`` required).  The default
-        is ``False``.
-    `funcnamehighlighting`
-        If given and ``True``, highlight builtin function names
-        (default: ``True``).
-    `disabledmodules`
-        If given, must be a list of module names whose function names
-        should not be highlighted. By default all modules are highlighted
-        except the special ``'unknown'`` module that includes functions
-        that are known to php but are undocumented.
-
-        To get a list of allowed modules have a look into the
-        `_phpbuiltins` module:
-
-        .. sourcecode:: pycon
-
-            >>> from pygments.lexers._phpbuiltins import MODULES
-            >>> MODULES.keys()
-            ['PHP Options/Info', 'Zip', 'dba', ...]
-
-        In fact the names of those modules match the module names from
-        the php documentation.
-    """
-
-    name = 'PHP'
-    aliases = ['php', 'php3', 'php4', 'php5']
-    filenames = ['*.php', '*.php[345]', '*.inc']
-    mimetypes = ['text/x-php']
-
-    flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-    tokens = {
-        'root': [
-            (r'<\?(php)?', Comment.Preproc, 'php'),
-            (r'[^<]+', Other),
-            (r'<', Other)
-        ],
-        'php': [
-            (r'\?>', Comment.Preproc, '#pop'),
-            (r'<<<(\'?)([a-zA-Z_][a-zA-Z0-9_]*)\1\n.*?\n\2\;?\n', String),
-            (r'\s+', Text),
-            (r'#.*?\n', Comment.Single),
-            (r'//.*?\n', Comment.Single),
-            # put the empty comment here, it is otherwise seen as
-            # the start of a docstring
-            (r'/\*\*/', Comment.Multiline),
-            (r'/\*\*.*?\*/', String.Doc),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
-             bygroups(Operator, Text, Name.Attribute)),
-            (r'[~!%^&*+=|:.<>/?@-]+', Operator),
-            (r'[\[\]{}();,]+', Punctuation),
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
-            (r'(function)(\s+)(&?)(\s*)',
-              bygroups(Keyword, Text, Operator, Text), 'functionname'),
-            (r'(const)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
-              bygroups(Keyword, Text, Name.Constant)),
-            (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
-             r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
-             r'FALSE|print|for|require|continue|foreach|require_once|'
-             r'declare|return|default|static|do|switch|die|stdClass|'
-             r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
-             r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
-             r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
-             r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
-             r'implements|public|private|protected|abstract|clone|try|'
-             r'catch|throw|this|use|namespace|trait)\b', Keyword),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
-            (r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
-            (r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other),
-            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eE][+-]?[0-9]+', Number.Float),
-            (r'0[0-7]+', Number.Oct),
-            (r'0[xX][a-fA-F0-9]+', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
-            (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
-            (r'"', String.Double, 'string'),
-        ],
-        'classname': [
-            (r'[a-zA-Z_][\\a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'functionname': [
-            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'[^{$"\\]+', String.Double),
-            (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
-            (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
-             String.Interpol),
-            (r'(\{\$\{)(.*?)(\}\})',
-             bygroups(String.Interpol, using(this, _startinline=True),
-                      String.Interpol)),
-            (r'(\{)(\$.*?)(\})',
-             bygroups(String.Interpol, using(this, _startinline=True),
-                      String.Interpol)),
-            (r'(\$\{)(\S+)(\})',
-             bygroups(String.Interpol, Name.Variable, String.Interpol)),
-            (r'[${\\]+', String.Double)
-        ],
-    }
-
-    def __init__(self, **options):
-        self.funcnamehighlighting = get_bool_opt(
-            options, 'funcnamehighlighting', True)
-        self.disabledmodules = get_list_opt(
-            options, 'disabledmodules', ['unknown'])
-        self.startinline = get_bool_opt(options, 'startinline', False)
-
-        # private option argument for the lexer itself
-        if '_startinline' in options:
-            self.startinline = options.pop('_startinline')
-
-        # collect activated functions in a set
-        self._functions = set()
-        if self.funcnamehighlighting:
-            from pygments.lexers._phpbuiltins import MODULES
-            for key, value in MODULES.iteritems():
-                if key not in self.disabledmodules:
-                    self._functions.update(value)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        stack = ['root']
-        if self.startinline:
-            stack.append('php')
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text, stack):
-            if token is Name.Other:
-                if value in self._functions:
-                    yield index, Name.Builtin, value
-                    continue
-            yield index, token, value
-
-    def analyse_text(text):
-        rv = 0.0
-        if re.search(r'<\?(?!xml)', text):
-            rv += 0.3
-        if '?>' in text:
-            rv += 0.1
-        return rv
-
-
-class DtdLexer(RegexLexer):
-    """
-    A lexer for DTDs (Document Type Definitions).
-
-    *New in Pygments 1.5.*
-    """
-
-    flags = re.MULTILINE | re.DOTALL
-
-    name = 'DTD'
-    aliases = ['dtd']
-    filenames = ['*.dtd']
-    mimetypes = ['application/xml-dtd']
-
-    tokens = {
-        'root': [
-            include('common'),
-
-            (r'(<!ELEMENT)(\s+)(\S+)',
-                bygroups(Keyword, Text, Name.Tag), 'element'),
-            (r'(<!ATTLIST)(\s+)(\S+)',
-                bygroups(Keyword, Text, Name.Tag), 'attlist'),
-            (r'(<!ENTITY)(\s+)(\S+)',
-                bygroups(Keyword, Text, Name.Entity), 'entity'),
-            (r'(<!NOTATION)(\s+)(\S+)',
-                bygroups(Keyword, Text, Name.Tag), 'notation'),
-            (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
-                bygroups(Keyword, Name.Entity, Text, Keyword)),
-
-            (r'(<!DOCTYPE)(\s+)([^>\s]+)',
-                bygroups(Keyword, Text, Name.Tag)),
-            (r'PUBLIC|SYSTEM', Keyword.Constant),
-            (r'[\[\]>]', Keyword),
-        ],
-
-        'common': [
-            (r'\s+', Text),
-            (r'(%|&)[^;]*;', Name.Entity),
-            ('<!--', Comment, 'comment'),
-            (r'[(|)*,?+]', Operator),
-            (r'"[^"]*"', String.Double),
-            (r'\'[^\']*\'', String.Single),
-        ],
-
-        'comment': [
-            ('[^-]+', Comment),
-            ('-->', Comment, '#pop'),
-            ('-', Comment),
-        ],
-
-        'element': [
-            include('common'),
-            (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
-            (r'[^>\s\|()?+*,]+', Name.Tag),
-            (r'>', Keyword, '#pop'),
-        ],
-
-        'attlist': [
-            include('common'),
-            (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
-             Keyword.Constant),
-            (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
-            (r'xml:space|xml:lang', Keyword.Reserved),
-            (r'[^>\s\|()?+*,]+', Name.Attribute),
-            (r'>', Keyword, '#pop'),
-        ],
-
-        'entity': [
-            include('common'),
-            (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
-            (r'[^>\s\|()?+*,]+', Name.Entity),
-            (r'>', Keyword, '#pop'),
-        ],
-
-        'notation': [
-            include('common'),
-            (r'SYSTEM|PUBLIC', Keyword.Constant),
-            (r'[^>\s\|()?+*,]+', Name.Attribute),
-            (r'>', Keyword, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        if not looks_like_xml(text) and \
-            ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
-            return 0.8
-
-class XmlLexer(RegexLexer):
-    """
-    Generic lexer for XML (eXtensible Markup Language).
-    """
-
-    flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
-    name = 'XML'
-    aliases = ['xml']
-    filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl']
-    mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
-                 'application/rss+xml', 'application/atom+xml']
-
-    tokens = {
-        'root': [
-            ('[^<&]+', Text),
-            (r'&\S*?;', Name.Entity),
-            (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
-            ('<!--', Comment, 'comment'),
-            (r'<\?.*?\?>', Comment.Preproc),
-            ('<![^>]*>', Comment.Preproc),
-            (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
-            (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
-        ],
-        'comment': [
-            ('[^-]+', Comment),
-            ('-->', Comment, '#pop'),
-            ('-', Comment),
-        ],
-        'tag': [
-            (r'\s+', Text),
-            (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
-            (r'/?\s*>', Name.Tag, '#pop'),
-        ],
-        'attr': [
-            ('\s+', Text),
-            ('".*?"', String, '#pop'),
-            ("'.*?'", String, '#pop'),
-            (r'[^\s>]+', String, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        if looks_like_xml(text):
-            return 0.5
-
-
-class XsltLexer(XmlLexer):
-    '''
-    A lexer for XSLT.
-
-    *New in Pygments 0.10.*
-    '''
-
-    name = 'XSLT'
-    aliases = ['xslt']
-    filenames = ['*.xsl', '*.xslt', '*.xpl']  # xpl is XProc
-    mimetypes = ['application/xsl+xml', 'application/xslt+xml']
-
-    EXTRA_KEYWORDS = set([
-        'apply-imports', 'apply-templates', 'attribute',
-        'attribute-set', 'call-template', 'choose', 'comment',
-        'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
-        'for-each', 'if', 'import', 'include', 'key', 'message',
-        'namespace-alias', 'number', 'otherwise', 'output', 'param',
-        'preserve-space', 'processing-instruction', 'sort',
-        'strip-space', 'stylesheet', 'template', 'text', 'transform',
-        'value-of', 'variable', 'when', 'with-param'
-    ])
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
-            m = re.match('</?xsl:([^>]*)/?>?', value)
-
-            if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
-                yield index, Keyword, value
-            else:
-                yield index, token, value
-
-    def analyse_text(text):
-        if looks_like_xml(text) and '<xsl' in text:
-            return 0.8
-
-
-class MxmlLexer(RegexLexer):
-    """
-    For MXML markup.
-    Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
-    *New in Pygments 1.1.*
-    """
-    flags = re.MULTILINE | re.DOTALL
-    name = 'MXML'
-    aliases = ['mxml']
-    filenames = ['*.mxml']
-    mimetimes = ['text/xml', 'application/xml']
-
-    tokens = {
-            'root': [
-                ('[^<&]+', Text),
-                (r'&\S*?;', Name.Entity),
-                (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
-                 bygroups(String, using(ActionScript3Lexer), String)),
-                ('<!--', Comment, 'comment'),
-                (r'<\?.*?\?>', Comment.Preproc),
-                ('<![^>]*>', Comment.Preproc),
-                (r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'),
-                (r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag),
-            ],
-            'comment': [
-                ('[^-]+', Comment),
-                ('-->', Comment, '#pop'),
-                ('-', Comment),
-            ],
-            'tag': [
-                (r'\s+', Text),
-                (r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
-                (r'/?\s*>', Name.Tag, '#pop'),
-            ],
-            'attr': [
-                ('\s+', Text),
-                ('".*?"', String, '#pop'),
-                ("'.*?'", String, '#pop'),
-                (r'[^\s>]+', String, '#pop'),
-            ],
-        }
-
-
-class HaxeLexer(RegexLexer):
-    """
-    For haXe source code (http://haxe.org/).
-
-    *New in Pygments 1.3.*
-    """
-
-    name = 'haXe'
-    aliases = ['hx', 'haXe']
-    filenames = ['*.hx']
-    mimetypes = ['text/haxe']
-
-    ident = r'(?:[a-zA-Z_][a-zA-Z0-9_]*)'
-    typeid = r'(?:(?:[a-z0-9_\.])*[A-Z_][A-Za-z0-9_]*)'
-    key_prop = r'(?:default|null|never)'
-    key_decl_mod = r'(?:public|private|override|static|inline|extern|dynamic)'
-
-    flags = re.DOTALL | re.MULTILINE
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            include('comments'),
-            (key_decl_mod, Keyword.Declaration),
-            include('enumdef'),
-            include('typedef'),
-            include('classdef'),
-            include('imports'),
-        ],
-
-        # General constructs
-        'comments': [
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'#[^\n]*', Comment.Preproc),
-        ],
-        'whitespace': [
-            include('comments'),
-            (r'\s+', Text),
-        ],
-        'codekeywords': [
-            (r'\b(if|else|while|do|for|in|break|continue|'
-             r'return|switch|case|try|catch|throw|null|trace|'
-             r'new|this|super|untyped|cast|callback|here)\b',
-             Keyword.Reserved),
-        ],
-        'literals': [
-            (r'0[xX][0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r'~/([^\n])*?/[gisx]*', String.Regex),
-            (r'\b(true|false|null)\b', Keyword.Constant),
-        ],
-        'codeblock': [
-          include('whitespace'),
-          include('new'),
-          include('case'),
-          include('anonfundef'),
-          include('literals'),
-          include('vardef'),
-          include('codekeywords'),
-          (r'[();,\[\]]', Punctuation),
-          (r'(?:=|\+=|-=|\*=|/=|%=|&=|\|=|\^=|<<=|>>=|>>>=|\|\||&&|'
-           r'\.\.\.|==|!=|>|<|>=|<=|\||&|\^|<<|>>>|>>|\+|\-|\*|/|%|'
-           r'!|\+\+|\-\-|~|\.|\?|\:)',
-           Operator),
-          (ident, Name),
-
-          (r'}', Punctuation,'#pop'),
-          (r'{', Punctuation,'#push'),
-        ],
-
-        # Instance/Block level constructs
-        'propertydef': [
-            (r'(\()(' + key_prop + ')(,)(' + key_prop + ')(\))',
-             bygroups(Punctuation, Keyword.Reserved, Punctuation,
-                      Keyword.Reserved, Punctuation)),
-        ],
-        'new': [
-            (r'\bnew\b', Keyword, 'typedecl'),
-        ],
-        'case': [
-            (r'\b(case)(\s+)(' + ident + ')(\s*)(\()',
-             bygroups(Keyword.Reserved, Text, Name, Text, Punctuation),
-             'funargdecl'),
-        ],
-        'vardef': [
-            (r'\b(var)(\s+)(' + ident + ')',
-             bygroups(Keyword.Declaration, Text, Name.Variable), 'vardecl'),
-        ],
-        'vardecl': [
-            include('whitespace'),
-            include('typelabel'),
-            (r'=', Operator,'#pop'),
-            (r';', Punctuation,'#pop'),
-        ],
-        'instancevardef': [
-            (key_decl_mod,Keyword.Declaration),
-            (r'\b(var)(\s+)(' + ident + ')',
-             bygroups(Keyword.Declaration, Text, Name.Variable.Instance),
-             'instancevardecl'),
-        ],
-        'instancevardecl': [
-            include('vardecl'),
-            include('propertydef'),
-        ],
-
-        'anonfundef': [
-            (r'\bfunction\b', Keyword.Declaration, 'fundecl'),
-        ],
-        'instancefundef': [
-            (key_decl_mod, Keyword.Declaration),
-            (r'\b(function)(\s+)(' + ident + ')',
-             bygroups(Keyword.Declaration, Text, Name.Function), 'fundecl'),
-        ],
-        'fundecl': [
-            include('whitespace'),
-            include('typelabel'),
-            include('generictypedecl'),
-            (r'\(',Punctuation,'funargdecl'),
-            (r'(?=[a-zA-Z0-9_])',Text,'#pop'),
-            (r'{',Punctuation,('#pop','codeblock')),
-            (r';',Punctuation,'#pop'),
-        ],
-        'funargdecl': [
-            include('whitespace'),
-            (ident, Name.Variable),
-            include('typelabel'),
-            include('literals'),
-            (r'=', Operator),
-            (r',', Punctuation),
-            (r'\?', Punctuation),
-            (r'\)', Punctuation, '#pop'),
-        ],
-
-        'typelabel': [
-            (r':', Punctuation, 'type'),
-        ],
-        'typedecl': [
-            include('whitespace'),
-            (typeid, Name.Class),
-            (r'<', Punctuation, 'generictypedecl'),
-            (r'(?=[{}()=,a-z])', Text,'#pop'),
-        ],
-        'type': [
-            include('whitespace'),
-            (typeid, Name.Class),
-            (r'<', Punctuation, 'generictypedecl'),
-            (r'->', Keyword.Type),
-            (r'(?=[{}(),;=])', Text, '#pop'),
-        ],
-        'generictypedecl': [
-            include('whitespace'),
-            (typeid, Name.Class),
-            (r'<', Punctuation, '#push'),
-            (r'>', Punctuation, '#pop'),
-            (r',', Punctuation),
-        ],
-
-        # Top level constructs
-        'imports': [
-            (r'(package|import|using)(\s+)([^;]+)(;)',
-             bygroups(Keyword.Namespace, Text, Name.Namespace,Punctuation)),
-        ],
-        'typedef': [
-            (r'typedef', Keyword.Declaration, ('typedefprebody', 'typedecl')),
-        ],
-        'typedefprebody': [
-            include('whitespace'),
-            (r'(=)(\s*)({)', bygroups(Punctuation, Text, Punctuation),
-             ('#pop', 'typedefbody')),
-        ],
-        'enumdef': [
-            (r'enum', Keyword.Declaration, ('enumdefprebody', 'typedecl')),
-        ],
-        'enumdefprebody': [
-            include('whitespace'),
-            (r'{', Punctuation, ('#pop','enumdefbody')),
-        ],
-        'classdef': [
-            (r'class', Keyword.Declaration, ('classdefprebody', 'typedecl')),
-        ],
-        'classdefprebody': [
-            include('whitespace'),
-            (r'(extends|implements)', Keyword.Declaration,'typedecl'),
-            (r'{', Punctuation, ('#pop', 'classdefbody')),
-        ],
-        'interfacedef': [
-            (r'interface', Keyword.Declaration,
-             ('interfacedefprebody', 'typedecl')),
-        ],
-        'interfacedefprebody': [
-            include('whitespace'),
-            (r'(extends)', Keyword.Declaration, 'typedecl'),
-            (r'{', Punctuation, ('#pop', 'classdefbody')),
-        ],
-
-        'typedefbody': [
-          include('whitespace'),
-          include('instancevardef'),
-          include('instancefundef'),
-          (r'>', Punctuation, 'typedecl'),
-          (r',', Punctuation),
-          (r'}', Punctuation, '#pop'),
-        ],
-        'enumdefbody': [
-          include('whitespace'),
-          (ident, Name.Variable.Instance),
-          (r'\(', Punctuation, 'funargdecl'),
-          (r';', Punctuation),
-          (r'}', Punctuation, '#pop'),
-        ],
-        'classdefbody': [
-          include('whitespace'),
-          include('instancevardef'),
-          include('instancefundef'),
-          (r'}', Punctuation, '#pop'),
-          include('codeblock'),
-        ],
-    }
-
-    def analyse_text(text):
-        if re.match(r'\w+\s*:\s*\w', text): return 0.3
-
-
-def _indentation(lexer, match, ctx):
-    indentation = match.group(0)
-    yield match.start(), Text, indentation
-    ctx.last_indentation = indentation
-    ctx.pos = match.end()
-
-    if hasattr(ctx, 'block_state') and ctx.block_state and \
-            indentation.startswith(ctx.block_indentation) and \
-            indentation != ctx.block_indentation:
-        ctx.stack.append(ctx.block_state)
-    else:
-        ctx.block_state = None
-        ctx.block_indentation = None
-        ctx.stack.append('content')
-
-def _starts_block(token, state):
-    def callback(lexer, match, ctx):
-        yield match.start(), token, match.group(0)
-
-        if hasattr(ctx, 'last_indentation'):
-            ctx.block_indentation = ctx.last_indentation
-        else:
-            ctx.block_indentation = ''
-
-        ctx.block_state = state
-        ctx.pos = match.end()
-
-    return callback
-
-
-class HamlLexer(ExtendedRegexLexer):
-    """
-    For Haml markup.
-
-    *New in Pygments 1.3.*
-    """
-
-    name = 'Haml'
-    aliases = ['haml', 'HAML']
-    filenames = ['*.haml']
-    mimetypes = ['text/x-haml']
-
-    flags = re.IGNORECASE
-    # Haml can include " |\n" anywhere,
-    # which is ignored and used to wrap long lines.
-    # To accommodate this, use this custom faux dot instead.
-    _dot = r'(?: \|\n(?=.* \|)|.)'
-
-    # In certain places, a comma at the end of the line
-    # allows line wrapping as well.
-    _comma_dot = r'(?:,\s*\n|' + _dot + ')'
-    tokens = {
-        'root': [
-            (r'[ \t]*\n', Text),
-            (r'[ \t]*', _indentation),
-        ],
-
-        'css': [
-            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
-            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
-        ],
-
-        'eval-or-plain': [
-            (r'[&!]?==', Punctuation, 'plain'),
-            (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
-             bygroups(Punctuation, using(RubyLexer)),
-             'root'),
-            (r'', Text, 'plain'),
-        ],
-
-        'content': [
-            include('css'),
-            (r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
-            (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
-            (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
-             bygroups(Comment, Comment.Special, Comment),
-             '#pop'),
-            (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
-             '#pop'),
-            (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
-                                                 'haml-comment-block'), '#pop'),
-            (r'(-)(' + _comma_dot + r'*\n)',
-             bygroups(Punctuation, using(RubyLexer)),
-             '#pop'),
-            (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
-             '#pop'),
-            include('eval-or-plain'),
-        ],
-
-        'tag': [
-            include('css'),
-            (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
-            (r'\[' + _dot + '*?\]', using(RubyLexer)),
-            (r'\(', Text, 'html-attributes'),
-            (r'/[ \t]*\n', Punctuation, '#pop:2'),
-            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
-            include('eval-or-plain'),
-        ],
-
-        'plain': [
-            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
-            (r'(#\{)(' + _dot + '*?)(\})',
-             bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
-            (r'\n', Text, 'root'),
-        ],
-
-        'html-attributes': [
-            (r'\s+', Text),
-            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
-            (r'[a-z0-9_:-]+', Name.Attribute),
-            (r'\)', Text, '#pop'),
-        ],
-
-        'html-attribute-value': [
-            (r'[ \t]+', Text),
-            (r'[a-z0-9_]+', Name.Variable, '#pop'),
-            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
-            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
-            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
-            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
-        ],
-
-        'html-comment-block': [
-            (_dot + '+', Comment),
-            (r'\n', Text, 'root'),
-        ],
-
-        'haml-comment-block': [
-            (_dot + '+', Comment.Preproc),
-            (r'\n', Text, 'root'),
-        ],
-
-        'filter-block': [
-            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
-            (r'(#\{)(' + _dot + '*?)(\})',
-             bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
-            (r'\n', Text, 'root'),
-        ],
-    }
-
-
-common_sass_tokens = {
-    'value': [
-        (r'[ \t]+', Text),
-        (r'[!$][\w-]+', Name.Variable),
-        (r'url\(', String.Other, 'string-url'),
-        (r'[a-z_-][\w-]*(?=\()', Name.Function),
-        (r'(azimuth|background-attachment|background-color|'
-         r'background-image|background-position|background-repeat|'
-         r'background|border-bottom-color|border-bottom-style|'
-         r'border-bottom-width|border-left-color|border-left-style|'
-         r'border-left-width|border-right|border-right-color|'
-         r'border-right-style|border-right-width|border-top-color|'
-         r'border-top-style|border-top-width|border-bottom|'
-         r'border-collapse|border-left|border-width|border-color|'
-         r'border-spacing|border-style|border-top|border|caption-side|'
-         r'clear|clip|color|content|counter-increment|counter-reset|'
-         r'cue-after|cue-before|cue|cursor|direction|display|'
-         r'elevation|empty-cells|float|font-family|font-size|'
-         r'font-size-adjust|font-stretch|font-style|font-variant|'
-         r'font-weight|font|height|letter-spacing|line-height|'
-         r'list-style-type|list-style-image|list-style-position|'
-         r'list-style|margin-bottom|margin-left|margin-right|'
-         r'margin-top|margin|marker-offset|marks|max-height|max-width|'
-         r'min-height|min-width|opacity|orphans|outline|outline-color|'
-         r'outline-style|outline-width|overflow|padding-bottom|'
-         r'padding-left|padding-right|padding-top|padding|page|'
-         r'page-break-after|page-break-before|page-break-inside|'
-         r'pause-after|pause-before|pause|pitch|pitch-range|'
-         r'play-during|position|quotes|richness|right|size|'
-         r'speak-header|speak-numeral|speak-punctuation|speak|'
-         r'speech-rate|stress|table-layout|text-align|text-decoration|'
-         r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
-         r'vertical-align|visibility|voice-family|volume|white-space|'
-         r'widows|width|word-spacing|z-index|bottom|left|'
-         r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
-         r'behind|below|bidi-override|blink|block|bold|bolder|both|'
-         r'capitalize|center-left|center-right|center|circle|'
-         r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
-         r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
-         r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
-         r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
-         r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
-         r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
-         r'inherit|inline-table|inline|inset|inside|invert|italic|'
-         r'justify|katakana-iroha|katakana|landscape|larger|large|'
-         r'left-side|leftwards|level|lighter|line-through|list-item|'
-         r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
-         r'lower|low|medium|message-box|middle|mix|monospace|'
-         r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
-         r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
-         r'open-quote|outset|outside|overline|pointer|portrait|px|'
-         r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
-         r'rightwards|s-resize|sans-serif|scroll|se-resize|'
-         r'semi-condensed|semi-expanded|separate|serif|show|silent|'
-         r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
-         r'spell-out|square|static|status-bar|super|sw-resize|'
-         r'table-caption|table-cell|table-column|table-column-group|'
-         r'table-footer-group|table-header-group|table-row|'
-         r'table-row-group|text|text-bottom|text-top|thick|thin|'
-         r'transparent|ultra-condensed|ultra-expanded|underline|'
-         r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
-         r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
-         r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
-        (r'(indigo|gold|firebrick|indianred|darkolivegreen|'
-         r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
-         r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
-         r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
-         r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
-         r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
-         r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
-         r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
-         r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
-         r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
-         r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
-         r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
-         r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
-         r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
-         r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
-         r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
-         r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
-         r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
-         r'plum|darkgoldenrod|sandybrown|magenta|tan|'
-         r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
-         r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
-         r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
-         r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
-         r'lightyellow|lavenderblush|linen|mediumaquamarine|'
-         r'blueviolet|peachpuff)\b', Name.Entity),
-        (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
-         r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
-        (r'\!(important|default)', Name.Exception),
-        (r'(true|false)', Name.Pseudo),
-        (r'(and|or|not)', Operator.Word),
-        (r'/\*', Comment.Multiline, 'inline-comment'),
-        (r'//[^\n]*', Comment.Single),
-        (r'\#[a-z0-9]{1,6}', Number.Hex),
-        (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
-        (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
-        (r'#{', String.Interpol, 'interpolation'),
-        (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
-        (r'[\[\]()]+', Punctuation),
-        (r'"', String.Double, 'string-double'),
-        (r"'", String.Single, 'string-single'),
-        (r'[a-z_-][\w-]*', Name),
-    ],
-
-    'interpolation': [
-        (r'\}', String.Interpol, '#pop'),
-        include('value'),
-    ],
-
-    'selector': [
-        (r'[ \t]+', Text),
-        (r'\:', Name.Decorator, 'pseudo-class'),
-        (r'\.', Name.Class, 'class'),
-        (r'\#', Name.Namespace, 'id'),
-        (r'[a-zA-Z0-9_-]+', Name.Tag),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r'&', Keyword),
-        (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
-        (r'"', String.Double, 'string-double'),
-        (r"'", String.Single, 'string-single'),
-    ],
-
-    'string-double': [
-        (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r'"', String.Double, '#pop'),
-    ],
-
-    'string-single': [
-        (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r"'", String.Double, '#pop'),
-    ],
-
-    'string-url': [
-        (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r'\)', String.Other, '#pop'),
-    ],
-
-    'pseudo-class': [
-        (r'[\w-]+', Name.Decorator),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r'', Text, '#pop'),
-    ],
-
-    'class': [
-        (r'[\w-]+', Name.Class),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r'', Text, '#pop'),
-    ],
-
-    'id': [
-        (r'[\w-]+', Name.Namespace),
-        (r'#\{', String.Interpol, 'interpolation'),
-        (r'', Text, '#pop'),
-    ],
-
-    'for': [
-        (r'(from|to|through)', Operator.Word),
-        include('value'),
-    ],
-}
-
-class SassLexer(ExtendedRegexLexer):
-    """
-    For Sass stylesheets.
-
-    *New in Pygments 1.3.*
-    """
-
-    name = 'Sass'
-    aliases = ['sass', 'SASS']
-    filenames = ['*.sass']
-    mimetypes = ['text/x-sass']
-
-    flags = re.IGNORECASE
-    tokens = {
-        'root': [
-            (r'[ \t]*\n', Text),
-            (r'[ \t]*', _indentation),
-        ],
-
-        'content': [
-            (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
-             'root'),
-            (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
-             'root'),
-            (r'@import', Keyword, 'import'),
-            (r'@for', Keyword, 'for'),
-            (r'@(debug|warn|if|while)', Keyword, 'value'),
-            (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
-            (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
-            (r'@extend', Keyword, 'selector'),
-            (r'@[a-z0-9_-]+', Keyword, 'selector'),
-            (r'=[\w-]+', Name.Function, 'value'),
-            (r'\+[\w-]+', Name.Decorator, 'value'),
-            (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
-             bygroups(Name.Variable, Operator), 'value'),
-            (r':', Name.Attribute, 'old-style-attr'),
-            (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
-            (r'', Text, 'selector'),
-        ],
-
-        'single-comment': [
-            (r'.+', Comment.Single),
-            (r'\n', Text, 'root'),
-        ],
-
-        'multi-comment': [
-            (r'.+', Comment.Multiline),
-            (r'\n', Text, 'root'),
-        ],
-
-        'import': [
-            (r'[ \t]+', Text),
-            (r'\S+', String),
-            (r'\n', Text, 'root'),
-        ],
-
-        'old-style-attr': [
-            (r'[^\s:="\[]+', Name.Attribute),
-            (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*=', Operator, 'value'),
-            (r'', Text, 'value'),
-        ],
-
-        'new-style-attr': [
-            (r'[^\s:="\[]+', Name.Attribute),
-            (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*[=:]', Operator, 'value'),
-        ],
-
-        'inline-comment': [
-            (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r"\*/", Comment, '#pop'),
-        ],
-    }
-    for group, common in common_sass_tokens.iteritems():
-        tokens[group] = copy.copy(common)
-    tokens['value'].append((r'\n', Text, 'root'))
-    tokens['selector'].append((r'\n', Text, 'root'))
-
-
-class ScssLexer(RegexLexer):
-    """
-    For SCSS stylesheets.
-    """
-
-    name = 'SCSS'
-    aliases = ['scss']
-    filenames = ['*.scss']
-    mimetypes = ['text/x-scss']
-
-    flags = re.IGNORECASE | re.DOTALL
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@import', Keyword, 'value'),
-            (r'@for', Keyword, 'for'),
-            (r'@(debug|warn|if|while)', Keyword, 'value'),
-            (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
-            (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
-            (r'@extend', Keyword, 'selector'),
-            (r'@[a-z0-9_-]+', Keyword, 'selector'),
-            (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
-            (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
-            (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
-            (r'', Text, 'selector'),
-        ],
-
-        'attr': [
-            (r'[^\s:="\[]+', Name.Attribute),
-            (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*:', Operator, 'value'),
-        ],
-
-        'inline-comment': [
-            (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r"\*/", Comment, '#pop'),
-        ],
-    }
-    for group, common in common_sass_tokens.iteritems():
-        tokens[group] = copy.copy(common)
-    tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
-    tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
-
-
-class CoffeeScriptLexer(RegexLexer):
-    """
-    For `CoffeeScript`_ source code.
-
-    .. _CoffeeScript: http://coffeescript.org
-
-    *New in Pygments 1.3.*
-    """
-
-    name = 'CoffeeScript'
-    aliases = ['coffee-script', 'coffeescript']
-    filenames = ['*.coffee']
-    mimetypes = ['text/coffeescript']
-
-    flags = re.DOTALL
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'###[^#].*?###', Comment.Multiline),
-            (r'#(?!##[^#]).*?\n', Comment.Single),
-        ],
-        'multilineregex': [
-            (r'[^/#]+', String.Regex),
-            (r'///([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'#{', String.Interpol, 'interpoling_string'),
-            (r'[/#]', String.Regex),
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'///', String.Regex, ('#pop', 'multilineregex')),
-            (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'', Text, '#pop'),
-        ],
-        'root': [
-            # this next expr leads to infinite loops root -> slashstartsregex
-            #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
-             r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
-             r'=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
-             Operator, 'slashstartsregex'),
-            (r'(?:\([^()]+\))?\s*[=-]>', Name.Function),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'(?<![\.\$])(for|own|in|of|while|until|'
-             r'loop|break|return|continue|'
-             r'switch|when|then|if|unless|else|'
-             r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
-             r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
-            (r'(?<![\.\$])(true|false|yes|no|on|off|null|'
-             r'NaN|Infinity|undefined)\b',
-             Keyword.Constant),
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
-             Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable,
-              'slashstartsregex'),
-            (r'@[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable.Instance,
-              'slashstartsregex'),
-            (r'@', Name.Other, 'slashstartsregex'),
-            (r'@?[$a-zA-Z_][a-zA-Z0-9_\$]*', Name.Other, 'slashstartsregex'),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            ('"""', String, 'tdqs'),
-            ("'''", String, 'tsqs'),
-            ('"', String, 'dqs'),
-            ("'", String, 'sqs'),
-        ],
-        'strings': [
-            (r'[^#\\\'"]+', String),
-            # note that all coffee script strings are multi-line.
-            # hashmarks, quotes and backslashes must be parsed one at a time
-        ],
-        'interpoling_string' : [
-            (r'}', String.Interpol, "#pop"),
-            include('root')
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            (r'\\.|\'', String), # double-quoted string don't need ' escapes
-            (r'#{', String.Interpol, "interpoling_string"),
-            include('strings')
-        ],
-        'sqs': [
-            (r"'", String, '#pop'),
-            (r'#|\\.|"', String), # single quoted strings don't need " escapses
-            include('strings')
-        ],
-        'tdqs': [
-            (r'"""', String, '#pop'),
-            (r'\\.|\'|"', String), # no need to escape quotes in triple-string
-            (r'#{', String.Interpol, "interpoling_string"),
-            include('strings'),
-        ],
-        'tsqs': [
-            (r"'''", String, '#pop'),
-            (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
-            include('strings')
-        ],
-    }
-
-
-class LiveScriptLexer(RegexLexer):
-    """
-    For `LiveScript`_ source code.
-
-    .. _LiveScript: http://gkz.github.com/LiveScript/
-
-    New in Pygments 1.6.
-    """
-
-    name = 'LiveScript'
-    aliases = ['live-script', 'livescript']
-    filenames = ['*.ls']
-    mimetypes = ['text/livescript']
-
-    flags = re.DOTALL
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'#.*?\n', Comment.Single),
-        ],
-        'multilineregex': [
-            include('commentsandwhitespace'),
-            (r'//([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'/', String.Regex),
-            (r'[^/#]+', String.Regex)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'//', String.Regex, ('#pop', 'multilineregex')),
-            (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'', Text, '#pop'),
-        ],
-        'root': [
-            # this next expr leads to infinite loops root -> slashstartsregex
-            #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
-             r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
-            (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
-             r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
-             r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
-             r'[+*`%&\|\^/])=?',
-             Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
-             r'return|continue|switch|when|then|if|unless|else|'
-             r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
-             r'extends|this|class|by|const|var|to|til)\b', Keyword,
-              'slashstartsregex'),
-            (r'(?<![\.\$])(true|false|yes|no|on|off|'
-             r'null|NaN|Infinity|undefined|void)\b',
-              Keyword.Constant),
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
-              Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable,
-              'slashstartsregex'),
-            (r'@[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
-              'slashstartsregex'),
-            (r'@', Name.Other, 'slashstartsregex'),
-            (r'@?[$a-zA-Z_][a-zA-Z0-9_\-]*', Name.Other, 'slashstartsregex'),
-            (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
-            (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
-            ('"""', String, 'tdqs'),
-            ("'''", String, 'tsqs'),
-            ('"', String, 'dqs'),
-            ("'", String, 'sqs'),
-            (r'\\[\w$-]+', String),
-            (r'<\[.*\]>', String),
-        ],
-        'strings': [
-            (r'[^#\\\'"]+', String),
-            # note that all coffee script strings are multi-line.
-            # hashmarks, quotes and backslashes must be parsed one at a time
-        ],
-        'interpoling_string' : [
-            (r'}', String.Interpol, "#pop"),
-            include('root')
-        ],
-        'dqs': [
-            (r'"', String, '#pop'),
-            (r'\\.|\'', String), # double-quoted string don't need ' escapes
-            (r'#{', String.Interpol, "interpoling_string"),
-            (r'#', String),
-            include('strings')
-        ],
-        'sqs': [
-            (r"'", String, '#pop'),
-            (r'#|\\.|"', String), # single quoted strings don't need " escapses
-            include('strings')
-        ],
-        'tdqs': [
-            (r'"""', String, '#pop'),
-            (r'\\.|\'|"', String), # no need to escape quotes in triple-string
-            (r'#{', String.Interpol, "interpoling_string"),
-            (r'#', String),
-            include('strings'),
-        ],
-        'tsqs': [
-            (r"'''", String, '#pop'),
-            (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
-            include('strings')
-        ],
-    }
-
-
-class DuelLexer(RegexLexer):
-    """
-    Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
-    See http://duelengine.org/.
-    See http://jsonml.org/jbst/.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'Duel'
-    aliases = ['duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST']
-    filenames = ['*.duel','*.jbst']
-    mimetypes = ['text/x-duel','text/x-jbst']
-
-    flags = re.DOTALL
-
-    tokens = {
-        'root': [
-            (r'(<%[@=#!:]?)(.*?)(%>)',
-             bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
-            (r'(<%\$)(.*?)(:)(.*?)(%>)',
-             bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
-            (r'(<%--)(.*?)(--%>)',
-             bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
-            (r'(<script.*?>)(.*?)(</script>)',
-             bygroups(using(HtmlLexer),
-                      using(JavascriptLexer), using(HtmlLexer))),
-            (r'(.+?)(?=<)', using(HtmlLexer)),
-            (r'.+', using(HtmlLexer)),
-        ],
-    }
-
-
-class ScamlLexer(ExtendedRegexLexer):
-    """
-    For `Scaml markup <http://scalate.fusesource.org/>`_.  Scaml is Haml for Scala.
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'Scaml'
-    aliases = ['scaml', 'SCAML']
-    filenames = ['*.scaml']
-    mimetypes = ['text/x-scaml']
-
-    flags = re.IGNORECASE
-    # Scaml does not yet support the " |\n" notation to
-    # wrap long lines.  Once it does, use the custom faux
-    # dot instead.
-    # _dot = r'(?: \|\n(?=.* \|)|.)'
-    _dot = r'.'
-
-    tokens = {
-        'root': [
-            (r'[ \t]*\n', Text),
-            (r'[ \t]*', _indentation),
-        ],
-
-        'css': [
-            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
-            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
-        ],
-
-        'eval-or-plain': [
-            (r'[&!]?==', Punctuation, 'plain'),
-            (r'([&!]?[=~])(' + _dot + r'*\n)',
-             bygroups(Punctuation, using(ScalaLexer)),
-             'root'),
-            (r'', Text, 'plain'),
-        ],
-
-        'content': [
-            include('css'),
-            (r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
-            (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
-            (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
-             bygroups(Comment, Comment.Special, Comment),
-             '#pop'),
-            (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
-             '#pop'),
-            (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
-                                                 'scaml-comment-block'), '#pop'),
-            (r'(-@\s*)(import)?(' + _dot + r'*\n)',
-             bygroups(Punctuation, Keyword, using(ScalaLexer)),
-             '#pop'),
-            (r'(-)(' + _dot + r'*\n)',
-             bygroups(Punctuation, using(ScalaLexer)),
-             '#pop'),
-            (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
-             '#pop'),
-            include('eval-or-plain'),
-        ],
-
-        'tag': [
-            include('css'),
-            (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
-            (r'\[' + _dot + '*?\]', using(ScalaLexer)),
-            (r'\(', Text, 'html-attributes'),
-            (r'/[ \t]*\n', Punctuation, '#pop:2'),
-            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
-            include('eval-or-plain'),
-        ],
-
-        'plain': [
-            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
-            (r'(#\{)(' + _dot + '*?)(\})',
-             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
-            (r'\n', Text, 'root'),
-        ],
-
-        'html-attributes': [
-            (r'\s+', Text),
-            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
-            (r'[a-z0-9_:-]+', Name.Attribute),
-            (r'\)', Text, '#pop'),
-        ],
-
-        'html-attribute-value': [
-            (r'[ \t]+', Text),
-            (r'[a-z0-9_]+', Name.Variable, '#pop'),
-            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
-            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
-            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
-            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
-        ],
-
-        'html-comment-block': [
-            (_dot + '+', Comment),
-            (r'\n', Text, 'root'),
-        ],
-
-        'scaml-comment-block': [
-            (_dot + '+', Comment.Preproc),
-            (r'\n', Text, 'root'),
-        ],
-
-        'filter-block': [
-            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
-            (r'(#\{)(' + _dot + '*?)(\})',
-             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
-            (r'\n', Text, 'root'),
-        ],
-    }
-
-
-class JadeLexer(ExtendedRegexLexer):
-    """
-    For Jade markup.
-    Jade is a variant of Scaml, see:
-    http://scalate.fusesource.org/documentation/scaml-reference.html
-
-    *New in Pygments 1.4.*
-    """
-
-    name = 'Jade'
-    aliases = ['jade', 'JADE']
-    filenames = ['*.jade']
-    mimetypes = ['text/x-jade']
-
-    flags = re.IGNORECASE
-    _dot = r'.'
-
-    tokens = {
-        'root': [
-            (r'[ \t]*\n', Text),
-            (r'[ \t]*', _indentation),
-        ],
-
-        'css': [
-            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
-            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
-        ],
-
-        'eval-or-plain': [
-            (r'[&!]?==', Punctuation, 'plain'),
-            (r'([&!]?[=~])(' + _dot + r'*\n)',
-             bygroups(Punctuation, using(ScalaLexer)),  'root'),
-            (r'', Text, 'plain'),
-        ],
-
-        'content': [
-            include('css'),
-            (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
-            (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
-             bygroups(Comment, Comment.Special, Comment),
-             '#pop'),
-            (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
-             '#pop'),
-            (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
-                                                 'scaml-comment-block'), '#pop'),
-            (r'(-@\s*)(import)?(' + _dot + r'*\n)',
-             bygroups(Punctuation, Keyword, using(ScalaLexer)),
-             '#pop'),
-            (r'(-)(' + _dot + r'*\n)',
-             bygroups(Punctuation, using(ScalaLexer)),
-             '#pop'),
-            (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
-             '#pop'),
-            (r'[a-z0-9_:-]+', Name.Tag, 'tag'),
-            (r'\|', Text, 'eval-or-plain'),
-        ],
-
-        'tag': [
-            include('css'),
-            (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
-            (r'\[' + _dot + '*?\]', using(ScalaLexer)),
-            (r'\(', Text, 'html-attributes'),
-            (r'/[ \t]*\n', Punctuation, '#pop:2'),
-            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
-            include('eval-or-plain'),
-        ],
-
-        'plain': [
-            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
-            (r'(#\{)(' + _dot + '*?)(\})',
-             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
-            (r'\n', Text, 'root'),
-        ],
-
-        'html-attributes': [
-            (r'\s+', Text),
-            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
-            (r'[a-z0-9_:-]+', Name.Attribute),
-            (r'\)', Text, '#pop'),
-        ],
-
-        'html-attribute-value': [
-            (r'[ \t]+', Text),
-            (r'[a-z0-9_]+', Name.Variable, '#pop'),
-            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
-            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
-            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
-            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
-        ],
-
-        'html-comment-block': [
-            (_dot + '+', Comment),
-            (r'\n', Text, 'root'),
-        ],
-
-        'scaml-comment-block': [
-            (_dot + '+', Comment.Preproc),
-            (r'\n', Text, 'root'),
-        ],
-
-        'filter-block': [
-            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
-            (r'(#\{)(' + _dot + '*?)(\})',
-             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
-            (r'\n', Text, 'root'),
-        ],
-    }
-
-
-class XQueryLexer(ExtendedRegexLexer):
-    """
-    An XQuery lexer, parsing a stream and outputting the tokens needed to
-    highlight xquery code.
-
-    *New in Pygments 1.4.*
-    """
-    name = 'XQuery'
-    aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
-    filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
-    mimetypes = ['text/xquery', 'application/xquery']
-
-    xquery_parse_state = []
-
-    # FIX UNICODE LATER
-    #ncnamestartchar = (
-    #    ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
-    #    ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
-    #    ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
-    #    ur"[\u10000-\uEFFFF]"
-    #)
-    ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
-    # FIX UNICODE LATER
-    #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
-    #                                ur"[\u203F-\u2040]")
-    ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
-    ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
-    pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])"
-    pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
-    pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
-    prefixedname = "%s:%s" % (ncname, ncname)
-    unprefixedname = ncname
-    qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
-
-    entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
-    charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
-
-    stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
-    stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
-
-    # FIX UNICODE LATER
-    #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
-    #                      ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
-    elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
-    #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
-    #                       ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
-    quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
-    #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
-    #                       ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
-    aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
-
-
-    # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
-    #                 aposattrcontentchar
-    #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
-
-    flags = re.DOTALL | re.MULTILINE | re.UNICODE
-
-    def punctuation_root_callback(lexer, match, ctx):
-        yield match.start(), Punctuation, match.group(1)
-        # transition to root always - don't pop off stack
-        ctx.stack = ['root']
-        ctx.pos = match.end()
-
-    def operator_root_callback(lexer, match, ctx):
-        yield match.start(), Operator, match.group(1)
-        # transition to root always - don't pop off stack
-        ctx.stack = ['root']
-        ctx.pos = match.end()
-
-    def popstate_tag_callback(lexer, match, ctx):
-        yield match.start(), Name.Tag, match.group(1)
-        ctx.stack.append(lexer.xquery_parse_state.pop())
-        ctx.pos = match.end()
-
-    def popstate_xmlcomment_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append(lexer.xquery_parse_state.pop())
-        ctx.pos = match.end()
-
-    def popstate_kindtest_callback(lexer, match, ctx):
-        yield match.start(), Punctuation, match.group(1)
-        next_state = lexer.xquery_parse_state.pop()
-        if next_state == 'occurrenceindicator':
-            if re.match("[?*+]+", match.group(2)):
-                yield match.start(), Punctuation, match.group(2)
-                ctx.stack.append('operator')
-                ctx.pos = match.end()
-            else:
-                ctx.stack.append('operator')
-                ctx.pos = match.end(1)
-        else:
-            ctx.stack.append(next_state)
-            ctx.pos = match.end(1)
-
-    def popstate_callback(lexer, match, ctx):
-        yield match.start(), Punctuation, match.group(1)
-        # if we have run out of our state stack, pop whatever is on the pygments
-        # state stack
-        if len(lexer.xquery_parse_state) == 0:
-            ctx.stack.pop()
-        elif len(ctx.stack) > 1:
-            ctx.stack.append(lexer.xquery_parse_state.pop())
-        else:
-            # i don't know if i'll need this, but in case, default back to root
-            ctx.stack = ['root']
-        ctx.pos = match.end()
-
-    def pushstate_element_content_starttag_callback(lexer, match, ctx):
-        yield match.start(), Name.Tag, match.group(1)
-        lexer.xquery_parse_state.append('element_content')
-        ctx.stack.append('start_tag')
-        ctx.pos = match.end()
-
-    def pushstate_cdata_section_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('cdata_section')
-        lexer.xquery_parse_state.append(ctx.state.pop)
-        ctx.pos = match.end()
-
-    def pushstate_starttag_callback(lexer, match, ctx):
-        yield match.start(), Name.Tag, match.group(1)
-        lexer.xquery_parse_state.append(ctx.state.pop)
-        ctx.stack.append('start_tag')
-        ctx.pos = match.end()
-
-    def pushstate_operator_order_callback(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        ctx.stack = ['root']
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_operator_root_validate(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        ctx.stack = ['root']
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_operator_root_validate_withmode(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Keyword, match.group(3)
-        ctx.stack = ['root']
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('processing_instruction')
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('processing_instruction')
-        lexer.xquery_parse_state.append('element_content')
-        ctx.pos = match.end()
-
-    def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('cdata_section')
-        lexer.xquery_parse_state.append('element_content')
-        ctx.pos = match.end()
-
-    def pushstate_operator_cdata_section_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('cdata_section')
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('xml_comment')
-        lexer.xquery_parse_state.append('element_content')
-        ctx.pos = match.end()
-
-    def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
-        yield match.start(), String.Doc, match.group(1)
-        ctx.stack.append('xml_comment')
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_kindtest_callback(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        lexer.xquery_parse_state.append('kindtest')
-        ctx.stack.append('kindtest')
-        ctx.pos = match.end()
-
-    def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        lexer.xquery_parse_state.append('operator')
-        ctx.stack.append('kindtestforpi')
-        ctx.pos = match.end()
-
-    def pushstate_operator_kindtest_callback(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        lexer.xquery_parse_state.append('operator')
-        ctx.stack.append('kindtest')
-        ctx.pos = match.end()
-
-    def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
-        yield match.start(), Name.Tag, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        lexer.xquery_parse_state.append('occurrenceindicator')
-        ctx.stack.append('kindtest')
-        ctx.pos = match.end()
-
-    def pushstate_operator_starttag_callback(lexer, match, ctx):
-        yield match.start(), Name.Tag, match.group(1)
-        lexer.xquery_parse_state.append('operator')
-        ctx.stack.append('start_tag')
-        ctx.pos = match.end()
-
-    def pushstate_operator_root_callback(lexer, match, ctx):
-        yield match.start(), Punctuation, match.group(1)
-        lexer.xquery_parse_state.append('operator')
-        ctx.stack = ['root']#.append('root')
-        ctx.pos = match.end()
-
-    def pushstate_operator_root_construct_callback(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        lexer.xquery_parse_state.append('operator')
-        ctx.stack = ['root']
-        ctx.pos = match.end()
-
-    def pushstate_root_callback(lexer, match, ctx):
-        yield match.start(), Punctuation, match.group(1)
-        cur_state = ctx.stack.pop()
-        lexer.xquery_parse_state.append(cur_state)
-        ctx.stack = ['root']#.append('root')
-        ctx.pos = match.end()
-
-    def pushstate_operator_attribute_callback(lexer, match, ctx):
-        yield match.start(), Name.Attribute, match.group(1)
-        ctx.stack.append('operator')
-        ctx.pos = match.end()
-
-    def pushstate_operator_callback(lexer, match, ctx):
-        yield match.start(), Keyword, match.group(1)
-        yield match.start(), Text, match.group(2)
-        yield match.start(), Punctuation, match.group(3)
-        lexer.xquery_parse_state.append('operator')
-        ctx.pos = match.end()
-
-    tokens = {
-        'comment': [
-            # xquery comments
-            (r'(:\))', Comment, '#pop'),
-            (r'(\(:)', Comment, '#push'),
-            (r'[^:)]', Comment),
-            (r'([^:)]|:|\))', Comment),
-        ],
-        'whitespace': [
-            (r'\s+', Text),
-        ],
-        'operator': [
-            include('whitespace'),
-            (r'(\})', popstate_callback),
-            (r'\(:', Comment, 'comment'),
-
-            (r'(\{)', pushstate_root_callback),
-            (r'then|else|external|at|div|except', Keyword, 'root'),
-            (r'order by', Keyword, 'root'),
-            (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
-            (r'and|or', Operator.Word, 'root'),
-            (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
-             Operator.Word, 'root'),
-            (r'return|satisfies|to|union|where|preserve\s+strip',
-             Keyword, 'root'),
-            (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\||:=|=)',
-             operator_root_callback),
-            (r'(::|;|\[|//|/|,)',
-             punctuation_root_callback),
-            (r'(castable|cast)(\s+)(as)\b',
-             bygroups(Keyword, Text, Keyword), 'singletype'),
-            (r'(instance)(\s+)(of)\b',
-             bygroups(Keyword, Text, Keyword), 'itemtype'),
-            (r'(treat)(\s+)(as)\b',
-             bygroups(Keyword, Text, Keyword), 'itemtype'),
-            (r'(case|as)\b', Keyword, 'itemtype'),
-            (r'(\))(\s*)(as)',
-             bygroups(Punctuation, Text, Keyword), 'itemtype'),
-            (r'\$', Name.Variable, 'varname'),
-            (r'(for|let)(\s+)(\$)',
-             bygroups(Keyword, Text, Name.Variable), 'varname'),
-            #(r'\)|\?|\]', Punctuation, '#push'),
-            (r'\)|\?|\]', Punctuation),
-            (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
-            (r'ascending|descending|default', Keyword, '#push'),
-            (r'external', Keyword),
-            (r'collation', Keyword, 'uritooperator'),
-            # finally catch all string literals and stay in operator state
-            (stringdouble, String.Double),
-            (stringsingle, String.Single),
-
-            (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
-        ],
-        'uritooperator': [
-            (stringdouble, String.Double, '#pop'),
-            (stringsingle, String.Single, '#pop'),
-        ],
-        'namespacedecl': [
-            include('whitespace'),
-            (r'\(:', Comment, 'comment'),
-            (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
-            (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
-            (stringdouble, String.Double),
-            (stringsingle, String.Single),
-            (r',', Punctuation),
-            (r'=', Operator),
-            (r';', Punctuation, 'root'),
-            (ncname, Name.Namespace),
-        ],
-        'namespacekeyword': [
-            include('whitespace'),
-            (r'\(:', Comment, 'comment'),
-            (stringdouble, String.Double, 'namespacedecl'),
-            (stringsingle, String.Single, 'namespacedecl'),
-            (r'inherit|no-inherit', Keyword, 'root'),
-            (r'namespace', Keyword, 'namespacedecl'),
-            (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
-            (r'preserve|no-preserve', Keyword),
-            (r',', Punctuation),
-        ],
-        'varname': [
-            (r'\(:', Comment, 'comment'),
-            (qname, Name.Variable, 'operator'),
-        ],
-        'singletype': [
-            (r'\(:', Comment, 'comment'),
-            (ncname + r'(:\*)', Name.Variable, 'operator'),
-            (qname, Name.Variable, 'operator'),
-        ],
-        'itemtype': [
-            include('whitespace'),
-            (r'\(:', Comment, 'comment'),
-            (r'\$', Punctuation, 'varname'),
-            (r'(void)(\s*)(\()(\s*)(\))',
-             bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
-            (r'(element|attribute|schema-element|schema-attribute|comment|text|'
-             r'node|binary|document-node|empty-sequence)(\s*)(\()',
-             pushstate_occurrenceindicator_kindtest_callback),
-            # Marklogic specific type?
-            (r'(processing-instruction)(\s*)(\()',
-             bygroups(Keyword, Text, Punctuation),
-             ('occurrenceindicator', 'kindtestforpi')),
-            (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
-             bygroups(Keyword, Text, Punctuation, Text, Punctuation),
-             'occurrenceindicator'),
-            (r'\(\#', Punctuation, 'pragma'),
-            (r';', Punctuation, '#pop'),
-            (r'then|else', Keyword, '#pop'),
-            (r'(at)(\s+)(' + stringdouble + ')',
-             bygroups(Keyword, Text, String.Double), 'namespacedecl'),
-            (r'(at)(\s+)(' + stringsingle + ')',
-             bygroups(Keyword, Text, String.Single), 'namespacedecl'),
-            (r'except|intersect|in|is|return|satisfies|to|union|where',
-             Keyword, 'root'),
-            (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
-            (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'),
-            (r'external|at', Keyword, 'root'),
-            (r'(stable)(\s+)(order)(\s+)(by)',
-             bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
-            (r'(castable|cast)(\s+)(as)',
-             bygroups(Keyword, Text, Keyword), 'singletype'),
-            (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
-            (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
-            (r'case|as', Keyword, 'itemtype'),
-            (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
-            (ncname + r':\*', Keyword.Type, 'operator'),
-            (qname, Keyword.Type, 'occurrenceindicator'),
-        ],
-        'kindtest': [
-            (r'\(:', Comment, 'comment'),
-            (r'{', Punctuation, 'root'),
-            (r'(\))([*+?]?)', popstate_kindtest_callback),
-            (r'\*', Name, 'closekindtest'),
-            (qname, Name, 'closekindtest'),
-            (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
-        ],
-        'kindtestforpi': [
-            (r'\(:', Comment, 'comment'),
-            (r'\)', Punctuation, '#pop'),
-            (ncname, Name.Variable),
-            (stringdouble, String.Double),
-            (stringsingle, String.Single),
-        ],
-        'closekindtest': [
-            (r'\(:', Comment, 'comment'),
-            (r'(\))', popstate_callback),
-            (r',', Punctuation),
-            (r'(\{)', pushstate_operator_root_callback),
-            (r'\?', Punctuation),
-        ],
-        'xml_comment': [
-            (r'(-->)', popstate_xmlcomment_callback),
-            (r'[^-]{1,2}', Literal),
-            (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
-             unirange(0x10000, 0x10ffff), Literal),
-        ],
-        'processing_instruction': [
-            (r'\s+', Text, 'processing_instruction_content'),
-            (r'\?>', String.Doc, '#pop'),
-            (pitarget, Name),
-        ],
-        'processing_instruction_content': [
-            (r'\?>', String.Doc, '#pop'),
-            (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
-             unirange(0x10000, 0x10ffff), Literal),
-        ],
-        'cdata_section': [
-            (r']]>', String.Doc, '#pop'),
-            (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
-             unirange(0x10000, 0x10ffff), Literal),
-        ],
-        'start_tag': [
-            include('whitespace'),
-            (r'(/>)', popstate_tag_callback),
-            (r'>', Name.Tag, 'element_content'),
-            (r'"', Punctuation, 'quot_attribute_content'),
-            (r"'", Punctuation, 'apos_attribute_content'),
-            (r'=', Operator),
-            (qname, Name.Tag),
-        ],
-        'quot_attribute_content': [
-            (r'"', Punctuation, 'start_tag'),
-            (r'(\{)', pushstate_root_callback),
-            (r'""', Name.Attribute),
-            (quotattrcontentchar, Name.Attribute),
-            (entityref, Name.Attribute),
-            (charref, Name.Attribute),
-            (r'\{\{|\}\}', Name.Attribute),
-        ],
-        'apos_attribute_content': [
-            (r"'", Punctuation, 'start_tag'),
-            (r'\{', Punctuation, 'root'),
-            (r"''", Name.Attribute),
-            (aposattrcontentchar, Name.Attribute),
-            (entityref, Name.Attribute),
-            (charref, Name.Attribute),
-            (r'\{\{|\}\}', Name.Attribute),
-        ],
-        'element_content': [
-            (r'</', Name.Tag, 'end_tag'),
-            (r'(\{)', pushstate_root_callback),
-            (r'(<!--)', pushstate_element_content_xmlcomment_callback),
-            (r'(<\?)', pushstate_element_content_processing_instruction_callback),
-            (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
-            (r'(<)', pushstate_element_content_starttag_callback),
-            (elementcontentchar, Literal),
-            (entityref, Literal),
-            (charref, Literal),
-            (r'\{\{|\}\}', Literal),
-        ],
-        'end_tag': [
-            include('whitespace'),
-            (r'(>)', popstate_tag_callback),
-            (qname, Name.Tag),
-        ],
-        'xmlspace_decl': [
-            (r'\(:', Comment, 'comment'),
-            (r'preserve|strip', Keyword, '#pop'),
-        ],
-        'declareordering': [
-            (r'\(:', Comment, 'comment'),
-            include('whitespace'),
-            (r'ordered|unordered', Keyword, '#pop'),
-        ],
-        'xqueryversion': [
-            include('whitespace'),
-            (r'\(:', Comment, 'comment'),
-            (stringdouble, String.Double),
-            (stringsingle, String.Single),
-            (r'encoding', Keyword),
-            (r';', Punctuation, '#pop'),
-        ],
-        'pragma': [
-            (qname, Name.Variable, 'pragmacontents'),
-        ],
-        'pragmacontents': [
-            (r'#\)', Punctuation, 'operator'),
-            (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
-             unirange(0x10000, 0x10ffff), Literal),
-            (r'(\s+)', Text),
-        ],
-        'occurrenceindicator': [
-            include('whitespace'),
-            (r'\(:', Comment, 'comment'),
-            (r'\*|\?|\+', Operator, 'operator'),
-            (r':=', Operator, 'root'),
-            (r'', Text, 'operator'),
-        ],
-        'option': [
-            include('whitespace'),
-            (qname, Name.Variable, '#pop'),
-        ],
-        'qname_braren': [
-            include('whitespace'),
-            (r'(\{)', pushstate_operator_root_callback),
-            (r'(\()', Punctuation, 'root'),
-        ],
-        'element_qname': [
-            (qname, Name.Variable, 'root'),
-        ],
-        'attribute_qname': [
-            (qname, Name.Variable, 'root'),
-        ],
-        'root': [
-            include('whitespace'),
-            (r'\(:', Comment, 'comment'),
-
-            # handle operator state
-            # order on numbers matters - handle most complex first
-            (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Double, 'operator'),
-            (r'(\.\d+)[eE][\+\-]?\d+', Number.Double, 'operator'),
-            (r'(\.\d+|\d+\.\d*)', Number, 'operator'),
-            (r'(\d+)', Number.Integer, 'operator'),
-            (r'(\.\.|\.|\))', Punctuation, 'operator'),
-            (r'(declare)(\s+)(construction)',
-             bygroups(Keyword, Text, Keyword), 'operator'),
-            (r'(declare)(\s+)(default)(\s+)(order)',
-             bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
-            (ncname + ':\*', Name, 'operator'),
-            ('\*:'+ncname, Name.Tag, 'operator'),
-            ('\*', Name.Tag, 'operator'),
-            (stringdouble, String.Double, 'operator'),
-            (stringsingle, String.Single, 'operator'),
-
-            (r'(\})', popstate_callback),
-
-            #NAMESPACE DECL
-            (r'(declare)(\s+)(default)(\s+)(collation)',
-             bygroups(Keyword, Text, Keyword, Text, Keyword)),
-            (r'(module|declare)(\s+)(namespace)',
-             bygroups(Keyword, Text, Keyword), 'namespacedecl'),
-            (r'(declare)(\s+)(base-uri)',
-             bygroups(Keyword, Text, Keyword), 'namespacedecl'),
-
-            #NAMESPACE KEYWORD
-            (r'(declare)(\s+)(default)(\s+)(element|function)',
-             bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
-            (r'(import)(\s+)(schema|module)',
-             bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
-            (r'(declare)(\s+)(copy-namespaces)',
-             bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
-
-            #VARNAMEs
-            (r'(for|let|some|every)(\s+)(\$)',
-             bygroups(Keyword, Text, Name.Variable), 'varname'),
-            (r'\$', Name.Variable, 'varname'),
-            (r'(declare)(\s+)(variable)(\s+)(\$)',
-             bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
-
-            #ITEMTYPE
-            (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
-
-            (r'(element|attribute|schema-element|schema-attribute|comment|'
-             r'text|node|document-node|empty-sequence)(\s+)(\()',
-             pushstate_operator_kindtest_callback),
-
-            (r'(processing-instruction)(\s+)(\()',
-             pushstate_operator_kindtestforpi_callback),
-
-            (r'(<!--)', pushstate_operator_xmlcomment_callback),
-
-            (r'(<\?)', pushstate_operator_processing_instruction_callback),
-
-            (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
-
-            # (r'</', Name.Tag, 'end_tag'),
-            (r'(<)', pushstate_operator_starttag_callback),
-
-            (r'(declare)(\s+)(boundary-space)',
-             bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
-
-            (r'(validate)(\s+)(lax|strict)',
-             pushstate_operator_root_validate_withmode),
-            (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
-            (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
-            (r'(element|attribute)(\s*)(\{)',
-             pushstate_operator_root_construct_callback),
-
-            (r'(document|text|processing-instruction|comment)(\s*)(\{)',
-             pushstate_operator_root_construct_callback),
-            #ATTRIBUTE
-            (r'(attribute)(\s+)(?=' + qname + r')',
-             bygroups(Keyword, Text), 'attribute_qname'),
-            #ELEMENT
-            (r'(element)(\s+)(?=' +qname+ r')',
-             bygroups(Keyword, Text), 'element_qname'),
-            #PROCESSING_INSTRUCTION
-            (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
-             bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
-             'operator'),
-
-            (r'(declare|define)(\s+)(function)',
-             bygroups(Keyword, Text, Keyword)),
-
-            (r'(\{)', pushstate_operator_root_callback),
-
-            (r'(unordered|ordered)(\s*)(\{)',
-             pushstate_operator_order_callback),
-
-            (r'(declare)(\s+)(ordering)',
-             bygroups(Keyword, Text, Keyword), 'declareordering'),
-
-            (r'(xquery)(\s+)(version)',
-             bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
-
-            (r'(\(#)', Punctuation, 'pragma'),
-
-            # sometimes return can occur in root state
-            (r'return', Keyword),
-
-            (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
-             'option'),
-
-            #URI LITERALS - single and double quoted
-            (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
-            (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
-
-            (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
-             bygroups(Keyword, Punctuation)),
-            (r'(descendant|following-sibling|following|parent|preceding-sibling'
-             r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
-
-            (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
-
-            (r'then|else', Keyword),
-
-            # ML specific
-            (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
-            (r'(catch)(\s*)(\()(\$)',
-             bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
-
-            (r'(@'+qname+')', Name.Attribute),
-            (r'(@'+ncname+')', Name.Attribute),
-            (r'@\*:'+ncname, Name.Attribute),
-            (r'(@)', Name.Attribute),
-
-            (r'//|/|\+|-|;|,|\(|\)', Punctuation),
-
-            # STANDALONE QNAMES
-            (qname + r'(?=\s*{)', Name.Tag, 'qname_braren'),
-            (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
-            (qname, Name.Tag, 'operator'),
-        ]
-    }
-
-
-class DartLexer(RegexLexer):
-    """
-    For `Dart <http://dartlang.org/>`_ source code.
-
-    *New in Pygments 1.5.*
-    """
-
-    name = 'Dart'
-    aliases = ['dart']
-    filenames = ['*.dart']
-    mimetypes = ['text/x-dart']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            include('string_literal'),
-            (r'#!(.*?)$', Comment.Preproc),
-            (r'\b(import|export)\b', Keyword, 'import_decl'),
-            (r'\b(library|source|part of|part)\b', Keyword),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'\b(class)\b(\s+)',
-             bygroups(Keyword.Declaration, Text), 'class'),
-            (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
-             r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
-             Keyword),
-            (r'\b(abstract|const|extends|factory|final|get|implements|'
-             r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
-            (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
-            (r'\b(false|null|true)\b', Keyword.Constant),
-            (r'[~!%^&*+=|?:<>/-]|as', Operator),
-            (r'[a-zA-Z_$][a-zA-Z0-9_]*:', Name.Label),
-            (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
-            (r'[(){}\[\],.;]', Punctuation),
-            (r'0[xX][0-9a-fA-F]+', Number.Hex),
-            # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
-            (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
-            (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
-            (r'\n', Text)
-            # pseudo-keyword negate intentionally left out
-        ],
-        'class': [
-            (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name.Class, '#pop')
-        ],
-        'import_decl': [
-            include('string_literal'),
-            (r'\s+', Text),
-            (r'\b(as|show|hide)\b', Keyword),
-            (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
-            (r'\,', Punctuation),
-            (r'\;', Punctuation, '#pop')
-        ],
-        'string_literal': [
-            # Raw strings.
-            (r'r"""([\s|\S]*?)"""', String.Double),
-            (r"r'''([\s|\S]*?)'''", String.Single),
-            (r'r"(.*?)"', String.Double),
-            (r"r'(.*?)'", String.Single),
-            # Normal Strings.
-            (r'"""', String.Double, 'string_double_multiline'),
-            (r"'''", String.Single, 'string_single_multiline'),
-            (r'"', String.Double, 'string_double'),
-            (r"'", String.Single, 'string_single')
-        ],
-        'string_common': [
-            (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z\'\"$\\])",
-             String.Escape),
-            (r'(\$)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(String.Interpol, Name)),
-            (r'(\$\{)(.*?)(\})',
-             bygroups(String.Interpol, using(this), String.Interpol))
-        ],
-        'string_double': [
-            (r'"', String.Double, '#pop'),
-            (r'[^\"$\\\n]+', String.Double),
-            include('string_common'),
-            (r'\$+', String.Double)
-        ],
-        'string_double_multiline': [
-            (r'"""', String.Double, '#pop'),
-            (r'[^\"$\\]+', String.Double),
-            include('string_common'),
-            (r'(\$|\")+', String.Double)
-        ],
-        'string_single': [
-            (r"'", String.Single, '#pop'),
-            (r"[^\'$\\\n]+", String.Single),
-            include('string_common'),
-            (r'\$+', String.Single)
-        ],
-        'string_single_multiline': [
-            (r"'''", String.Single, '#pop'),
-            (r'[^\'$\\]+', String.Single),
-            include('string_common'),
-            (r'(\$|\')+', String.Single)
-        ]
-    }
-
-
-class TypeScriptLexer(RegexLexer):
-    """
-    For `TypeScript <http://www.python.org>`_ source code.
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'TypeScript'
-    aliases = ['ts']
-    filenames = ['*.ts']
-    mimetypes = ['text/x-typescript']
-
-    flags = re.DOTALL
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'<!--', Comment),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            (r'', Text, '#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'root': [
-            (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
-             r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
-             r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
-             r'this)\b', Keyword, 'slashstartsregex'),
-            (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-            (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
-             r'extends|final|float|goto|implements|import|int|interface|long|native|'
-             r'package|private|protected|public|short|static|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Reserved),
-            (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-            # Match stuff like: module name {...}
-            (r'\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)',
-             bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
-            # Match variable type keywords
-            (r'\b(string|bool|number)\b', Keyword.Type),
-            # Match stuff like: constructor
-            (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
-            # Match stuff like: super(argument, list)
-            (r'(super)(\s*)(\([a-zA-Z0-9,_?.$\s]+\s*\))',
-             bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
-            # Match stuff like: function() {...}
-            (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
-            # Match stuff like: (function: return type)
-            (r'([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)([a-zA-Z0-9_?.$][\w?.$]*)',
-             bygroups(Name.Other, Text, Keyword.Type)),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-        ]
-    }
-
-
-class LassoLexer(RegexLexer):
-    """
-    For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
-    syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
-    HTML, use the `LassoHtmlLexer`.
-
-    Additional options accepted:
-
-    `builtinshighlighting`
-        If given and ``True``, highlight builtin tags, types, traits, and
-        methods (default: ``True``).
-    `requiredelimiters`
-        If given and ``True``, only highlight code between delimiters as Lasso
-        (default: ``False``).
-
-    *New in Pygments 1.6.*
-    """
-
-    name = 'Lasso'
-    aliases = ['lasso', 'lassoscript']
-    filenames = ['*.lasso', '*.lasso[89]']
-    alias_filenames = ['*.incl', '*.inc', '*.las']
-    mimetypes = ['text/x-lasso']
-    flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-
-    tokens = {
-        'root': [
-            (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
-            (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
-            (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
-            (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
-            (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
-                ('delimiters', 'anglebrackets')),
-            (r'<', Other, 'delimiters'),
-            (r'\s+', Other),
-            (r'', Other, ('delimiters', 'lassofile')),
-        ],
-        'delimiters': [
-            (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
-            (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
-            (r'\[', Comment.Preproc, 'squarebrackets'),
-            (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
-            (r'<', Other),
-            (r'[^[<]+', Other),
-        ],
-        'nosquarebrackets': [
-            (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
-            (r'<', Other),
-            (r'[^<]+', Other),
-        ],
-        'noprocess': [
-            (r'\[/noprocess\]', Comment.Preproc, '#pop'),
-            (r'\[', Other),
-            (r'[^[]', Other),
-        ],
-        'squarebrackets': [
-            (r'\]', Comment.Preproc, '#pop'),
-            include('lasso'),
-        ],
-        'anglebrackets': [
-            (r'\?>', Comment.Preproc, '#pop'),
-            include('lasso'),
-        ],
-        'lassofile': [
-            (r'\]', Comment.Preproc, '#pop'),
-            (r'\?>', Comment.Preproc, '#pop'),
-            include('lasso'),
-        ],
-        'whitespacecomments': [
-            (r'\s+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*\*!.*?\*/', String.Doc),
-            (r'/\*.*?\*/', Comment.Multiline),
-        ],
-        'lasso': [
-            # whitespace/comments
-            include('whitespacecomments'),
-
-            # literals
-            (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
-            (r'0x[\da-f]+', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
-            (r"'", String.Single, 'singlestring'),
-            (r'"', String.Double, 'doublestring'),
-            (r'`[^`]*`', String.Backtick),
-
-            # names
-            (r'\$[a-z_][\w.]*', Name.Variable),
-            (r'#[a-z_][\w.]*|#\d+', Name.Variable.Instance),
-            (r"(\.)('[a-z_][\w.]*')",
-                bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
-            (r"(self)(->)('[a-z_][\w.]*')",
-                bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
-            (r'(\.\.?)([a-z_][\w.]*)',
-                bygroups(Name.Builtin.Pseudo, Name.Other)),
-            (r'(self|inherited|global|void)\b', Name.Builtin.Pseudo),
-            (r'-[a-z_][\w.]*', Name.Attribute),
-            (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
-            (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
-             r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
-             r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
-             r'Error_InvalidDatabase|Error_InvalidPassword|'
-             r'Error_InvalidUsername|Error_ModuleNotFound|'
-             r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
-             r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
-             r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
-             r'Error_UpdateError)\b', Name.Exception),
-
-            # definitions
-            (r'(define)(\s+)([a-z_][\w.]*)(\s*)(=>)(\s*)(type|trait|thread)\b',
-                bygroups(Keyword.Declaration, Text, Name.Class, Text, Operator,
-                         Text, Keyword)),
-            (r'(define)(\s+)([a-z_][\w.]*)(->)([a-z_][\w.]*=?|[-+*/%<>]|==)',
-                bygroups(Keyword.Declaration, Text, Name.Class, Operator,
-                         Name.Function), 'signature'),
-            (r'(define)(\s+)([a-z_][\w.]*)',
-                bygroups(Keyword.Declaration, Text, Name.Function),
-                'signature'),
-            (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|'
-             r'[-+*/%<>]|==)(?=\s*\())', bygroups(Keyword, Text, Name.Function),
-                'signature'),
-            (r'(public|protected|private)(\s+)([a-z_][\w.]*)',
-                bygroups(Keyword, Text, Name.Function)),
-
-            # keywords
-            (r'(true|false|none|minimal|full|all)\b', Keyword.Constant),
-            (r'(local|var|variable|data)\b', Keyword.Declaration),
-            (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
-             r'null)\b', Keyword.Type),
-            (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
-            (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
-            (r'require\b', Keyword, 'requiresection'),
-            (r'(/?)(Namespace_Using)\b',
-                bygroups(Punctuation, Keyword.Namespace)),
-            (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
-             r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
-             r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
-             r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
-             r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
-             r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
-             r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
-             r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
-             r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
-             r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
-             r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
-             r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
-             r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
-             r'Tag_Name|ascending|average|by|define|descending|do|equals|'
-             r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
-             r'min|on|order|parent|protected|provide|public|require|skip|'
-             r'split_thread|sum|take|thread|to|trait|type|where|with|yield)\b',
-                 bygroups(Punctuation, Keyword)),
-
-            # other
-            (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=\s*\([^)]*\)\s*=>))',
-                Name.Function, 'signature'),
-            (r'(and|or|not)\b', Operator.Word),
-            (r'([a-z_][\w.]*)(\s*)(::\s*)([a-z_][\w.]*)(\s*)(=)',
-                bygroups(Name, Text, Punctuation, Name.Label, Text, Operator)),
-            (r'((?<!->)[a-z_][\w.]*)(\s*)(=(?!=))',
-                bygroups(Name, Text, Operator)),
-            (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
-            (r'(=)(bw|ew|cn|lte?|gte?|n?eq|ft|n?rx)\b',
-                bygroups(Operator, Operator.Word)),
-            (r':=|[-+*/%=<>&|!?\\]+', Operator),
-            (r'[{}():;,@^]', Punctuation),
-        ],
-        'singlestring': [
-            (r"'", String.Single, '#pop'),
-            (r"[^'\\]+", String.Single),
-            include('escape'),
-            (r"\\+", String.Single),
-        ],
-        'doublestring': [
-            (r'"', String.Double, '#pop'),
-            (r'[^"\\]+', String.Double),
-            include('escape'),
-            (r'\\+', String.Double),
-        ],
-        'escape': [
-            (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
-             r'[abefnrtv?\"\'\\]|$)', String.Escape),
-        ],
-        'signature': [
-            (r'=>', Operator, '#pop'),
-            (r'\)', Punctuation, '#pop'),
-            (r'[(,]', Punctuation, 'parameter'),
-            include('lasso'),
-        ],
-        'parameter': [
-            (r'\)', Punctuation, '#pop'),
-            (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
-            (r'\.\.\.', Name.Builtin.Pseudo),
-            include('lasso'),
-        ],
-        'requiresection': [
-            (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=\s*\())', Name, 'requiresignature'),
-            (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=(\s*::\s*[\w.]+)?\s*,))', Name),
-            (r'[a-z_][\w.]*=?|[-+*/%<>]|==', Name, '#pop'),
-            (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
-            (r',', Punctuation),
-            include('whitespacecomments'),
-        ],
-        'requiresignature': [
-            (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
-            (r'\)', Punctuation, '#pop:2'),
-            (r'-?[a-z_][\w.]*', Name.Attribute),
-            (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
-            (r'\.\.\.', Name.Builtin.Pseudo),
-            (r'[(,]', Punctuation),
-            include('whitespacecomments'),
-        ],
-    }
-
-    def __init__(self, **options):
-        self.builtinshighlighting = get_bool_opt(
-            options, 'builtinshighlighting', True)
-        self.requiredelimiters = get_bool_opt(
-            options, 'requiredelimiters', False)
-
-        self._builtins = set()
-        if self.builtinshighlighting:
-            from pygments.lexers._lassobuiltins import BUILTINS
-            for key, value in BUILTINS.iteritems():
-                self._builtins.update(value)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        stack = ['root']
-        if self.requiredelimiters:
-            stack.append('delimiters')
-        for index, token, value in \
-            RegexLexer.get_tokens_unprocessed(self, text, stack):
-            if token is Name.Other:
-                if value.lower() in self._builtins:
-                    yield index, Name.Builtin, value
-                    continue
-            yield index, token, value
-
-    def analyse_text(text):
-        rv = 0.0
-        if 'bin/lasso9' in text:
-            rv += 0.8
-        if re.search(r'<\?(=|lasso)', text, re.I):
-            rv += 0.4
-        if re.search(r'local\(', text, re.I):
-            rv += 0.4
-        if re.search(r'\[\n|\?>', text):
-            rv += 0.4
-        return rv
-
-
-class QmlLexer(RegexLexer):
-    """
-    For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
-
-    *New in Pygments 1.6.*
-    """
-
-    # QML is based on javascript, so much of this is taken from the
-    # JavascriptLexer above.
-
-    name = 'QML'
-    aliases = ['qml', 'Qt Meta Language', 'Qt modeling Language']
-    filenames = ['*.qml',]
-    mimetypes = [ 'application/x-qml',]
-
-
-    # pasted from JavascriptLexer, with some additions
-    flags = re.DOTALL
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'<!--', Comment),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            (r'', Text, '#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'root' : [
-            (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
-             r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-
-            # QML insertions
-            (r'\bid\s*:\s*[A-Za-z][_A-Za-z.0-9]*',Keyword.Declaration,
-             'slashstartsregex'),
-            (r'\b[A-Za-z][_A-Za-z.0-9]*\s*:',Keyword, 'slashstartsregex'),
-
-            # the rest from JavascriptLexer
-            (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
-             r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
-             r'this)\b', Keyword, 'slashstartsregex'),
-            (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-            (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
-             r'extends|final|float|goto|implements|import|int|interface|long|native|'
-             r'package|private|protected|public|short|static|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Reserved),
-            (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-        ]
-    }
diff --git a/python/ext-libs/pygments/plugin.py b/python/ext-libs/pygments/plugin.py
deleted file mode 100644
index 58662e9..0000000
--- a/python/ext-libs/pygments/plugin.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.plugin
-    ~~~~~~~~~~~~~~~
-
-    Pygments setuptools plugin interface. The methods defined
-    here also work if setuptools isn't installed but they just
-    return nothing.
-
-    lexer plugins::
-
-        [pygments.lexers]
-        yourlexer = yourmodule:YourLexer
-
-    formatter plugins::
-
-        [pygments.formatters]
-        yourformatter = yourformatter:YourFormatter
-        /.ext = yourformatter:YourFormatter
-
-    As you can see, you can define extensions for the formatter
-    with a leading slash.
-
-    syntax plugins::
-
-        [pygments.styles]
-        yourstyle = yourstyle:YourStyle
-
-    filter plugin::
-
-        [pygments.filter]
-        yourfilter = yourfilter:YourFilter
-
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-try:
-    import pkg_resources
-except ImportError:
-    pkg_resources = None
-
-LEXER_ENTRY_POINT = 'pygments.lexers'
-FORMATTER_ENTRY_POINT = 'pygments.formatters'
-STYLE_ENTRY_POINT = 'pygments.styles'
-FILTER_ENTRY_POINT = 'pygments.filters'
-
-
-def find_plugin_lexers():
-    if pkg_resources is None:
-        return
-    for entrypoint in pkg_resources.iter_entry_points(LEXER_ENTRY_POINT):
-        yield entrypoint.load()
-
-
-def find_plugin_formatters():
-    if pkg_resources is None:
-        return
-    for entrypoint in pkg_resources.iter_entry_points(FORMATTER_ENTRY_POINT):
-        yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_styles():
-    if pkg_resources is None:
-        return
-    for entrypoint in pkg_resources.iter_entry_points(STYLE_ENTRY_POINT):
-        yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_filters():
-    if pkg_resources is None:
-        return
-    for entrypoint in pkg_resources.iter_entry_points(FILTER_ENTRY_POINT):
-        yield entrypoint.name, entrypoint.load()
diff --git a/python/ext-libs/pygments/scanner.py b/python/ext-libs/pygments/scanner.py
deleted file mode 100644
index f469e69..0000000
--- a/python/ext-libs/pygments/scanner.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.scanner
-    ~~~~~~~~~~~~~~~~
-
-    This library implements a regex based scanner. Some languages
-    like Pascal are easy to parse but have some keywords that
-    depend on the context. Because of this it's impossible to lex
-    that just by using a regular expression lexer like the
-    `RegexLexer`.
-
-    Have a look at the `DelphiLexer` to get an idea of how to use
-    this scanner.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-import re
-
-
-class EndOfText(RuntimeError):
-    """
-    Raise if end of text is reached and the user
-    tried to call a match function.
-    """
-
-
-class Scanner(object):
-    """
-    Simple scanner
-
-    All method patterns are regular expression strings (not
-    compiled expressions!)
-    """
-
-    def __init__(self, text, flags=0):
-        """
-        :param text:    The text which should be scanned
-        :param flags:   default regular expression flags
-        """
-        self.data = text
-        self.data_length = len(text)
-        self.start_pos = 0
-        self.pos = 0
-        self.flags = flags
-        self.last = None
-        self.match = None
-        self._re_cache = {}
-
-    def eos(self):
-        """`True` if the scanner reached the end of text."""
-        return self.pos >= self.data_length
-    eos = property(eos, eos.__doc__)
-
-    def check(self, pattern):
-        """
-        Apply `pattern` on the current position and return
-        the match object. (Doesn't touch pos). Use this for
-        lookahead.
-        """
-        if self.eos:
-            raise EndOfText()
-        if pattern not in self._re_cache:
-            self._re_cache[pattern] = re.compile(pattern, self.flags)
-        return self._re_cache[pattern].match(self.data, self.pos)
-
-    def test(self, pattern):
-        """Apply a pattern on the current position and check
-        if it patches. Doesn't touch pos."""
-        return self.check(pattern) is not None
-
-    def scan(self, pattern):
-        """
-        Scan the text for the given pattern and update pos/match
-        and related fields. The return value is a boolen that
-        indicates if the pattern matched. The matched value is
-        stored on the instance as ``match``, the last value is
-        stored as ``last``. ``start_pos`` is the position of the
-        pointer before the pattern was matched, ``pos`` is the
-        end position.
-        """
-        if self.eos:
-            raise EndOfText()
-        if pattern not in self._re_cache:
-            self._re_cache[pattern] = re.compile(pattern, self.flags)
-        self.last = self.match
-        m = self._re_cache[pattern].match(self.data, self.pos)
-        if m is None:
-            return False
-        self.start_pos = m.start()
-        self.pos = m.end()
-        self.match = m.group()
-        return True
-
-    def get_char(self):
-        """Scan exactly one char."""
-        self.scan('.')
-
-    def __repr__(self):
-        return '<%s %d/%d>' % (
-            self.__class__.__name__,
-            self.pos,
-            self.data_length
-        )
diff --git a/python/ext-libs/pygments/style.py b/python/ext-libs/pygments/style.py
deleted file mode 100644
index 0fc01b4..0000000
--- a/python/ext-libs/pygments/style.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.style
-    ~~~~~~~~~~~~~~
-
-    Basic style object.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Token, STANDARD_TYPES
-
-
-class StyleMeta(type):
-
-    def __new__(mcs, name, bases, dct):
-        obj = type.__new__(mcs, name, bases, dct)
-        for token in STANDARD_TYPES:
-            if token not in obj.styles:
-                obj.styles[token] = ''
-
-        def colorformat(text):
-            if text[0:1] == '#':
-                col = text[1:]
-                if len(col) == 6:
-                    return col
-                elif len(col) == 3:
-                    return col[0]*2 + col[1]*2 + col[2]*2
-            elif text == '':
-                return ''
-            assert False, "wrong color format %r" % text
-
-        _styles = obj._styles = {}
-
-        for ttype in obj.styles:
-            for token in ttype.split():
-                if token in _styles:
-                    continue
-                ndef = _styles.get(token.parent, None)
-                styledefs = obj.styles.get(token, '').split()
-                if  not ndef or token is None:
-                    ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
-                elif 'noinherit' in styledefs and token is not Token:
-                    ndef = _styles[Token][:]
-                else:
-                    ndef = ndef[:]
-                _styles[token] = ndef
-                for styledef in obj.styles.get(token, '').split():
-                    if styledef == 'noinherit':
-                        pass
-                    elif styledef == 'bold':
-                        ndef[1] = 1
-                    elif styledef == 'nobold':
-                        ndef[1] = 0
-                    elif styledef == 'italic':
-                        ndef[2] = 1
-                    elif styledef == 'noitalic':
-                        ndef[2] = 0
-                    elif styledef == 'underline':
-                        ndef[3] = 1
-                    elif styledef == 'nounderline':
-                        ndef[3] = 0
-                    elif styledef[:3] == 'bg:':
-                        ndef[4] = colorformat(styledef[3:])
-                    elif styledef[:7] == 'border:':
-                        ndef[5] = colorformat(styledef[7:])
-                    elif styledef == 'roman':
-                        ndef[6] = 1
-                    elif styledef == 'sans':
-                        ndef[7] = 1
-                    elif styledef == 'mono':
-                        ndef[8] = 1
-                    else:
-                        ndef[0] = colorformat(styledef)
-
-        return obj
-
-    def style_for_token(cls, token):
-        t = cls._styles[token]
-        return {
-            'color':        t[0] or None,
-            'bold':         bool(t[1]),
-            'italic':       bool(t[2]),
-            'underline':    bool(t[3]),
-            'bgcolor':      t[4] or None,
-            'border':       t[5] or None,
-            'roman':        bool(t[6]) or None,
-            'sans':         bool(t[7]) or None,
-            'mono':         bool(t[8]) or None,
-        }
-
-    def list_styles(cls):
-        return list(cls)
-
-    def styles_token(cls, ttype):
-        return ttype in cls._styles
-
-    def __iter__(cls):
-        for token in cls._styles:
-            yield token, cls.style_for_token(token)
-
-    def __len__(cls):
-        return len(cls._styles)
-
-
-class Style(object):
-    __metaclass__ = StyleMeta
-
-    #: overall background color (``None`` means transparent)
-    background_color = '#ffffff'
-
-    #: highlight background color
-    highlight_color = '#ffffcc'
-
-    #: Style definitions for individual token types.
-    styles = {}
diff --git a/python/ext-libs/pygments/styles/__init__.py b/python/ext-libs/pygments/styles/__init__.py
deleted file mode 100644
index 3d6ef73..0000000
--- a/python/ext-libs/pygments/styles/__init__.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles
-    ~~~~~~~~~~~~~~~
-
-    Contains built-in styles.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.plugin import find_plugin_styles
-from pygments.util import ClassNotFound
-
-
-#: Maps style names to 'submodule::classname'.
-STYLE_MAP = {
-    'default':  'default::DefaultStyle',
-    'emacs':    'emacs::EmacsStyle',
-    'friendly': 'friendly::FriendlyStyle',
-    'colorful': 'colorful::ColorfulStyle',
-    'autumn':   'autumn::AutumnStyle',
-    'murphy':   'murphy::MurphyStyle',
-    'manni':    'manni::ManniStyle',
-    'monokai':  'monokai::MonokaiStyle',
-    'perldoc':  'perldoc::PerldocStyle',
-    'pastie':   'pastie::PastieStyle',
-    'borland':  'borland::BorlandStyle',
-    'trac':     'trac::TracStyle',
-    'native':   'native::NativeStyle',
-    'fruity':   'fruity::FruityStyle',
-    'bw':       'bw::BlackWhiteStyle',
-    'vim':      'vim::VimStyle',
-    'vs':       'vs::VisualStudioStyle',
-    'tango':    'tango::TangoStyle',
-    'rrt':      'rrt::RrtStyle',
-}
-
-
-def get_style_by_name(name):
-    if name in STYLE_MAP:
-        mod, cls = STYLE_MAP[name].split('::')
-        builtin = "yes"
-    else:
-        for found_name, style in find_plugin_styles():
-            if name == found_name:
-                return style
-        # perhaps it got dropped into our styles package
-        builtin = ""
-        mod = name
-        cls = name.title() + "Style"
-
-    try:
-        mod = __import__('pygments.styles.' + mod, None, None, [cls])
-    except ImportError:
-        raise ClassNotFound("Could not find style module %r" % mod +
-                         (builtin and ", though it should be builtin") + ".")
-    try:
-        return getattr(mod, cls)
-    except AttributeError:
-        raise ClassNotFound("Could not find style class %r in style module." % cls)
-
-
-def get_all_styles():
-    """Return an generator for all styles by name,
-    both builtin and plugin."""
-    for name in STYLE_MAP:
-        yield name
-    for name, _ in find_plugin_styles():
-        yield name
diff --git a/python/ext-libs/pygments/styles/autumn.py b/python/ext-libs/pygments/styles/autumn.py
deleted file mode 100644
index 3960536..0000000
--- a/python/ext-libs/pygments/styles/autumn.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.autumn
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    A colorful style, inspired by the terminal highlighting style.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class AutumnStyle(Style):
-    """
-    A colorful style, inspired by the terminal highlighting style.
-    """
-
-    default_style = ""
-
-    styles = {
-        Whitespace:                 '#bbbbbb',
-
-        Comment:                    'italic #aaaaaa',
-        Comment.Preproc:            'noitalic #4c8317',
-        Comment.Special:            'italic #0000aa',
-
-        Keyword:                    '#0000aa',
-        Keyword.Type:               '#00aaaa',
-
-        Operator.Word:              '#0000aa',
-
-        Name.Builtin:               '#00aaaa',
-        Name.Function:              '#00aa00',
-        Name.Class:                 'underline #00aa00',
-        Name.Namespace:             'underline #00aaaa',
-        Name.Variable:              '#aa0000',
-        Name.Constant:              '#aa0000',
-        Name.Entity:                'bold #800',
-        Name.Attribute:             '#1e90ff',
-        Name.Tag:                   'bold #1e90ff',
-        Name.Decorator:             '#888888',
-
-        String:                     '#aa5500',
-        String.Symbol:              '#0000aa',
-        String.Regex:               '#009999',
-
-        Number:                     '#009999',
-
-        Generic.Heading:            'bold #000080',
-        Generic.Subheading:         'bold #800080',
-        Generic.Deleted:            '#aa0000',
-        Generic.Inserted:           '#00aa00',
-        Generic.Error:              '#aa0000',
-        Generic.Emph:               'italic',
-        Generic.Strong:             'bold',
-        Generic.Prompt:             '#555555',
-        Generic.Output:             '#888888',
-        Generic.Traceback:          '#aa0000',
-
-        Error:                      '#F00 bg:#FAA'
-    }
diff --git a/python/ext-libs/pygments/styles/borland.py b/python/ext-libs/pygments/styles/borland.py
deleted file mode 100644
index 9858034..0000000
--- a/python/ext-libs/pygments/styles/borland.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.borland
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Style similar to the style used in the Borland IDEs.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class BorlandStyle(Style):
-    """
-    Style similar to the style used in the borland IDEs.
-    """
-
-    default_style = ''
-
-    styles = {
-        Whitespace:             '#bbbbbb',
-
-        Comment:                'italic #008800',
-        Comment.Preproc:        'noitalic #008080',
-        Comment.Special:        'noitalic bold',
-
-        String:                 '#0000FF',
-        String.Char:            '#800080',
-        Number:                 '#0000FF',
-        Keyword:                'bold #000080',
-        Operator.Word:          'bold',
-        Name.Tag:               'bold #000080',
-        Name.Attribute:         '#FF0000',
-
-        Generic.Heading:        '#999999',
-        Generic.Subheading:     '#aaaaaa',
-        Generic.Deleted:        'bg:#ffdddd #000000',
-        Generic.Inserted:       'bg:#ddffdd #000000',
-        Generic.Error:          '#aa0000',
-        Generic.Emph:           'italic',
-        Generic.Strong:         'bold',
-        Generic.Prompt:         '#555555',
-        Generic.Output:         '#888888',
-        Generic.Traceback:      '#aa0000',
-
-        Error:                  'bg:#e3d2d2 #a61717'
-    }
diff --git a/python/ext-libs/pygments/styles/bw.py b/python/ext-libs/pygments/styles/bw.py
deleted file mode 100644
index 170442a..0000000
--- a/python/ext-libs/pygments/styles/bw.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.bw
-    ~~~~~~~~~~~~~~~~~~
-
-    Simple black/white only style.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Operator, Generic
-
-
-class BlackWhiteStyle(Style):
-
-    background_color = "#ffffff"
-    default_style = ""
-
-    styles = {
-        Comment:                   "italic",
-        Comment.Preproc:           "noitalic",
-
-        Keyword:                   "bold",
-        Keyword.Pseudo:            "nobold",
-        Keyword.Type:              "nobold",
-
-        Operator.Word:             "bold",
-
-        Name.Class:                "bold",
-        Name.Namespace:            "bold",
-        Name.Exception:            "bold",
-        Name.Entity:               "bold",
-        Name.Tag:                  "bold",
-
-        String:                    "italic",
-        String.Interpol:           "bold",
-        String.Escape:             "bold",
-
-        Generic.Heading:           "bold",
-        Generic.Subheading:        "bold",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold",
-
-        Error:                     "border:#FF0000"
-    }
diff --git a/python/ext-libs/pygments/styles/colorful.py b/python/ext-libs/pygments/styles/colorful.py
deleted file mode 100644
index eb59546..0000000
--- a/python/ext-libs/pygments/styles/colorful.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.colorful
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    A colorful style, inspired by CodeRay.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class ColorfulStyle(Style):
-    """
-    A colorful style, inspired by CodeRay.
-    """
-
-    default_style = ""
-
-    styles = {
-        Whitespace:                "#bbbbbb",
-
-        Comment:                   "#888",
-        Comment.Preproc:           "#579",
-        Comment.Special:           "bold #cc0000",
-
-        Keyword:                   "bold #080",
-        Keyword.Pseudo:            "#038",
-        Keyword.Type:              "#339",
-
-        Operator:                  "#333",
-        Operator.Word:             "bold #000",
-
-        Name.Builtin:              "#007020",
-        Name.Function:             "bold #06B",
-        Name.Class:                "bold #B06",
-        Name.Namespace:            "bold #0e84b5",
-        Name.Exception:            "bold #F00",
-        Name.Variable:             "#963",
-        Name.Variable.Instance:    "#33B",
-        Name.Variable.Class:       "#369",
-        Name.Variable.Global:      "bold #d70",
-        Name.Constant:             "bold #036",
-        Name.Label:                "bold #970",
-        Name.Entity:               "bold #800",
-        Name.Attribute:            "#00C",
-        Name.Tag:                  "#070",
-        Name.Decorator:            "bold #555",
-
-        String:                    "bg:#fff0f0",
-        String.Char:               "#04D bg:",
-        String.Doc:                "#D42 bg:",
-        String.Interpol:           "bg:#eee",
-        String.Escape:             "bold #666",
-        String.Regex:              "bg:#fff0ff #000",
-        String.Symbol:             "#A60 bg:",
-        String.Other:              "#D20",
-
-        Number:                    "bold #60E",
-        Number.Integer:            "bold #00D",
-        Number.Float:              "bold #60E",
-        Number.Hex:                "bold #058",
-        Number.Oct:                "bold #40E",
-
-        Generic.Heading:           "bold #000080",
-        Generic.Subheading:        "bold #800080",
-        Generic.Deleted:           "#A00000",
-        Generic.Inserted:          "#00A000",
-        Generic.Error:             "#FF0000",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold #c65d09",
-        Generic.Output:            "#888",
-        Generic.Traceback:         "#04D",
-
-        Error:                     "#F00 bg:#FAA"
-    }
diff --git a/python/ext-libs/pygments/styles/default.py b/python/ext-libs/pygments/styles/default.py
deleted file mode 100644
index 77bdac0..0000000
--- a/python/ext-libs/pygments/styles/default.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.default
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    The default highlighting style.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class DefaultStyle(Style):
-    """
-    The default style (inspired by Emacs 22).
-    """
-
-    background_color = "#f8f8f8"
-    default_style = ""
-
-    styles = {
-        Whitespace:                "#bbbbbb",
-        Comment:                   "italic #408080",
-        Comment.Preproc:           "noitalic #BC7A00",
-
-        #Keyword:                   "bold #AA22FF",
-        Keyword:                   "bold #008000",
-        Keyword.Pseudo:            "nobold",
-        Keyword.Type:              "nobold #B00040",
-
-        Operator:                  "#666666",
-        Operator.Word:             "bold #AA22FF",
-
-        Name.Builtin:              "#008000",
-        Name.Function:             "#0000FF",
-        Name.Class:                "bold #0000FF",
-        Name.Namespace:            "bold #0000FF",
-        Name.Exception:            "bold #D2413A",
-        Name.Variable:             "#19177C",
-        Name.Constant:             "#880000",
-        Name.Label:                "#A0A000",
-        Name.Entity:               "bold #999999",
-        Name.Attribute:            "#7D9029",
-        Name.Tag:                  "bold #008000",
-        Name.Decorator:            "#AA22FF",
-
-        String:                    "#BA2121",
-        String.Doc:                "italic",
-        String.Interpol:           "bold #BB6688",
-        String.Escape:             "bold #BB6622",
-        String.Regex:              "#BB6688",
-        #String.Symbol:             "#B8860B",
-        String.Symbol:             "#19177C",
-        String.Other:              "#008000",
-        Number:                    "#666666",
-
-        Generic.Heading:           "bold #000080",
-        Generic.Subheading:        "bold #800080",
-        Generic.Deleted:           "#A00000",
-        Generic.Inserted:          "#00A000",
-        Generic.Error:             "#FF0000",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold #000080",
-        Generic.Output:            "#888",
-        Generic.Traceback:         "#04D",
-
-        Error:                     "border:#FF0000"
-    }
diff --git a/python/ext-libs/pygments/styles/emacs.py b/python/ext-libs/pygments/styles/emacs.py
deleted file mode 100644
index 9f8b407..0000000
--- a/python/ext-libs/pygments/styles/emacs.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.emacs
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    A highlighting style for Pygments, inspired by Emacs.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class EmacsStyle(Style):
-    """
-    The default style (inspired by Emacs 22).
-    """
-
-    background_color = "#f8f8f8"
-    default_style = ""
-
-    styles = {
-        Whitespace:                "#bbbbbb",
-        Comment:                   "italic #008800",
-        Comment.Preproc:           "noitalic",
-        Comment.Special:           "noitalic bold",
-
-        Keyword:                   "bold #AA22FF",
-        Keyword.Pseudo:            "nobold",
-        Keyword.Type:              "bold #00BB00",
-
-        Operator:                  "#666666",
-        Operator.Word:             "bold #AA22FF",
-
-        Name.Builtin:              "#AA22FF",
-        Name.Function:             "#00A000",
-        Name.Class:                "#0000FF",
-        Name.Namespace:            "bold #0000FF",
-        Name.Exception:            "bold #D2413A",
-        Name.Variable:             "#B8860B",
-        Name.Constant:             "#880000",
-        Name.Label:                "#A0A000",
-        Name.Entity:               "bold #999999",
-        Name.Attribute:            "#BB4444",
-        Name.Tag:                  "bold #008000",
-        Name.Decorator:            "#AA22FF",
-
-        String:                    "#BB4444",
-        String.Doc:                "italic",
-        String.Interpol:           "bold #BB6688",
-        String.Escape:             "bold #BB6622",
-        String.Regex:              "#BB6688",
-        String.Symbol:             "#B8860B",
-        String.Other:              "#008000",
-        Number:                    "#666666",
-
-        Generic.Heading:           "bold #000080",
-        Generic.Subheading:        "bold #800080",
-        Generic.Deleted:           "#A00000",
-        Generic.Inserted:          "#00A000",
-        Generic.Error:             "#FF0000",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold #000080",
-        Generic.Output:            "#888",
-        Generic.Traceback:         "#04D",
-
-        Error:                     "border:#FF0000"
-    }
diff --git a/python/ext-libs/pygments/styles/friendly.py b/python/ext-libs/pygments/styles/friendly.py
deleted file mode 100644
index 732a125..0000000
--- a/python/ext-libs/pygments/styles/friendly.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.friendly
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    A modern style based on the VIM pyte theme.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class FriendlyStyle(Style):
-    """
-    A modern style based on the VIM pyte theme.
-    """
-
-    background_color = "#f0f0f0"
-    default_style = ""
-
-    styles = {
-        Whitespace:                "#bbbbbb",
-        Comment:                   "italic #60a0b0",
-        Comment.Preproc:           "noitalic #007020",
-        Comment.Special:           "noitalic bg:#fff0f0",
-
-        Keyword:                   "bold #007020",
-        Keyword.Pseudo:            "nobold",
-        Keyword.Type:              "nobold #902000",
-
-        Operator:                  "#666666",
-        Operator.Word:             "bold #007020",
-
-        Name.Builtin:              "#007020",
-        Name.Function:             "#06287e",
-        Name.Class:                "bold #0e84b5",
-        Name.Namespace:            "bold #0e84b5",
-        Name.Exception:            "#007020",
-        Name.Variable:             "#bb60d5",
-        Name.Constant:             "#60add5",
-        Name.Label:                "bold #002070",
-        Name.Entity:               "bold #d55537",
-        Name.Attribute:            "#4070a0",
-        Name.Tag:                  "bold #062873",
-        Name.Decorator:            "bold #555555",
-
-        String:                    "#4070a0",
-        String.Doc:                "italic",
-        String.Interpol:           "italic #70a0d0",
-        String.Escape:             "bold #4070a0",
-        String.Regex:              "#235388",
-        String.Symbol:             "#517918",
-        String.Other:              "#c65d09",
-        Number:                    "#40a070",
-
-        Generic.Heading:           "bold #000080",
-        Generic.Subheading:        "bold #800080",
-        Generic.Deleted:           "#A00000",
-        Generic.Inserted:          "#00A000",
-        Generic.Error:             "#FF0000",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold #c65d09",
-        Generic.Output:            "#888",
-        Generic.Traceback:         "#04D",
-
-        Error:                     "border:#FF0000"
-    }
diff --git a/python/ext-libs/pygments/styles/fruity.py b/python/ext-libs/pygments/styles/fruity.py
deleted file mode 100644
index 4533415..0000000
--- a/python/ext-libs/pygments/styles/fruity.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.fruity
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    pygments version of my "fruity" vim theme.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Comment, Name, Keyword, \
-    Generic, Number, String, Whitespace
-
-class FruityStyle(Style):
-    """
-    Pygments version of the "native" vim theme.
-    """
-
-    background_color = '#111111'
-    highlight_color = '#333333'
-
-    styles = {
-        Whitespace:         '#888888',
-        Token:              '#ffffff',
-        Generic.Output:     '#444444 bg:#222222',
-        Keyword:            '#fb660a bold',
-        Keyword.Pseudo:     'nobold',
-        Number:             '#0086f7 bold',
-        Name.Tag:           '#fb660a bold',
-        Name.Variable:      '#fb660a',
-        Comment:            '#008800 bg:#0f140f italic',
-        Name.Attribute:     '#ff0086 bold',
-        String:             '#0086d2',
-        Name.Function:      '#ff0086 bold',
-        Generic.Heading:    '#ffffff bold',
-        Keyword.Type:       '#cdcaa9 bold',
-        Generic.Subheading: '#ffffff bold',
-        Name.Constant:      '#0086d2',
-        Comment.Preproc:    '#ff0007 bold'
-    }
diff --git a/python/ext-libs/pygments/styles/manni.py b/python/ext-libs/pygments/styles/manni.py
deleted file mode 100644
index 036a212..0000000
--- a/python/ext-libs/pygments/styles/manni.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.manni
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    A colorful style, inspired by the terminal highlighting style.
-
-    This is a port of the style used in the `php port`_ of pygments
-    by Manni. The style is called 'default' there.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class ManniStyle(Style):
-    """
-    A colorful style, inspired by the terminal highlighting style.
-    """
-
-    background_color = '#f0f3f3'
-
-    styles = {
-        Whitespace:         '#bbbbbb',
-        Comment:            'italic #0099FF',
-        Comment.Preproc:    'noitalic #009999',
-        Comment.Special:    'bold',
-
-        Keyword:            'bold #006699',
-        Keyword.Pseudo:     'nobold',
-        Keyword.Type:       '#007788',
-
-        Operator:           '#555555',
-        Operator.Word:      'bold #000000',
-
-        Name.Builtin:       '#336666',
-        Name.Function:      '#CC00FF',
-        Name.Class:         'bold #00AA88',
-        Name.Namespace:     'bold #00CCFF',
-        Name.Exception:     'bold #CC0000',
-        Name.Variable:      '#003333',
-        Name.Constant:      '#336600',
-        Name.Label:         '#9999FF',
-        Name.Entity:        'bold #999999',
-        Name.Attribute:     '#330099',
-        Name.Tag:           'bold #330099',
-        Name.Decorator:     '#9999FF',
-
-        String:             '#CC3300',
-        String.Doc:         'italic',
-        String.Interpol:    '#AA0000',
-        String.Escape:      'bold #CC3300',
-        String.Regex:       '#33AAAA',
-        String.Symbol:      '#FFCC33',
-        String.Other:       '#CC3300',
-
-        Number:             '#FF6600',
-
-        Generic.Heading:    'bold #003300',
-        Generic.Subheading: 'bold #003300',
-        Generic.Deleted:    'border:#CC0000 bg:#FFCCCC',
-        Generic.Inserted:   'border:#00CC00 bg:#CCFFCC',
-        Generic.Error:      '#FF0000',
-        Generic.Emph:       'italic',
-        Generic.Strong:     'bold',
-        Generic.Prompt:     'bold #000099',
-        Generic.Output:     '#AAAAAA',
-        Generic.Traceback:  '#99CC66',
-
-        Error:              'bg:#FFAAAA #AA0000'
-    }
diff --git a/python/ext-libs/pygments/styles/monokai.py b/python/ext-libs/pygments/styles/monokai.py
deleted file mode 100644
index 31dc83b..0000000
--- a/python/ext-libs/pygments/styles/monokai.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.monokai
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Mimic the Monokai color scheme. Based on tango.py.
-
-    http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
-     Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-class MonokaiStyle(Style):
-    """
-    This style mimics the Monokai color scheme.
-    """
-
-    background_color = "#272822"
-    highlight_color = "#49483e"
-
-    styles = {
-        # No corresponding class for the following:
-        Text:                      "#f8f8f2", # class:  ''
-        Whitespace:                "",        # class: 'w'
-        Error:                     "#960050 bg:#1e0010", # class: 'err'
-        Other:                     "",        # class 'x'
-
-        Comment:                   "#75715e", # class: 'c'
-        Comment.Multiline:         "",        # class: 'cm'
-        Comment.Preproc:           "",        # class: 'cp'
-        Comment.Single:            "",        # class: 'c1'
-        Comment.Special:           "",        # class: 'cs'
-
-        Keyword:                   "#66d9ef", # class: 'k'
-        Keyword.Constant:          "",        # class: 'kc'
-        Keyword.Declaration:       "",        # class: 'kd'
-        Keyword.Namespace:         "#f92672", # class: 'kn'
-        Keyword.Pseudo:            "",        # class: 'kp'
-        Keyword.Reserved:          "",        # class: 'kr'
-        Keyword.Type:              "",        # class: 'kt'
-
-        Operator:                  "#f92672", # class: 'o'
-        Operator.Word:             "",        # class: 'ow' - like keywords
-
-        Punctuation:               "#f8f8f2", # class: 'p'
-
-        Name:                      "#f8f8f2", # class: 'n'
-        Name.Attribute:            "#a6e22e", # class: 'na' - to be revised
-        Name.Builtin:              "",        # class: 'nb'
-        Name.Builtin.Pseudo:       "",        # class: 'bp'
-        Name.Class:                "#a6e22e", # class: 'nc' - to be revised
-        Name.Constant:             "#66d9ef", # class: 'no' - to be revised
-        Name.Decorator:            "#a6e22e", # class: 'nd' - to be revised
-        Name.Entity:               "",        # class: 'ni'
-        Name.Exception:            "#a6e22e", # class: 'ne'
-        Name.Function:             "#a6e22e", # class: 'nf'
-        Name.Property:             "",        # class: 'py'
-        Name.Label:                "",        # class: 'nl'
-        Name.Namespace:            "",        # class: 'nn' - to be revised
-        Name.Other:                "#a6e22e", # class: 'nx'
-        Name.Tag:                  "#f92672", # class: 'nt' - like a keyword
-        Name.Variable:             "",        # class: 'nv' - to be revised
-        Name.Variable.Class:       "",        # class: 'vc' - to be revised
-        Name.Variable.Global:      "",        # class: 'vg' - to be revised
-        Name.Variable.Instance:    "",        # class: 'vi' - to be revised
-
-        Number:                    "#ae81ff", # class: 'm'
-        Number.Float:              "",        # class: 'mf'
-        Number.Hex:                "",        # class: 'mh'
-        Number.Integer:            "",        # class: 'mi'
-        Number.Integer.Long:       "",        # class: 'il'
-        Number.Oct:                "",        # class: 'mo'
-
-        Literal:                   "#ae81ff", # class: 'l'
-        Literal.Date:              "#e6db74", # class: 'ld'
-
-        String:                    "#e6db74", # class: 's'
-        String.Backtick:           "",        # class: 'sb'
-        String.Char:               "",        # class: 'sc'
-        String.Doc:                "",        # class: 'sd' - like a comment
-        String.Double:             "",        # class: 's2'
-        String.Escape:             "#ae81ff", # class: 'se'
-        String.Heredoc:            "",        # class: 'sh'
-        String.Interpol:           "",        # class: 'si'
-        String.Other:              "",        # class: 'sx'
-        String.Regex:              "",        # class: 'sr'
-        String.Single:             "",        # class: 's1'
-        String.Symbol:             "",        # class: 'ss'
-
-        Generic:                   "",        # class: 'g'
-        Generic.Deleted:           "",        # class: 'gd',
-        Generic.Emph:              "italic",  # class: 'ge'
-        Generic.Error:             "",        # class: 'gr'
-        Generic.Heading:           "",        # class: 'gh'
-        Generic.Inserted:          "",        # class: 'gi'
-        Generic.Output:            "",        # class: 'go'
-        Generic.Prompt:            "",        # class: 'gp'
-        Generic.Strong:            "bold",    # class: 'gs'
-        Generic.Subheading:        "",        # class: 'gu'
-        Generic.Traceback:         "",        # class: 'gt'
-    }
diff --git a/python/ext-libs/pygments/styles/murphy.py b/python/ext-libs/pygments/styles/murphy.py
deleted file mode 100644
index dbf4eba..0000000
--- a/python/ext-libs/pygments/styles/murphy.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.murphy
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Murphy's style from CodeRay.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class MurphyStyle(Style):
-    """
-    Murphy's style from CodeRay.
-    """
-
-    default_style = ""
-
-    styles = {
-        Whitespace:                "#bbbbbb",
-        Comment:                   "#666 italic",
-        Comment.Preproc:           "#579 noitalic",
-        Comment.Special:           "#c00 bold",
-
-        Keyword:                   "bold #289",
-        Keyword.Pseudo:            "#08f",
-        Keyword.Type:              "#66f",
-
-        Operator:                  "#333",
-        Operator.Word:             "bold #000",
-
-        Name.Builtin:              "#072",
-        Name.Function:             "bold #5ed",
-        Name.Class:                "bold #e9e",
-        Name.Namespace:            "bold #0e84b5",
-        Name.Exception:            "bold #F00",
-        Name.Variable:             "#036",
-        Name.Variable.Instance:    "#aaf",
-        Name.Variable.Class:       "#ccf",
-        Name.Variable.Global:      "#f84",
-        Name.Constant:             "bold #5ed",
-        Name.Label:                "bold #970",
-        Name.Entity:               "#800",
-        Name.Attribute:            "#007",
-        Name.Tag:                  "#070",
-        Name.Decorator:            "bold #555",
-
-        String:                    "bg:#e0e0ff",
-        String.Char:               "#88F bg:",
-        String.Doc:                "#D42 bg:",
-        String.Interpol:           "bg:#eee",
-        String.Escape:             "bold #666",
-        String.Regex:              "bg:#e0e0ff #000",
-        String.Symbol:             "#fc8 bg:",
-        String.Other:              "#f88",
-
-        Number:                    "bold #60E",
-        Number.Integer:            "bold #66f",
-        Number.Float:              "bold #60E",
-        Number.Hex:                "bold #058",
-        Number.Oct:                "bold #40E",
-
-        Generic.Heading:           "bold #000080",
-        Generic.Subheading:        "bold #800080",
-        Generic.Deleted:           "#A00000",
-        Generic.Inserted:          "#00A000",
-        Generic.Error:             "#FF0000",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold #c65d09",
-        Generic.Output:            "#888",
-        Generic.Traceback:         "#04D",
-
-        Error:                     "#F00 bg:#FAA"
-    }
diff --git a/python/ext-libs/pygments/styles/native.py b/python/ext-libs/pygments/styles/native.py
deleted file mode 100644
index 0de8438..0000000
--- a/python/ext-libs/pygments/styles/native.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.native
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    pygments version of my "native" vim theme.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Token, Whitespace
-
-
-class NativeStyle(Style):
-    """
-    Pygments version of the "native" vim theme.
-    """
-
-    background_color = '#202020'
-    highlight_color = '#404040'
-
-    styles = {
-        Token:              '#d0d0d0',
-        Whitespace:         '#666666',
-
-        Comment:            'italic #999999',
-        Comment.Preproc:    'noitalic bold #cd2828',
-        Comment.Special:    'noitalic bold #e50808 bg:#520000',
-
-        Keyword:            'bold #6ab825',
-        Keyword.Pseudo:     'nobold',
-        Operator.Word:      'bold #6ab825',
-
-        String:             '#ed9d13',
-        String.Other:       '#ffa500',
-
-        Number:             '#3677a9',
-
-        Name.Builtin:       '#24909d',
-        Name.Variable:      '#40ffff',
-        Name.Constant:      '#40ffff',
-        Name.Class:         'underline #447fcf',
-        Name.Function:      '#447fcf',
-        Name.Namespace:     'underline #447fcf',
-        Name.Exception:     '#bbbbbb',
-        Name.Tag:           'bold #6ab825',
-        Name.Attribute:     '#bbbbbb',
-        Name.Decorator:     '#ffa500',
-
-        Generic.Heading:    'bold #ffffff',
-        Generic.Subheading: 'underline #ffffff',
-        Generic.Deleted:    '#d22323',
-        Generic.Inserted:   '#589819',
-        Generic.Error:      '#d22323',
-        Generic.Emph:       'italic',
-        Generic.Strong:     'bold',
-        Generic.Prompt:     '#aaaaaa',
-        Generic.Output:     '#cccccc',
-        Generic.Traceback:  '#d22323',
-
-        Error:              'bg:#e3d2d2 #a61717'
-    }
diff --git a/python/ext-libs/pygments/styles/pastie.py b/python/ext-libs/pygments/styles/pastie.py
deleted file mode 100644
index 2a2f386..0000000
--- a/python/ext-libs/pygments/styles/pastie.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.pastie
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Style similar to the `pastie`_ default style.
-
-    .. _pastie: http://pastie.caboo.se/
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class PastieStyle(Style):
-    """
-    Style similar to the pastie default style.
-    """
-
-    default_style = ''
-
-    styles = {
-        Whitespace:             '#bbbbbb',
-        Comment:                '#888888',
-        Comment.Preproc:        'bold #cc0000',
-        Comment.Special:        'bg:#fff0f0 bold #cc0000',
-
-        String:                 'bg:#fff0f0 #dd2200',
-        String.Regex:           'bg:#fff0ff #008800',
-        String.Other:           'bg:#f0fff0 #22bb22',
-        String.Symbol:          '#aa6600',
-        String.Interpol:        '#3333bb',
-        String.Escape:          '#0044dd',
-
-        Operator.Word:          '#008800',
-
-        Keyword:                'bold #008800',
-        Keyword.Pseudo:         'nobold',
-        Keyword.Type:           '#888888',
-
-        Name.Class:             'bold #bb0066',
-        Name.Exception:         'bold #bb0066',
-        Name.Function:          'bold #0066bb',
-        Name.Property:          'bold #336699',
-        Name.Namespace:         'bold #bb0066',
-        Name.Builtin:           '#003388',
-        Name.Variable:          '#336699',
-        Name.Variable.Class:    '#336699',
-        Name.Variable.Instance: '#3333bb',
-        Name.Variable.Global:   '#dd7700',
-        Name.Constant:          'bold #003366',
-        Name.Tag:               'bold #bb0066',
-        Name.Attribute:         '#336699',
-        Name.Decorator:         '#555555',
-        Name.Label:             'italic #336699',
-
-        Number:                 'bold #0000DD',
-
-        Generic.Heading:        '#333',
-        Generic.Subheading:     '#666',
-        Generic.Deleted:        'bg:#ffdddd #000000',
-        Generic.Inserted:       'bg:#ddffdd #000000',
-        Generic.Error:          '#aa0000',
-        Generic.Emph:           'italic',
-        Generic.Strong:         'bold',
-        Generic.Prompt:         '#555555',
-        Generic.Output:         '#888888',
-        Generic.Traceback:      '#aa0000',
-
-        Error:                  'bg:#e3d2d2 #a61717'
-    }
diff --git a/python/ext-libs/pygments/styles/perldoc.py b/python/ext-libs/pygments/styles/perldoc.py
deleted file mode 100644
index b8b67b2..0000000
--- a/python/ext-libs/pygments/styles/perldoc.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.perldoc
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Style similar to the style used in the `perldoc`_ code blocks.
-
-    .. _perldoc: http://perldoc.perl.org/
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class PerldocStyle(Style):
-    """
-    Style similar to the style used in the perldoc code blocks.
-    """
-
-    background_color = '#eeeedd'
-    default_style = ''
-
-    styles = {
-        Whitespace:             '#bbbbbb',
-        Comment:                '#228B22',
-        Comment.Preproc:        '#1e889b',
-        Comment.Special:        '#8B008B bold',
-
-        String:                 '#CD5555',
-        String.Heredoc:         '#1c7e71 italic',
-        String.Regex:           '#B452CD',
-        String.Other:           '#cb6c20',
-        String.Regex:           '#1c7e71',
-
-        Number:                 '#B452CD',
-
-        Operator.Word:          '#8B008B',
-
-        Keyword:                '#8B008B bold',
-        Keyword.Type:           '#a7a7a7',
-
-        Name.Class:             '#008b45 bold',
-        Name.Exception:         '#008b45 bold',
-        Name.Function:          '#008b45',
-        Name.Namespace:         '#008b45 underline',
-        Name.Variable:          '#00688B',
-        Name.Constant:          '#00688B',
-        Name.Decorator:         '#707a7c',
-        Name.Tag:               '#8B008B bold',
-        Name.Attribute:         '#658b00',
-        Name.Builtin:           '#658b00',
-
-        Generic.Heading:        'bold #000080',
-        Generic.Subheading:     'bold #800080',
-        Generic.Deleted:        '#aa0000',
-        Generic.Inserted:       '#00aa00',
-        Generic.Error:          '#aa0000',
-        Generic.Emph:           'italic',
-        Generic.Strong:         'bold',
-        Generic.Prompt:         '#555555',
-        Generic.Output:         '#888888',
-        Generic.Traceback:      '#aa0000',
-
-        Error:                  'bg:#e3d2d2 #a61717'
-    }
diff --git a/python/ext-libs/pygments/styles/rrt.py b/python/ext-libs/pygments/styles/rrt.py
deleted file mode 100644
index 1a2fc6a..0000000
--- a/python/ext-libs/pygments/styles/rrt.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.rrt
-    ~~~~~~~~~~~~~~~~~~~
-
-    pygments "rrt" theme, based on Zap and Emacs defaults.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Comment, Name, Keyword, String
-
-
-class RrtStyle(Style):
-    """
-    Minimalistic "rrt" theme, based on Zap and Emacs defaults.
-    """
-
-    background_color = '#000000'
-    highlight_color = '#0000ff'
-
-    styles = {
-        Comment:            '#00ff00',
-        Name.Function:      '#ffff00',
-        Name.Variable:      '#eedd82',
-        Name.Constant:      '#7fffd4',
-        Keyword:            '#ff0000',
-        Comment.Preproc:    '#e5e5e5',
-        String:             '#87ceeb',
-        Keyword.Type:       '#ee82ee',
-    }
diff --git a/python/ext-libs/pygments/styles/tango.py b/python/ext-libs/pygments/styles/tango.py
deleted file mode 100644
index 7b1c4f3..0000000
--- a/python/ext-libs/pygments/styles/tango.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.tango
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    The Crunchy default Style inspired from the color palette from
-    the Tango Icon Theme Guidelines.
-
-    http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
-
-    Butter:     #fce94f     #edd400     #c4a000
-    Orange:     #fcaf3e     #f57900     #ce5c00
-    Chocolate:  #e9b96e     #c17d11     #8f5902
-    Chameleon:  #8ae234     #73d216     #4e9a06
-    Sky Blue:   #729fcf     #3465a4     #204a87
-    Plum:       #ad7fa8     #75507b     #5c35cc
-    Scarlet Red:#ef2929     #cc0000     #a40000
-    Aluminium:  #eeeeec     #d3d7cf     #babdb6
-                #888a85     #555753     #2e3436
-
-    Not all of the above colors are used; other colors added:
-        very light grey: #f8f8f8  (for background)
-
-    This style can be used as a template as it includes all the known
-    Token types, unlike most (if not all) of the styles included in the
-    Pygments distribution.
-
-    However, since Crunchy is intended to be used by beginners, we have strived
-    to create a style that gloss over subtle distinctions between different
-    categories.
-
-    Taking Python for example, comments (Comment.*) and docstrings (String.Doc)
-    have been chosen to have the same style.  Similarly, keywords (Keyword.*),
-    and Operator.Word (and, or, in) have been assigned the same style.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-class TangoStyle(Style):
-    """
-    The Crunchy default Style inspired from the color palette from
-    the Tango Icon Theme Guidelines.
-    """
-
-    # work in progress...
-
-    background_color = "#f8f8f8"
-    default_style = ""
-
-    styles = {
-        # No corresponding class for the following:
-        #Text:                     "", # class:  ''
-        Whitespace:                "underline #f8f8f8",      # class: 'w'
-        Error:                     "#a40000 border:#ef2929", # class: 'err'
-        Other:                     "#000000",                # class 'x'
-
-        Comment:                   "italic #8f5902", # class: 'c'
-        Comment.Multiline:         "italic #8f5902", # class: 'cm'
-        Comment.Preproc:           "italic #8f5902", # class: 'cp'
-        Comment.Single:            "italic #8f5902", # class: 'c1'
-        Comment.Special:           "italic #8f5902", # class: 'cs'
-
-        Keyword:                   "bold #204a87",   # class: 'k'
-        Keyword.Constant:          "bold #204a87",   # class: 'kc'
-        Keyword.Declaration:       "bold #204a87",   # class: 'kd'
-        Keyword.Namespace:         "bold #204a87",   # class: 'kn'
-        Keyword.Pseudo:            "bold #204a87",   # class: 'kp'
-        Keyword.Reserved:          "bold #204a87",   # class: 'kr'
-        Keyword.Type:              "bold #204a87",   # class: 'kt'
-
-        Operator:                  "bold #ce5c00",   # class: 'o'
-        Operator.Word:             "bold #204a87",   # class: 'ow' - like keywords
-
-        Punctuation:               "bold #000000",   # class: 'p'
-
-        # because special names such as Name.Class, Name.Function, etc.
-        # are not recognized as such later in the parsing, we choose them
-        # to look the same as ordinary variables.
-        Name:                      "#000000",        # class: 'n'
-        Name.Attribute:            "#c4a000",        # class: 'na' - to be revised
-        Name.Builtin:              "#204a87",        # class: 'nb'
-        Name.Builtin.Pseudo:       "#3465a4",        # class: 'bp'
-        Name.Class:                "#000000",        # class: 'nc' - to be revised
-        Name.Constant:             "#000000",        # class: 'no' - to be revised
-        Name.Decorator:            "bold #5c35cc",   # class: 'nd' - to be revised
-        Name.Entity:               "#ce5c00",        # class: 'ni'
-        Name.Exception:            "bold #cc0000",   # class: 'ne'
-        Name.Function:             "#000000",        # class: 'nf'
-        Name.Property:             "#000000",        # class: 'py'
-        Name.Label:                "#f57900",        # class: 'nl'
-        Name.Namespace:            "#000000",        # class: 'nn' - to be revised
-        Name.Other:                "#000000",        # class: 'nx'
-        Name.Tag:                  "bold #204a87",   # class: 'nt' - like a keyword
-        Name.Variable:             "#000000",        # class: 'nv' - to be revised
-        Name.Variable.Class:       "#000000",        # class: 'vc' - to be revised
-        Name.Variable.Global:      "#000000",        # class: 'vg' - to be revised
-        Name.Variable.Instance:    "#000000",        # class: 'vi' - to be revised
-
-        # since the tango light blue does not show up well in text, we choose
-        # a pure blue instead.
-        Number:                    "bold #0000cf",   # class: 'm'
-        Number.Float:              "bold #0000cf",   # class: 'mf'
-        Number.Hex:                "bold #0000cf",   # class: 'mh'
-        Number.Integer:            "bold #0000cf",   # class: 'mi'
-        Number.Integer.Long:       "bold #0000cf",   # class: 'il'
-        Number.Oct:                "bold #0000cf",   # class: 'mo'
-
-        Literal:                   "#000000",        # class: 'l'
-        Literal.Date:              "#000000",        # class: 'ld'
-
-        String:                    "#4e9a06",        # class: 's'
-        String.Backtick:           "#4e9a06",        # class: 'sb'
-        String.Char:               "#4e9a06",        # class: 'sc'
-        String.Doc:                "italic #8f5902", # class: 'sd' - like a comment
-        String.Double:             "#4e9a06",        # class: 's2'
-        String.Escape:             "#4e9a06",        # class: 'se'
-        String.Heredoc:            "#4e9a06",        # class: 'sh'
-        String.Interpol:           "#4e9a06",        # class: 'si'
-        String.Other:              "#4e9a06",        # class: 'sx'
-        String.Regex:              "#4e9a06",        # class: 'sr'
-        String.Single:             "#4e9a06",        # class: 's1'
-        String.Symbol:             "#4e9a06",        # class: 'ss'
-
-        Generic:                   "#000000",        # class: 'g'
-        Generic.Deleted:           "#a40000",        # class: 'gd'
-        Generic.Emph:              "italic #000000", # class: 'ge'
-        Generic.Error:             "#ef2929",        # class: 'gr'
-        Generic.Heading:           "bold #000080",   # class: 'gh'
-        Generic.Inserted:          "#00A000",        # class: 'gi'
-        Generic.Output:            "italic #000000", # class: 'go'
-        Generic.Prompt:            "#8f5902",        # class: 'gp'
-        Generic.Strong:            "bold #000000",   # class: 'gs'
-        Generic.Subheading:        "bold #800080",   # class: 'gu'
-        Generic.Traceback:         "bold #a40000",   # class: 'gt'
-    }
diff --git a/python/ext-libs/pygments/styles/trac.py b/python/ext-libs/pygments/styles/trac.py
deleted file mode 100644
index 714e36c..0000000
--- a/python/ext-libs/pygments/styles/trac.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.trac
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Port of the default trac highlighter design.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace
-
-
-class TracStyle(Style):
-    """
-    Port of the default trac highlighter design.
-    """
-
-    default_style = ''
-
-    styles = {
-        Whitespace:             '#bbbbbb',
-        Comment:                'italic #999988',
-        Comment.Preproc:        'bold noitalic #999999',
-        Comment.Special:        'bold #999999',
-
-        Operator:               'bold',
-
-        String:                 '#bb8844',
-        String.Regex:           '#808000',
-
-        Number:                 '#009999',
-
-        Keyword:                'bold',
-        Keyword.Type:           '#445588',
-
-        Name.Builtin:           '#999999',
-        Name.Function:          'bold #990000',
-        Name.Class:             'bold #445588',
-        Name.Exception:         'bold #990000',
-        Name.Namespace:         '#555555',
-        Name.Variable:          '#008080',
-        Name.Constant:          '#008080',
-        Name.Tag:               '#000080',
-        Name.Attribute:         '#008080',
-        Name.Entity:            '#800080',
-
-        Generic.Heading:        '#999999',
-        Generic.Subheading:     '#aaaaaa',
-        Generic.Deleted:        'bg:#ffdddd #000000',
-        Generic.Inserted:       'bg:#ddffdd #000000',
-        Generic.Error:          '#aa0000',
-        Generic.Emph:           'italic',
-        Generic.Strong:         'bold',
-        Generic.Prompt:         '#555555',
-        Generic.Output:         '#888888',
-        Generic.Traceback:      '#aa0000',
-
-        Error:                  'bg:#e3d2d2 #a61717'
-    }
diff --git a/python/ext-libs/pygments/styles/vim.py b/python/ext-libs/pygments/styles/vim.py
deleted file mode 100644
index a5462db..0000000
--- a/python/ext-libs/pygments/styles/vim.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.vim
-    ~~~~~~~~~~~~~~~~~~~
-
-    A highlighting style for Pygments, inspired by vim.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Number, Operator, Generic, Whitespace, Token
-
-
-class VimStyle(Style):
-    """
-    Styles somewhat like vim 7.0
-    """
-
-    background_color = "#000000"
-    highlight_color = "#222222"
-    default_style = "#cccccc"
-
-    styles = {
-        Token:                     "#cccccc",
-        Whitespace:                "",
-        Comment:                   "#000080",
-        Comment.Preproc:           "",
-        Comment.Special:           "bold #cd0000",
-
-        Keyword:                   "#cdcd00",
-        Keyword.Declaration:       "#00cd00",
-        Keyword.Namespace:         "#cd00cd",
-        Keyword.Pseudo:            "",
-        Keyword.Type:              "#00cd00",
-
-        Operator:                  "#3399cc",
-        Operator.Word:             "#cdcd00",
-
-        Name:                      "",
-        Name.Class:                "#00cdcd",
-        Name.Builtin:              "#cd00cd",
-        Name.Exception:            "bold #666699",
-        Name.Variable:             "#00cdcd",
-
-        String:                    "#cd0000",
-        Number:                    "#cd00cd",
-
-        Generic.Heading:           "bold #000080",
-        Generic.Subheading:        "bold #800080",
-        Generic.Deleted:           "#cd0000",
-        Generic.Inserted:          "#00cd00",
-        Generic.Error:             "#FF0000",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold #000080",
-        Generic.Output:            "#888",
-        Generic.Traceback:         "#04D",
-
-        Error:                     "border:#FF0000"
-    }
diff --git a/python/ext-libs/pygments/styles/vs.py b/python/ext-libs/pygments/styles/vs.py
deleted file mode 100644
index 14a56fa..0000000
--- a/python/ext-libs/pygments/styles/vs.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.styles.vs
-    ~~~~~~~~~~~~~~~~~~
-
-    Simple style with MS Visual Studio colors.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
-     Operator, Generic
-
-
-class VisualStudioStyle(Style):
-
-    background_color = "#ffffff"
-    default_style = ""
-
-    styles = {
-        Comment:                   "#008000",
-        Comment.Preproc:           "#0000ff",
-        Keyword:                   "#0000ff",
-        Operator.Word:             "#0000ff",
-        Keyword.Type:              "#2b91af",
-        Name.Class:                "#2b91af",
-        String:                    "#a31515",
-
-        Generic.Heading:           "bold",
-        Generic.Subheading:        "bold",
-        Generic.Emph:              "italic",
-        Generic.Strong:            "bold",
-        Generic.Prompt:            "bold",
-
-        Error:                     "border:#FF0000"
-    }
diff --git a/python/ext-libs/pygments/token.py b/python/ext-libs/pygments/token.py
deleted file mode 100644
index 19a83f2..0000000
--- a/python/ext-libs/pygments/token.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.token
-    ~~~~~~~~~~~~~~
-
-    Basic token types and the standard tokens.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-class _TokenType(tuple):
-    parent = None
-
-    def split(self):
-        buf = []
-        node = self
-        while node is not None:
-            buf.append(node)
-            node = node.parent
-        buf.reverse()
-        return buf
-
-    def __init__(self, *args):
-        # no need to call super.__init__
-        self.subtypes = set()
-
-    def __contains__(self, val):
-        return self is val or (
-            type(val) is self.__class__ and
-            val[:len(self)] == self
-        )
-
-    def __getattr__(self, val):
-        if not val or not val[0].isupper():
-            return tuple.__getattribute__(self, val)
-        new = _TokenType(self + (val,))
-        setattr(self, val, new)
-        self.subtypes.add(new)
-        new.parent = self
-        return new
-
-    def __repr__(self):
-        return 'Token' + (self and '.' or '') + '.'.join(self)
-
-
-Token       = _TokenType()
-
-# Special token types
-Text        = Token.Text
-Whitespace  = Text.Whitespace
-Error       = Token.Error
-# Text that doesn't belong to this lexer (e.g. HTML in PHP)
-Other       = Token.Other
-
-# Common token types for source code
-Keyword     = Token.Keyword
-Name        = Token.Name
-Literal     = Token.Literal
-String      = Literal.String
-Number      = Literal.Number
-Punctuation = Token.Punctuation
-Operator    = Token.Operator
-Comment     = Token.Comment
-
-# Generic types for non-source code
-Generic     = Token.Generic
-
-# String and some others are not direct childs of Token.
-# alias them:
-Token.Token = Token
-Token.String = String
-Token.Number = Number
-
-
-def is_token_subtype(ttype, other):
-    """
-    Return True if ``ttype`` is a subtype of ``other``.
-
-    exists for backwards compatibility. use ``ttype in other`` now.
-    """
-    return ttype in other
-
-
-def string_to_tokentype(s):
-    """
-    Convert a string into a token type::
-
-        >>> string_to_token('String.Double')
-        Token.Literal.String.Double
-        >>> string_to_token('Token.Literal.Number')
-        Token.Literal.Number
-        >>> string_to_token('')
-        Token
-
-    Tokens that are already tokens are returned unchanged:
-
-        >>> string_to_token(String)
-        Token.Literal.String
-    """
-    if isinstance(s, _TokenType):
-        return s
-    if not s:
-        return Token
-    node = Token
-    for item in s.split('.'):
-        node = getattr(node, item)
-    return node
-
-
-# Map standard token types to short names, used in CSS class naming.
-# If you add a new item, please be sure to run this file to perform
-# a consistency check for duplicate values.
-STANDARD_TYPES = {
-    Token:                         '',
-
-    Text:                          '',
-    Whitespace:                    'w',
-    Error:                         'err',
-    Other:                         'x',
-
-    Keyword:                       'k',
-    Keyword.Constant:              'kc',
-    Keyword.Declaration:           'kd',
-    Keyword.Namespace:             'kn',
-    Keyword.Pseudo:                'kp',
-    Keyword.Reserved:              'kr',
-    Keyword.Type:                  'kt',
-
-    Name:                          'n',
-    Name.Attribute:                'na',
-    Name.Builtin:                  'nb',
-    Name.Builtin.Pseudo:           'bp',
-    Name.Class:                    'nc',
-    Name.Constant:                 'no',
-    Name.Decorator:                'nd',
-    Name.Entity:                   'ni',
-    Name.Exception:                'ne',
-    Name.Function:                 'nf',
-    Name.Property:                 'py',
-    Name.Label:                    'nl',
-    Name.Namespace:                'nn',
-    Name.Other:                    'nx',
-    Name.Tag:                      'nt',
-    Name.Variable:                 'nv',
-    Name.Variable.Class:           'vc',
-    Name.Variable.Global:          'vg',
-    Name.Variable.Instance:        'vi',
-
-    Literal:                       'l',
-    Literal.Date:                  'ld',
-
-    String:                        's',
-    String.Backtick:               'sb',
-    String.Char:                   'sc',
-    String.Doc:                    'sd',
-    String.Double:                 's2',
-    String.Escape:                 'se',
-    String.Heredoc:                'sh',
-    String.Interpol:               'si',
-    String.Other:                  'sx',
-    String.Regex:                  'sr',
-    String.Single:                 's1',
-    String.Symbol:                 'ss',
-
-    Number:                        'm',
-    Number.Float:                  'mf',
-    Number.Hex:                    'mh',
-    Number.Integer:                'mi',
-    Number.Integer.Long:           'il',
-    Number.Oct:                    'mo',
-
-    Operator:                      'o',
-    Operator.Word:                 'ow',
-
-    Punctuation:                   'p',
-
-    Comment:                       'c',
-    Comment.Multiline:             'cm',
-    Comment.Preproc:               'cp',
-    Comment.Single:                'c1',
-    Comment.Special:               'cs',
-
-    Generic:                       'g',
-    Generic.Deleted:               'gd',
-    Generic.Emph:                  'ge',
-    Generic.Error:                 'gr',
-    Generic.Heading:               'gh',
-    Generic.Inserted:              'gi',
-    Generic.Output:                'go',
-    Generic.Prompt:                'gp',
-    Generic.Strong:                'gs',
-    Generic.Subheading:            'gu',
-    Generic.Traceback:             'gt',
-}
diff --git a/python/ext-libs/pygments/unistring.py b/python/ext-libs/pygments/unistring.py
deleted file mode 100644
index e46b11c..0000000
--- a/python/ext-libs/pygments/unistring.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.unistring
-    ~~~~~~~~~~~~~~~~~~
-
-    Strings of all Unicode characters of a certain category.
-    Used for matching in Unicode-aware languages. Run to regenerate.
-
-    Inspired by chartypes_create.py from the MoinMoin project.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-from pygments.util import u_prefix
-
-Cc = u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f'
-
-Cf = u'\xad\u0600\u0601\u0602\u0603\u06dd\u070f\u17b4\u17b5\u200b\u200c\u200d\u200e\u200f\u202a\u202b\u202c\u202d\u202e\u2060\u2061\u2062\u2063\u2064\u206a\u206b\u206c\u206d\u206e\u206f\ufeff\ufff9\ufffa\ufffb'
-
-Cn = u'\u0378\u0379\u037f\u0380\u0381\u0382\u0383\u038b\u038d\u03a2\u0526\u0527\u0528\u0529\u052a\u052b\u052c\u052d\u052e\u052f\u0530\u0557\u0558\u0560\u0588\u058b\u058c\u058d\u058e\u058f\u0590\u05c8\u05c9\u05ca\u05cb\u05cc\u05cd\u05ce\u05cf\u05eb\u05ec\u05ed\u05ee\u05ef\u05f5\u05f6\u05f7\u05f8\u05f9\u05fa\u05fb\u05fc\u05fd\u05fe\u05ff\u0604\u0605\u061c\u061d\u0620\u065f\u070e\u074b\u074c\u07b2\u07b3\u07b4\u07b5\u07b6\u07b7\u07b8\u07b9\u07ba\u07bb\u07bc\u07bd\u07be\u07bf\u07fb\u07fc\u07f [...]
-
-Co = u'\ue000\ue001\ue002\ue003\ue004\ue005\ue006\ue007\ue008\ue009\ue00a\ue00b\ue00c\ue00d\ue00e\ue00f\ue010\ue011\ue012\ue013\ue014\ue015\ue016\ue017\ue018\ue019\ue01a\ue01b\ue01c\ue01d\ue01e\ue01f\ue020\ue021\ue022\ue023\ue024\ue025\ue026\ue027\ue028\ue029\ue02a\ue02b\ue02c\ue02d\ue02e\ue02f\ue030\ue031\ue032\ue033\ue034\ue035\ue036\ue037\ue038\ue039\ue03a\ue03b\ue03c\ue03d\ue03e\ue03f\ue040\ue041\ue042\ue043\ue044\ue045\ue046\ue047\ue048\ue049\ue04a\ue04b\ue04c\ue04d\ue04e\ue04f\ue05 [...]
-
-try:
-    Cs = eval(u_prefix + r"'\ud800\ud801\ud802\ud803\ud804\ud805\ud806\ud807\ud808\ud809\ud80a\ud80b\ud80c\ud80d\ud80e\ud80f\ud810\ud811\ud812\ud813\ud814\ud815\ud816\ud817\ud818\ud819\ud81a\ud81b\ud81c\ud81d\ud81e\ud81f\ud820\ud821\ud822\ud823\ud824\ud825\ud826\ud827\ud828\ud829\ud82a\ud82b\ud82c\ud82d\ud82e\ud82f\ud830\ud831\ud832\ud833\ud834\ud835\ud836\ud837\ud838\ud839\ud83a\ud83b\ud83c\ud83d\ud83e\ud83f\ud840\ud841\ud842\ud843\ud844\ud845\ud846\ud847\ud848\ud849\ud84a\ud84b\ud84c\u [...]
-except UnicodeDecodeError:
-    Cs = '' # Jython can't handle isolated surrogates
-
-Ll = u'abcdefghijklmnopqrstuvwxyz\xaa\xb5\xba\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\ [...]
-
-Lm = u'\u02b0\u02b1\u02b2\u02b3\u02b4\u02b5\u02b6\u02b7\u02b8\u02b9\u02ba\u02bb\u02bc\u02bd\u02be\u02bf\u02c0\u02c1\u02c6\u02c7\u02c8\u02c9\u02ca\u02cb\u02cc\u02cd\u02ce\u02cf\u02d0\u02d1\u02e0\u02e1\u02e2\u02e3\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5\u06e6\u07f4\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78\u1c79\u1c7a\u1c7b\u1c7c\u1c7d\u1d2c\u1d2d\u1d2e\u1d2f\u1d30\u1d31\u1d32\u1d33\u1d34\u1d35\u1d36\u1d37\u1d38\u1d39\u1d3a\u1d3b\u1d3c\u1d3d\u1d3 [...]
-
-Lo = u'\u01bb\u01c0\u01c1\u01c2\u01c3\u0294\u05d0\u05d1\u05d2\u05d3\u05d4\u05d5\u05d6\u05d7\u05d8\u05d9\u05da\u05db\u05dc\u05dd\u05de\u05df\u05e0\u05e1\u05e2\u05e3\u05e4\u05e5\u05e6\u05e7\u05e8\u05e9\u05ea\u05f0\u05f1\u05f2\u0621\u0622\u0623\u0624\u0625\u0626\u0627\u0628\u0629\u062a\u062b\u062c\u062d\u062e\u062f\u0630\u0631\u0632\u0633\u0634\u0635\u0636\u0637\u0638\u0639\u063a\u063b\u063c\u063d\u063e\u063f\u0641\u0642\u0643\u0644\u0645\u0646\u0647\u0648\u0649\u064a\u066e\u066f\u0671\u067 [...]
-
-Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88\u1f89\u1f8a\u1f8b\u1f8c\u1f8d\u1f8e\u1f8f\u1f98\u1f99\u1f9a\u1f9b\u1f9c\u1f9d\u1f9e\u1f9f\u1fa8\u1fa9\u1faa\u1fab\u1fac\u1fad\u1fae\u1faf\u1fbc\u1fcc\u1ffc'
-
-Lu = u'ABCDEFGHIJKLMNOPQRSTUVWXYZ\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0 [...]
-
-Mc = u'\u0903\u093e\u093f\u0940\u0949\u094a\u094b\u094c\u094e\u0982\u0983\u09be\u09bf\u09c0\u09c7\u09c8\u09cb\u09cc\u09d7\u0a03\u0a3e\u0a3f\u0a40\u0a83\u0abe\u0abf\u0ac0\u0ac9\u0acb\u0acc\u0b02\u0b03\u0b3e\u0b40\u0b47\u0b48\u0b4b\u0b4c\u0b57\u0bbe\u0bbf\u0bc1\u0bc2\u0bc6\u0bc7\u0bc8\u0bca\u0bcb\u0bcc\u0bd7\u0c01\u0c02\u0c03\u0c41\u0c42\u0c43\u0c44\u0c82\u0c83\u0cbe\u0cc0\u0cc1\u0cc2\u0cc3\u0cc4\u0cc7\u0cc8\u0cca\u0ccb\u0cd5\u0cd6\u0d02\u0d03\u0d3e\u0d3f\u0d40\u0d46\u0d47\u0d48\u0d4a\u0d4 [...]
-
-Me = u'\u0488\u0489\u06de\u20dd\u20de\u20df\u20e0\u20e2\u20e3\u20e4\ua670\ua671\ua672'
-
-Mn = u'\u0300\u0301\u0302\u0303\u0304\u0305\u0306\u0307\u0308\u0309\u030a\u030b\u030c\u030d\u030e\u030f\u0310\u0311\u0312\u0313\u0314\u0315\u0316\u0317\u0318\u0319\u031a\u031b\u031c\u031d\u031e\u031f\u0320\u0321\u0322\u0323\u0324\u0325\u0326\u0327\u0328\u0329\u032a\u032b\u032c\u032d\u032e\u032f\u0330\u0331\u0332\u0333\u0334\u0335\u0336\u0337\u0338\u0339\u033a\u033b\u033c\u033d\u033e\u033f\u0340\u0341\u0342\u0343\u0344\u0345\u0346\u0347\u0348\u0349\u034a\u034b\u034c\u034d\u034e\u034f\u035 [...]
-
-Nd = u'0123456789\u0660\u0661\u0662\u0663\u0664\u0665\u0666\u0667\u0668\u0669\u06f0\u06f1\u06f2\u06f3\u06f4\u06f5\u06f6\u06f7\u06f8\u06f9\u07c0\u07c1\u07c2\u07c3\u07c4\u07c5\u07c6\u07c7\u07c8\u07c9\u0966\u0967\u0968\u0969\u096a\u096b\u096c\u096d\u096e\u096f\u09e6\u09e7\u09e8\u09e9\u09ea\u09eb\u09ec\u09ed\u09ee\u09ef\u0a66\u0a67\u0a68\u0a69\u0a6a\u0a6b\u0a6c\u0a6d\u0a6e\u0a6f\u0ae6\u0ae7\u0ae8\u0ae9\u0aea\u0aeb\u0aec\u0aed\u0aee\u0aef\u0b66\u0b67\u0b68\u0b69\u0b6a\u0b6b\u0b6c\u0b6d\u0b6e\ [...]
-
-Nl = u'\u16ee\u16ef\u16f0\u2160\u2161\u2162\u2163\u2164\u2165\u2166\u2167\u2168\u2169\u216a\u216b\u216c\u216d\u216e\u216f\u2170\u2171\u2172\u2173\u2174\u2175\u2176\u2177\u2178\u2179\u217a\u217b\u217c\u217d\u217e\u217f\u2180\u2181\u2182\u2185\u2186\u2187\u2188\u3007\u3021\u3022\u3023\u3024\u3025\u3026\u3027\u3028\u3029\u3038\u3039\u303a\ua6e6\ua6e7\ua6e8\ua6e9\ua6ea\ua6eb\ua6ec\ua6ed\ua6ee\ua6ef'
-
-No = u'\xb2\xb3\xb9\xbc\xbd\xbe\u09f4\u09f5\u09f6\u09f7\u09f8\u09f9\u0bf0\u0bf1\u0bf2\u0c78\u0c79\u0c7a\u0c7b\u0c7c\u0c7d\u0c7e\u0d70\u0d71\u0d72\u0d73\u0d74\u0d75\u0f2a\u0f2b\u0f2c\u0f2d\u0f2e\u0f2f\u0f30\u0f31\u0f32\u0f33\u1369\u136a\u136b\u136c\u136d\u136e\u136f\u1370\u1371\u1372\u1373\u1374\u1375\u1376\u1377\u1378\u1379\u137a\u137b\u137c\u17f0\u17f1\u17f2\u17f3\u17f4\u17f5\u17f6\u17f7\u17f8\u17f9\u2070\u2074\u2075\u2076\u2077\u2078\u2079\u2080\u2081\u2082\u2083\u2084\u2085\u2086\u208 [...]
-
-Pc = u'_\u203f\u2040\u2054\ufe33\ufe34\ufe4d\ufe4e\ufe4f\uff3f'
-
-Pd = u'\\-\u058a\u05be\u1400\u1806\u2010\u2011\u2012\u2013\u2014\u2015\u2e17\u2e1a\u301c\u3030\u30a0\ufe31\ufe32\ufe58\ufe63\uff0d'
-
-Pe = u')]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e\u301f\ufd3f\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
-
-Pf = u'\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
-
-Pi = u'\xab\u2018\u201b\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
-
-Po = u'!"#%&\'*,./:;?@\\\\\xa1\xb7\xbf\u037e\u0387\u055a\u055b\u055c\u055d\u055e\u055f\u0589\u05c0\u05c3\u05c6\u05f3\u05f4\u0609\u060a\u060c\u060d\u061b\u061e\u061f\u066a\u066b\u066c\u066d\u06d4\u0700\u0701\u0702\u0703\u0704\u0705\u0706\u0707\u0708\u0709\u070a\u070b\u070c\u070d\u07f7\u07f8\u07f9\u0830\u0831\u0832\u0833\u0834\u0835\u0836\u0837\u0838\u0839\u083a\u083b\u083c\u083d\u083e\u0964\u0965\u0970\u0df4\u0e4f\u0e5a\u0e5b\u0f04\u0f05\u0f06\u0f07\u0f08\u0f09\u0f0a\u0f0b\u0f0c\u0f0d\u0f [...]
-
-Ps = u'([{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3e\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-
-Sc = u'$\xa2\xa3\xa4\xa5\u060b\u09f2\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0\u20a1\u20a2\u20a3\u20a4\u20a5\u20a6\u20a7\u20a8\u20a9\u20aa\u20ab\u20ac\u20ad\u20ae\u20af\u20b0\u20b1\u20b2\u20b3\u20b4\u20b5\u20b6\u20b7\u20b8\ua838\ufdfc\ufe69\uff04\uffe0\uffe1\uffe5\uffe6'
-
-Sk = u'^`\xa8\xaf\xb4\xb8\u02c2\u02c3\u02c4\u02c5\u02d2\u02d3\u02d4\u02d5\u02d6\u02d7\u02d8\u02d9\u02da\u02db\u02dc\u02dd\u02de\u02df\u02e5\u02e6\u02e7\u02e8\u02e9\u02ea\u02eb\u02ed\u02ef\u02f0\u02f1\u02f2\u02f3\u02f4\u02f5\u02f6\u02f7\u02f8\u02f9\u02fa\u02fb\u02fc\u02fd\u02fe\u02ff\u0375\u0384\u0385\u1fbd\u1fbf\u1fc0\u1fc1\u1fcd\u1fce\u1fcf\u1fdd\u1fde\u1fdf\u1fed\u1fee\u1fef\u1ffd\u1ffe\u309b\u309c\ua700\ua701\ua702\ua703\ua704\ua705\ua706\ua707\ua708\ua709\ua70a\ua70b\ua70c\ua70d\ua70 [...]
-
-Sm = u'+<=>|~\xac\xb1\xd7\xf7\u03f6\u0606\u0607\u0608\u2044\u2052\u207a\u207b\u207c\u208a\u208b\u208c\u2140\u2141\u2142\u2143\u2144\u214b\u2190\u2191\u2192\u2193\u2194\u219a\u219b\u21a0\u21a3\u21a6\u21ae\u21ce\u21cf\u21d2\u21d4\u21f4\u21f5\u21f6\u21f7\u21f8\u21f9\u21fa\u21fb\u21fc\u21fd\u21fe\u21ff\u2200\u2201\u2202\u2203\u2204\u2205\u2206\u2207\u2208\u2209\u220a\u220b\u220c\u220d\u220e\u220f\u2210\u2211\u2212\u2213\u2214\u2215\u2216\u2217\u2218\u2219\u221a\u221b\u221c\u221d\u221e\u221f\ [...]
-
-So = u'\xa6\xa7\xa9\xae\xb0\xb6\u0482\u060e\u060f\u06e9\u06fd\u06fe\u07f6\u09fa\u0b70\u0bf3\u0bf4\u0bf5\u0bf6\u0bf7\u0bf8\u0bfa\u0c7f\u0cf1\u0cf2\u0d79\u0f01\u0f02\u0f03\u0f13\u0f14\u0f15\u0f16\u0f17\u0f1a\u0f1b\u0f1c\u0f1d\u0f1e\u0f1f\u0f34\u0f36\u0f38\u0fbe\u0fbf\u0fc0\u0fc1\u0fc2\u0fc3\u0fc4\u0fc5\u0fc7\u0fc8\u0fc9\u0fca\u0fcb\u0fcc\u0fce\u0fcf\u0fd5\u0fd6\u0fd7\u0fd8\u109e\u109f\u1360\u1390\u1391\u1392\u1393\u1394\u1395\u1396\u1397\u1398\u1399\u1940\u19e0\u19e1\u19e2\u19e3\u19e4\u19e [...]
-
-Zl = u'\u2028'
-
-Zp = u'\u2029'
-
-Zs = u' \xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000'
-
-cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
-
-# Generated from unidata 5.2.0
-
-def combine(*args):
-    return u''.join([globals()[cat] for cat in args])
-
-xid_start = u'\u0041-\u005A\u005F\u0061-\u007A\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u01BA\u01BB\u01BC-\u01BF\u01C0-\u01C3\u01C4-\u0241\u0250-\u02AF\u02B0-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EE\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03F5\u03F7-\u0481\u048A-\u04CE\u04D0-\u04F9\u0500-\u050F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0621-\u063A\u0640\u0641-\u064A\u066E-\u066F\u0671-\u06D3\u06D5\u06E5-\u06E6\u06EE-\u06EF\u06FA-\u06FC\u06FF\u0 [...]
-
-xid_continue = u'\u0030-\u0039\u0041-\u005A\u005F\u0061-\u007A\u00AA\u00B5\u00B7\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u01BA\u01BB\u01BC-\u01BF\u01C0-\u01C3\u01C4-\u0241\u0250-\u02AF\u02B0-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EE\u0300-\u036F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03F5\u03F7-\u0481\u0483-\u0486\u048A-\u04CE\u04D0-\u04F9\u0500-\u050F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05B9\u05BB-\u05BD\u05BF\u05C1-\u05C2\u05C4-\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2 [...]
-
-def allexcept(*args):
-    newcats = cats[:]
-    for arg in args:
-        newcats.remove(arg)
-    return u''.join([globals()[cat] for cat in newcats])
-
-if __name__ == '__main__':
-    import unicodedata
-
-    categories = {}
-
-    f = open(__file__)
-    try:
-        content = f.read()
-    finally:
-        f.close()
-
-    header = content[:content.find('Cc =')]
-    footer = content[content.find("def combine("):]
-
-    for code in range(65535):
-        c = unichr(code)
-        cat = unicodedata.category(c)
-        if ord(c) == 0xdc00:
-            # Hack to avoid combining this combining with the preceding high
-            # surrogate, 0xdbff, when doing a repr.
-            c = u'\\' + c
-        elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d):
-            # Escape regex metachars.
-            c = u'\\' + c
-        categories.setdefault(cat, []).append(c)
-
-    f = open(__file__, 'w')
-    f.write(header)
-
-    for cat in sorted(categories):
-        val = u''.join(categories[cat])
-        if cat == 'Cs':
-            # Jython can't handle isolated surrogates
-            f.write("""\
-try:
-    Cs = eval(u_prefix + r"%s")
-except UnicodeDecodeError:
-    Cs = '' # Jython can't handle isolated surrogates\n\n""" % repr(val).lstrip('u'))
-        else:
-            f.write('%s = %r\n\n' % (cat, val))
-    f.write('cats = %r\n\n' % sorted(categories.keys()))
-    f.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
-
-    f.write(footer)
-    f.close()
diff --git a/python/ext-libs/pygments/util.py b/python/ext-libs/pygments/util.py
deleted file mode 100644
index caac114..0000000
--- a/python/ext-libs/pygments/util.py
+++ /dev/null
@@ -1,277 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    pygments.util
-    ~~~~~~~~~~~~~
-
-    Utility functions.
-
-    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import codecs
-
-
-split_path_re = re.compile(r'[/\\ ]')
-doctype_lookup_re = re.compile(r'''(?smx)
-    (<\?.*?\?>)?\s*
-    <!DOCTYPE\s+(
-     [a-zA-Z_][a-zA-Z0-9]*\s+
-     [a-zA-Z_][a-zA-Z0-9]*\s+
-     "[^"]*")
-     [^>]*>
-''')
-tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>(?uism)')
-
-
-class ClassNotFound(ValueError):
-    """
-    If one of the get_*_by_* functions didn't find a matching class.
-    """
-
-
-class OptionError(Exception):
-    pass
-
-
-def get_choice_opt(options, optname, allowed, default=None, normcase=False):
-    string = options.get(optname, default)
-    if normcase:
-        string = string.lower()
-    if string not in allowed:
-        raise OptionError('Value for option %s must be one of %s' %
-                          (optname, ', '.join(map(str, allowed))))
-    return string
-
-
-def get_bool_opt(options, optname, default=None):
-    string = options.get(optname, default)
-    if isinstance(string, bool):
-        return string
-    elif isinstance(string, int):
-        return bool(string)
-    elif not isinstance(string, basestring):
-        raise OptionError('Invalid type %r for option %s; use '
-                          '1/0, yes/no, true/false, on/off' % (
-                          string, optname))
-    elif string.lower() in ('1', 'yes', 'true', 'on'):
-        return True
-    elif string.lower() in ('0', 'no', 'false', 'off'):
-        return False
-    else:
-        raise OptionError('Invalid value %r for option %s; use '
-                          '1/0, yes/no, true/false, on/off' % (
-                          string, optname))
-
-
-def get_int_opt(options, optname, default=None):
-    string = options.get(optname, default)
-    try:
-        return int(string)
-    except TypeError:
-        raise OptionError('Invalid type %r for option %s; you '
-                          'must give an integer value' % (
-                          string, optname))
-    except ValueError:
-        raise OptionError('Invalid value %r for option %s; you '
-                          'must give an integer value' % (
-                          string, optname))
-
-
-def get_list_opt(options, optname, default=None):
-    val = options.get(optname, default)
-    if isinstance(val, basestring):
-        return val.split()
-    elif isinstance(val, (list, tuple)):
-        return list(val)
-    else:
-        raise OptionError('Invalid type %r for option %s; you '
-                          'must give a list value' % (
-                          val, optname))
-
-
-def docstring_headline(obj):
-    if not obj.__doc__:
-        return ''
-    res = []
-    for line in obj.__doc__.strip().splitlines():
-        if line.strip():
-            res.append(" " + line.strip())
-        else:
-            break
-    return ''.join(res).lstrip()
-
-
-def make_analysator(f):
-    """
-    Return a static text analysation function that
-    returns float values.
-    """
-    def text_analyse(text):
-        try:
-            rv = f(text)
-        except Exception:
-            return 0.0
-        if not rv:
-            return 0.0
-        try:
-            return min(1.0, max(0.0, float(rv)))
-        except (ValueError, TypeError):
-            return 0.0
-    text_analyse.__doc__ = f.__doc__
-    return staticmethod(text_analyse)
-
-
-def shebang_matches(text, regex):
-    """
-    Check if the given regular expression matches the last part of the
-    shebang if one exists.
-
-        >>> from pygments.util import shebang_matches
-        >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
-        True
-        >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
-        True
-        >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
-        False
-        >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
-        False
-        >>> shebang_matches('#!/usr/bin/startsomethingwith python',
-        ...                 r'python(2\.\d)?')
-        True
-
-    It also checks for common windows executable file extensions::
-
-        >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
-        True
-
-    Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
-    the same as ``'perl -e'``)
-
-    Note that this method automatically searches the whole string (eg:
-    the regular expression is wrapped in ``'^$'``)
-    """
-    index = text.find('\n')
-    if index >= 0:
-        first_line = text[:index].lower()
-    else:
-        first_line = text.lower()
-    if first_line.startswith('#!'):
-        try:
-            found = [x for x in split_path_re.split(first_line[2:].strip())
-                     if x and not x.startswith('-')][-1]
-        except IndexError:
-            return False
-        regex = re.compile('^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
-        if regex.search(found) is not None:
-            return True
-    return False
-
-
-def doctype_matches(text, regex):
-    """
-    Check if the doctype matches a regular expression (if present).
-    Note that this method only checks the first part of a DOCTYPE.
-    eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
-    """
-    m = doctype_lookup_re.match(text)
-    if m is None:
-        return False
-    doctype = m.group(2)
-    return re.compile(regex).match(doctype.strip()) is not None
-
-
-def html_doctype_matches(text):
-    """
-    Check if the file looks like it has a html doctype.
-    """
-    return doctype_matches(text, r'html\s+PUBLIC\s+"-//W3C//DTD X?HTML.*')
-
-
-_looks_like_xml_cache = {}
-def looks_like_xml(text):
-    """
-    Check if a doctype exists or if we have some tags.
-    """
-    key = hash(text)
-    try:
-        return _looks_like_xml_cache[key]
-    except KeyError:
-        m = doctype_lookup_re.match(text)
-        if m is not None:
-            return True
-        rv = tag_re.search(text[:1000]) is not None
-        _looks_like_xml_cache[key] = rv
-        return rv
-
-# Python narrow build compatibility
-
-def _surrogatepair(c):
-    return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
-
-def unirange(a, b):
-    """
-    Returns a regular expression string to match the given non-BMP range.
-    """
-    if b < a:
-        raise ValueError("Bad character range")
-    if a < 0x10000 or b < 0x10000:
-        raise ValueError("unirange is only defined for non-BMP ranges")
-
-    if sys.maxunicode > 0xffff:
-        # wide build
-        return u'[%s-%s]' % (unichr(a), unichr(b))
-    else:
-        # narrow build stores surrogates, and the 're' module handles them
-        # (incorrectly) as characters.  Since there is still ordering among
-        # these characters, expand the range to one that it understands.  Some
-        # background in http://bugs.python.org/issue3665 and
-        # http://bugs.python.org/issue12749
-        #
-        # Additionally, the lower constants are using unichr rather than
-        # literals because jython [which uses the wide path] can't load this
-        # file if they are literals.
-        ah, al = _surrogatepair(a)
-        bh, bl = _surrogatepair(b)
-        if ah == bh:
-            return u'(?:%s[%s-%s])' % (unichr(ah), unichr(al), unichr(bl))
-        else:
-            buf = []
-            buf.append(u'%s[%s-%s]' %
-                       (unichr(ah), unichr(al),
-                        ah == bh and unichr(bl) or unichr(0xdfff)))
-            if ah - bh > 1:
-                buf.append(u'[%s-%s][%s-%s]' %
-                           unichr(ah+1), unichr(bh-1), unichr(0xdc00), unichr(0xdfff))
-            if ah != bh:
-                buf.append(u'%s[%s-%s]' %
-                           (unichr(bh), unichr(0xdc00), unichr(bl)))
-
-            return u'(?:' + u'|'.join(buf) + u')'
-
-# Python 2/3 compatibility
-
-if sys.version_info < (3,0):
-    b = bytes = str
-    u_prefix = 'u'
-    import StringIO, cStringIO
-    BytesIO = cStringIO.StringIO
-    StringIO = StringIO.StringIO
-    uni_open = codecs.open
-else:
-    import builtins
-    bytes = builtins.bytes
-    u_prefix = ''
-    def b(s):
-        if isinstance(s, str):
-            return bytes(map(ord, s))
-        elif isinstance(s, bytes):
-            return s
-        else:
-            raise TypeError("Invalid argument %r for b()" % (s,))
-    import io
-    BytesIO = io.BytesIO
-    StringIO = io.StringIO
-    uni_open = builtins.open
diff --git a/python/ext-libs/requests-2.10.0.dist-info/DESCRIPTION.rst b/python/ext-libs/requests-2.10.0.dist-info/DESCRIPTION.rst
deleted file mode 100644
index e3f9133..0000000
--- a/python/ext-libs/requests-2.10.0.dist-info/DESCRIPTION.rst
+++ /dev/null
@@ -1,1257 +0,0 @@
-Requests: HTTP for Humans
-=========================
-
-.. image:: https://img.shields.io/pypi/v/requests.svg
-    :target: https://pypi.python.org/pypi/requests
-
-.. image:: https://img.shields.io/pypi/dm/requests.svg
-        :target: https://pypi.python.org/pypi/requests
-
-Requests is the only *Non-GMO* HTTP library for Python, safe for human
-consumption.
-
-**Warning:** Recreational use of other HTTP libraries may result in dangerous side-effects,
-including: security vulnerabilities, verbose code, reinventing the wheel,
-constantly reading documentation, depression, headaches, or even death.
-
-Behold, the power of Requests:
-
-.. code-block:: python
-
-    >>> r = requests.get('https://api.github.com/user', auth=('user', 'pass'))
-    >>> r.status_code
-    200
-    >>> r.headers['content-type']
-    'application/json; charset=utf8'
-    >>> r.encoding
-    'utf-8'
-    >>> r.text
-    u'{"type":"User"...'
-    >>> r.json()
-    {u'disk_usage': 368627, u'private_gists': 484, ...}
-
-See `the similar code, sans Requests <https://gist.github.com/973705>`_.
-
-Requests allows you to send *organic, grass-fed* HTTP/1.1 requests, without the
-need for manual labor. There's no need to manually add query strings to your
-URLs, or to form-encode your POST data. Keep-alive and HTTP connection pooling
-are 100% automatic, powered by `urllib3 <https://github.com/shazow/urllib3>`_,
-which is embedded within Requests.
-
-Besides, all the cool kids are doing it. Requests is one of the most
-downloaded Python packages of all time, pulling in over 7,000,000 downloads
-every month. You don't want to be left out!
-
-Feature Support
----------------
-
-Requests is ready for today's web.
-
-- International Domains and URLs
-- Keep-Alive & Connection Pooling
-- Sessions with Cookie Persistence
-- Browser-style SSL Verification
-- Basic/Digest Authentication
-- Elegant Key/Value Cookies
-- Automatic Decompression
-- Automatic Content Decoding
-- Unicode Response Bodies
-- Multipart File Uploads
-- HTTP(S) Proxy Support
-- Connection Timeouts
-- Streaming Downloads
-- ``.netrc`` Support
-- Chunked Requests
-- Thread-safety
-
-Requests supports Python 2.6 — 3.5, and runs great on PyPy.
-
-Installation
-------------
-
-To install Requests, simply:
-
-.. code-block:: bash
-
-    $ pip install requests
-    ✨🍰✨
-
-Satisfaction, guaranteed.
-
-Documentation
--------------
-
-Fantastic documentation is available at http://docs.python-requests.org/, for a limited time only.
-
-
-How to Contribute
------------------
-
-#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
-#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
-#. Write a test which shows that the bug was fixed or that the feature works as expected.
-#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
-
-.. _`the repository`: http://github.com/kennethreitz/requests
-.. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
-.. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
-
-
-.. :changelog:
-
-Release History
----------------
-
-2.10.0 (04-29-2016)
-+++++++++++++++++++
-
-**New Features**
-
-- SOCKS Proxy Support! (requires PySocks; $ pip install requests[socks])
-
-**Miscellaneous**
-
-- Updated bundled urllib3 to 1.15.1.
-
-2.9.2 (04-29-2016)
-++++++++++++++++++
-
-**Improvements**
-
-- Change built-in CaseInsensitiveDict (used for headers) to use OrderedDict
-  as its underlying datastore.
-
-**Bugfixes**
-
-- Don't use redirect_cache if allow_redirects=False
-- When passed objects that throw exceptions from ``tell()``, send them via
-  chunked transfer encoding instead of failing.
-- Raise a ProxyError for proxy related connection issues.
-
-2.9.1 (2015-12-21)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Resolve regression introduced in 2.9.0 that made it impossible to send binary
-  strings as bodies in Python 3.
-- Fixed errors when calculating cookie expiration dates in certain locales.
-
-**Miscellaneous**
-
-- Updated bundled urllib3 to 1.13.1.
-
-2.9.0 (2015-12-15)
-++++++++++++++++++
-
-**Minor Improvements** (Backwards compatible)
-
-- The ``verify`` keyword argument now supports being passed a path to a
-  directory of CA certificates, not just a single-file bundle.
-- Warnings are now emitted when sending files opened in text mode.
-- Added the 511 Network Authentication Required status code to the status code
-  registry.
-
-**Bugfixes**
-
-- For file-like objects that are not seeked to the very beginning, we now
-  send the content length for the number of bytes we will actually read, rather
-  than the total size of the file, allowing partial file uploads.
-- When uploading file-like objects, if they are empty or have no obvious
-  content length we set ``Transfer-Encoding: chunked`` rather than
-  ``Content-Length: 0``.
-- We correctly receive the response in buffered mode when uploading chunked
-  bodies.
-- We now handle being passed a query string as a bytestring on Python 3, by
-  decoding it as UTF-8.
-- Sessions are now closed in all cases (exceptional and not) when using the
-  functional API rather than leaking and waiting for the garbage collector to
-  clean them up.
-- Correctly handle digest auth headers with a malformed ``qop`` directive that
-  contains no token, by treating it the same as if no ``qop`` directive was
-  provided at all.
-- Minor performance improvements when removing specific cookies by name.
-
-**Miscellaneous**
-
-- Updated urllib3 to 1.13.
-
-2.8.1 (2015-10-13)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
-  bundle.
-- Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
-  ``ConnectionError``
-- When using the PreparedRequest flow, requests will now correctly respect the
-  ``json`` parameter. Broken in 2.8.0.
-- When using the PreparedRequest flow, requests will now correctly handle a
-  Unicode-string method name on Python 2. Broken in 2.8.0.
-
-2.8.0 (2015-10-05)
-++++++++++++++++++
-
-**Minor Improvements** (Backwards Compatible)
-
-- Requests now supports per-host proxies. This allows the ``proxies``
-  dictionary to have entries of the form
-  ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
-  in preference to the previously-supported scheme-specific ones, but the
-  previous syntax will continue to work.
-- ``Response.raise_for_status`` now prints the URL that failed as part of the
-  exception message.
-- ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
-  defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
-  exceptions to be thrown.
-- Change to bundled projects import logic to make it easier to unbundle
-  requests downstream.
-- Changed the default User-Agent string to avoid leaking data on Linux: now
-  contains only the requests version.
-
-**Bugfixes**
-
-- The ``json`` parameter to ``post()`` and friends will now only be used if
-  neither ``data`` nor ``files`` are present, consistent with the
-  documentation.
-- We now ignore empty fields in the ``NO_PROXY`` environment variable.
-- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
-  ``stream=True`` with ``contextlib.closing``.
-- Prevented bugs where we would attempt to return the same connection back to
-  the connection pool twice when sending a Chunked body.
-- Miscellaneous minor internal changes.
-- Digest Auth support is now thread safe.
-
-**Updates**
-
-- Updated urllib3 to 1.12.
-
-2.7.0 (2015-05-03)
-++++++++++++++++++
-
-This is the first release that follows our new release process. For more, see
-`our documentation
-<http://docs.python-requests.org/en/latest/community/release-process/>`_.
-
-**Bugfixes**
-
-- Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
-  encoding and response framing.
-
-2.6.2 (2015-04-23)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Fix regression where compressed data that was sent as chunked data was not
-  properly decompressed. (#2561)
-
-2.6.1 (2015-04-22)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Remove VendorAlias import machinery introduced in v2.5.2.
-
-- Simplify the PreparedRequest.prepare API: We no longer require the user to
-  pass an empty list to the hooks keyword argument. (c.f. #2552)
-
-- Resolve redirects now receives and forwards all of the original arguments to
-  the adapter. (#2503)
-
-- Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
-  cannot be encoded in ASCII. (#2540)
-
-- Populate the parsed path of the URI field when performing Digest
-  Authentication. (#2426)
-
-- Copy a PreparedRequest's CookieJar more reliably when it is not an instance
-  of RequestsCookieJar. (#2527)
-
-2.6.0 (2015-03-14)
-++++++++++++++++++
-
-**Bugfixes**
-
-- CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
-  without a host value set would use the hostname for the redirected URL
-  exposing requests users to session fixation attacks and potentially cookie
-  stealing. This was disclosed privately by Matthew Daley of
-  `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
-  v2.1.0 to v2.5.3 (inclusive on both ends).
-
-- Fix error when requests is an ``install_requires`` dependency and ``python
-  setup.py test`` is run. (#2462)
-
-- Fix error when urllib3 is unbundled and requests continues to use the
-  vendored import location.
-
-- Include fixes to ``urllib3``'s header handling.
-
-- Requests' handling of unvendored dependencies is now more restrictive.
-
-**Features and Improvements**
-
-- Support bytearrays when passed as parameters in the ``files`` argument.
-  (#2468)
-
-- Avoid data duplication when creating a request with ``str``, ``bytes``, or
-  ``bytearray`` input to the ``files`` argument.
-
-2.5.3 (2015-02-24)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Revert changes to our vendored certificate bundle. For more context see
-  (#2455, #2456, and http://bugs.python.org/issue23476)
-
-2.5.2 (2015-02-23)
-++++++++++++++++++
-
-**Features and Improvements**
-
-- Add sha256 fingerprint support. (`shazow/urllib3#540`_)
-
-- Improve the performance of headers. (`shazow/urllib3#544`_)
-
-**Bugfixes**
-
-- Copy pip's import machinery. When downstream redistributors remove
-  requests.packages.urllib3 the import machinery will continue to let those
-  same symbols work. Example usage in requests' documentation and 3rd-party
-  libraries relying on the vendored copies of urllib3 will work without having
-  to fallback to the system urllib3.
-
-- Attempt to quote parts of the URL on redirect if unquoting and then quoting
-  fails. (#2356)
-
-- Fix filename type check for multipart form-data uploads. (#2411)
-
-- Properly handle the case where a server issuing digest authentication
-  challenges provides both auth and auth-int qop-values. (#2408)
-
-- Fix a socket leak. (`shazow/urllib3#549`_)
-
-- Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
-
-- Disable the built-in hostname verification. (`shazow/urllib3#526`_)
-
-- Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
-
-**Security**
-
-- Pulled in an updated ``cacert.pem``.
-
-- Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
-
-.. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
-.. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
-.. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
-.. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
-.. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
-.. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
-.. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
-
-2.5.1 (2014-12-23)
-++++++++++++++++++
-
-**Behavioural Changes**
-
-- Only catch HTTPErrors in raise_for_status (#2382)
-
-**Bugfixes**
-
-- Handle LocationParseError from urllib3 (#2344)
-- Handle file-like object filenames that are not strings (#2379)
-- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
-
-2.5.0 (2014-12-01)
-++++++++++++++++++
-
-**Improvements**
-
-- Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
-- The ``iter_lines`` method on a response now accepts a delimiter with which
-  to split the content (#2295)
-
-**Behavioural Changes**
-
-- Add deprecation warnings to functions in requests.utils that will be removed
-  in 3.0 (#2309)
-- Sessions used by the functional API are always closed (#2326)
-- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
-
-**Bugfixes**
-
-- Only parse the URL once (#2353)
-- Allow Content-Length header to always be overridden (#2332)
-- Properly handle files in HTTPDigestAuth (#2333)
-- Cap redirect_cache size to prevent memory abuse (#2299)
-- Fix HTTPDigestAuth handling of redirects after authenticating successfully
-  (#2253)
-- Fix crash with custom method parameter to Session.request (#2317)
-- Fix how Link headers are parsed using the regular expression library (#2271)
-
-**Documentation**
-
-- Add more references for interlinking (#2348)
-- Update CSS for theme (#2290)
-- Update width of buttons and sidebar (#2289)
-- Replace references of Gittip with Gratipay (#2282)
-- Add link to changelog in sidebar (#2273)
-
-2.4.3 (2014-10-06)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Unicode URL improvements for Python 2.
-- Re-order JSON param for backwards compat.
-- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
-
-
-2.4.2 (2014-10-05)
-++++++++++++++++++
-
-**Improvements**
-
-- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
-- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
-
-**Bugfixes**
-
-- Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
-- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
-
-**Documentation**
-
-- Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
-- Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
-- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
-
-
-
-2.4.1 (2014-09-09)
-++++++++++++++++++
-
-- Now has a "security" package extras set, ``$ pip install requests[security]``
-- Requests will now use Certifi if it is available.
-- Capture and re-raise urllib3 ProtocolError
-- Bugfix for responses that attempt to redirect to themselves forever (wtf?).
-
-
-2.4.0 (2014-08-29)
-++++++++++++++++++
-
-**Behavioral Changes**
-
-- ``Connection: keep-alive`` header is now sent automatically.
-
-**Improvements**
-
-- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
-- Allow copying of PreparedRequests without headers/cookies.
-- Updated bundled urllib3 version.
-- Refactored settings loading from environment -- new `Session.merge_environment_settings`.
-- Handle socket errors in iter_content.
-
-
-2.3.0 (2014-05-16)
-++++++++++++++++++
-
-**API Changes**
-
-- New ``Response`` property ``is_redirect``, which is true when the
-  library could have processed this response as a redirection (whether
-  or not it actually did).
-- The ``timeout`` parameter now affects requests with both ``stream=True`` and
-  ``stream=False`` equally.
-- The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
-  Proxy schemes now default to ``http://``.
-- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
-  dictionary when references as string or viewed in the interpreter.
-
-**Bugfixes**
-
-- No longer expose Authorization or Proxy-Authorization headers on redirect.
-  Fix CVE-2014-1829 and CVE-2014-1830 respectively.
-- Authorization is re-evaluated each redirect.
-- On redirect, pass url as native strings.
-- Fall-back to autodetected encoding for JSON when Unicode detection fails.
-- Headers set to ``None`` on the ``Session`` are now correctly not sent.
-- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
-  response.
-- Stop advertising ``compress`` as a supported Content-Encoding.
-- The ``Response.history`` parameter is now always a list.
-- Many, many ``urllib3`` bugfixes.
-
-2.2.1 (2014-01-23)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
-- Assorted urllib3 fixes.
-
-2.2.0 (2014-01-09)
-++++++++++++++++++
-
-**API Changes**
-
-- New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
-  ``DecodeError`` exceptions.
-
-**Bugfixes**
-
-- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
-- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
-- Use the correct pool size for pools of connections to proxies.
-- Fix iteration of ``CookieJar`` objects.
-- Ensure that cookies are persisted over redirect.
-- Switch back to using chardet, since it has merged with charade.
-
-2.1.0 (2013-12-05)
-++++++++++++++++++
-
-- Updated CA Bundle, of course.
-- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
-- Clean up connections when we hit problems during chunked upload, rather than leaking them.
-- Return connections to the pool when a chunked upload is successful, rather than leaking it.
-- Match the HTTPbis recommendation for HTTP 301 redirects.
-- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
-- Values of headers set by Requests are now always the native string type.
-- Fix previously broken SNI support.
-- Fix accessing HTTP proxies using proxy authentication.
-- Unencode HTTP Basic usernames and passwords extracted from URLs.
-- Support for IP address ranges for no_proxy environment variable
-- Parse headers correctly when users override the default ``Host:`` header.
-- Avoid munging the URL in case of case-sensitive servers.
-- Looser URL handling for non-HTTP/HTTPS urls.
-- Accept unicode methods in Python 2.6 and 2.7.
-- More resilient cookie handling.
-- Make ``Response`` objects pickleable.
-- Actually added MD5-sess to Digest Auth instead of pretending to like last time.
-- Updated internal urllib3.
-- Fixed @Lukasa's lack of taste.
-
-2.0.1 (2013-10-24)
-++++++++++++++++++
-
-- Updated included CA Bundle with new mistrusts and automated process for the future
-- Added MD5-sess to Digest Auth
-- Accept per-file headers in multipart file POST messages.
-- Fixed: Don't send the full URL on CONNECT messages.
-- Fixed: Correctly lowercase a redirect scheme.
-- Fixed: Cookies not persisted when set via functional API.
-- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
-- Updated internal urllib3 and chardet.
-
-2.0.0 (2013-09-24)
-++++++++++++++++++
-
-**API Changes:**
-
-- Keys in the Headers dictionary are now native strings on all Python versions,
-  i.e. bytestrings on Python 2, unicode on Python 3.
-- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
-  will be raised if they don't.
-- Timeouts now apply to read time if ``Stream=False``.
-- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
-- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
-- Added new method to ``Session`` objects: ``Session.update_request()``. This
-  method updates a ``Request`` object with the data (e.g. cookies) stored on
-  the ``Session``.
-- Added new method to ``Session`` objects: ``Session.prepare_request()``. This
-  method updates and prepares a ``Request`` object, and returns the
-  corresponding ``PreparedRequest`` object.
-- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
-  This should not be called directly, but improves the subclass interface.
-- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
-  will now raise a Requests ``ChunkedEncodingError`` instead.
-- Invalid percent-escape sequences now cause a Requests ``InvalidURL``
-  exception to be raised.
-- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
-  ``"already_reported"``.
-- HTTP 226 reason added (``"im_used"``).
-
-**Bugfixes:**
-
-- Vastly improved proxy support, including the CONNECT verb. Special thanks to
-  the many contributors who worked towards this improvement.
-- Cookies are now properly managed when 401 authentication responses are
-  received.
-- Chunked encoding fixes.
-- Support for mixed case schemes.
-- Better handling of streaming downloads.
-- Retrieve environment proxies from more locations.
-- Minor cookies fixes.
-- Improved redirect behaviour.
-- Improved streaming behaviour, particularly for compressed data.
-- Miscellaneous small Python 3 text encoding bugs.
-- ``.netrc`` no longer overrides explicit auth.
-- Cookies set by hooks are now correctly persisted on Sessions.
-- Fix problem with cookies that specify port numbers in their host field.
-- ``BytesIO`` can be used to perform streaming uploads.
-- More generous parsing of the ``no_proxy`` environment variable.
-- Non-string objects can be passed in data values alongside files.
-
-1.2.3 (2013-05-25)
-++++++++++++++++++
-
-- Simple packaging fix
-
-
-1.2.2 (2013-05-23)
-++++++++++++++++++
-
-- Simple packaging fix
-
-
-1.2.1 (2013-05-20)
-++++++++++++++++++
-
-- 301 and 302 redirects now change the verb to GET for all verbs, not just
-  POST, improving browser compatibility.
-- Python 3.3.2 compatibility
-- Always percent-encode location headers
-- Fix connection adapter matching to be most-specific first
-- new argument to the default connection adapter for passing a block argument
-- prevent a KeyError when there's no link headers
-
-1.2.0 (2013-03-31)
-++++++++++++++++++
-
-- Fixed cookies on sessions and on requests
-- Significantly change how hooks are dispatched - hooks now receive all the
-  arguments specified by the user when making a request so hooks can make a
-  secondary request with the same parameters. This is especially necessary for
-  authentication handler authors
-- certifi support was removed
-- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
-- Major proxy work thanks to @Lukasa including parsing of proxy authentication
-  from the proxy url
-- Fix DigestAuth handling too many 401s
-- Update vendored urllib3 to include SSL bug fixes
-- Allow keyword arguments to be passed to ``json.loads()`` via the
-  ``Response.json()`` method
-- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
-  requests
-- Add ``elapsed`` attribute to ``Response`` objects to time how long a request
-  took.
-- Fix ``RequestsCookieJar``
-- Sessions and Adapters are now picklable, i.e., can be used with the
-  multiprocessing library
-- Update charade to version 1.0.3
-
-The change in how hooks are dispatched will likely cause a great deal of
-issues.
-
-1.1.0 (2013-01-10)
-++++++++++++++++++
-
-- CHUNKED REQUESTS
-- Support for iterable response bodies
-- Assume servers persist redirect params
-- Allow explicit content types to be specified for file data
-- Make merge_kwargs case-insensitive when looking up keys
-
-1.0.3 (2012-12-18)
-++++++++++++++++++
-
-- Fix file upload encoding bug
-- Fix cookie behavior
-
-1.0.2 (2012-12-17)
-++++++++++++++++++
-
-- Proxy fix for HTTPAdapter.
-
-1.0.1 (2012-12-17)
-++++++++++++++++++
-
-- Cert verification exception bug.
-- Proxy fix for HTTPAdapter.
-
-1.0.0 (2012-12-17)
-++++++++++++++++++
-
-- Massive Refactor and Simplification
-- Switch to Apache 2.0 license
-- Swappable Connection Adapters
-- Mountable Connection Adapters
-- Mutable ProcessedRequest chain
-- /s/prefetch/stream
-- Removal of all configuration
-- Standard library logging
-- Make Response.json() callable, not property.
-- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
-- Removal of all hooks except 'response'
-- Removal of all authentication helpers (OAuth, Kerberos)
-
-This is not a backwards compatible change.
-
-0.14.2 (2012-10-27)
-+++++++++++++++++++
-
-- Improved mime-compatible JSON handling
-- Proxy fixes
-- Path hack fixes
-- Case-Insensitive Content-Encoding headers
-- Support for CJK parameters in form posts
-
-
-0.14.1 (2012-10-01)
-+++++++++++++++++++
-
-- Python 3.3 Compatibility
-- Simply default accept-encoding
-- Bugfixes
-
-
-0.14.0 (2012-09-02)
-++++++++++++++++++++
-
-- No more iter_content errors if already downloaded.
-
-0.13.9 (2012-08-25)
-+++++++++++++++++++
-
-- Fix for OAuth + POSTs
-- Remove exception eating from dispatch_hook
-- General bugfixes
-
-0.13.8 (2012-08-21)
-+++++++++++++++++++
-
-- Incredible Link header support :)
-
-0.13.7 (2012-08-19)
-+++++++++++++++++++
-
-- Support for (key, value) lists everywhere.
-- Digest Authentication improvements.
-- Ensure proxy exclusions work properly.
-- Clearer UnicodeError exceptions.
-- Automatic casting of URLs to strings (fURL and such)
-- Bugfixes.
-
-0.13.6 (2012-08-06)
-+++++++++++++++++++
-
-- Long awaited fix for hanging connections!
-
-0.13.5 (2012-07-27)
-+++++++++++++++++++
-
-- Packaging fix
-
-0.13.4 (2012-07-27)
-+++++++++++++++++++
-
-- GSSAPI/Kerberos authentication!
-- App Engine 2.7 Fixes!
-- Fix leaking connections (from urllib3 update)
-- OAuthlib path hack fix
-- OAuthlib URL parameters fix.
-
-0.13.3 (2012-07-12)
-+++++++++++++++++++
-
-- Use simplejson if available.
-- Do not hide SSLErrors behind Timeouts.
-- Fixed param handling with urls containing fragments.
-- Significantly improved information in User Agent.
-- client certificates are ignored when verify=False
-
-0.13.2 (2012-06-28)
-+++++++++++++++++++
-
-- Zero dependencies (once again)!
-- New: Response.reason
-- Sign querystring parameters in OAuth 1.0
-- Client certificates no longer ignored when verify=False
-- Add openSUSE certificate support
-
-0.13.1 (2012-06-07)
-+++++++++++++++++++
-
-- Allow passing a file or file-like object as data.
-- Allow hooks to return responses that indicate errors.
-- Fix Response.text and Response.json for body-less responses.
-
-0.13.0 (2012-05-29)
-+++++++++++++++++++
-
-- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
-- Allow disabling of cookie persistence.
-- New implementation of safe_mode
-- cookies.get now supports default argument
-- Session cookies not saved when Session.request is called with return_response=False
-- Env: no_proxy support.
-- RequestsCookieJar improvements.
-- Various bug fixes.
-
-0.12.1 (2012-05-08)
-+++++++++++++++++++
-
-- New ``Response.json`` property.
-- Ability to add string file uploads.
-- Fix out-of-range issue with iter_lines.
-- Fix iter_content default size.
-- Fix POST redirects containing files.
-
-0.12.0 (2012-05-02)
-+++++++++++++++++++
-
-- EXPERIMENTAL OAUTH SUPPORT!
-- Proper CookieJar-backed cookies interface with awesome dict-like interface.
-- Speed fix for non-iterated content chunks.
-- Move ``pre_request`` to a more usable place.
-- New ``pre_send`` hook.
-- Lazily encode data, params, files.
-- Load system Certificate Bundle if ``certify`` isn't available.
-- Cleanups, fixes.
-
-0.11.2 (2012-04-22)
-+++++++++++++++++++
-
-- Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
-- Infinite digest auth redirect fix.
-- Multi-part file upload improvements.
-- Fix decoding of invalid %encodings in URLs.
-- If there is no content in a response don't throw an error the second time that content is attempted to be read.
-- Upload data on redirects.
-
-0.11.1 (2012-03-30)
-+++++++++++++++++++
-
-* POST redirects now break RFC to do what browsers do: Follow up with a GET.
-* New ``strict_mode`` configuration to disable new redirect behavior.
-
-
-0.11.0 (2012-03-14)
-+++++++++++++++++++
-
-* Private SSL Certificate support
-* Remove select.poll from Gevent monkeypatching
-* Remove redundant generator for chunked transfer encoding
-* Fix: Response.ok raises Timeout Exception in safe_mode
-
-0.10.8 (2012-03-09)
-+++++++++++++++++++
-
-* Generate chunked ValueError fix
-* Proxy configuration by environment variables
-* Simplification of iter_lines.
-* New `trust_env` configuration for disabling system/environment hints.
-* Suppress cookie errors.
-
-0.10.7 (2012-03-07)
-+++++++++++++++++++
-
-* `encode_uri` = False
-
-0.10.6 (2012-02-25)
-+++++++++++++++++++
-
-* Allow '=' in cookies.
-
-0.10.5 (2012-02-25)
-+++++++++++++++++++
-
-* Response body with 0 content-length fix.
-* New async.imap.
-* Don't fail on netrc.
-
-
-0.10.4 (2012-02-20)
-+++++++++++++++++++
-
-* Honor netrc.
-
-0.10.3 (2012-02-20)
-+++++++++++++++++++
-
-* HEAD requests don't follow redirects anymore.
-* raise_for_status() doesn't raise for 3xx anymore.
-* Make Session objects picklable.
-* ValueError for invalid schema URLs.
-
-0.10.2 (2012-01-15)
-+++++++++++++++++++
-
-* Vastly improved URL quoting.
-* Additional allowed cookie key values.
-* Attempted fix for "Too many open files" Error
-* Replace unicode errors on first pass, no need for second pass.
-* Append '/' to bare-domain urls before query insertion.
-* Exceptions now inherit from RuntimeError.
-* Binary uploads + auth fix.
-* Bugfixes.
-
-
-0.10.1 (2012-01-23)
-+++++++++++++++++++
-
-* PYTHON 3 SUPPORT!
-* Dropped 2.5 Support. (*Backwards Incompatible*)
-
-0.10.0 (2012-01-21)
-+++++++++++++++++++
-
-* ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
-* New ``Response.text`` is unicode-only.
-* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
-* Default to ISO-8859-1 (Western) encoding for "text" subtypes.
-* Removal of `decode_unicode`. (*Backwards Incompatible*)
-* New multiple-hooks system.
-* New ``Response.register_hook`` for registering hooks within the pipeline.
-* ``Response.url`` is now Unicode.
-
-0.9.3 (2012-01-18)
-++++++++++++++++++
-
-* SSL verify=False bugfix (apparent on windows machines).
-
-0.9.2 (2012-01-18)
-++++++++++++++++++
-
-* Asynchronous async.send method.
-* Support for proper chunk streams with boundaries.
-* session argument for Session classes.
-* Print entire hook tracebacks, not just exception instance.
-* Fix response.iter_lines from pending next line.
-* Fix but in HTTP-digest auth w/ URI having query strings.
-* Fix in Event Hooks section.
-* Urllib3 update.
-
-
-0.9.1 (2012-01-06)
-++++++++++++++++++
-
-* danger_mode for automatic Response.raise_for_status()
-* Response.iter_lines refactor
-
-0.9.0 (2011-12-28)
-++++++++++++++++++
-
-* verify ssl is default.
-
-
-0.8.9 (2011-12-28)
-++++++++++++++++++
-
-* Packaging fix.
-
-
-0.8.8 (2011-12-28)
-++++++++++++++++++
-
-* SSL CERT VERIFICATION!
-* Release of Cerifi: Mozilla's cert list.
-* New 'verify' argument for SSL requests.
-* Urllib3 update.
-
-0.8.7 (2011-12-24)
-++++++++++++++++++
-
-* iter_lines last-line truncation fix
-* Force safe_mode for async requests
-* Handle safe_mode exceptions more consistently
-* Fix iteration on null responses in safe_mode
-
-0.8.6 (2011-12-18)
-++++++++++++++++++
-
-* Socket timeout fixes.
-* Proxy Authorization support.
-
-0.8.5 (2011-12-14)
-++++++++++++++++++
-
-* Response.iter_lines!
-
-0.8.4 (2011-12-11)
-++++++++++++++++++
-
-* Prefetch bugfix.
-* Added license to installed version.
-
-0.8.3 (2011-11-27)
-++++++++++++++++++
-
-* Converted auth system to use simpler callable objects.
-* New session parameter to API methods.
-* Display full URL while logging.
-
-0.8.2 (2011-11-19)
-++++++++++++++++++
-
-* New Unicode decoding system, based on over-ridable `Response.encoding`.
-* Proper URL slash-quote handling.
-* Cookies with ``[``, ``]``, and ``_`` allowed.
-
-0.8.1 (2011-11-15)
-++++++++++++++++++
-
-* URL Request path fix
-* Proxy fix.
-* Timeouts fix.
-
-0.8.0 (2011-11-13)
-++++++++++++++++++
-
-* Keep-alive support!
-* Complete removal of Urllib2
-* Complete removal of Poster
-* Complete removal of CookieJars
-* New ConnectionError raising
-* Safe_mode for error catching
-* prefetch parameter for request methods
-* OPTION method
-* Async pool size throttling
-* File uploads send real names
-* Vendored in urllib3
-
-0.7.6 (2011-11-07)
-++++++++++++++++++
-
-* Digest authentication bugfix (attach query data to path)
-
-0.7.5 (2011-11-04)
-++++++++++++++++++
-
-* Response.content = None if there was an invalid response.
-* Redirection auth handling.
-
-0.7.4 (2011-10-26)
-++++++++++++++++++
-
-* Session Hooks fix.
-
-0.7.3 (2011-10-23)
-++++++++++++++++++
-
-* Digest Auth fix.
-
-
-0.7.2 (2011-10-23)
-++++++++++++++++++
-
-* PATCH Fix.
-
-
-0.7.1 (2011-10-23)
-++++++++++++++++++
-
-* Move away from urllib2 authentication handling.
-* Fully Remove AuthManager, AuthObject, &c.
-* New tuple-based auth system with handler callbacks.
-
-
-0.7.0 (2011-10-22)
-++++++++++++++++++
-
-* Sessions are now the primary interface.
-* Deprecated InvalidMethodException.
-* PATCH fix.
-* New config system (no more global settings).
-
-
-0.6.6 (2011-10-19)
-++++++++++++++++++
-
-* Session parameter bugfix (params merging).
-
-
-0.6.5 (2011-10-18)
-++++++++++++++++++
-
-* Offline (fast) test suite.
-* Session dictionary argument merging.
-
-
-0.6.4 (2011-10-13)
-++++++++++++++++++
-
-* Automatic decoding of unicode, based on HTTP Headers.
-* New ``decode_unicode`` setting.
-* Removal of ``r.read/close`` methods.
-* New ``r.faw`` interface for advanced response usage.*
-* Automatic expansion of parameterized headers.
-
-
-0.6.3 (2011-10-13)
-++++++++++++++++++
-
-* Beautiful ``requests.async`` module, for making async requests w/ gevent.
-
-
-0.6.2 (2011-10-09)
-++++++++++++++++++
-
-* GET/HEAD obeys allow_redirects=False.
-
-
-0.6.1 (2011-08-20)
-++++++++++++++++++
-
-* Enhanced status codes experience ``\o/``
-* Set a maximum number of redirects (``settings.max_redirects``)
-* Full Unicode URL support
-* Support for protocol-less redirects.
-* Allow for arbitrary request types.
-* Bugfixes
-
-
-0.6.0 (2011-08-17)
-++++++++++++++++++
-
-* New callback hook system
-* New persistent sessions object and context manager
-* Transparent Dict-cookie handling
-* Status code reference object
-* Removed Response.cached
-* Added Response.request
-* All args are kwargs
-* Relative redirect support
-* HTTPError handling improvements
-* Improved https testing
-* Bugfixes
-
-
-0.5.1 (2011-07-23)
-++++++++++++++++++
-
-* International Domain Name Support!
-* Access headers without fetching entire body (``read()``)
-* Use lists as dicts for parameters
-* Add Forced Basic Authentication
-* Forced Basic is default authentication type
-* ``python-requests.org`` default User-Agent header
-* CaseInsensitiveDict lower-case caching
-* Response.history bugfix
-
-
-0.5.0 (2011-06-21)
-++++++++++++++++++
-
-* PATCH Support
-* Support for Proxies
-* HTTPBin Test Suite
-* Redirect Fixes
-* settings.verbose stream writing
-* Querystrings for all methods
-* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
-  ``r.requests.get('hwe://blah'); r.raise_for_status()``
-
-
-0.4.1 (2011-05-22)
-++++++++++++++++++
-
-* Improved Redirection Handling
-* New 'allow_redirects' param for following non-GET/HEAD Redirects
-* Settings module refactoring
-
-
-0.4.0 (2011-05-15)
-++++++++++++++++++
-
-* Response.history: list of redirected responses
-* Case-Insensitive Header Dictionaries!
-* Unicode URLs
-
-
-0.3.4 (2011-05-14)
-++++++++++++++++++
-
-* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
-* Internal Refactor
-* Bytes data upload Bugfix
-
-
-
-0.3.3 (2011-05-12)
-++++++++++++++++++
-
-* Request timeouts
-* Unicode url-encoded data
-* Settings context manager and module
-
-
-0.3.2 (2011-04-15)
-++++++++++++++++++
-
-* Automatic Decompression of GZip Encoded Content
-* AutoAuth Support for Tupled HTTP Auth
-
-
-0.3.1 (2011-04-01)
-++++++++++++++++++
-
-* Cookie Changes
-* Response.read()
-* Poster fix
-
-
-0.3.0 (2011-02-25)
-++++++++++++++++++
-
-* Automatic Authentication API Change
-* Smarter Query URL Parameterization
-* Allow file uploads and POST data together
-* New Authentication Manager System
-    - Simpler Basic HTTP System
-    - Supports all build-in urllib2 Auths
-    - Allows for custom Auth Handlers
-
-
-0.2.4 (2011-02-19)
-++++++++++++++++++
-
-* Python 2.5 Support
-* PyPy-c v1.4 Support
-* Auto-Authentication tests
-* Improved Request object constructor
-
-0.2.3 (2011-02-15)
-++++++++++++++++++
-
-* New HTTPHandling Methods
-    - Response.__nonzero__ (false if bad HTTP Status)
-    - Response.ok (True if expected HTTP Status)
-    - Response.error (Logged HTTPError if bad HTTP Status)
-    - Response.raise_for_status() (Raises stored HTTPError)
-
-
-0.2.2 (2011-02-14)
-++++++++++++++++++
-
-* Still handles request in the event of an HTTPError. (Issue #2)
-* Eventlet and Gevent Monkeypatch support.
-* Cookie Support (Issue #1)
-
-
-0.2.1 (2011-02-14)
-++++++++++++++++++
-
-* Added file attribute to POST and PUT requests for multipart-encode file uploads.
-* Added Request.url attribute for context and redirects
-
-
-0.2.0 (2011-02-14)
-++++++++++++++++++
-
-* Birth!
-
-
-0.0.1 (2011-02-13)
-++++++++++++++++++
-
-* Frustration
-* Conception
-
-
-
diff --git a/python/ext-libs/requests-2.10.0.dist-info/METADATA b/python/ext-libs/requests-2.10.0.dist-info/METADATA
deleted file mode 100644
index 7fe90bf..0000000
--- a/python/ext-libs/requests-2.10.0.dist-info/METADATA
+++ /dev/null
@@ -1,1286 +0,0 @@
-Metadata-Version: 2.0
-Name: requests
-Version: 2.10.0
-Summary: Python HTTP for Humans.
-Home-page: http://python-requests.org
-Author: Kenneth Reitz
-Author-email: me at kennethreitz.com
-License: Apache 2.0
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: Natural Language :: English
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
-Provides-Extra: security
-Requires-Dist: ndg-httpsclient; extra == 'security'
-Requires-Dist: pyOpenSSL (>=0.13); extra == 'security'
-Requires-Dist: pyasn1; extra == 'security'
-Provides-Extra: socks
-Requires-Dist: PySocks (>=1.5.6); extra == 'socks'
-
-Requests: HTTP for Humans
-=========================
-
-.. image:: https://img.shields.io/pypi/v/requests.svg
-    :target: https://pypi.python.org/pypi/requests
-
-.. image:: https://img.shields.io/pypi/dm/requests.svg
-        :target: https://pypi.python.org/pypi/requests
-
-Requests is the only *Non-GMO* HTTP library for Python, safe for human
-consumption.
-
-**Warning:** Recreational use of other HTTP libraries may result in dangerous side-effects,
-including: security vulnerabilities, verbose code, reinventing the wheel,
-constantly reading documentation, depression, headaches, or even death.
-
-Behold, the power of Requests:
-
-.. code-block:: python
-
-    >>> r = requests.get('https://api.github.com/user', auth=('user', 'pass'))
-    >>> r.status_code
-    200
-    >>> r.headers['content-type']
-    'application/json; charset=utf8'
-    >>> r.encoding
-    'utf-8'
-    >>> r.text
-    u'{"type":"User"...'
-    >>> r.json()
-    {u'disk_usage': 368627, u'private_gists': 484, ...}
-
-See `the similar code, sans Requests <https://gist.github.com/973705>`_.
-
-Requests allows you to send *organic, grass-fed* HTTP/1.1 requests, without the
-need for manual labor. There's no need to manually add query strings to your
-URLs, or to form-encode your POST data. Keep-alive and HTTP connection pooling
-are 100% automatic, powered by `urllib3 <https://github.com/shazow/urllib3>`_,
-which is embedded within Requests.
-
-Besides, all the cool kids are doing it. Requests is one of the most
-downloaded Python packages of all time, pulling in over 7,000,000 downloads
-every month. You don't want to be left out!
-
-Feature Support
----------------
-
-Requests is ready for today's web.
-
-- International Domains and URLs
-- Keep-Alive & Connection Pooling
-- Sessions with Cookie Persistence
-- Browser-style SSL Verification
-- Basic/Digest Authentication
-- Elegant Key/Value Cookies
-- Automatic Decompression
-- Automatic Content Decoding
-- Unicode Response Bodies
-- Multipart File Uploads
-- HTTP(S) Proxy Support
-- Connection Timeouts
-- Streaming Downloads
-- ``.netrc`` Support
-- Chunked Requests
-- Thread-safety
-
-Requests supports Python 2.6 — 3.5, and runs great on PyPy.
-
-Installation
-------------
-
-To install Requests, simply:
-
-.. code-block:: bash
-
-    $ pip install requests
-    ✨🍰✨
-
-Satisfaction, guaranteed.
-
-Documentation
--------------
-
-Fantastic documentation is available at http://docs.python-requests.org/, for a limited time only.
-
-
-How to Contribute
------------------
-
-#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
-#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
-#. Write a test which shows that the bug was fixed or that the feature works as expected.
-#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
-
-.. _`the repository`: http://github.com/kennethreitz/requests
-.. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
-.. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
-
-
-.. :changelog:
-
-Release History
----------------
-
-2.10.0 (04-29-2016)
-+++++++++++++++++++
-
-**New Features**
-
-- SOCKS Proxy Support! (requires PySocks; $ pip install requests[socks])
-
-**Miscellaneous**
-
-- Updated bundled urllib3 to 1.15.1.
-
-2.9.2 (04-29-2016)
-++++++++++++++++++
-
-**Improvements**
-
-- Change built-in CaseInsensitiveDict (used for headers) to use OrderedDict
-  as its underlying datastore.
-
-**Bugfixes**
-
-- Don't use redirect_cache if allow_redirects=False
-- When passed objects that throw exceptions from ``tell()``, send them via
-  chunked transfer encoding instead of failing.
-- Raise a ProxyError for proxy related connection issues.
-
-2.9.1 (2015-12-21)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Resolve regression introduced in 2.9.0 that made it impossible to send binary
-  strings as bodies in Python 3.
-- Fixed errors when calculating cookie expiration dates in certain locales.
-
-**Miscellaneous**
-
-- Updated bundled urllib3 to 1.13.1.
-
-2.9.0 (2015-12-15)
-++++++++++++++++++
-
-**Minor Improvements** (Backwards compatible)
-
-- The ``verify`` keyword argument now supports being passed a path to a
-  directory of CA certificates, not just a single-file bundle.
-- Warnings are now emitted when sending files opened in text mode.
-- Added the 511 Network Authentication Required status code to the status code
-  registry.
-
-**Bugfixes**
-
-- For file-like objects that are not seeked to the very beginning, we now
-  send the content length for the number of bytes we will actually read, rather
-  than the total size of the file, allowing partial file uploads.
-- When uploading file-like objects, if they are empty or have no obvious
-  content length we set ``Transfer-Encoding: chunked`` rather than
-  ``Content-Length: 0``.
-- We correctly receive the response in buffered mode when uploading chunked
-  bodies.
-- We now handle being passed a query string as a bytestring on Python 3, by
-  decoding it as UTF-8.
-- Sessions are now closed in all cases (exceptional and not) when using the
-  functional API rather than leaking and waiting for the garbage collector to
-  clean them up.
-- Correctly handle digest auth headers with a malformed ``qop`` directive that
-  contains no token, by treating it the same as if no ``qop`` directive was
-  provided at all.
-- Minor performance improvements when removing specific cookies by name.
-
-**Miscellaneous**
-
-- Updated urllib3 to 1.13.
-
-2.8.1 (2015-10-13)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
-  bundle.
-- Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
-  ``ConnectionError``
-- When using the PreparedRequest flow, requests will now correctly respect the
-  ``json`` parameter. Broken in 2.8.0.
-- When using the PreparedRequest flow, requests will now correctly handle a
-  Unicode-string method name on Python 2. Broken in 2.8.0.
-
-2.8.0 (2015-10-05)
-++++++++++++++++++
-
-**Minor Improvements** (Backwards Compatible)
-
-- Requests now supports per-host proxies. This allows the ``proxies``
-  dictionary to have entries of the form
-  ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
-  in preference to the previously-supported scheme-specific ones, but the
-  previous syntax will continue to work.
-- ``Response.raise_for_status`` now prints the URL that failed as part of the
-  exception message.
-- ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
-  defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
-  exceptions to be thrown.
-- Change to bundled projects import logic to make it easier to unbundle
-  requests downstream.
-- Changed the default User-Agent string to avoid leaking data on Linux: now
-  contains only the requests version.
-
-**Bugfixes**
-
-- The ``json`` parameter to ``post()`` and friends will now only be used if
-  neither ``data`` nor ``files`` are present, consistent with the
-  documentation.
-- We now ignore empty fields in the ``NO_PROXY`` environment variable.
-- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
-  ``stream=True`` with ``contextlib.closing``.
-- Prevented bugs where we would attempt to return the same connection back to
-  the connection pool twice when sending a Chunked body.
-- Miscellaneous minor internal changes.
-- Digest Auth support is now thread safe.
-
-**Updates**
-
-- Updated urllib3 to 1.12.
-
-2.7.0 (2015-05-03)
-++++++++++++++++++
-
-This is the first release that follows our new release process. For more, see
-`our documentation
-<http://docs.python-requests.org/en/latest/community/release-process/>`_.
-
-**Bugfixes**
-
-- Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
-  encoding and response framing.
-
-2.6.2 (2015-04-23)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Fix regression where compressed data that was sent as chunked data was not
-  properly decompressed. (#2561)
-
-2.6.1 (2015-04-22)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Remove VendorAlias import machinery introduced in v2.5.2.
-
-- Simplify the PreparedRequest.prepare API: We no longer require the user to
-  pass an empty list to the hooks keyword argument. (c.f. #2552)
-
-- Resolve redirects now receives and forwards all of the original arguments to
-  the adapter. (#2503)
-
-- Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
-  cannot be encoded in ASCII. (#2540)
-
-- Populate the parsed path of the URI field when performing Digest
-  Authentication. (#2426)
-
-- Copy a PreparedRequest's CookieJar more reliably when it is not an instance
-  of RequestsCookieJar. (#2527)
-
-2.6.0 (2015-03-14)
-++++++++++++++++++
-
-**Bugfixes**
-
-- CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
-  without a host value set would use the hostname for the redirected URL
-  exposing requests users to session fixation attacks and potentially cookie
-  stealing. This was disclosed privately by Matthew Daley of
-  `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
-  v2.1.0 to v2.5.3 (inclusive on both ends).
-
-- Fix error when requests is an ``install_requires`` dependency and ``python
-  setup.py test`` is run. (#2462)
-
-- Fix error when urllib3 is unbundled and requests continues to use the
-  vendored import location.
-
-- Include fixes to ``urllib3``'s header handling.
-
-- Requests' handling of unvendored dependencies is now more restrictive.
-
-**Features and Improvements**
-
-- Support bytearrays when passed as parameters in the ``files`` argument.
-  (#2468)
-
-- Avoid data duplication when creating a request with ``str``, ``bytes``, or
-  ``bytearray`` input to the ``files`` argument.
-
-2.5.3 (2015-02-24)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Revert changes to our vendored certificate bundle. For more context see
-  (#2455, #2456, and http://bugs.python.org/issue23476)
-
-2.5.2 (2015-02-23)
-++++++++++++++++++
-
-**Features and Improvements**
-
-- Add sha256 fingerprint support. (`shazow/urllib3#540`_)
-
-- Improve the performance of headers. (`shazow/urllib3#544`_)
-
-**Bugfixes**
-
-- Copy pip's import machinery. When downstream redistributors remove
-  requests.packages.urllib3 the import machinery will continue to let those
-  same symbols work. Example usage in requests' documentation and 3rd-party
-  libraries relying on the vendored copies of urllib3 will work without having
-  to fallback to the system urllib3.
-
-- Attempt to quote parts of the URL on redirect if unquoting and then quoting
-  fails. (#2356)
-
-- Fix filename type check for multipart form-data uploads. (#2411)
-
-- Properly handle the case where a server issuing digest authentication
-  challenges provides both auth and auth-int qop-values. (#2408)
-
-- Fix a socket leak. (`shazow/urllib3#549`_)
-
-- Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
-
-- Disable the built-in hostname verification. (`shazow/urllib3#526`_)
-
-- Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
-
-**Security**
-
-- Pulled in an updated ``cacert.pem``.
-
-- Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
-
-.. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
-.. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
-.. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
-.. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
-.. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
-.. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
-.. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
-
-2.5.1 (2014-12-23)
-++++++++++++++++++
-
-**Behavioural Changes**
-
-- Only catch HTTPErrors in raise_for_status (#2382)
-
-**Bugfixes**
-
-- Handle LocationParseError from urllib3 (#2344)
-- Handle file-like object filenames that are not strings (#2379)
-- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
-
-2.5.0 (2014-12-01)
-++++++++++++++++++
-
-**Improvements**
-
-- Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
-- The ``iter_lines`` method on a response now accepts a delimiter with which
-  to split the content (#2295)
-
-**Behavioural Changes**
-
-- Add deprecation warnings to functions in requests.utils that will be removed
-  in 3.0 (#2309)
-- Sessions used by the functional API are always closed (#2326)
-- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
-
-**Bugfixes**
-
-- Only parse the URL once (#2353)
-- Allow Content-Length header to always be overridden (#2332)
-- Properly handle files in HTTPDigestAuth (#2333)
-- Cap redirect_cache size to prevent memory abuse (#2299)
-- Fix HTTPDigestAuth handling of redirects after authenticating successfully
-  (#2253)
-- Fix crash with custom method parameter to Session.request (#2317)
-- Fix how Link headers are parsed using the regular expression library (#2271)
-
-**Documentation**
-
-- Add more references for interlinking (#2348)
-- Update CSS for theme (#2290)
-- Update width of buttons and sidebar (#2289)
-- Replace references of Gittip with Gratipay (#2282)
-- Add link to changelog in sidebar (#2273)
-
-2.4.3 (2014-10-06)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Unicode URL improvements for Python 2.
-- Re-order JSON param for backwards compat.
-- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
-
-
-2.4.2 (2014-10-05)
-++++++++++++++++++
-
-**Improvements**
-
-- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
-- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
-
-**Bugfixes**
-
-- Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
-- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
-
-**Documentation**
-
-- Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
-- Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
-- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
-
-
-
-2.4.1 (2014-09-09)
-++++++++++++++++++
-
-- Now has a "security" package extras set, ``$ pip install requests[security]``
-- Requests will now use Certifi if it is available.
-- Capture and re-raise urllib3 ProtocolError
-- Bugfix for responses that attempt to redirect to themselves forever (wtf?).
-
-
-2.4.0 (2014-08-29)
-++++++++++++++++++
-
-**Behavioral Changes**
-
-- ``Connection: keep-alive`` header is now sent automatically.
-
-**Improvements**
-
-- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
-- Allow copying of PreparedRequests without headers/cookies.
-- Updated bundled urllib3 version.
-- Refactored settings loading from environment -- new `Session.merge_environment_settings`.
-- Handle socket errors in iter_content.
-
-
-2.3.0 (2014-05-16)
-++++++++++++++++++
-
-**API Changes**
-
-- New ``Response`` property ``is_redirect``, which is true when the
-  library could have processed this response as a redirection (whether
-  or not it actually did).
-- The ``timeout`` parameter now affects requests with both ``stream=True`` and
-  ``stream=False`` equally.
-- The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
-  Proxy schemes now default to ``http://``.
-- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
-  dictionary when references as string or viewed in the interpreter.
-
-**Bugfixes**
-
-- No longer expose Authorization or Proxy-Authorization headers on redirect.
-  Fix CVE-2014-1829 and CVE-2014-1830 respectively.
-- Authorization is re-evaluated each redirect.
-- On redirect, pass url as native strings.
-- Fall-back to autodetected encoding for JSON when Unicode detection fails.
-- Headers set to ``None`` on the ``Session`` are now correctly not sent.
-- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
-  response.
-- Stop advertising ``compress`` as a supported Content-Encoding.
-- The ``Response.history`` parameter is now always a list.
-- Many, many ``urllib3`` bugfixes.
-
-2.2.1 (2014-01-23)
-++++++++++++++++++
-
-**Bugfixes**
-
-- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
-- Assorted urllib3 fixes.
-
-2.2.0 (2014-01-09)
-++++++++++++++++++
-
-**API Changes**
-
-- New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
-  ``DecodeError`` exceptions.
-
-**Bugfixes**
-
-- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
-- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
-- Use the correct pool size for pools of connections to proxies.
-- Fix iteration of ``CookieJar`` objects.
-- Ensure that cookies are persisted over redirect.
-- Switch back to using chardet, since it has merged with charade.
-
-2.1.0 (2013-12-05)
-++++++++++++++++++
-
-- Updated CA Bundle, of course.
-- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
-- Clean up connections when we hit problems during chunked upload, rather than leaking them.
-- Return connections to the pool when a chunked upload is successful, rather than leaking it.
-- Match the HTTPbis recommendation for HTTP 301 redirects.
-- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
-- Values of headers set by Requests are now always the native string type.
-- Fix previously broken SNI support.
-- Fix accessing HTTP proxies using proxy authentication.
-- Unencode HTTP Basic usernames and passwords extracted from URLs.
-- Support for IP address ranges for no_proxy environment variable
-- Parse headers correctly when users override the default ``Host:`` header.
-- Avoid munging the URL in case of case-sensitive servers.
-- Looser URL handling for non-HTTP/HTTPS urls.
-- Accept unicode methods in Python 2.6 and 2.7.
-- More resilient cookie handling.
-- Make ``Response`` objects pickleable.
-- Actually added MD5-sess to Digest Auth instead of pretending to like last time.
-- Updated internal urllib3.
-- Fixed @Lukasa's lack of taste.
-
-2.0.1 (2013-10-24)
-++++++++++++++++++
-
-- Updated included CA Bundle with new mistrusts and automated process for the future
-- Added MD5-sess to Digest Auth
-- Accept per-file headers in multipart file POST messages.
-- Fixed: Don't send the full URL on CONNECT messages.
-- Fixed: Correctly lowercase a redirect scheme.
-- Fixed: Cookies not persisted when set via functional API.
-- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
-- Updated internal urllib3 and chardet.
-
-2.0.0 (2013-09-24)
-++++++++++++++++++
-
-**API Changes:**
-
-- Keys in the Headers dictionary are now native strings on all Python versions,
-  i.e. bytestrings on Python 2, unicode on Python 3.
-- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
-  will be raised if they don't.
-- Timeouts now apply to read time if ``Stream=False``.
-- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
-- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
-- Added new method to ``Session`` objects: ``Session.update_request()``. This
-  method updates a ``Request`` object with the data (e.g. cookies) stored on
-  the ``Session``.
-- Added new method to ``Session`` objects: ``Session.prepare_request()``. This
-  method updates and prepares a ``Request`` object, and returns the
-  corresponding ``PreparedRequest`` object.
-- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
-  This should not be called directly, but improves the subclass interface.
-- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
-  will now raise a Requests ``ChunkedEncodingError`` instead.
-- Invalid percent-escape sequences now cause a Requests ``InvalidURL``
-  exception to be raised.
-- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
-  ``"already_reported"``.
-- HTTP 226 reason added (``"im_used"``).
-
-**Bugfixes:**
-
-- Vastly improved proxy support, including the CONNECT verb. Special thanks to
-  the many contributors who worked towards this improvement.
-- Cookies are now properly managed when 401 authentication responses are
-  received.
-- Chunked encoding fixes.
-- Support for mixed case schemes.
-- Better handling of streaming downloads.
-- Retrieve environment proxies from more locations.
-- Minor cookies fixes.
-- Improved redirect behaviour.
-- Improved streaming behaviour, particularly for compressed data.
-- Miscellaneous small Python 3 text encoding bugs.
-- ``.netrc`` no longer overrides explicit auth.
-- Cookies set by hooks are now correctly persisted on Sessions.
-- Fix problem with cookies that specify port numbers in their host field.
-- ``BytesIO`` can be used to perform streaming uploads.
-- More generous parsing of the ``no_proxy`` environment variable.
-- Non-string objects can be passed in data values alongside files.
-
-1.2.3 (2013-05-25)
-++++++++++++++++++
-
-- Simple packaging fix
-
-
-1.2.2 (2013-05-23)
-++++++++++++++++++
-
-- Simple packaging fix
-
-
-1.2.1 (2013-05-20)
-++++++++++++++++++
-
-- 301 and 302 redirects now change the verb to GET for all verbs, not just
-  POST, improving browser compatibility.
-- Python 3.3.2 compatibility
-- Always percent-encode location headers
-- Fix connection adapter matching to be most-specific first
-- new argument to the default connection adapter for passing a block argument
-- prevent a KeyError when there's no link headers
-
-1.2.0 (2013-03-31)
-++++++++++++++++++
-
-- Fixed cookies on sessions and on requests
-- Significantly change how hooks are dispatched - hooks now receive all the
-  arguments specified by the user when making a request so hooks can make a
-  secondary request with the same parameters. This is especially necessary for
-  authentication handler authors
-- certifi support was removed
-- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
-- Major proxy work thanks to @Lukasa including parsing of proxy authentication
-  from the proxy url
-- Fix DigestAuth handling too many 401s
-- Update vendored urllib3 to include SSL bug fixes
-- Allow keyword arguments to be passed to ``json.loads()`` via the
-  ``Response.json()`` method
-- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
-  requests
-- Add ``elapsed`` attribute to ``Response`` objects to time how long a request
-  took.
-- Fix ``RequestsCookieJar``
-- Sessions and Adapters are now picklable, i.e., can be used with the
-  multiprocessing library
-- Update charade to version 1.0.3
-
-The change in how hooks are dispatched will likely cause a great deal of
-issues.
-
-1.1.0 (2013-01-10)
-++++++++++++++++++
-
-- CHUNKED REQUESTS
-- Support for iterable response bodies
-- Assume servers persist redirect params
-- Allow explicit content types to be specified for file data
-- Make merge_kwargs case-insensitive when looking up keys
-
-1.0.3 (2012-12-18)
-++++++++++++++++++
-
-- Fix file upload encoding bug
-- Fix cookie behavior
-
-1.0.2 (2012-12-17)
-++++++++++++++++++
-
-- Proxy fix for HTTPAdapter.
-
-1.0.1 (2012-12-17)
-++++++++++++++++++
-
-- Cert verification exception bug.
-- Proxy fix for HTTPAdapter.
-
-1.0.0 (2012-12-17)
-++++++++++++++++++
-
-- Massive Refactor and Simplification
-- Switch to Apache 2.0 license
-- Swappable Connection Adapters
-- Mountable Connection Adapters
-- Mutable ProcessedRequest chain
-- /s/prefetch/stream
-- Removal of all configuration
-- Standard library logging
-- Make Response.json() callable, not property.
-- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
-- Removal of all hooks except 'response'
-- Removal of all authentication helpers (OAuth, Kerberos)
-
-This is not a backwards compatible change.
-
-0.14.2 (2012-10-27)
-+++++++++++++++++++
-
-- Improved mime-compatible JSON handling
-- Proxy fixes
-- Path hack fixes
-- Case-Insensitive Content-Encoding headers
-- Support for CJK parameters in form posts
-
-
-0.14.1 (2012-10-01)
-+++++++++++++++++++
-
-- Python 3.3 Compatibility
-- Simply default accept-encoding
-- Bugfixes
-
-
-0.14.0 (2012-09-02)
-++++++++++++++++++++
-
-- No more iter_content errors if already downloaded.
-
-0.13.9 (2012-08-25)
-+++++++++++++++++++
-
-- Fix for OAuth + POSTs
-- Remove exception eating from dispatch_hook
-- General bugfixes
-
-0.13.8 (2012-08-21)
-+++++++++++++++++++
-
-- Incredible Link header support :)
-
-0.13.7 (2012-08-19)
-+++++++++++++++++++
-
-- Support for (key, value) lists everywhere.
-- Digest Authentication improvements.
-- Ensure proxy exclusions work properly.
-- Clearer UnicodeError exceptions.
-- Automatic casting of URLs to strings (fURL and such)
-- Bugfixes.
-
-0.13.6 (2012-08-06)
-+++++++++++++++++++
-
-- Long awaited fix for hanging connections!
-
-0.13.5 (2012-07-27)
-+++++++++++++++++++
-
-- Packaging fix
-
-0.13.4 (2012-07-27)
-+++++++++++++++++++
-
-- GSSAPI/Kerberos authentication!
-- App Engine 2.7 Fixes!
-- Fix leaking connections (from urllib3 update)
-- OAuthlib path hack fix
-- OAuthlib URL parameters fix.
-
-0.13.3 (2012-07-12)
-+++++++++++++++++++
-
-- Use simplejson if available.
-- Do not hide SSLErrors behind Timeouts.
-- Fixed param handling with urls containing fragments.
-- Significantly improved information in User Agent.
-- client certificates are ignored when verify=False
-
-0.13.2 (2012-06-28)
-+++++++++++++++++++
-
-- Zero dependencies (once again)!
-- New: Response.reason
-- Sign querystring parameters in OAuth 1.0
-- Client certificates no longer ignored when verify=False
-- Add openSUSE certificate support
-
-0.13.1 (2012-06-07)
-+++++++++++++++++++
-
-- Allow passing a file or file-like object as data.
-- Allow hooks to return responses that indicate errors.
-- Fix Response.text and Response.json for body-less responses.
-
-0.13.0 (2012-05-29)
-+++++++++++++++++++
-
-- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
-- Allow disabling of cookie persistence.
-- New implementation of safe_mode
-- cookies.get now supports default argument
-- Session cookies not saved when Session.request is called with return_response=False
-- Env: no_proxy support.
-- RequestsCookieJar improvements.
-- Various bug fixes.
-
-0.12.1 (2012-05-08)
-+++++++++++++++++++
-
-- New ``Response.json`` property.
-- Ability to add string file uploads.
-- Fix out-of-range issue with iter_lines.
-- Fix iter_content default size.
-- Fix POST redirects containing files.
-
-0.12.0 (2012-05-02)
-+++++++++++++++++++
-
-- EXPERIMENTAL OAUTH SUPPORT!
-- Proper CookieJar-backed cookies interface with awesome dict-like interface.
-- Speed fix for non-iterated content chunks.
-- Move ``pre_request`` to a more usable place.
-- New ``pre_send`` hook.
-- Lazily encode data, params, files.
-- Load system Certificate Bundle if ``certify`` isn't available.
-- Cleanups, fixes.
-
-0.11.2 (2012-04-22)
-+++++++++++++++++++
-
-- Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
-- Infinite digest auth redirect fix.
-- Multi-part file upload improvements.
-- Fix decoding of invalid %encodings in URLs.
-- If there is no content in a response don't throw an error the second time that content is attempted to be read.
-- Upload data on redirects.
-
-0.11.1 (2012-03-30)
-+++++++++++++++++++
-
-* POST redirects now break RFC to do what browsers do: Follow up with a GET.
-* New ``strict_mode`` configuration to disable new redirect behavior.
-
-
-0.11.0 (2012-03-14)
-+++++++++++++++++++
-
-* Private SSL Certificate support
-* Remove select.poll from Gevent monkeypatching
-* Remove redundant generator for chunked transfer encoding
-* Fix: Response.ok raises Timeout Exception in safe_mode
-
-0.10.8 (2012-03-09)
-+++++++++++++++++++
-
-* Generate chunked ValueError fix
-* Proxy configuration by environment variables
-* Simplification of iter_lines.
-* New `trust_env` configuration for disabling system/environment hints.
-* Suppress cookie errors.
-
-0.10.7 (2012-03-07)
-+++++++++++++++++++
-
-* `encode_uri` = False
-
-0.10.6 (2012-02-25)
-+++++++++++++++++++
-
-* Allow '=' in cookies.
-
-0.10.5 (2012-02-25)
-+++++++++++++++++++
-
-* Response body with 0 content-length fix.
-* New async.imap.
-* Don't fail on netrc.
-
-
-0.10.4 (2012-02-20)
-+++++++++++++++++++
-
-* Honor netrc.
-
-0.10.3 (2012-02-20)
-+++++++++++++++++++
-
-* HEAD requests don't follow redirects anymore.
-* raise_for_status() doesn't raise for 3xx anymore.
-* Make Session objects picklable.
-* ValueError for invalid schema URLs.
-
-0.10.2 (2012-01-15)
-+++++++++++++++++++
-
-* Vastly improved URL quoting.
-* Additional allowed cookie key values.
-* Attempted fix for "Too many open files" Error
-* Replace unicode errors on first pass, no need for second pass.
-* Append '/' to bare-domain urls before query insertion.
-* Exceptions now inherit from RuntimeError.
-* Binary uploads + auth fix.
-* Bugfixes.
-
-
-0.10.1 (2012-01-23)
-+++++++++++++++++++
-
-* PYTHON 3 SUPPORT!
-* Dropped 2.5 Support. (*Backwards Incompatible*)
-
-0.10.0 (2012-01-21)
-+++++++++++++++++++
-
-* ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
-* New ``Response.text`` is unicode-only.
-* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
-* Default to ISO-8859-1 (Western) encoding for "text" subtypes.
-* Removal of `decode_unicode`. (*Backwards Incompatible*)
-* New multiple-hooks system.
-* New ``Response.register_hook`` for registering hooks within the pipeline.
-* ``Response.url`` is now Unicode.
-
-0.9.3 (2012-01-18)
-++++++++++++++++++
-
-* SSL verify=False bugfix (apparent on windows machines).
-
-0.9.2 (2012-01-18)
-++++++++++++++++++
-
-* Asynchronous async.send method.
-* Support for proper chunk streams with boundaries.
-* session argument for Session classes.
-* Print entire hook tracebacks, not just exception instance.
-* Fix response.iter_lines from pending next line.
-* Fix but in HTTP-digest auth w/ URI having query strings.
-* Fix in Event Hooks section.
-* Urllib3 update.
-
-
-0.9.1 (2012-01-06)
-++++++++++++++++++
-
-* danger_mode for automatic Response.raise_for_status()
-* Response.iter_lines refactor
-
-0.9.0 (2011-12-28)
-++++++++++++++++++
-
-* verify ssl is default.
-
-
-0.8.9 (2011-12-28)
-++++++++++++++++++
-
-* Packaging fix.
-
-
-0.8.8 (2011-12-28)
-++++++++++++++++++
-
-* SSL CERT VERIFICATION!
-* Release of Cerifi: Mozilla's cert list.
-* New 'verify' argument for SSL requests.
-* Urllib3 update.
-
-0.8.7 (2011-12-24)
-++++++++++++++++++
-
-* iter_lines last-line truncation fix
-* Force safe_mode for async requests
-* Handle safe_mode exceptions more consistently
-* Fix iteration on null responses in safe_mode
-
-0.8.6 (2011-12-18)
-++++++++++++++++++
-
-* Socket timeout fixes.
-* Proxy Authorization support.
-
-0.8.5 (2011-12-14)
-++++++++++++++++++
-
-* Response.iter_lines!
-
-0.8.4 (2011-12-11)
-++++++++++++++++++
-
-* Prefetch bugfix.
-* Added license to installed version.
-
-0.8.3 (2011-11-27)
-++++++++++++++++++
-
-* Converted auth system to use simpler callable objects.
-* New session parameter to API methods.
-* Display full URL while logging.
-
-0.8.2 (2011-11-19)
-++++++++++++++++++
-
-* New Unicode decoding system, based on over-ridable `Response.encoding`.
-* Proper URL slash-quote handling.
-* Cookies with ``[``, ``]``, and ``_`` allowed.
-
-0.8.1 (2011-11-15)
-++++++++++++++++++
-
-* URL Request path fix
-* Proxy fix.
-* Timeouts fix.
-
-0.8.0 (2011-11-13)
-++++++++++++++++++
-
-* Keep-alive support!
-* Complete removal of Urllib2
-* Complete removal of Poster
-* Complete removal of CookieJars
-* New ConnectionError raising
-* Safe_mode for error catching
-* prefetch parameter for request methods
-* OPTION method
-* Async pool size throttling
-* File uploads send real names
-* Vendored in urllib3
-
-0.7.6 (2011-11-07)
-++++++++++++++++++
-
-* Digest authentication bugfix (attach query data to path)
-
-0.7.5 (2011-11-04)
-++++++++++++++++++
-
-* Response.content = None if there was an invalid response.
-* Redirection auth handling.
-
-0.7.4 (2011-10-26)
-++++++++++++++++++
-
-* Session Hooks fix.
-
-0.7.3 (2011-10-23)
-++++++++++++++++++
-
-* Digest Auth fix.
-
-
-0.7.2 (2011-10-23)
-++++++++++++++++++
-
-* PATCH Fix.
-
-
-0.7.1 (2011-10-23)
-++++++++++++++++++
-
-* Move away from urllib2 authentication handling.
-* Fully Remove AuthManager, AuthObject, &c.
-* New tuple-based auth system with handler callbacks.
-
-
-0.7.0 (2011-10-22)
-++++++++++++++++++
-
-* Sessions are now the primary interface.
-* Deprecated InvalidMethodException.
-* PATCH fix.
-* New config system (no more global settings).
-
-
-0.6.6 (2011-10-19)
-++++++++++++++++++
-
-* Session parameter bugfix (params merging).
-
-
-0.6.5 (2011-10-18)
-++++++++++++++++++
-
-* Offline (fast) test suite.
-* Session dictionary argument merging.
-
-
-0.6.4 (2011-10-13)
-++++++++++++++++++
-
-* Automatic decoding of unicode, based on HTTP Headers.
-* New ``decode_unicode`` setting.
-* Removal of ``r.read/close`` methods.
-* New ``r.faw`` interface for advanced response usage.*
-* Automatic expansion of parameterized headers.
-
-
-0.6.3 (2011-10-13)
-++++++++++++++++++
-
-* Beautiful ``requests.async`` module, for making async requests w/ gevent.
-
-
-0.6.2 (2011-10-09)
-++++++++++++++++++
-
-* GET/HEAD obeys allow_redirects=False.
-
-
-0.6.1 (2011-08-20)
-++++++++++++++++++
-
-* Enhanced status codes experience ``\o/``
-* Set a maximum number of redirects (``settings.max_redirects``)
-* Full Unicode URL support
-* Support for protocol-less redirects.
-* Allow for arbitrary request types.
-* Bugfixes
-
-
-0.6.0 (2011-08-17)
-++++++++++++++++++
-
-* New callback hook system
-* New persistent sessions object and context manager
-* Transparent Dict-cookie handling
-* Status code reference object
-* Removed Response.cached
-* Added Response.request
-* All args are kwargs
-* Relative redirect support
-* HTTPError handling improvements
-* Improved https testing
-* Bugfixes
-
-
-0.5.1 (2011-07-23)
-++++++++++++++++++
-
-* International Domain Name Support!
-* Access headers without fetching entire body (``read()``)
-* Use lists as dicts for parameters
-* Add Forced Basic Authentication
-* Forced Basic is default authentication type
-* ``python-requests.org`` default User-Agent header
-* CaseInsensitiveDict lower-case caching
-* Response.history bugfix
-
-
-0.5.0 (2011-06-21)
-++++++++++++++++++
-
-* PATCH Support
-* Support for Proxies
-* HTTPBin Test Suite
-* Redirect Fixes
-* settings.verbose stream writing
-* Querystrings for all methods
-* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
-  ``r.requests.get('hwe://blah'); r.raise_for_status()``
-
-
-0.4.1 (2011-05-22)
-++++++++++++++++++
-
-* Improved Redirection Handling
-* New 'allow_redirects' param for following non-GET/HEAD Redirects
-* Settings module refactoring
-
-
-0.4.0 (2011-05-15)
-++++++++++++++++++
-
-* Response.history: list of redirected responses
-* Case-Insensitive Header Dictionaries!
-* Unicode URLs
-
-
-0.3.4 (2011-05-14)
-++++++++++++++++++
-
-* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
-* Internal Refactor
-* Bytes data upload Bugfix
-
-
-
-0.3.3 (2011-05-12)
-++++++++++++++++++
-
-* Request timeouts
-* Unicode url-encoded data
-* Settings context manager and module
-
-
-0.3.2 (2011-04-15)
-++++++++++++++++++
-
-* Automatic Decompression of GZip Encoded Content
-* AutoAuth Support for Tupled HTTP Auth
-
-
-0.3.1 (2011-04-01)
-++++++++++++++++++
-
-* Cookie Changes
-* Response.read()
-* Poster fix
-
-
-0.3.0 (2011-02-25)
-++++++++++++++++++
-
-* Automatic Authentication API Change
-* Smarter Query URL Parameterization
-* Allow file uploads and POST data together
-* New Authentication Manager System
-    - Simpler Basic HTTP System
-    - Supports all build-in urllib2 Auths
-    - Allows for custom Auth Handlers
-
-
-0.2.4 (2011-02-19)
-++++++++++++++++++
-
-* Python 2.5 Support
-* PyPy-c v1.4 Support
-* Auto-Authentication tests
-* Improved Request object constructor
-
-0.2.3 (2011-02-15)
-++++++++++++++++++
-
-* New HTTPHandling Methods
-    - Response.__nonzero__ (false if bad HTTP Status)
-    - Response.ok (True if expected HTTP Status)
-    - Response.error (Logged HTTPError if bad HTTP Status)
-    - Response.raise_for_status() (Raises stored HTTPError)
-
-
-0.2.2 (2011-02-14)
-++++++++++++++++++
-
-* Still handles request in the event of an HTTPError. (Issue #2)
-* Eventlet and Gevent Monkeypatch support.
-* Cookie Support (Issue #1)
-
-
-0.2.1 (2011-02-14)
-++++++++++++++++++
-
-* Added file attribute to POST and PUT requests for multipart-encode file uploads.
-* Added Request.url attribute for context and redirects
-
-
-0.2.0 (2011-02-14)
-++++++++++++++++++
-
-* Birth!
-
-
-0.0.1 (2011-02-13)
-++++++++++++++++++
-
-* Frustration
-* Conception
-
-
-
diff --git a/python/ext-libs/requests-2.10.0.dist-info/RECORD b/python/ext-libs/requests-2.10.0.dist-info/RECORD
deleted file mode 100644
index d04913d..0000000
--- a/python/ext-libs/requests-2.10.0.dist-info/RECORD
+++ /dev/null
@@ -1,169 +0,0 @@
-requests/__init__.py,sha256=ZaQ9ZpESi8Zhi-SHxMJwAqs_7En4GlvjEa-uJOQaQxc,2215
-requests/adapters.py,sha256=eKKxnQoL8yFVb3Hh607POK0dBm2gu50fgQTtSX-lJy8,18680
-requests/api.py,sha256=MCiVT9TG56jThCzrfy1pLQVOCXxaTOzNHj8HNKDu1P4,5795
-requests/auth.py,sha256=YBFhR0KH8og250hDRTYgC9KBqMVxh9yMjpZh0-kXefI,8075
-requests/cacert.pem,sha256=5xzWFRrSP0ZsXiW6emg8UQ_w497lT4qWCv32OO8R1ME,344712
-requests/certs.py,sha256=RX5H1cSiB52Hbjh_qv3eMW8hqHEF_r4Qiv_4AwfziuU,613
-requests/compat.py,sha256=hq7CKHoykNs8yzKPAJiOkHQJPoNp9A89MufTdhlCniY,1469
-requests/cookies.py,sha256=ShZ9lzb0qhTV03nhafCMQXrnugBGr32M30FEo5u6rkQ,17564
-requests/exceptions.py,sha256=lyzK5I-zkNCN9zfYGJgkDMvtt3akjw0QUq4q8pYI4wA,2776
-requests/hooks.py,sha256=jSdmZjB5oRJ6xmKM3VtqkYkq8oFTwx6gqG9AaUWpAlw,767
-requests/models.py,sha256=17T7cPYBk4JggKQxaeGMWWDB5KqLG8u6QiBXaH7IFYY,29530
-requests/sessions.py,sha256=K6tPEgZ4giLZ5sQg3QymC26lcsmFvwtZ044g2hrY3WE,24897
-requests/status_codes.py,sha256=QCHM7hbByBJCcPxnN63tCBYkJLjE04umd5m5DSZk-FE,3315
-requests/structures.py,sha256=TJ-VyA0GJwzMtLw2A9AqiHWsjvnzjf0s1AHGYc7n-pM,3017
-requests/utils.py,sha256=KQwDsJdE5DMGmSdRmBVHsdIVDak4rmK41gKOYR9WhfU,22228
-requests/packages/__init__.py,sha256=CVheqNRcXIkAi5037RhxeqbAqd0QhrK1o9R9kS2xvuI,1384
-requests/packages/chardet/__init__.py,sha256=XuTKCYOR7JwsoHxqZTYH86LVyMDbDI3s1s0W_qoGEBM,1295
-requests/packages/chardet/big5freq.py,sha256=D8oTdz-GM7Jg8TsaWJDm65vM_OLHC3xub6qUJ3rOgsQ,82594
-requests/packages/chardet/big5prober.py,sha256=XX96C--6WKYW36mL-z7pJSAtc169Z8ZImByCP4pEN9A,1684
-requests/packages/chardet/chardetect.py,sha256=f4299UZG6uWd3i3r_N0OdrFj2sA9JFI54PAmDLAFmWA,2504
-requests/packages/chardet/chardistribution.py,sha256=cUARQFr1oTLXeJCDQrDRkUP778AvSMzhSCnG8VLCV58,9226
-requests/packages/chardet/charsetgroupprober.py,sha256=0lKk7VE516fgMw119tNefFqLOxKfIE9WfdkpIT69OKU,3791
-requests/packages/chardet/charsetprober.py,sha256=Z48o2KiOj23FNqYH8FqzhH5m1qdm3rI8DcTm2Yqtklg,1902
-requests/packages/chardet/codingstatemachine.py,sha256=E85rYhHVMw9xDEJVgiQhp0OnLGr6i2r8_7QOWMKTH08,2318
-requests/packages/chardet/compat.py,sha256=5mm6yrHwef1JEG5OxkPJlSq5lkjLVpEGh3iPgFBkpkM,1157
-requests/packages/chardet/constants.py,sha256=-UnY8U7EP7z9fTyd09yq35BEkSFEAUAiv9ohd1DW1s4,1335
-requests/packages/chardet/cp949prober.py,sha256=FMvdLyB7fejPXRsTbca7LK1P3RUvvssmjUNyaEfz8zY,1782
-requests/packages/chardet/escprober.py,sha256=q5TcQKeVq31WxrW7Sv8yjpZkjEoaHO8S92EJZ9hodys,3187
-requests/packages/chardet/escsm.py,sha256=7iljEKN8lXTh8JFXPUSwlibMno6R6ksq4evLxbkzfro,7839
-requests/packages/chardet/eucjpprober.py,sha256=5IpfSEjAb7h3hcGMd6dkU80O900C2N6xku28rdYFKuc,3678
-requests/packages/chardet/euckrfreq.py,sha256=T5saK5mImySG5ygQPtsp6o2uKulouCwYm2ElOyFkJqU,45978
-requests/packages/chardet/euckrprober.py,sha256=Wo7dnZ5Erw_nB4H-m5alMiOxOuJUmGHlwCSaGqExDZA,1675
-requests/packages/chardet/euctwfreq.py,sha256=G_I0BW9i1w0ONeeUwIYqV7_U09buIHdqh-wNHVaql7I,34872
-requests/packages/chardet/euctwprober.py,sha256=upS2P6GuT5ujOxXYw-RJLcT7A4PTuo27KGUKU4UZpIQ,1676
-requests/packages/chardet/gb2312freq.py,sha256=M2gFdo_qQ_BslStEchrPW5CrPEZEacC0uyDLw4ok-kY,36011
-requests/packages/chardet/gb2312prober.py,sha256=VWnjoRa83Y6V6oczMaxyUr0uy48iCnC2nzk9zfEIRHc,1681
-requests/packages/chardet/hebrewprober.py,sha256=8pdoUfsVXf_L4BnJde_BewS6H2yInV5688eu0nFhLHY,13359
-requests/packages/chardet/jisfreq.py,sha256=ZcL4R5ekHHbP2KCYGakVMBsiKqZZZAABzhwi-uRkOps,47315
-requests/packages/chardet/jpcntx.py,sha256=yftmp0QaF6RJO5SJs8I7LU5AF4rwP23ebeCQL4BM1OY,19348
-requests/packages/chardet/langbulgarianmodel.py,sha256=ZyPsA796MSVhYdfWhMCgKWckupAKAnKqWcE3Cl3ej6o,12784
-requests/packages/chardet/langcyrillicmodel.py,sha256=fkcd5OvogUp-GrNDWAZPgkYsSRCD2omotAEvqjlmLKE,17725
-requests/packages/chardet/langgreekmodel.py,sha256=QHMy31CH_ot67UCtmurCEKqKx2WwoaKrw2YCYYBK2Lw,12628
-requests/packages/chardet/langhebrewmodel.py,sha256=4ASl5vzKJPng4H278VHKtRYC03TpQpenlHTcsmZH1rE,11318
-requests/packages/chardet/langhungarianmodel.py,sha256=SXwuUzh49_cBeMXhshRHdrhlkz0T8_pZWV_pdqBKNFk,12536
-requests/packages/chardet/langthaimodel.py,sha256=-k7djh3dGKngAGnt3WfuoJN7acDcWcmHAPojhaUd7q4,11275
-requests/packages/chardet/latin1prober.py,sha256=238JHOxH8aRudJY2NmeSv5s7i0Qe3GuklIU3HlYybvg,5232
-requests/packages/chardet/mbcharsetprober.py,sha256=9rOCjDVsmSMp6e7q2syqak22j7lrbUZhJhMee2gbVL0,3268
-requests/packages/chardet/mbcsgroupprober.py,sha256=SHRzNPLpDXfMJLA8phCHVU0WgqbgDCNxDQMolGX_7yk,1967
-requests/packages/chardet/mbcssm.py,sha256=IKwJXyxu34n6NojmxVxC60MLFtJKm-hIfxaFEnb3uBA,19590
-requests/packages/chardet/sbcharsetprober.py,sha256=Xq0lODqJnDgxglBiQI4BqTFiPbn63-0a5XNA5-hVu7U,4793
-requests/packages/chardet/sbcsgroupprober.py,sha256=8hLyH8RAG-aohBo7o_KciWVgRo42ZE_zEtuNG1JMRYI,3291
-requests/packages/chardet/sjisprober.py,sha256=UYOmiMDzttYIkSDoOB08UEagivJpUXz4tuWiWzTiOr8,3764
-requests/packages/chardet/universaldetector.py,sha256=h-E2x6XSCzlNjycYWG0Fe4Cf1SGdaIzUNu2HCphpMZA,6840
-requests/packages/chardet/utf8prober.py,sha256=7tdNZGrJY7jZUBD483GGMkiP0Tx8Fp-cGvWHoAsilHg,2652
-requests/packages/urllib3/__init__.py,sha256=dgnv71-LhuNA7j-ov1A7TL7h4Ajq5JT1fONyGO8ivCg,2854
-requests/packages/urllib3/_collections.py,sha256=RP-cHyTx4AgYwvoETK8q1IVRbWFJnE0VV692ZHSbU68,10553
-requests/packages/urllib3/connection.py,sha256=QCmkelYgtbc06DfJtgs22na78kRTLCTbLb-OSWLbt-A,11617
-requests/packages/urllib3/connectionpool.py,sha256=ws48c_kVg09RUxuqpusWg_oOXIRTf25y3SLVN6yuCcQ,32713
-requests/packages/urllib3/exceptions.py,sha256=zGjhZCR1wefEnCN5b7WouQ3UhXesJ2bRKYIeWusaFJs,5599
-requests/packages/urllib3/fields.py,sha256=WUMvCLvnw7XemBq6AmCgNPJwyIJL_vWaMHaA2FLlscM,5931
-requests/packages/urllib3/filepost.py,sha256=NvLlFsdt8ih_Q4S2ekQF3CJG0nOXs32YI-G04_AdT2g,2320
-requests/packages/urllib3/poolmanager.py,sha256=W09uewCGoKSzezei0DwaTXT7kuvsF2elO2wUXWfiAco,9614
-requests/packages/urllib3/request.py,sha256=jET7OvA3FSjxABBRGhCyMdPvM9XuJA6df9gRhkJiJiY,5988
-requests/packages/urllib3/response.py,sha256=Xx9-heNZOrcx5suX-I1PYT_uEbhVtqOHqkui309g9XY,18542
-requests/packages/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-requests/packages/urllib3/contrib/appengine.py,sha256=VP10uoVbNpH0kYVbOFd7dN5dtDcVfEytMoriKsDBBuI,7938
-requests/packages/urllib3/contrib/ntlmpool.py,sha256=r-vMDMXAGbix9a7-IhbKVTATmAst-5g4hKYOLf8Kd5M,4531
-requests/packages/urllib3/contrib/pyopenssl.py,sha256=JsdAh0gL4XvQzhOEBRoFtJN91qLf1LFIDEFZs95445I,11778
-requests/packages/urllib3/contrib/socks.py,sha256=mAHrChjtbElhGBoHOGq5CqRjtmdFiS_YjnTr2HlUNYU,5669
-requests/packages/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109
-requests/packages/urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935
-requests/packages/urllib3/packages/six.py,sha256=U-rO-WBrFS8PxHeamSl6okKCjqPF18NhiZb0qPZ67XM,11628
-requests/packages/urllib3/packages/ssl_match_hostname/__init__.py,sha256=cOWMIn1orgJoA35p6pSzO_-Dc6iOX9Dhl6D2sL9b_2o,460
-requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=fK28k37hL7-D79v9iM2fHgNK9Q1Pw0M7qVRL4rkfFjQ,3778
-requests/packages/urllib3/util/__init__.py,sha256=n2QE9_0Bb6u8tf7LUc4qKe8V-Hz9G8lEOc9j_30Q8d0,892
-requests/packages/urllib3/util/connection.py,sha256=6PvDBlK_6QDLHzEDT-uEMhqKcDoSuRO43Vtb4IXfkzQ,3380
-requests/packages/urllib3/util/request.py,sha256=ZMDewRK-mjlK72szGIIjzYnLIn-zPP0WgJUMjKeZ6Tg,2128
-requests/packages/urllib3/util/response.py,sha256=1UFd5TIp9MyBp4xgnZoyQZscZVPPr0tWRaXNR5w_vds,2165
-requests/packages/urllib3/util/retry.py,sha256=WjO8iuwSR-TFJUPW1OhuuhvNReZHJ8v_QEPThdUzOLE,10350
-requests/packages/urllib3/util/ssl_.py,sha256=eVPo9L7d8x8g1wnBi2swWM49AXu5I2pKXKAIHs2ar_k,11624
-requests/packages/urllib3/util/timeout.py,sha256=ioAIYptFyBG7eU_r8_ZmO45hpj1dJE6WCvrGR9dNFjs,9596
-requests/packages/urllib3/util/url.py,sha256=EcX4ZfmgKWcqM4sY9FlC-yN4y_snuURPV0TpUPHNjnc,5879
-requests-2.10.0.dist-info/DESCRIPTION.rst,sha256=t-6boSsqDF3EA_HZ30bzY_sH_azB0ecWOP9LHFplx8Y,36859
-requests-2.10.0.dist-info/METADATA,sha256=4W_oYsxZeTaYSe89rVL-S81mWftPyk-qyA5l417Ls3M,38011
-requests-2.10.0.dist-info/RECORD,,
-requests-2.10.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110
-requests-2.10.0.dist-info/metadata.json,sha256=x_llR_pnCzf5Knf36VDDh6QjUdUkW0lHbR21hdFihhs,1281
-requests-2.10.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
-requests/packages/chardet/sbcsgroupprober.pyc,,
-requests/status_codes.pyc,,
-requests/packages/chardet/mbcsgroupprober.pyc,,
-requests/certs.pyc,,
-requests/packages/chardet/langhebrewmodel.pyc,,
-requests/packages/urllib3/contrib/pyopenssl.pyc,,
-requests/packages/urllib3/util/__init__.pyc,,
-requests/packages/chardet/latin1prober.pyc,,
-requests/api.pyc,,
-requests/packages/chardet/big5prober.pyc,,
-requests/packages/urllib3/contrib/__init__.pyc,,
-requests/packages/chardet/constants.pyc,,
-requests/packages/chardet/euckrfreq.pyc,,
-requests/packages/chardet/codingstatemachine.pyc,,
-requests/compat.pyc,,
-requests/packages/urllib3/packages/six.pyc,,
-requests/structures.pyc,,
-requests/sessions.pyc,,
-requests/models.pyc,,
-requests/packages/chardet/universaldetector.pyc,,
-requests/packages/chardet/escsm.pyc,,
-requests/packages/chardet/charsetprober.pyc,,
-requests/packages/urllib3/util/retry.pyc,,
-requests/packages/chardet/chardetect.pyc,,
-requests/packages/chardet/gb2312freq.pyc,,
-requests/packages/urllib3/util/ssl_.pyc,,
-requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyc,,
-requests/packages/urllib3/poolmanager.pyc,,
-requests/packages/chardet/euctwfreq.pyc,,
-requests/packages/chardet/jisfreq.pyc,,
-requests/auth.pyc,,
-requests/adapters.pyc,,
-requests/packages/chardet/jpcntx.pyc,,
-requests/packages/urllib3/request.pyc,,
-requests/packages/urllib3/fields.pyc,,
-requests/packages/urllib3/util/url.pyc,,
-requests/packages/chardet/chardistribution.pyc,,
-requests/hooks.pyc,,
-requests/packages/urllib3/filepost.pyc,,
-requests/packages/chardet/langthaimodel.pyc,,
-requests/packages/chardet/charsetgroupprober.pyc,,
-requests/packages/urllib3/packages/__init__.pyc,,
-requests/packages/urllib3/contrib/socks.pyc,,
-requests/packages/urllib3/util/request.pyc,,
-requests/packages/chardet/compat.pyc,,
-requests/packages/chardet/utf8prober.pyc,,
-requests/packages/urllib3/util/response.pyc,,
-requests/packages/chardet/sbcharsetprober.pyc,,
-requests/packages/chardet/gb2312prober.pyc,,
-requests/packages/chardet/mbcharsetprober.pyc,,
-requests/packages/chardet/langbulgarianmodel.pyc,,
-requests/utils.pyc,,
-requests/packages/__init__.pyc,,
-requests/packages/chardet/__init__.pyc,,
-requests/packages/urllib3/connection.pyc,,
-requests/packages/urllib3/util/connection.pyc,,
-requests/packages/chardet/sjisprober.pyc,,
-requests/packages/chardet/langcyrillicmodel.pyc,,
-requests/packages/urllib3/contrib/appengine.pyc,,
-requests/packages/chardet/langgreekmodel.pyc,,
-requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyc,,
-requests/packages/urllib3/util/timeout.pyc,,
-requests/packages/chardet/big5freq.pyc,,
-requests/packages/chardet/langhungarianmodel.pyc,,
-requests/packages/urllib3/_collections.pyc,,
-requests/packages/chardet/euckrprober.pyc,,
-requests/packages/chardet/cp949prober.pyc,,
-requests/packages/chardet/mbcssm.pyc,,
-requests/packages/urllib3/packages/ordered_dict.pyc,,
-requests/packages/urllib3/contrib/ntlmpool.pyc,,
-requests/cookies.pyc,,
-requests/packages/chardet/eucjpprober.pyc,,
-requests/packages/chardet/hebrewprober.pyc,,
-requests/packages/urllib3/exceptions.pyc,,
-requests/packages/urllib3/__init__.pyc,,
-requests/packages/chardet/escprober.pyc,,
-requests/packages/urllib3/response.pyc,,
-requests/packages/urllib3/connectionpool.pyc,,
-requests/__init__.pyc,,
-requests/exceptions.pyc,,
-requests/packages/chardet/euctwprober.pyc,,
diff --git a/python/ext-libs/requests-2.10.0.dist-info/WHEEL b/python/ext-libs/requests-2.10.0.dist-info/WHEEL
deleted file mode 100644
index 8b6dd1b..0000000
--- a/python/ext-libs/requests-2.10.0.dist-info/WHEEL
+++ /dev/null
@@ -1,6 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.29.0)
-Root-Is-Purelib: true
-Tag: py2-none-any
-Tag: py3-none-any
-
diff --git a/python/ext-libs/requests-2.10.0.dist-info/metadata.json b/python/ext-libs/requests-2.10.0.dist-info/metadata.json
deleted file mode 100644
index c4cd13d..0000000
--- a/python/ext-libs/requests-2.10.0.dist-info/metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Natural Language :: English", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Pytho [...]
\ No newline at end of file
diff --git a/python/ext-libs/requests-2.10.0.dist-info/top_level.txt b/python/ext-libs/requests-2.10.0.dist-info/top_level.txt
deleted file mode 100644
index f229360..0000000
--- a/python/ext-libs/requests-2.10.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-requests
diff --git a/python/ext-libs/requests/__init__.py b/python/ext-libs/requests/__init__.py
deleted file mode 100644
index 82c0f78..0000000
--- a/python/ext-libs/requests/__init__.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#   __
-#  /__)  _  _     _   _ _/   _
-# / (   (- (/ (/ (- _)  /  _)
-#          /
-
-"""
-Requests HTTP library
-~~~~~~~~~~~~~~~~~~~~~
-
-Requests is an HTTP library, written in Python, for human beings. Basic GET
-usage:
-
-   >>> import requests
-   >>> r = requests.get('https://www.python.org')
-   >>> r.status_code
-   200
-   >>> 'Python is a programming language' in r.content
-   True
-
-... or POST:
-
-   >>> payload = dict(key1='value1', key2='value2')
-   >>> r = requests.post('http://httpbin.org/post', data=payload)
-   >>> print(r.text)
-   {
-     ...
-     "form": {
-       "key2": "value2",
-       "key1": "value1"
-     },
-     ...
-   }
-
-The other HTTP methods are supported - see `requests.api`. Full documentation
-is at <http://python-requests.org>.
-
-:copyright: (c) 2016 by Kenneth Reitz.
-:license: Apache 2.0, see LICENSE for more details.
-
-"""
-
-__title__ = 'requests'
-__version__ = '2.10.0'
-__build__ = 0x021000
-__author__ = 'Kenneth Reitz'
-__license__ = 'Apache 2.0'
-__copyright__ = 'Copyright 2016 Kenneth Reitz'
-
-# Attempt to enable urllib3's SNI support, if possible
-try:
-    from .packages.urllib3.contrib import pyopenssl
-    pyopenssl.inject_into_urllib3()
-except ImportError:
-    pass
-
-import warnings
-
-# urllib3's DependencyWarnings should be silenced.
-from .packages.urllib3.exceptions import DependencyWarning
-warnings.simplefilter('ignore', DependencyWarning)
-
-from . import utils
-from .models import Request, Response, PreparedRequest
-from .api import request, get, head, post, patch, put, delete, options
-from .sessions import session, Session
-from .status_codes import codes
-from .exceptions import (
-    RequestException, Timeout, URLRequired,
-    TooManyRedirects, HTTPError, ConnectionError,
-    FileModeWarning, ConnectTimeout, ReadTimeout
-)
-
-# Set default logging handler to avoid "No handler found" warnings.
-import logging
-try:  # Python 2.7+
-    from logging import NullHandler
-except ImportError:
-    class NullHandler(logging.Handler):
-        def emit(self, record):
-            pass
-
-logging.getLogger(__name__).addHandler(NullHandler())
-
-import warnings
-
-# FileModeWarnings go off per the default.
-warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/python/ext-libs/requests/adapters.py b/python/ext-libs/requests/adapters.py
deleted file mode 100644
index 23e448f..0000000
--- a/python/ext-libs/requests/adapters.py
+++ /dev/null
@@ -1,483 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.adapters
-~~~~~~~~~~~~~~~~~
-
-This module contains the transport adapters that Requests uses to define
-and maintain connections.
-"""
-
-import os.path
-import socket
-
-from .models import Response
-from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
-from .packages.urllib3.response import HTTPResponse
-from .packages.urllib3.util import Timeout as TimeoutSauce
-from .packages.urllib3.util.retry import Retry
-from .compat import urlparse, basestring
-from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
-                    prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
-                    select_proxy, to_native_string)
-from .structures import CaseInsensitiveDict
-from .packages.urllib3.exceptions import ClosedPoolError
-from .packages.urllib3.exceptions import ConnectTimeoutError
-from .packages.urllib3.exceptions import HTTPError as _HTTPError
-from .packages.urllib3.exceptions import MaxRetryError
-from .packages.urllib3.exceptions import NewConnectionError
-from .packages.urllib3.exceptions import ProxyError as _ProxyError
-from .packages.urllib3.exceptions import ProtocolError
-from .packages.urllib3.exceptions import ReadTimeoutError
-from .packages.urllib3.exceptions import SSLError as _SSLError
-from .packages.urllib3.exceptions import ResponseError
-from .cookies import extract_cookies_to_jar
-from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
-                         ProxyError, RetryError, InvalidSchema)
-from .auth import _basic_auth_str
-
-try:
-    from .packages.urllib3.contrib.socks import SOCKSProxyManager
-except ImportError:
-    def SOCKSProxyManager(*args, **kwargs):
-        raise InvalidSchema("Missing dependencies for SOCKS support.")
-
-DEFAULT_POOLBLOCK = False
-DEFAULT_POOLSIZE = 10
-DEFAULT_RETRIES = 0
-DEFAULT_POOL_TIMEOUT = None
-
-
-class BaseAdapter(object):
-    """The Base Transport Adapter"""
-
-    def __init__(self):
-        super(BaseAdapter, self).__init__()
-
-    def send(self):
-        raise NotImplementedError
-
-    def close(self):
-        raise NotImplementedError
-
-
-class HTTPAdapter(BaseAdapter):
-    """The built-in HTTP Adapter for urllib3.
-
-    Provides a general-case interface for Requests sessions to contact HTTP and
-    HTTPS urls by implementing the Transport Adapter interface. This class will
-    usually be created by the :class:`Session <Session>` class under the
-    covers.
-
-    :param pool_connections: The number of urllib3 connection pools to cache.
-    :param pool_maxsize: The maximum number of connections to save in the pool.
-    :param max_retries: The maximum number of retries each connection
-        should attempt. Note, this applies only to failed DNS lookups, socket
-        connections and connection timeouts, never to requests where data has
-        made it to the server. By default, Requests does not retry failed
-        connections. If you need granular control over the conditions under
-        which we retry a request, import urllib3's ``Retry`` class and pass
-        that instead.
-    :param pool_block: Whether the connection pool should block for connections.
-
-    Usage::
-
-      >>> import requests
-      >>> s = requests.Session()
-      >>> a = requests.adapters.HTTPAdapter(max_retries=3)
-      >>> s.mount('http://', a)
-    """
-    __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
-                 '_pool_block']
-
-    def __init__(self, pool_connections=DEFAULT_POOLSIZE,
-                 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
-                 pool_block=DEFAULT_POOLBLOCK):
-        if max_retries == DEFAULT_RETRIES:
-            self.max_retries = Retry(0, read=False)
-        else:
-            self.max_retries = Retry.from_int(max_retries)
-        self.config = {}
-        self.proxy_manager = {}
-
-        super(HTTPAdapter, self).__init__()
-
-        self._pool_connections = pool_connections
-        self._pool_maxsize = pool_maxsize
-        self._pool_block = pool_block
-
-        self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
-
-    def __getstate__(self):
-        return dict((attr, getattr(self, attr, None)) for attr in
-                    self.__attrs__)
-
-    def __setstate__(self, state):
-        # Can't handle by adding 'proxy_manager' to self.__attrs__ because
-        # self.poolmanager uses a lambda function, which isn't pickleable.
-        self.proxy_manager = {}
-        self.config = {}
-
-        for attr, value in state.items():
-            setattr(self, attr, value)
-
-        self.init_poolmanager(self._pool_connections, self._pool_maxsize,
-                              block=self._pool_block)
-
-    def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
-        """Initializes a urllib3 PoolManager.
-
-        This method should not be called from user code, and is only
-        exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param connections: The number of urllib3 connection pools to cache.
-        :param maxsize: The maximum number of connections to save in the pool.
-        :param block: Block when no free connections are available.
-        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
-        """
-        # save these values for pickling
-        self._pool_connections = connections
-        self._pool_maxsize = maxsize
-        self._pool_block = block
-
-        self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
-                                       block=block, strict=True, **pool_kwargs)
-
-    def proxy_manager_for(self, proxy, **proxy_kwargs):
-        """Return urllib3 ProxyManager for the given proxy.
-
-        This method should not be called from user code, and is only
-        exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param proxy: The proxy to return a urllib3 ProxyManager for.
-        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
-        :returns: ProxyManager
-        """
-        if proxy in self.proxy_manager:
-            manager = self.proxy_manager[proxy]
-        elif proxy.lower().startswith('socks'):
-            username, password = get_auth_from_url(proxy)
-            manager = self.proxy_manager[proxy] = SOCKSProxyManager(
-                proxy,
-                username=username,
-                password=password,
-                num_pools=self._pool_connections,
-                maxsize=self._pool_maxsize,
-                block=self._pool_block,
-                **proxy_kwargs
-            )
-        else:
-            proxy_headers = self.proxy_headers(proxy)
-            manager = self.proxy_manager[proxy] = proxy_from_url(
-                proxy,
-                proxy_headers=proxy_headers,
-                num_pools=self._pool_connections,
-                maxsize=self._pool_maxsize,
-                block=self._pool_block,
-                **proxy_kwargs)
-
-        return manager
-
-    def cert_verify(self, conn, url, verify, cert):
-        """Verify a SSL certificate. This method should not be called from user
-        code, and is only exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param conn: The urllib3 connection object associated with the cert.
-        :param url: The requested URL.
-        :param verify: Whether we should actually verify the certificate.
-        :param cert: The SSL certificate to verify.
-        """
-        if url.lower().startswith('https') and verify:
-
-            cert_loc = None
-
-            # Allow self-specified cert location.
-            if verify is not True:
-                cert_loc = verify
-
-            if not cert_loc:
-                cert_loc = DEFAULT_CA_BUNDLE_PATH
-
-            if not cert_loc:
-                raise Exception("Could not find a suitable SSL CA certificate bundle.")
-
-            conn.cert_reqs = 'CERT_REQUIRED'
-
-            if not os.path.isdir(cert_loc):
-                conn.ca_certs = cert_loc
-            else:
-                conn.ca_cert_dir = cert_loc
-        else:
-            conn.cert_reqs = 'CERT_NONE'
-            conn.ca_certs = None
-            conn.ca_cert_dir = None
-
-        if cert:
-            if not isinstance(cert, basestring):
-                conn.cert_file = cert[0]
-                conn.key_file = cert[1]
-            else:
-                conn.cert_file = cert
-
-    def build_response(self, req, resp):
-        """Builds a :class:`Response <requests.Response>` object from a urllib3
-        response. This should not be called from user code, and is only exposed
-        for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
-
-        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
-        :param resp: The urllib3 response object.
-        """
-        response = Response()
-
-        # Fallback to None if there's no status_code, for whatever reason.
-        response.status_code = getattr(resp, 'status', None)
-
-        # Make headers case-insensitive.
-        response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
-
-        # Set encoding.
-        response.encoding = get_encoding_from_headers(response.headers)
-        response.raw = resp
-        response.reason = response.raw.reason
-
-        if isinstance(req.url, bytes):
-            response.url = req.url.decode('utf-8')
-        else:
-            response.url = req.url
-
-        # Add new cookies from the server.
-        extract_cookies_to_jar(response.cookies, req, resp)
-
-        # Give the Response some context.
-        response.request = req
-        response.connection = self
-
-        return response
-
-    def get_connection(self, url, proxies=None):
-        """Returns a urllib3 connection for the given URL. This should not be
-        called from user code, and is only exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param url: The URL to connect to.
-        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
-        """
-        proxy = select_proxy(url, proxies)
-
-        if proxy:
-            proxy = prepend_scheme_if_needed(proxy, 'http')
-            proxy_manager = self.proxy_manager_for(proxy)
-            conn = proxy_manager.connection_from_url(url)
-        else:
-            # Only scheme should be lower case
-            parsed = urlparse(url)
-            url = parsed.geturl()
-            conn = self.poolmanager.connection_from_url(url)
-
-        return conn
-
-    def close(self):
-        """Disposes of any internal state.
-
-        Currently, this closes the PoolManager and any active ProxyManager,
-        which closes any pooled connections.
-        """
-        self.poolmanager.clear()
-        for proxy in self.proxy_manager.values():
-            proxy.clear()
-
-    def request_url(self, request, proxies):
-        """Obtain the url to use when making the final request.
-
-        If the message is being sent through a HTTP proxy, the full URL has to
-        be used. Otherwise, we should only use the path portion of the URL.
-
-        This should not be called from user code, and is only exposed for use
-        when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
-        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
-        """
-        proxy = select_proxy(request.url, proxies)
-        scheme = urlparse(request.url).scheme
-
-        is_proxied_http_request = (proxy and scheme != 'https')
-        using_socks_proxy = False
-        if proxy:
-            proxy_scheme = urlparse(proxy).scheme.lower()
-            using_socks_proxy = proxy_scheme.startswith('socks')
-
-        url = request.path_url
-        if is_proxied_http_request and not using_socks_proxy:
-            url = urldefragauth(request.url)
-
-        return url
-
-    def add_headers(self, request, **kwargs):
-        """Add any headers needed by the connection. As of v2.0 this does
-        nothing by default, but is left for overriding by users that subclass
-        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        This should not be called from user code, and is only exposed for use
-        when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
-        :param kwargs: The keyword arguments from the call to send().
-        """
-        pass
-
-    def proxy_headers(self, proxy):
-        """Returns a dictionary of the headers to add to any request sent
-        through a proxy. This works with urllib3 magic to ensure that they are
-        correctly sent to the proxy, rather than in a tunnelled request if
-        CONNECT is being used.
-
-        This should not be called from user code, and is only exposed for use
-        when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param proxies: The url of the proxy being used for this request.
-        """
-        headers = {}
-        username, password = get_auth_from_url(proxy)
-
-        if username and password:
-            headers['Proxy-Authorization'] = _basic_auth_str(username,
-                                                             password)
-
-        return headers
-
-    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
-        """Sends PreparedRequest object. Returns Response object.
-
-        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
-        :param stream: (optional) Whether to stream the request content.
-        :param timeout: (optional) How long to wait for the server to send
-            data before giving up, as a float, or a :ref:`(connect timeout,
-            read timeout) <timeouts>` tuple.
-        :type timeout: float or tuple
-        :param verify: (optional) Whether to verify SSL certificates.
-        :param cert: (optional) Any user-provided SSL certificate to be trusted.
-        :param proxies: (optional) The proxies dictionary to apply to the request.
-        """
-
-        conn = self.get_connection(request.url, proxies)
-
-        self.cert_verify(conn, request.url, verify, cert)
-        url = self.request_url(request, proxies)
-        self.add_headers(request)
-
-        chunked = not (request.body is None or 'Content-Length' in request.headers)
-
-        if isinstance(timeout, tuple):
-            try:
-                connect, read = timeout
-                timeout = TimeoutSauce(connect=connect, read=read)
-            except ValueError as e:
-                # this may raise a string formatting error.
-                err = ("Invalid timeout {0}. Pass a (connect, read) "
-                       "timeout tuple, or a single float to set "
-                       "both timeouts to the same value".format(timeout))
-                raise ValueError(err)
-        else:
-            timeout = TimeoutSauce(connect=timeout, read=timeout)
-
-        try:
-            if not chunked:
-                resp = conn.urlopen(
-                    method=request.method,
-                    url=url,
-                    body=request.body,
-                    headers=request.headers,
-                    redirect=False,
-                    assert_same_host=False,
-                    preload_content=False,
-                    decode_content=False,
-                    retries=self.max_retries,
-                    timeout=timeout
-                )
-
-            # Send the request.
-            else:
-                if hasattr(conn, 'proxy_pool'):
-                    conn = conn.proxy_pool
-
-                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
-
-                try:
-                    low_conn.putrequest(request.method,
-                                        url,
-                                        skip_accept_encoding=True)
-
-                    for header, value in request.headers.items():
-                        low_conn.putheader(header, value)
-
-                    low_conn.endheaders()
-
-                    for i in request.body:
-                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
-                        low_conn.send(b'\r\n')
-                        low_conn.send(i)
-                        low_conn.send(b'\r\n')
-                    low_conn.send(b'0\r\n\r\n')
-
-                    # Receive the response from the server
-                    try:
-                        # For Python 2.7+ versions, use buffering of HTTP
-                        # responses
-                        r = low_conn.getresponse(buffering=True)
-                    except TypeError:
-                        # For compatibility with Python 2.6 versions and back
-                        r = low_conn.getresponse()
-
-                    resp = HTTPResponse.from_httplib(
-                        r,
-                        pool=conn,
-                        connection=low_conn,
-                        preload_content=False,
-                        decode_content=False
-                    )
-                except:
-                    # If we hit any problems here, clean up the connection.
-                    # Then, reraise so that we can handle the actual exception.
-                    low_conn.close()
-                    raise
-
-        except (ProtocolError, socket.error) as err:
-            raise ConnectionError(err, request=request)
-
-        except MaxRetryError as e:
-            if isinstance(e.reason, ConnectTimeoutError):
-                # TODO: Remove this in 3.0.0: see #2811
-                if not isinstance(e.reason, NewConnectionError):
-                    raise ConnectTimeout(e, request=request)
-
-            if isinstance(e.reason, ResponseError):
-                raise RetryError(e, request=request)
-
-            if isinstance(e.reason, _ProxyError):
-                raise ProxyError(e, request=request)
-
-            raise ConnectionError(e, request=request)
-
-        except ClosedPoolError as e:
-            raise ConnectionError(e, request=request)
-
-        except _ProxyError as e:
-            raise ProxyError(e)
-
-        except (_SSLError, _HTTPError) as e:
-            if isinstance(e, _SSLError):
-                raise SSLError(e, request=request)
-            elif isinstance(e, ReadTimeoutError):
-                raise ReadTimeout(e, request=request)
-            else:
-                raise
-
-        return self.build_response(request, resp)
diff --git a/python/ext-libs/requests/api.py b/python/ext-libs/requests/api.py
deleted file mode 100644
index c2068d0..0000000
--- a/python/ext-libs/requests/api.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.api
-~~~~~~~~~~~~
-
-This module implements the Requests API.
-
-:copyright: (c) 2012 by Kenneth Reitz.
-:license: Apache2, see LICENSE for more details.
-
-"""
-
-from . import sessions
-
-
-def request(method, url, **kwargs):
-    """Constructs and sends a :class:`Request <Request>`.
-
-    :param method: method for the new :class:`Request` object.
-    :param url: URL for the new :class:`Request` object.
-    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param json: (optional) json data to send in the body of the :class:`Request`.
-    :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
-    :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
-    :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
-        ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
-        or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
-        defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
-        to add for the file.
-    :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
-    :param timeout: (optional) How long to wait for the server to send data
-        before giving up, as a float, or a :ref:`(connect timeout, read
-        timeout) <timeouts>` tuple.
-    :type timeout: float or tuple
-    :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
-    :type allow_redirects: bool
-    :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
-    :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
-    :param stream: (optional) if ``False``, the response content will be immediately downloaded.
-    :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-
-    Usage::
-
-      >>> import requests
-      >>> req = requests.request('GET', 'http://httpbin.org/get')
-      <Response [200]>
-    """
-
-    # By using the 'with' statement we are sure the session is closed, thus we
-    # avoid leaving sockets open which can trigger a ResourceWarning in some
-    # cases, and look like a memory leak in others.
-    with sessions.Session() as session:
-        return session.request(method=method, url=url, **kwargs)
-
-
-def get(url, params=None, **kwargs):
-    """Sends a GET request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    kwargs.setdefault('allow_redirects', True)
-    return request('get', url, params=params, **kwargs)
-
-
-def options(url, **kwargs):
-    """Sends a OPTIONS request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    kwargs.setdefault('allow_redirects', True)
-    return request('options', url, **kwargs)
-
-
-def head(url, **kwargs):
-    """Sends a HEAD request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    kwargs.setdefault('allow_redirects', False)
-    return request('head', url, **kwargs)
-
-
-def post(url, data=None, json=None, **kwargs):
-    """Sends a POST request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param json: (optional) json data to send in the body of the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('post', url, data=data, json=json, **kwargs)
-
-
-def put(url, data=None, **kwargs):
-    """Sends a PUT request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('put', url, data=data, **kwargs)
-
-
-def patch(url, data=None, **kwargs):
-    """Sends a PATCH request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('patch', url,  data=data, **kwargs)
-
-
-def delete(url, **kwargs):
-    """Sends a DELETE request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('delete', url, **kwargs)
diff --git a/python/ext-libs/requests/auth.py b/python/ext-libs/requests/auth.py
deleted file mode 100644
index 73f8e9d..0000000
--- a/python/ext-libs/requests/auth.py
+++ /dev/null
@@ -1,242 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.auth
-~~~~~~~~~~~~~
-
-This module contains the authentication handlers for Requests.
-"""
-
-import os
-import re
-import time
-import hashlib
-import threading
-
-from base64 import b64encode
-
-from .compat import urlparse, str
-from .cookies import extract_cookies_to_jar
-from .utils import parse_dict_header, to_native_string
-from .status_codes import codes
-
-CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
-CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
-
-
-def _basic_auth_str(username, password):
-    """Returns a Basic Auth string."""
-
-    authstr = 'Basic ' + to_native_string(
-        b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
-    )
-
-    return authstr
-
-
-class AuthBase(object):
-    """Base class that all auth implementations derive from"""
-
-    def __call__(self, r):
-        raise NotImplementedError('Auth hooks must be callable.')
-
-
-class HTTPBasicAuth(AuthBase):
-    """Attaches HTTP Basic Authentication to the given Request object."""
-    def __init__(self, username, password):
-        self.username = username
-        self.password = password
-
-    def __eq__(self, other):
-        return all([
-            self.username == getattr(other, 'username', None),
-            self.password == getattr(other, 'password', None)
-        ])
-
-    def __ne__(self, other):
-        return not self == other
-
-    def __call__(self, r):
-        r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
-        return r
-
-
-class HTTPProxyAuth(HTTPBasicAuth):
-    """Attaches HTTP Proxy Authentication to a given Request object."""
-    def __call__(self, r):
-        r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
-        return r
-
-
-class HTTPDigestAuth(AuthBase):
-    """Attaches HTTP Digest Authentication to the given Request object."""
-    def __init__(self, username, password):
-        self.username = username
-        self.password = password
-        # Keep state in per-thread local storage
-        self._thread_local = threading.local()
-
-    def init_per_thread_state(self):
-        # Ensure state is initialized just once per-thread
-        if not hasattr(self._thread_local, 'init'):
-            self._thread_local.init = True
-            self._thread_local.last_nonce = ''
-            self._thread_local.nonce_count = 0
-            self._thread_local.chal = {}
-            self._thread_local.pos = None
-            self._thread_local.num_401_calls = None
-
-    def build_digest_header(self, method, url):
-
-        realm = self._thread_local.chal['realm']
-        nonce = self._thread_local.chal['nonce']
-        qop = self._thread_local.chal.get('qop')
-        algorithm = self._thread_local.chal.get('algorithm')
-        opaque = self._thread_local.chal.get('opaque')
-        hash_utf8 = None
-
-        if algorithm is None:
-            _algorithm = 'MD5'
-        else:
-            _algorithm = algorithm.upper()
-        # lambdas assume digest modules are imported at the top level
-        if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
-            def md5_utf8(x):
-                if isinstance(x, str):
-                    x = x.encode('utf-8')
-                return hashlib.md5(x).hexdigest()
-            hash_utf8 = md5_utf8
-        elif _algorithm == 'SHA':
-            def sha_utf8(x):
-                if isinstance(x, str):
-                    x = x.encode('utf-8')
-                return hashlib.sha1(x).hexdigest()
-            hash_utf8 = sha_utf8
-
-        KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
-
-        if hash_utf8 is None:
-            return None
-
-        # XXX not implemented yet
-        entdig = None
-        p_parsed = urlparse(url)
-        #: path is request-uri defined in RFC 2616 which should not be empty
-        path = p_parsed.path or "/"
-        if p_parsed.query:
-            path += '?' + p_parsed.query
-
-        A1 = '%s:%s:%s' % (self.username, realm, self.password)
-        A2 = '%s:%s' % (method, path)
-
-        HA1 = hash_utf8(A1)
-        HA2 = hash_utf8(A2)
-
-        if nonce == self._thread_local.last_nonce:
-            self._thread_local.nonce_count += 1
-        else:
-            self._thread_local.nonce_count = 1
-        ncvalue = '%08x' % self._thread_local.nonce_count
-        s = str(self._thread_local.nonce_count).encode('utf-8')
-        s += nonce.encode('utf-8')
-        s += time.ctime().encode('utf-8')
-        s += os.urandom(8)
-
-        cnonce = (hashlib.sha1(s).hexdigest()[:16])
-        if _algorithm == 'MD5-SESS':
-            HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
-
-        if not qop:
-            respdig = KD(HA1, "%s:%s" % (nonce, HA2))
-        elif qop == 'auth' or 'auth' in qop.split(','):
-            noncebit = "%s:%s:%s:%s:%s" % (
-                nonce, ncvalue, cnonce, 'auth', HA2
-                )
-            respdig = KD(HA1, noncebit)
-        else:
-            # XXX handle auth-int.
-            return None
-
-        self._thread_local.last_nonce = nonce
-
-        # XXX should the partial digests be encoded too?
-        base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
-               'response="%s"' % (self.username, realm, nonce, path, respdig)
-        if opaque:
-            base += ', opaque="%s"' % opaque
-        if algorithm:
-            base += ', algorithm="%s"' % algorithm
-        if entdig:
-            base += ', digest="%s"' % entdig
-        if qop:
-            base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
-
-        return 'Digest %s' % (base)
-
-    def handle_redirect(self, r, **kwargs):
-        """Reset num_401_calls counter on redirects."""
-        if r.is_redirect:
-            self._thread_local.num_401_calls = 1
-
-    def handle_401(self, r, **kwargs):
-        """Takes the given response and tries digest-auth, if needed."""
-
-        if self._thread_local.pos is not None:
-            # Rewind the file position indicator of the body to where
-            # it was to resend the request.
-            r.request.body.seek(self._thread_local.pos)
-        s_auth = r.headers.get('www-authenticate', '')
-
-        if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
-
-            self._thread_local.num_401_calls += 1
-            pat = re.compile(r'digest ', flags=re.IGNORECASE)
-            self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
-
-            # Consume content and release the original connection
-            # to allow our new request to reuse the same one.
-            r.content
-            r.close()
-            prep = r.request.copy()
-            extract_cookies_to_jar(prep._cookies, r.request, r.raw)
-            prep.prepare_cookies(prep._cookies)
-
-            prep.headers['Authorization'] = self.build_digest_header(
-                prep.method, prep.url)
-            _r = r.connection.send(prep, **kwargs)
-            _r.history.append(r)
-            _r.request = prep
-
-            return _r
-
-        self._thread_local.num_401_calls = 1
-        return r
-
-    def __call__(self, r):
-        # Initialize per-thread state, if needed
-        self.init_per_thread_state()
-        # If we have a saved nonce, skip the 401
-        if self._thread_local.last_nonce:
-            r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
-        try:
-            self._thread_local.pos = r.body.tell()
-        except AttributeError:
-            # In the case of HTTPDigestAuth being reused and the body of
-            # the previous request was a file-like object, pos has the
-            # file position of the previous body. Ensure it's set to
-            # None.
-            self._thread_local.pos = None
-        r.register_hook('response', self.handle_401)
-        r.register_hook('response', self.handle_redirect)
-        self._thread_local.num_401_calls = 1
-
-        return r
-
-    def __eq__(self, other):
-        return all([
-            self.username == getattr(other, 'username', None),
-            self.password == getattr(other, 'password', None)
-        ])
-
-    def __ne__(self, other):
-        return not self == other
diff --git a/python/ext-libs/requests/cacert.pem b/python/ext-libs/requests/cacert.pem
deleted file mode 100644
index 6a66daa..0000000
--- a/python/ext-libs/requests/cacert.pem
+++ /dev/null
@@ -1,5616 +0,0 @@
-
-# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
-# Subject: O=Equifax OU=Equifax Secure Certificate Authority
-# Label: "Equifax Secure CA"
-# Serial: 903804111
-# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
-# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
-# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
-dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
-MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
-dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
-BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
-cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
-aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
-ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
-IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
-7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
-1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Label: "GlobalSign Root CA"
-# Serial: 4835703278459707669005204
-# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
-# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
-# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
-AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
-yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
-38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
-AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
-DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
-HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Label: "GlobalSign Root CA - R2"
-# Serial: 4835703278459682885658125
-# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
-# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
-# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
-MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
-v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
-eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
-tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
-C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
-zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
-mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
-V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
-3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
-J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
-291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
-ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
-AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
-TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
-# Serial: 206684696279472310254277870180966723415
-# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
-# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
-# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
-N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
-KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
-kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
-CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
-Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
-imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
-2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
-DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
-/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
-F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
-TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
-# Serial: 314531972711909413743075096039378935511
-# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
-# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
-# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
-GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
-+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
-U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
-NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
-ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
-ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
-CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
-g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
-fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
-2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
-bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Premium 2048 Secure Server CA"
-# Serial: 946069240
-# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
-# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
-# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
-RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
-bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
-IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
-MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
-LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
-YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
-A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
-K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
-sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
-MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
-XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
-HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
-4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
-HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
-j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
-U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
-zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
-u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
-bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
-fF6adulZkMV8gzURZVE=
------END CERTIFICATE-----
-
-# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Label: "Baltimore CyberTrust Root"
-# Serial: 33554617
-# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
-# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
-# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
-RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
-VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
-DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
-ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
-VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
-mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
-IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
-mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
-XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
-dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
-jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
-BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
-DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
-9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
-jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
-Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
-ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
-R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Low-Value Services Root"
-# Serial: 1
-# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
-# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
-# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
------BEGIN CERTIFICATE-----
-MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
-MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
-QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
-VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
-CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
-tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
-dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
-PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
-+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
-BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
-MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
-ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
-IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
-7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
-43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
-eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
-pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
-WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Label: "AddTrust External Root"
-# Serial: 1
-# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
-# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
-# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
------BEGIN CERTIFICATE-----
-MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
-IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
-MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
-FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
-bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
-dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
-H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
-uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
-mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
-a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
-E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
-WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
-VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
-Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
-cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
-IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
-AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
-YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
-6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
-Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
-c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
-mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Public Services Root"
-# Serial: 1
-# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
-# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
-# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
-MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
-ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
-BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
-6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
-GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
-dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
-1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
-62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
-BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
-AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
-MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
-cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
-b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
-IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
-iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
-GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
-4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
-XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Qualified Certificates Root"
-# Serial: 1
-# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
-# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
-# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
------BEGIN CERTIFICATE-----
-MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
-MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
-EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
-BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
-xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
-87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
-2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
-WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
-0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
-A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
-pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
-ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
-aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
-hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
-hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
-dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
-P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
-iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
-xqE=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Label: "Entrust Root Certification Authority"
-# Serial: 1164660820
-# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
-# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
-# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
------BEGIN CERTIFICATE-----
-MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
-Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
-KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
-cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
-NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
-NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
-ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
-BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
-KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
-Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
-4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
-KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
-rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
-94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
-sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
-gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
-kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
-vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
-A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
-O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
-AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
-9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
-eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
-0vdXcDazv/wor3ElhVsT/h5/WrQ8
------END CERTIFICATE-----
-
-# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3
-# Subject: O=RSA Security Inc OU=RSA Security 2048 V3
-# Label: "RSA Security 2048 v3"
-# Serial: 13297492616345471454730593562152402946
-# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e
-# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42
-# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c
------BEGIN CERTIFICATE-----
-MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6
-MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
-dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX
-BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy
-MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp
-eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg
-/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl
-wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh
-AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2
-PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu
-AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR
-MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc
-HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/
-Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+
-f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO
-rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch
-6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3
-7CAFYd4=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Label: "GeoTrust Global CA"
-# Serial: 144470
-# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
-# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
-# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
------BEGIN CERTIFICATE-----
-MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
-YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
-R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
-9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
-fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
-iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
-1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
-bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
-MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
-ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
-uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
-Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
-tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
-PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
-hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
-5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Global CA 2"
-# Serial: 1
-# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
-# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
-# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
------BEGIN CERTIFICATE-----
-MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
-IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
-R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
-PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
-Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
-TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
-5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
-S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
-2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
-FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
-EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
-EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
-/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
-A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
-abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
-I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
-4iIprn2DQKi6bA==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA"
-# Serial: 1
-# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
-# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
-# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
------BEGIN CERTIFICATE-----
-MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
-BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
-IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
-VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
-cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
-QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
-F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
-c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
-mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
-VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
-teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
-f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
-Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
-nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
-/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
-MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
-9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
-aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
-IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
-ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
-uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
-Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
-QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
-koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
-ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
-DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
-bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA 2"
-# Serial: 1
-# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
-# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
-# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
------BEGIN CERTIFICATE-----
-MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
-VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
-c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
-AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
-WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
-FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
-XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
-se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
-KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
-IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
-y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
-hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
-QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
-Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
-HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
-KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
-dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
-L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
-Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
-ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
-T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
-GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
-1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
-OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
-6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
-QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
------END CERTIFICATE-----
-
-# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
-# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
-# Label: "Visa eCommerce Root"
-# Serial: 25952180776285836048024890241505565794
-# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
-# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
-# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
------BEGIN CERTIFICATE-----
-MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
-MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
-cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
-bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
-CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
-dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
-cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
-2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
-lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
-ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
-299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
-vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
-dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
-AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
-AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
-zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
-LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
-7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
-++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
-398znM/jra6O1I7mT1GvFpLgXPYHDw==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum CA O=Unizeto Sp. z o.o.
-# Subject: CN=Certum CA O=Unizeto Sp. z o.o.
-# Label: "Certum Root CA"
-# Serial: 65568
-# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9
-# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18
-# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24
------BEGIN CERTIFICATE-----
-MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM
-MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
-QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM
-MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
-QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E
-jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo
-ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI
-ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu
-Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg
-AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7
-HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA
-uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa
-TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg
-xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q
-CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x
-O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs
-6GAqm4VKQPNriiTsBhYscw==
------END CERTIFICATE-----
-
-# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
-# Subject: CN=AAA Certificate Services O=Comodo CA Limited
-# Label: "Comodo AAA Services root"
-# Serial: 1
-# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
-# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
-# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
------BEGIN CERTIFICATE-----
-MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
-YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
-GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
-BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
-3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
-YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
-rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
-ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
-oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
-MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
-QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
-b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
-AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
-GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
-Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
-G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
-l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
-smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
-# Subject: CN=Secure Certificate Services O=Comodo CA Limited
-# Label: "Comodo Secure Services root"
-# Serial: 1
-# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
-# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
-# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
------BEGIN CERTIFICATE-----
-MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
-ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
-fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
-BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
-cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
-HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
-CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
-3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
-6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
-HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
-EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
-Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
-Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
-DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
-5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
-Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
-gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
-aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
-izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
------END CERTIFICATE-----
-
-# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
-# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
-# Label: "Comodo Trusted Services root"
-# Serial: 1
-# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
-# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
-# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
-aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
-MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
-BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
-VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
-fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
-TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
-fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
-1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
-kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
-A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
-ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
-dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
-Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
-HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
-pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
-jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
-xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
-dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
-# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
-# Label: "QuoVadis Root CA"
-# Serial: 985026699
-# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
-# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
-# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
------BEGIN CERTIFICATE-----
-MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
-TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
-MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
-IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
-dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
-li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
-rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
-WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
-F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
-xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
-Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
-dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
-ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
-IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
-c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
-ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
-Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
-KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
-KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
-y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
-dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
-VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
-MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
-fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
-7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
-cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
-mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
-xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
-SnQ2+Q==
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 2"
-# Serial: 1289
-# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
-# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
-# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
------BEGIN CERTIFICATE-----
-MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
-GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
-b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
-BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
-YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
-GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
-Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
-WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
-rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
-+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
-ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
-Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
-PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
-/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
-oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
-yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
-EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
-A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
-MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
-ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
-BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
-g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
-fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
-WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
-B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
-hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
-TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
-mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
-ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
-4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
-8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 3"
-# Serial: 1478
-# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
-# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
-# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
------BEGIN CERTIFICATE-----
-MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
-GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
-b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
-BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
-YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
-V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
-4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
-H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
-8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
-vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
-mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
-btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
-T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
-WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
-c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
-4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
-VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
-CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
-aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
-aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
-dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
-czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
-A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
-TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
-Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
-7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
-d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
-+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
-4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
-t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
-DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
-k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
-zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
-Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
-mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
-4SVhM7JZG+Ju1zdXtg2pEto=
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
-# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
-# Label: "Security Communication Root CA"
-# Serial: 0
-# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
-# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
-# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
------BEGIN CERTIFICATE-----
-MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
-MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
-dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
-WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
-VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
-9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
-DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
-Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
-QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
-xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
-A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
-kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
-Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
-Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
-JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
-RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
------END CERTIFICATE-----
-
-# Issuer: CN=Sonera Class2 CA O=Sonera
-# Subject: CN=Sonera Class2 CA O=Sonera
-# Label: "Sonera Class 2 Root CA"
-# Serial: 29
-# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
-# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
-# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
------BEGIN CERTIFICATE-----
-MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
-MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
-MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
-BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
-Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
-5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
-3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
-vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
-8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
-DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
-MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
-zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
-3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
-FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
-Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
-ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA"
-# Serial: 10000010
-# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0
-# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04
-# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO
-TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy
-MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk
-ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn
-ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71
-9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO
-hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U
-tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o
-BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh
-SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww
-OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv
-cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA
-7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k
-/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm
-eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6
-u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy
-7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR
-iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==
------END CERTIFICATE-----
-
-# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN DATACorp SGC Root CA"
-# Serial: 91374294542884689855167577680241077609
-# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
-# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
-# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
------BEGIN CERTIFICATE-----
-MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
-kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
-IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
-EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
-VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
-dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
-BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
-E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
-D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
-4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
-lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
-bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
-o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
-MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
-LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
-BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
-AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
-Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
-j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
-KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
-2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
-mfnGV/TJVTl4uix5yaaIK/QI
------END CERTIFICATE-----
-
-# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN USERFirst Hardware Root CA"
-# Serial: 91374294542884704022267039221184531197
-# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
-# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
-# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
------BEGIN CERTIFICATE-----
-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
------END CERTIFICATE-----
-
-# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Label: "Camerfirma Chambers of Commerce Root"
-# Serial: 0
-# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84
-# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1
-# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3
------BEGIN CERTIFICATE-----
-MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn
-MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
-ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg
-b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa
-MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB
-ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw
-IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B
-AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb
-unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d
-BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq
-7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3
-0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX
-roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG
-A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j
-aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p
-26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA
-BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud
-EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN
-BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz
-aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB
-AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd
-p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi
-1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc
-XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0
-eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu
-tGWaIZDgqtCYvDi1czyL+Nw=
------END CERTIFICATE-----
-
-# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Label: "Camerfirma Global Chambersign Root"
-# Serial: 0
-# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19
-# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9
-# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed
------BEGIN CERTIFICATE-----
-MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn
-MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
-ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo
-YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9
-MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy
-NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G
-A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA
-A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0
-Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s
-QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV
-eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795
-B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh
-z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T
-AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i
-ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w
-TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH
-MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD
-VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE
-VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh
-bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B
-AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM
-bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi
-ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG
-VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c
-ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/
-AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Notary (Class A) Root"
-# Serial: 259
-# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7
-# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6
-# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67
------BEGIN CERTIFICATE-----
-MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV
-MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe
-TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0
-dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB
-KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0
-N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC
-dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu
-MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL
-b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD
-zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi
-3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8
-WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY
-Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi
-NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC
-ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4
-QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0
-YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz
-aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu
-IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm
-ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg
-ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs
-amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv
-IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3
-Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6
-ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1
-YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg
-dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs
-b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G
-CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO
-xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP
-0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ
-QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk
-f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK
-8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI
------END CERTIFICATE-----
-
-# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Label: "XRamp Global CA Root"
-# Serial: 107108908803651509692980124233745014957
-# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
-# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
-# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
------BEGIN CERTIFICATE-----
-MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
-gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
-MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
-UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
-NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
-dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
-dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
-dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
-38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
-KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
-DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
-qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
-JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
-PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
-BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
-jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
-eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
-ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
-vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
-qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
-IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
-i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
-O+7ETPTsJ3xCwnR8gooJybQDJbw=
------END CERTIFICATE-----
-
-# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Label: "Go Daddy Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
-# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
-# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
------BEGIN CERTIFICATE-----
-MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
-MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
-YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
-MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
-ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
-MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
-ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
-PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
-wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
-EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
-avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
-YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
-sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
-/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
-IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
-OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
-TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
-HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
-dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
-ReYNnyicsbkqWletNw+vHX/bvZ8=
------END CERTIFICATE-----
-
-# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Label: "Starfield Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
-# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
-# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
-MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
-U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
-NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
-ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
-ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
-DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
-8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
-+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
-X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
-K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
-1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
-A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
-zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
-YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
-bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
-DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
-L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
-eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
-xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
-VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
-WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
-# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
-# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
------BEGIN CERTIFICATE-----
-MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
-FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
-ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
-LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
-BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
-Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
-dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
-cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
-YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
-dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
-bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
-YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
-TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
-9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
-jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
-FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
-ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
-ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
-EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
-L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
-yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
-O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
-um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
-NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
------END CERTIFICATE-----
-
-# Issuer: O=Government Root Certification Authority
-# Subject: O=Government Root Certification Authority
-# Label: "Taiwan GRCA"
-# Serial: 42023070807708724159991140556527066870
-# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
-# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
-# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
------BEGIN CERTIFICATE-----
-MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
-MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
-PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
-Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
-AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
-IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
-gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
-yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
-F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
-jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
-ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
-VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
-YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
-EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
-Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
-DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
-MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
-UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
-TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
-qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
-ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
-JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
-hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
-EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
-nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
-udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
-ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
-LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
-pYYsfPQS
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root CA 1"
-# Serial: 122348795730808398873664200247279986742
-# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9
-# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51
-# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e
------BEGIN CERTIFICATE-----
-MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk
-MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
-YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
-Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT
-AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
-Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN
-BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9
-m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih
-FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/
-TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F
-EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco
-kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu
-HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF
-vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo
-19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC
-L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW
-bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX
-JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
-FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j
-BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc
-K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf
-ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik
-Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB
-sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e
-3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR
-ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip
-mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH
-b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf
-rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms
-hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y
-zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6
-MBr1mmz0DlP5OlvRHA==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root CA"
-# Serial: 17154717934120587862167794914071425081
-# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
-# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
-# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
------BEGIN CERTIFICATE-----
-MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
-JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
-mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
-wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
-VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
-AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
-AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
-BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
-pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
-dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
-fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
-NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
-H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
-+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root CA"
-# Serial: 10944719598952040374951832963794454346
-# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
-# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
-# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
------BEGIN CERTIFICATE-----
-MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
-QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
-CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
-nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
-43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
-T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
-gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
-TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
-DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
-hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
-06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
-PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
-YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
-CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert High Assurance EV Root CA"
-# Serial: 3553400076410547919724730734378100087
-# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
-# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
-# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
-ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
-LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
-RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
-+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
-PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
-xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
-Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
-hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
-EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
-FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
-nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
-eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
-hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
-Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
-vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
-+OkuE6N36B9K
------END CERTIFICATE-----
-
-# Issuer: CN=Class 2 Primary CA O=Certplus
-# Subject: CN=Class 2 Primary CA O=Certplus
-# Label: "Certplus Class 2 Primary CA"
-# Serial: 177770208045934040241468760488327595043
-# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
-# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
-# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
------BEGIN CERTIFICATE-----
-MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
-PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
-cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
-MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
-IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
-ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
-VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
-kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
-EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
-H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
-HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
-DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
-QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
-Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
-AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
-yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
-FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
-ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
-kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
-l7+ijrRU
------END CERTIFICATE-----
-
-# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
-# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
-# Label: "DST Root CA X3"
-# Serial: 91299735575339953335919266965803778155
-# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
-# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
-# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
------BEGIN CERTIFICATE-----
-MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
-MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
-DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
-PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
-Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
-rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
-OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
-xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
-7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
-aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
-HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
-SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
-ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
-AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
-R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
-JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
-Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
------END CERTIFICATE-----
-
-# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
-# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
-# Label: "DST ACES CA X6"
-# Serial: 17771143917277623872238992636097467865
-# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8
-# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d
-# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40
------BEGIN CERTIFICATE-----
-MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx
-ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w
-MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD
-VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx
-FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu
-ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7
-gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH
-fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a
-ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT
-ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF
-MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk
-c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto
-dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt
-aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI
-hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk
-QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/
-h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq
-nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR
-rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
-9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
-# Label: "TURKTRUST Certificate Services Provider Root 2"
-# Serial: 1
-# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00
-# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7
-# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6
------BEGIN CERTIFICATE-----
-MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc
-UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS
-S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
-SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3
-WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv
-bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU
-UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw
-bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe
-LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef
-J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh
-R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ
-Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX
-JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p
-zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S
-Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
-KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq
-ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
-Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz
-gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH
-uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS
-y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=
------END CERTIFICATE-----
-
-# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
-# Label: "SwissSign Gold CA - G2"
-# Serial: 13492815561806991280
-# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
-# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
-# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
------BEGIN CERTIFICATE-----
-MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
-BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
-biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
-MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
-d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
-CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
-76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
-bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
-6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
-emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
-MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
-MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
-MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
-FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
-aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
-gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
-qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
-lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
-8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
-L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
-45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
-UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
-O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
-bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
-GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
-77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
-hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
-92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
-Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
-ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
-Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
------END CERTIFICATE-----
-
-# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Label: "SwissSign Silver CA - G2"
-# Serial: 5700383053117599563
-# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
-# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
-# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
------BEGIN CERTIFICATE-----
-MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
-BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
-IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
-RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
-U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
-MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
-Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
-YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
-nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
-6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
-eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
-c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
-MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
-HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
-jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
-5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
-rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
-F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
-wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
-cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
-AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
-WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
-xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
-2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
-IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
-aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
-em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
-dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
-OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
-hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
-tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Label: "GeoTrust Primary Certification Authority"
-# Serial: 32798226551256963324313806436981982369
-# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
-# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
-# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
------BEGIN CERTIFICATE-----
-MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
-MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
-R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
-MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
-Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
-AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
-ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
-7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
-kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
-mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
-KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
-6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
-4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
-oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
-UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
-AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA"
-# Serial: 69529181992039203566298953787712940909
-# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
-# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
-# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
-# Serial: 33037644167568058970164719475676101450
-# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
-# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
-# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
-# Subject: CN=SecureTrust CA O=SecureTrust Corporation
-# Label: "SecureTrust CA"
-# Serial: 17199774589125277788362757014266862032
-# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
-# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
-# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
------BEGIN CERTIFICATE-----
-MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
-FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
-MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
-cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
-Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
-0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
-wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
-7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
-8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
-BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
-/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
-JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
-NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
-6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
-3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
-D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
-CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
-3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Global CA O=SecureTrust Corporation
-# Subject: CN=Secure Global CA O=SecureTrust Corporation
-# Label: "Secure Global CA"
-# Serial: 9751836167731051554232119481456978597
-# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
-# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
-# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
------BEGIN CERTIFICATE-----
-MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
-GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
-MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
-Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
-iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
-/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
-jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
-HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
-sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
-gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
-KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
-AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
-URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
-H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
-I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
-iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
-f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
-# Label: "COMODO Certification Authority"
-# Serial: 104350513648249232941998508985834464573
-# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
-# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
-# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
------BEGIN CERTIFICATE-----
-MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
-gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
-BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
-MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
-YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
-RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
-UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
-2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
-Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
-+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
-DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
-nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
-/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
-PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
-QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
-SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
-IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
-RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
-zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
-BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
-ZQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Label: "Network Solutions Certificate Authority"
-# Serial: 116697915152937497490437556386812487904
-# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
-# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
-# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
------BEGIN CERTIFICATE-----
-MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
-MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
-MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
-dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
-UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
-ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
-c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
-OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
-mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
-BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
-qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
-gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
-bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
-dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
-6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
-h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
-/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
-wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
-pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
------END CERTIFICATE-----
-
-# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
-# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
-# Label: "WellsSecure Public Root Certificate Authority"
-# Serial: 1
-# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36
-# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee
-# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43
------BEGIN CERTIFICATE-----
-MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx
-IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs
-cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v
-dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0
-MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl
-bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD
-DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw
-ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r
-WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU
-Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs
-HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj
-z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf
-SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl
-AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG
-KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P
-AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j
-BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC
-VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX
-ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg
-Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB
-ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd
-/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB
-A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn
-k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9
-iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv
-2G0xffX8oRAHh84vWdw+WNs=
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Label: "COMODO ECC Certification Authority"
-# Serial: 41578283867086692638256921589707938090
-# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
-# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
-# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
------BEGIN CERTIFICATE-----
-MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
-IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
-MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
-ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
-T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
-biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
-FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
-cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
-BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
-fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
-GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
------END CERTIFICATE-----
-
-# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI
-# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI
-# Label: "IGC/A"
-# Serial: 245102874772
-# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37
-# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c
-# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32
------BEGIN CERTIFICATE-----
-MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT
-AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ
-TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG
-9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw
-MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM
-BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO
-MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2
-LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI
-s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2
-xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4
-u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b
-F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx
-Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd
-PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV
-HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx
-NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF
-AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ
-L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY
-YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg
-Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a
-NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R
-0982gaEbeC9xs/FZTEYYKKuF0mBWWg==
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
-# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
-# Label: "Security Communication EV RootCA1"
-# Serial: 0
-# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3
-# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d
-# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37
------BEGIN CERTIFICATE-----
-MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl
-MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh
-U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz
-MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N
-IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11
-bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE
-RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO
-zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5
-bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF
-MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1
-VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC
-OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW
-tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ
-q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb
-EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+
-Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O
-VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490
------END CERTIFICATE-----
-
-# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
-# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
-# Label: "OISTE WISeKey Global Root GA CA"
-# Serial: 86718877871133159090080555911823548314
-# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
-# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
-# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
------BEGIN CERTIFICATE-----
-MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
-ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
-aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
-ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
-NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
-A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
-VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
-SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
-VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
-w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
-mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
-4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
-4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
-DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
-EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
-SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
-ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
-vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
-hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
-Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
-/L7fCg0=
------END CERTIFICATE-----
-
-# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
-# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
-# Label: "Microsec e-Szigno Root CA"
-# Serial: 272122594155480254301341951808045322001
-# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5
-# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d
-# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0
------BEGIN CERTIFICATE-----
-MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw
-cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy
-b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z
-ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4
-NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN
-TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p
-Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u
-uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+
-LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA
-vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770
-Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx
-62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB
-AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw
-LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP
-BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB
-AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov
-MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5
-ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn
-AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT
-AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh
-ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo
-AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa
-AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln
-bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p
-Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP
-PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv
-Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB
-EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu
-w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj
-cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV
-HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI
-VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS
-BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS
-b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS
-8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds
-ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl
-7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a
-86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR
-hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/
-MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=
------END CERTIFICATE-----
-
-# Issuer: CN=Certigna O=Dhimyotis
-# Subject: CN=Certigna O=Dhimyotis
-# Label: "Certigna"
-# Serial: 18364802974209362175
-# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
-# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
-# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
------BEGIN CERTIFICATE-----
-MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
-BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
-DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
-BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
-QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
-gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
-zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
-130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
-JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
-DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
-ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
-AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
-AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
-9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
-bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
-fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
-HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
-t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
-WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
------END CERTIFICATE-----
-
-# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
-# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
-# Label: "Deutsche Telekom Root CA 2"
-# Serial: 38
-# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
-# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
-# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
------BEGIN CERTIFICATE-----
-MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
-MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
-IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
-IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
-RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
-U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
-IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
-ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
-QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
-rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
-NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
-QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
-txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
-BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
-AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
-tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
-IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
-6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
-xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
-Cm26OWMohpLzGITY+9HPBVZkVw==
------END CERTIFICATE-----
-
-# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Label: "Cybertrust Global Root"
-# Serial: 4835703278459682877484360
-# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
-# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
-# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
------BEGIN CERTIFICATE-----
-MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
-A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
-bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
-ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
-b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
-7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
-J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
-HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
-t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
-FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
-XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
-MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
-hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
-MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
-A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
-Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
-XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
-omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
-A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
-WL1WMRJOEcgh4LMRkWXbtKaIOM5V
------END CERTIFICATE-----
-
-# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
-# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
-# Label: "ePKI Root Certification Authority"
-# Serial: 28956088682735189655030529057352760477
-# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
-# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
-# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
------BEGIN CERTIFICATE-----
-MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
-MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
-ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
-IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
-SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
-SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
-ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
-DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
-TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
-fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
-sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
-WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
-nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
-dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
-NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
-AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
-MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
-ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
-uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
-PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
-JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
-gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
-j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
-5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
-o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
-/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
-Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
-W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
-hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
-# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
-# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3"
-# Serial: 17
-# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26
-# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96
-# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a
------BEGIN CERTIFICATE-----
-MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS
-MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp
-bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw
-VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy
-YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy
-dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2
-ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe
-Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx
-GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls
-aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU
-QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh
-xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0
-aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr
-IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h
-gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK
-O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO
-fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw
-lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL
-hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID
-AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP
-NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t
-wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM
-7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh
-gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n
-oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs
-yZyQ2uypQjyttgI=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
-# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
-# Label: "Buypass Class 2 CA 1"
-# Serial: 1
-# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23
-# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc
-# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38
------BEGIN CERTIFICATE-----
-MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
-Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL
-MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
-VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0
-ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX
-l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB
-HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B
-5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3
-WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD
-AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP
-gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+
-DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu
-BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs
-h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
-LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
------END CERTIFICATE-----
-
-# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
-# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
-# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
-# Serial: 5525761995591021570
-# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37
-# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58
-# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2
------BEGIN CERTIFICATE-----
-MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV
-BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt
-ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4
-MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg
-SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl
-a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h
-4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk
-tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s
-tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL
-dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4
-c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um
-TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z
-+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O
-Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW
-OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW
-fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2
-l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB
-/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw
-FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+
-8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI
-6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO
-TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME
-wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY
-Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn
-xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q
-DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q
-Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t
-hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4
-7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7
-QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT
------END CERTIFICATE-----
-
-# Issuer: O=certSIGN OU=certSIGN ROOT CA
-# Subject: O=certSIGN OU=certSIGN ROOT CA
-# Label: "certSIGN ROOT CA"
-# Serial: 35210227249154
-# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
-# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
-# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
------BEGIN CERTIFICATE-----
-MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
-AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
-QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
-MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
-ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
-0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
-UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
-RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
-OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
-JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
-AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
-BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
-LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
-MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
-44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
-Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
-i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
-9u6wWk5JRFRYX0KD
------END CERTIFICATE-----
-
-# Issuer: CN=CNNIC ROOT O=CNNIC
-# Subject: CN=CNNIC ROOT O=CNNIC
-# Label: "CNNIC ROOT"
-# Serial: 1228079105
-# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19
-# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f
-# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7
------BEGIN CERTIFICATE-----
-MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD
-TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2
-MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF
-Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB
-DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh
-IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6
-dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO
-V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC
-GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN
-v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB
-AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB
-Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO
-76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK
-OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH
-ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi
-yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL
-buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj
-2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=
------END CERTIFICATE-----
-
-# Issuer: O=Japanese Government OU=ApplicationCA
-# Subject: O=Japanese Government OU=ApplicationCA
-# Label: "ApplicationCA - Japanese Government"
-# Serial: 49
-# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6
-# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74
-# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19
------BEGIN CERTIFICATE-----
-MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc
-MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp
-b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT
-AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs
-aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H
-j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K
-f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55
-IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw
-FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht
-QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm
-/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ
-k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ
-MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC
-seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ
-hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+
-eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U
-DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj
-B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL
-rosot4LKGAfmt1t06SAZf7IbiVQ=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G3"
-# Serial: 28809105769928564313984085209975885599
-# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
-# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
-# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
------BEGIN CERTIFICATE-----
-MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
-mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
-MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
-eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
-cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
-BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
-MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
-BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
-+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
-hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
-5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
-JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
-DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
-huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
-HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
-AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
-zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
-kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
-AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
-SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
-spki4cErx5z481+oghLrGREt
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G2"
-# Serial: 71758320672825410020661621085256472406
-# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
-# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
-# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
------BEGIN CERTIFICATE-----
-MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
-IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
-BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
-MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
-d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
-YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
-dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
-BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
-papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
-BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
-DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
-KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
-XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G3"
-# Serial: 127614157056681299805556476275995414779
-# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
-# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
-# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
-rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
-BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
-Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
-LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
-MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
-ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
-gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
-YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
-b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
-9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
-zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
-OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
-HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
-2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
-oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
-t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
-KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
-m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
-MdRAGmI0Nj81Aa6sY6A=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G2"
-# Serial: 80682863203381065782177908751794619243
-# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
-# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
-# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
------BEGIN CERTIFICATE-----
-MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
-MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
-KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
-MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
-eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
-BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
-NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
-BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
-MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
-So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
-tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
-CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
-qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
-rD6ogRLQy7rQkgu2npaqBA+K
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Universal Root Certification Authority"
-# Serial: 85209574734084581917763752644031726877
-# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
-# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
-# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
------BEGIN CERTIFICATE-----
-MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
-vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
-ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
-IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
-IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
-bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
-9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
-H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
-LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
-/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
-rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
-WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
-exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
-DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
-sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
-seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
-4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
-BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
-lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
-7M2CYfE45k+XmCpajQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
-# Serial: 63143484348153506665311985501458640051
-# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
-# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
-# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
------BEGIN CERTIFICATE-----
-MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
-U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
-SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
-biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
-GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
-fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
-AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
-aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
-aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
-kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
-4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
-FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
-# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
-# Label: "NetLock Arany (Class Gold) Főtanúsítvány"
-# Serial: 80544274841616
-# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
-# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
-# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
-EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
-MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
-cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
-dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
-pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
-b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
-aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
-IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
-lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
-AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
-VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
-ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
-BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
-AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
-U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
-bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
-+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
-bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
-uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
-XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA - G2"
-# Serial: 10000012
-# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
-# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
-# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
------BEGIN CERTIFICATE-----
-MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
-DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
-ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
-b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
-qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
-uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
-Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
-pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
-5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
-UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
-GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
-5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
-6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
-eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
-B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
-BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
-L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
-HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
-SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
-CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
-5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
-IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
-gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
-+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
-vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
-bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
-N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
-Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
-ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig O=Disig a.s.
-# Subject: CN=CA Disig O=Disig a.s.
-# Label: "CA Disig"
-# Serial: 1
-# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6
-# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41
-# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET
-MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE
-AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw
-CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg
-YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE
-Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX
-mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD
-XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW
-S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp
-FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD
-AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu
-ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z
-ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv
-Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw
-DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6
-yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq
-EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/
-CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB
-EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN
-PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=
------END CERTIFICATE-----
-
-# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus
-# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus
-# Label: "Juur-SK"
-# Serial: 999181308
-# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55
-# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89
-# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39
------BEGIN CERTIFICATE-----
-MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN
-AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp
-dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw
-MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw
-CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ
-MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB
-SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz
-ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH
-LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP
-PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL
-2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w
-ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC
-MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk
-AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0
-AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz
-AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz
-AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f
-BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE
-FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY
-P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi
-CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g
-kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95
-HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS
-na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q
-qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z
-TbvGRNs2yyqcjg==
------END CERTIFICATE-----
-
-# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
-# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
-# Label: "Hongkong Post Root CA 1"
-# Serial: 1000
-# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
-# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
-# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
------BEGIN CERTIFICATE-----
-MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
-FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
-Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
-A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
-b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
-jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
-PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
-ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
-nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
-q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
-MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
-mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
-7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
-oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
-EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
-fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
-AmvZWg==
------END CERTIFICATE-----
-
-# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Label: "SecureSign RootCA11"
-# Serial: 1
-# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
-# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
-# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
------BEGIN CERTIFICATE-----
-MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
-MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
-A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
-MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
-Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
-QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
-i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
-h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
-MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
-UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
-8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
-h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
-VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
-KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
-X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
-QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
-pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
-QSdJQO7e5iNEOdyhIta6A/I=
------END CERTIFICATE-----
-
-# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI
-# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI
-# Label: "ACEDICOM Root"
-# Serial: 7029493972724711941
-# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6
-# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84
-# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a
------BEGIN CERTIFICATE-----
-MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
-AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
-CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
-MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
-RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
-AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
-09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
-XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
-Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
-t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
-X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
-MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
-fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
-2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
-K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
-ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
-BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
-MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
-RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
-bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
-fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
-gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
-I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
-5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
-ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
-MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
-o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
-zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
-GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
-r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
-Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
------END CERTIFICATE-----
-
-# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
-# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
-# Label: "Microsec e-Szigno Root CA 2009"
-# Serial: 14014712776195784473
-# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
-# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
-# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
------BEGIN CERTIFICATE-----
-MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
-VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
-ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
-CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
-OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
-FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
-Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
-dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
-kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
-cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
-fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
-N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
-xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
-+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
-A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
-Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
-SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
-mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
-ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
-tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
-2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
-HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Label: "GlobalSign Root CA - R3"
-# Serial: 4835703278459759426209954
-# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
-# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
-# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
------BEGIN CERTIFICATE-----
-MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
-MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
-RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
-gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
-KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
-QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
-XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
-LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
-RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
-jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
-6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
-mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
-Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
-WD9f
------END CERTIFICATE-----
-
-# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
-# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
-# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
-# Serial: 6047274297262753887
-# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
-# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
-# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
------BEGIN CERTIFICATE-----
-MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
-BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
-cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
-MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
-Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
-thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
-cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
-L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
-NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
-X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
-m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
-Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
-EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
-KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
-6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
-OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
-VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
-VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
-cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
-ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
-AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
-661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
-am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
-ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
-PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
-3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
-SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
-3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
-ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
-StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
-Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
-jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
------END CERTIFICATE-----
-
-# Issuer: CN=Izenpe.com O=IZENPE S.A.
-# Subject: CN=Izenpe.com O=IZENPE S.A.
-# Label: "Izenpe.com"
-# Serial: 917563065490389241595536686991402621
-# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
-# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
-# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
------BEGIN CERTIFICATE-----
-MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
-MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
-ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
-VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
-b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
-scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
-xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
-LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
-uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
-yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
-JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
-rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
-BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
-hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
-QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
-HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
-Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
-QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
-BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
-MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
-AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
-A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
-laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
-awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
-JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
-LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
-VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
-LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
-UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
-QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
-naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
-QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
------END CERTIFICATE-----
-
-# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
-# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
-# Label: "Chambers of Commerce Root - 2008"
-# Serial: 11806822484801597146
-# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
-# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
-# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
------BEGIN CERTIFICATE-----
-MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
-VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
-IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
-MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
-IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
-MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
-dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
-EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
-MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
-28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
-VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
-DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
-5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
-ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
-Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
-UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
-+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
-Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
-ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
-hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
-HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
-+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
-YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
-L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
-ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
-IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
-HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
-DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
-PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
-5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
-glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
-FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
-pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
-xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
-tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
-jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
-fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
-OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
-d0jQ
------END CERTIFICATE-----
-
-# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
-# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
-# Label: "Global Chambersign Root - 2008"
-# Serial: 14541511773111788494
-# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
-# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
-# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
------BEGIN CERTIFICATE-----
-MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
-VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
-IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
-MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
-aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
-MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
-cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
-A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
-BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
-KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
-G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
-zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
-ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
-HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
-Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
-yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
-beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
-6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
-wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
-zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
-BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
-ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
-ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
-cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
-YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
-CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
-KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
-hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
-UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
-X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
-fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
-a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
-Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
-SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
-AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
-M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
-v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
-09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
------END CERTIFICATE-----
-
-# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Label: "Go Daddy Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
-# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
-# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
-EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
-ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
-NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
-EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
-AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
-DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
-E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
-/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
-DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
-GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
-tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
-AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
-WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
-9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
-gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
-2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
-LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
-4uJEvlz36hz1
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
-# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
-# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
-ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
-MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
-b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
-aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
-Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
-nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
-HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
-Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
-dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
-HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
-CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
-sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
-4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
-8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
-pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
-mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Services Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
-# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
-# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
------BEGIN CERTIFICATE-----
-MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
-ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
-MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
-VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
-ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
-dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
-OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
-8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
-Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
-hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
-6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
-DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
-AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
-bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
-ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
-qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
-iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
-0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
-sSi6
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
-# Subject: CN=AffirmTrust Commercial O=AffirmTrust
-# Label: "AffirmTrust Commercial"
-# Serial: 8608355977964138876
-# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
-# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
-# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
-Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
-ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
-MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
-yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
-VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
-nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
-XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
-vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
-Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
-N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
-nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Networking O=AffirmTrust
-# Subject: CN=AffirmTrust Networking O=AffirmTrust
-# Label: "AffirmTrust Networking"
-# Serial: 8957382827206547757
-# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
-# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
-# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
-YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
-kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
-QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
-6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
-yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
-QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
-tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
-QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
-Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
-olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
-x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium O=AffirmTrust
-# Subject: CN=AffirmTrust Premium O=AffirmTrust
-# Label: "AffirmTrust Premium"
-# Serial: 7893706540734352110
-# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
-# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
-# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
------BEGIN CERTIFICATE-----
-MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
-dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
-A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
-cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
-qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
-JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
-+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
-s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
-HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
-70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
-V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
-qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
-5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
-C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
-OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
-FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
-KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
-Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
-8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
-MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
-0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
-u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
-u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
-YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
-GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
-RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
-KeC2uAloGRwYQw==
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Label: "AffirmTrust Premium ECC"
-# Serial: 8401224907861490260
-# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
-# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
-# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
------BEGIN CERTIFICATE-----
-MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
-VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
-cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
-BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
-VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
-0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
-ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
-A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
-aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
-flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Label: "Certum Trusted Network CA"
-# Serial: 279744
-# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
-# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
-# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
------BEGIN CERTIFICATE-----
-MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
-MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
-ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
-cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
-WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
-Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
-IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
-UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
-TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
-BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
-kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
-AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
-HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
-sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
-I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
-J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
-VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
-03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
------END CERTIFICATE-----
-
-# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
-# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
-# Label: "Certinomis - Autorité Racine"
-# Serial: 1
-# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a
-# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3
-# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17
------BEGIN CERTIFICATE-----
-MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET
-MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk
-BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4
-Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl
-cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0
-aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP
-ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY
-F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N
-8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe
-rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K
-/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu
-7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC
-28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6
-lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E
-nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB
-0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09
-5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj
-WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN
-jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ
-KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s
-ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM
-OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q
-619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn
-2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj
-o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v
-nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG
-5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq
-pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb
-dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0
-BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5
------END CERTIFICATE-----
-
-# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
-# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
-# Label: "Root CA Generalitat Valenciana"
-# Serial: 994436456
-# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2
-# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46
-# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e
------BEGIN CERTIFICATE-----
-MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF
-UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ
-R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN
-MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G
-A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw
-JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+
-WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj
-SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl
-u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy
-A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk
-Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7
-MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr
-aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC
-IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A
-cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA
-YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA
-bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA
-bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA
-aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA
-aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA
-ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA
-YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA
-ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA
-LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6
-Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y
-eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw
-CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G
-A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu
-Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn
-lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt
-b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg
-9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF
-ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC
-IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=
------END CERTIFICATE-----
-
-# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
-# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
-# Label: "A-Trust-nQual-03"
-# Serial: 93214
-# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53
-# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2
-# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb
------BEGIN CERTIFICATE-----
-MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB
-VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp
-bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R
-dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw
-MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy
-dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52
-ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM
-EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj
-lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ
-znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH
-2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1
-k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs
-2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD
-VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
-AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG
-KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+
-8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R
-FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS
-mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE
-DNuxUCAKGkq6ahq97BvIxYSazQ==
------END CERTIFICATE-----
-
-# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
-# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
-# Label: "TWCA Root Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
-# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
-# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
------BEGIN CERTIFICATE-----
-MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
-MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
-V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
-WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
-LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
-AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
-K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
-RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
-rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
-3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
-hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
-MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
-XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
-lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
-aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
-YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
-# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
-# Label: "Security Communication RootCA2"
-# Serial: 0
-# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
-# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
-# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
-MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
-U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
-DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
-dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
-YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
-OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
-zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
-VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
-hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
-ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
-awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
-OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
-DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
-coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
-okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
-t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
-1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
-SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
------END CERTIFICATE-----
-
-# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
-# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
-# Label: "Hellenic Academic and Research Institutions RootCA 2011"
-# Serial: 0
-# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
-# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
-# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
------BEGIN CERTIFICATE-----
-MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
-RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
-dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
-YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
-NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
-EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
-cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
-c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
-dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
-fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
-bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
-75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
-FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
-HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
-5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
-b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
-A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
-6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
-TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
-dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
-Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
-l7WdmplNsDz4SgCbZN2fOUvRJ9e4
------END CERTIFICATE-----
-
-# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
-# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
-# Label: "Actalis Authentication Root CA"
-# Serial: 6271844772424770508
-# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
-# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
-# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
------BEGIN CERTIFICATE-----
-MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
-BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
-MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
-IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
-SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
-ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
-UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
-4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
-KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
-gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
-rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
-51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
-be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
-KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
-v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
-fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
-jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
-ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
-ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
-e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
-jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
-WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
-SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
-pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
-X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
-fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
-K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
-ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
-LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
-LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
------END CERTIFICATE-----
-
-# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
-# Subject: O=Trustis Limited OU=Trustis FPS Root CA
-# Label: "Trustis FPS Root CA"
-# Serial: 36053640375399034304724988975563710553
-# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
-# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
-# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
------BEGIN CERTIFICATE-----
-MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
-MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
-ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
-MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
-MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
-AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
-iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
-vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
-0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
-OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
-BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
-FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
-GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
-zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
-1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
-f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
-jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
-ZetX2fNXlrtIzYE=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 45
-# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
-# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
-# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
------BEGIN CERTIFICATE-----
-MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
-VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
-F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
-ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
-ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
-aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
-YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
-c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
-d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
-CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
-dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
-wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
-Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
-0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
-pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
-CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
-P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
-1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
-KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
-JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
-8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
-fyWl8kgAwKQB2j8=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Label: "StartCom Certification Authority G2"
-# Serial: 59
-# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
-# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
-# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
------BEGIN CERTIFICATE-----
-MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
-OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
-A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
-JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
-vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
-D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
-Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
-RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
-HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
-nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
-0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
-UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
-Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
-TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
-AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
-BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
-2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
-UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
-6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
-9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
-HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
-wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
-XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
-IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
-hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
-so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
-# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
-# Label: "Buypass Class 2 Root CA"
-# Serial: 2
-# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
-# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
-# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
------BEGIN CERTIFICATE-----
-MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
-Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
-TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
-HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
-BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
-6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
-L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
-1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
-MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
-QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
-arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
-Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
-FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
-P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
-9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
-AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
-uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
-9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
-A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
-OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
-+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
-KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
-DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
-H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
-I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
-5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
-3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
-Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
-# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
-# Label: "Buypass Class 3 Root CA"
-# Serial: 2
-# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
-# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
-# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
------BEGIN CERTIFICATE-----
-MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
-Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
-TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
-HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
-BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
-ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
-N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
-tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
-0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
-/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
-KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
-zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
-O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
-34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
-K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
-AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
-Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
-QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
-cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
-IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
-HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
-O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
-033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
-dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
-kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
-3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
-u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
-4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
------END CERTIFICATE-----
-
-# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Label: "T-TeleSec GlobalRoot Class 3"
-# Serial: 1
-# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
-# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
-# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
------BEGIN CERTIFICATE-----
-MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
-KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
-BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
-YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
-OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
-aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
-ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
-8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
-RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
-hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
-ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
-EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
-A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
-WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
-1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
-6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
-91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
-e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
-TpPDpFQUWw==
------END CERTIFICATE-----
-
-# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
-# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
-# Label: "EE Certification Centre Root CA"
-# Serial: 112324828676200291871926431888494945866
-# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
-# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
-# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
------BEGIN CERTIFICATE-----
-MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
-MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
-czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
-CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
-MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
-ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
-b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
-euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
-bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
-WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
-MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
-1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
-VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
-zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
-BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
-BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
-v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
-E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
-uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
-iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
-GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
-# Label: "TURKTRUST Certificate Services Provider Root 2007"
-# Serial: 1
-# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72
-# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33
-# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50
------BEGIN CERTIFICATE-----
-MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc
-UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS
-S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
-SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx
-OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry
-b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC
-VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE
-sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F
-ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY
-KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG
-+7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG
-HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P
-IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M
-733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk
-Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW
-AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I
-aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5
-mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa
-XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ
-qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9
------END CERTIFICATE-----
-
-# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
-# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
-# Label: "D-TRUST Root Class 3 CA 2 2009"
-# Serial: 623603
-# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
-# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
-# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
------BEGIN CERTIFICATE-----
-MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
-MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
-bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
-ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
-HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
-UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
-tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
-ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
-lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
-/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
-A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
-A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
-dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
-MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
-cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
-L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
-BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
-acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
-o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
-zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
-PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
-Johw1+qRzT65ysCQblrGXnRl11z+o+I=
------END CERTIFICATE-----
-
-# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
-# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
-# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
-# Serial: 623604
-# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
-# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
-# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
-MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
-bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
-NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
-BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
-ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
-3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
-qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
-p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
-HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
-ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
-HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
-Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
-c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
-RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
-dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
-Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
-3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
-nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
-CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
-xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
-KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
------END CERTIFICATE-----
-
-# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica
-# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT
-# Label: "PSCProcert"
-# Serial: 11
-# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec
-# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74
-# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0
------BEGIN CERTIFICATE-----
-MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1
-dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s
-YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz
-dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0
-aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh
-IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ
-KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw
-MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy
-b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx
-KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG
-A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u
-aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9
-7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74
-BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G
-ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9
-JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0
-PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2
-0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH
-0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/
-6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m
-v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7
-K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev
-bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw
-MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w
-MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD
-gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0
-b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh
-bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0
-cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp
-ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg
-ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq
-hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD
-AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w
-MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag
-RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t
-UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl
-cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v
-Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG
-AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN
-AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS
-1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB
-3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv
-Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh
-HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm
-pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz
-sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE
-qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb
-mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9
-opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H
-YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km
------END CERTIFICATE-----
-
-# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
-# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
-# Label: "China Internet Network Information Center EV Certificates Root"
-# Serial: 1218379777
-# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15
-# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e
-# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7
------BEGIN CERTIFICATE-----
-MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC
-Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g
-Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0
-aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa
-Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg
-SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo
-aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp
-ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z
-7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA//
-DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx
-zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8
-hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs
-4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u
-gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY
-NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E
-FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3
-j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG
-52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB
-echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws
-ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI
-zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy
-wy39FCqQmbkHzJ8=
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root CA 2"
-# Serial: 40698052477090394928831521023204026294
-# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19
-# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec
-# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41
------BEGIN CERTIFICATE-----
-MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk
-MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
-YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
-Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT
-AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
-Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN
-BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr
-jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r
-0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f
-2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP
-ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF
-y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA
-tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL
-6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0
-uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL
-acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh
-k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q
-VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
-FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O
-BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh
-b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R
-fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv
-/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI
-REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx
-srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv
-aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT
-woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n
-Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W
-t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N
-8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2
-9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5
-wSsSnqaeG8XmDtkx2Q==
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root EV CA 2"
-# Serial: 322973295377129385374608406479535262296
-# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec
-# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b
-# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d
------BEGIN CERTIFICATE-----
-MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw
-ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp
-dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290
-IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD
-VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy
-dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg
-MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx
-UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD
-1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH
-oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR
-HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/
-5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv
-idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL
-OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC
-NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f
-46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB
-UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth
-7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G
-A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED
-MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB
-bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x
-XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T
-PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0
-Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70
-WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL
-Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm
-7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S
-nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN
-vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB
-WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI
-fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb
-I+2ksx0WckNLIOFZfsLorSa/ovc=
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig Root R1 O=Disig a.s.
-# Subject: CN=CA Disig Root R1 O=Disig a.s.
-# Label: "CA Disig Root R1"
-# Serial: 14052245610670616104
-# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a
-# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6
-# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce
------BEGIN CERTIFICATE-----
-MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV
-BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
-MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy
-MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
-EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw
-ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk
-D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o
-OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A
-fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe
-IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n
-oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK
-/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj
-rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD
-3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE
-7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC
-yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd
-qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
-DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI
-hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR
-xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA
-SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo
-HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB
-emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC
-AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb
-7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x
-DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk
-F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF
-a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT
-Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig Root R2 O=Disig a.s.
-# Subject: CN=CA Disig Root R2 O=Disig a.s.
-# Label: "CA Disig Root R2"
-# Serial: 10572350602393338211
-# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
-# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
-# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
------BEGIN CERTIFICATE-----
-MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
-BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
-MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
-MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
-EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
-ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
-NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
-PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
-x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
-QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
-yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
-QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
-H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
-QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
-i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
-nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
-rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
-DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
-hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
-tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
-GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
-lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
-+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
-TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
-nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
-gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
-G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
-zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
-L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
------END CERTIFICATE-----
-
-# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
-# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
-# Label: "ACCVRAIZ1"
-# Serial: 6828503384748696800
-# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
-# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
-# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
------BEGIN CERTIFICATE-----
-MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
-AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
-CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
-BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
-VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
-qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
-HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
-G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
-lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
-IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
-0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
-k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
-4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
-m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
-cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
-uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
-KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
-ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
-AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
-VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
-VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
-CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
-cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
-QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
-7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
-cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
-QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
-czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
-aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
-aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
-DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
-BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
-D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
-JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
-AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
-vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
-tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
-7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
-I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
-h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
-d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
-pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
------END CERTIFICATE-----
-
-# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
-# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
-# Label: "TWCA Global Root CA"
-# Serial: 3262
-# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
-# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
-# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
------BEGIN CERTIFICATE-----
-MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
-EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
-VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
-NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
-B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
-10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
-0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
-MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
-zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
-46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
-yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
-laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
-oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
-BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
-qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
-4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
-/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
-1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
-LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
-H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
-RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
-nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
-15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
-6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
-nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
-wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
-aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
-KwbQBM0=
------END CERTIFICATE-----
-
-# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
-# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
-# Label: "TeliaSonera Root CA v1"
-# Serial: 199041966741090107964904287217786801558
-# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
-# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
-# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
------BEGIN CERTIFICATE-----
-MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
-NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
-b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
-VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
-MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
-VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
-7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
-Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
-/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
-81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
-dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
-Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
-sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
-pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
-slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
-arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
-VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
-9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
-dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
-0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
-TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
-Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
-Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
-OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
-vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
-t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
-HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
-SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
------END CERTIFICATE-----
-
-# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
-# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
-# Label: "E-Tugra Certification Authority"
-# Serial: 7667447206703254355
-# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
-# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
-# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
------BEGIN CERTIFICATE-----
-MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
-BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
-aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
-BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
-Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
-MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
-BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
-em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
-ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
-B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
-D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
-Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
-q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
-k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
-fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
-dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
-ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
-zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
-rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
-U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
-Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
-XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
-Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
-HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
-GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
-77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
-+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
-vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
-FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
-yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
-AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
-y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
-NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
------END CERTIFICATE-----
-
-# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Label: "T-TeleSec GlobalRoot Class 2"
-# Serial: 1
-# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
-# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
-# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
------BEGIN CERTIFICATE-----
-MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
-KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
-BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
-YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
-OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
-aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
-ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
-AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
-FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
-1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
-jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
-wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
-WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
-NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
-uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
-IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
-g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
-9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
-BSeOE6Fuwg==
------END CERTIFICATE-----
-
-# Issuer: CN=Atos TrustedRoot 2011 O=Atos
-# Subject: CN=Atos TrustedRoot 2011 O=Atos
-# Label: "Atos TrustedRoot 2011"
-# Serial: 6643877497813316402
-# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
-# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
-# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
-AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
-EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
-FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
-REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
-Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
-VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
-SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
-4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
-cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
-eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
-HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
-A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
-DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
-vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
-DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
-maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
-lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
-KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 1 G3"
-# Serial: 687049649626669250736271037606554624078720034195
-# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
-# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
-# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
-MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
-wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
-rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
-68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
-4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
-UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
-abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
-3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
-KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
-hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
-Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
-zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
-ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
-MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
-cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
-qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
-YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
-b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
-8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
-NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
-ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
-q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
-nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 2 G3"
-# Serial: 390156079458959257446133169266079962026824725800
-# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
-# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
-# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
-MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
-qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
-n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
-c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
-O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
-o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
-IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
-IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
-8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
-vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
-7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
-cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
-ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
-AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
-roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
-W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
-lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
-+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
-csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
-dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
-KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
-HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
-WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 3 G3"
-# Serial: 268090761170461462463995952157327242137089239581
-# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
-# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
-# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
-MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
-/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
-FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
-U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
-ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
-FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
-A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
-eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
-sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
-VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
-A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
-ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
-ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
-KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
-FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
-oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
-u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
-0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
-3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
-8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
-DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
-PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
-ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root G2"
-# Serial: 15385348160840213938643033620894905419
-# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
-# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
-# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
------BEGIN CERTIFICATE-----
-MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
-n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
-biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
-EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
-bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
-YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
-AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
-BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
-QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
-0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
-lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
-B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
-ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
-IhNzbM8m9Yop5w==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root G3"
-# Serial: 15459312981008553731928384953135426796
-# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
-# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
-# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
------BEGIN CERTIFICATE-----
-MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
-CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
-ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
-RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
-UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
-Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
-hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
-Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
-RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
-AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
-JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
-6pZjamVFkpUBtA==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root G2"
-# Serial: 4293743540046975378534879503202253541
-# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
-# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
-# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
------BEGIN CERTIFICATE-----
-MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
-MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
-2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
-1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
-q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
-tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
-vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
-BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
-5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
-1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
-NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
-Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
-8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
-pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
-MrY=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root G3"
-# Serial: 7089244469030293291760083333884364146
-# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
-# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
-# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
------BEGIN CERTIFICATE-----
-MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
-CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
-ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
-Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
-EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
-IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
-K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
-fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
-Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
-BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
-AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
-oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
-sycX
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Trusted Root G4"
-# Serial: 7451500558977370777930084869016614236
-# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
-# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
-# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
------BEGIN CERTIFICATE-----
-MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
-RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
-UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
-Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
-ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
-xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
-ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
-DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
-jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
-CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
-EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
-fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
-uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
-chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
-9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
-ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
-SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
-+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
-fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
-sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
-cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
-0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
-4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
-r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
-/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
-gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
------END CERTIFICATE-----
-
-# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited
-# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited
-# Label: "WoSign"
-# Serial: 125491772294754854453622855443212256657
-# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d
-# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb
-# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08
------BEGIN CERTIFICATE-----
-MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV
-MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV
-BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw
-MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX
-b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN
-rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U
-fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc
-f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2
-ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M
-x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR
-aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch
-zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar
-uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K
-mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA
-Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv
-HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H
-EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1
-LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ
-MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e
-JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN
-g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp
-dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab
-R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ
-PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce
-xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+
-J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl
-OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT
-ee5Ehr7XHuQe+w==
------END CERTIFICATE-----
-
-# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited
-# Subject: CN=CA 沃通根证书 O=WoSign CA Limited
-# Label: "WoSign China"
-# Serial: 106921963437422998931660691310149453965
-# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93
-# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6
-# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54
------BEGIN CERTIFICATE-----
-MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG
-MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV
-BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw
-MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl
-ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r
-D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1
-9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf
-v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk
-UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L
-NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb
-+gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V
-qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K
-yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G
-AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK
-J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC
-AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
-BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4
-WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6
-yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj
-/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6
-jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2
-ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX
-X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n
-FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D
-u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l
-O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le
-ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1
-2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ==
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
-# Label: "COMODO RSA Certification Authority"
-# Serial: 101909084537582093308941363524873193117
-# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
-# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
-# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
------BEGIN CERTIFICATE-----
-MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
-hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
-BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
-MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
-EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
-Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
-6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
-pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
-9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
-/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
-Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
-+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
-qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
-SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
-u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
-Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
-crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
-FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
-/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
-wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
-4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
-2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
-FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
-CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
-boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
-jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
-S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
-QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
-0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
-NVOFBkpdn627G190
------END CERTIFICATE-----
-
-# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
-# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
-# Label: "USERTrust RSA Certification Authority"
-# Serial: 2645093764781058787591871645665788717
-# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
-# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
-# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
------BEGIN CERTIFICATE-----
-MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
-iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
-cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
-BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
-MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
-BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
-aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
-dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
-3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
-tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
-Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
-VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
-79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
-c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
-Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
-c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
-UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
-Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
-BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
-A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
-Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
-VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
-ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
-8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
-iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
-Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
-XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
-qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
-VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
-L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
-jjxDah2nGN59PRbxYvnKkKj9
------END CERTIFICATE-----
-
-# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
-# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
-# Label: "USERTrust ECC Certification Authority"
-# Serial: 123013823720199481456569720443997572134
-# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
-# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
-# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
------BEGIN CERTIFICATE-----
-MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
-MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
-eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
-JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
-MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
-Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
-VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
-I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
-o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
-A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
-zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
-RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
-# Label: "GlobalSign ECC Root CA - R4"
-# Serial: 14367148294922964480859022125800977897474
-# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
-# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
-# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
------BEGIN CERTIFICATE-----
-MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
-MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
-bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
-DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
-QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
-FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
-uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
-kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
-ewv4n4Q=
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
-# Label: "GlobalSign ECC Root CA - R5"
-# Serial: 32785792099990507226680698011560947931244
-# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
-# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
-# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
------BEGIN CERTIFICATE-----
-MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
-MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
-bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
-DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
-QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
-8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
-hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
-KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
-515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
-xwy8p2Fp8fc74SrL+SvzZpA3
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA - G3"
-# Serial: 10003001
-# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
-# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
-# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
------BEGIN CERTIFICATE-----
-MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
-DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
-ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
-b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
-cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
-IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
-xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
-KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
-9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
-5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
-6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
-Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
-bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
-BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
-XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
-MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
-INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
-U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
-LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
-Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
-gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
-/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
-0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
-fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
-4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
-1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
-QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
-94B7IWcnMFk=
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
-# Label: "Staat der Nederlanden EV Root CA"
-# Serial: 10000013
-# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
-# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
-# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
------BEGIN CERTIFICATE-----
-MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
-MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
-TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
-b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
-M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
-UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
-Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
-rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
-pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
-j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
-KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
-/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
-cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
-1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
-px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
-/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
-MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
-eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
-2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
-v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
-wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
-CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
-vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
-Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
-Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
-eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
-FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
-7uzXLg==
------END CERTIFICATE-----
-
-# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
-# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
-# Label: "IdenTrust Commercial Root CA 1"
-# Serial: 13298821034946342390520003877796839426
-# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
-# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
-# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
-MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
-VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
-MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
-JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
-3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
-+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
-S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
-bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
-T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
-vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
-Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
-dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
-c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
-l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
-iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
-/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
-ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
-6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
-LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
-nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
-+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
-W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
-AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
-l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
-4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
-mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
-7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
------END CERTIFICATE-----
-
-# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
-# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
-# Label: "IdenTrust Public Sector Root CA 1"
-# Serial: 13298821034946342390521976156843933698
-# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
-# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
-# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
------BEGIN CERTIFICATE-----
-MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
-MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
-VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
-MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
-MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
-ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
-RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
-bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
-/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
-3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
-EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
-9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
-GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
-2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
-WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
-W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
-AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
-t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
-DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
-TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
-lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
-mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
-WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
-+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
-tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
-GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
-8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - G2"
-# Serial: 1246989352
-# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
-# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
-# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
------BEGIN CERTIFICATE-----
-MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
-cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
-IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
-dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
-NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
-dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
-dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
-aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
-AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
-RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
-cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
-wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
-U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
-jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
-BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
-BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
-jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
-Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
-1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
-nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
-VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - EC1"
-# Serial: 51543124481930649114116133369
-# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
-# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
-# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
------BEGIN CERTIFICATE-----
-MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
-A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
-d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
-dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
-RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
-MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
-VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
-L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
-Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
-ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
-A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
-ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
-Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
-BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
-R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
-hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
------END CERTIFICATE-----
-
-# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
-# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
-# Label: "CFCA EV ROOT"
-# Serial: 407555286
-# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
-# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
-# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
------BEGIN CERTIFICATE-----
-MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
-TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
-MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
-aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
-T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
-sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
-TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
-/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
-7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
-EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
-hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
-a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
-aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
-TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
-PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
-cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
-tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
-BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
-ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
-ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
-jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
-ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
-P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
-xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
-Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
-5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
-/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
-AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
-5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"
-# Serial: 156233699172481
-# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
-# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
-# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
------BEGIN CERTIFICATE-----
-MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
-BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
-aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
-QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
-SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
-MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
-VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
-dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
-bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
-/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
-Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
-4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
-5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
-hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
-AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
-SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
-VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
-URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
-peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
-Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
-+qtB4Uu2NQvAmxU=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"
-# Serial: 138134509972618
-# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46
-# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0
-# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00
------BEGIN CERTIFICATE-----
-MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG
-EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp
-IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB
-LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI
-aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx
-NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV
-BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2
-ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs
-ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x
-eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9
-+bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA
-z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p
-u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p
-lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB
-AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E
-BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq
-FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC
-QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy
-o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID
-gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm
-9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG
-tAuYSyher4hYyw==
------END CERTIFICATE-----
-
-# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
-# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
-# Label: "Certinomis - Root CA"
-# Serial: 1
-# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
-# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
-# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
------BEGIN CERTIFICATE-----
-MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
-MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
-BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
-MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
-FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
-Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
-fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
-LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
-WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
-TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
-5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
-CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
-wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
-wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
-m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
-F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
-WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
-2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
-AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
-0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
-F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
-g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
-qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
-h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
-ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
-btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
-Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
-8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
-gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
------END CERTIFICATE-----
-# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Secure Server CA"
-# Serial: 927650371
-# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
-# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
-# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
------BEGIN CERTIFICATE-----
-MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
-MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
-ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
-b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
-bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
-U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
-A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
-I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
-wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
-AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
-oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
-BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
-dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
-MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
-b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
-dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
-MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
-E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
-MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
-hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
-95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
-2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Label: "ValiCert Class 2 VA"
-# Serial: 1
-# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
-# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
-# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Express (Class C) Root"
-# Serial: 104
-# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4
-# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b
-# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f
------BEGIN CERTIFICATE-----
-MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx
-ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
-b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD
-EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X
-DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw
-DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u
-c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr
-TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN
-BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA
-OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC
-2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW
-RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P
-AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW
-ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0
-YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz
-b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO
-ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB
-IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs
-b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs
-ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s
-YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg
-a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g
-SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0
-aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg
-YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg
-Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY
-ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g
-pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4
-Fp1hBWeAyNDYpQcCNJgEjTME1A==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Business (Class B) Root"
-# Serial: 105
-# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6
-# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af
-# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12
------BEGIN CERTIFICATE-----
-MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx
-ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
-b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD
-EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05
-OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G
-A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
-Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l
-dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG
-SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK
-gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX
-iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc
-Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E
-BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G
-SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu
-b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh
-bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv
-Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln
-aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0
-IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh
-c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph
-biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo
-ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP
-UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj
-YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo
-dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA
-bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06
-sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa
-n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS
-NitjrFgBazMpUIaD8QFI
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Label: "RSA Root Certificate 1"
-# Serial: 1
-# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
-# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
-# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
-NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
-cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
-2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
-JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
-Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
-n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
-PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Label: "ValiCert Class 1 VA"
-# Serial: 1
-# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
-# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
-# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
-NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
-LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
-TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
-TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
-LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
-I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
-nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure eBusiness CA 1"
-# Serial: 4
-# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
-# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
-# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
------BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure Global eBusiness CA"
-# Serial: 1
-# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
-# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
-# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
------BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Premium Server CA"
-# Serial: 1
-# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
-# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
-# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
------BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Server CA"
-# Serial: 1
-# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
-# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
-# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
------BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Label: "Verisign Class 3 Public Primary Certification Authority"
-# Serial: 149843929435818692848040365716851702463
-# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
-# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
-# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
-lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
-AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Label: "Verisign Class 3 Public Primary Certification Authority"
-# Serial: 80507572722862485515306429940691309246
-# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
-# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
-# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
-2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
-2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
-# Serial: 167285380242319648451154478808036881606
-# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
-# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
-# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
-pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
-13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
-U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
-F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
-oJ2daZH9
------END CERTIFICATE-----
-
-# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Label: "GTE CyberTrust Global Root"
-# Serial: 421
-# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
-# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
-# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
------BEGIN CERTIFICATE-----
-MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
-b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
-iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
-r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
-04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
-GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
-3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
-lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
------END CERTIFICATE-----
diff --git a/python/ext-libs/requests/certs.py b/python/ext-libs/requests/certs.py
deleted file mode 100644
index 07e6475..0000000
--- a/python/ext-libs/requests/certs.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
-certs.py
-~~~~~~~~
-
-This module returns the preferred default CA certificate bundle.
-
-If you are packaging Requests, e.g., for a Linux distribution or a managed
-environment, you can change the definition of where() to return a separately
-packaged CA bundle.
-"""
-import os.path
-
-try:
-    from certifi import where
-except ImportError:
-    def where():
-        """Return the preferred certificate bundle."""
-        # vendored bundle inside Requests
-        return os.path.join(os.path.dirname(__file__), 'cacert.pem')
-
-if __name__ == '__main__':
-    print(where())
diff --git a/python/ext-libs/requests/compat.py b/python/ext-libs/requests/compat.py
deleted file mode 100644
index 70edff7..0000000
--- a/python/ext-libs/requests/compat.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-pythoncompat
-"""
-
-from .packages import chardet
-
-import sys
-
-# -------
-# Pythons
-# -------
-
-# Syntax sugar.
-_ver = sys.version_info
-
-#: Python 2.x?
-is_py2 = (_ver[0] == 2)
-
-#: Python 3.x?
-is_py3 = (_ver[0] == 3)
-
-try:
-    import simplejson as json
-except (ImportError, SyntaxError):
-    # simplejson does not support Python 3.2, it throws a SyntaxError
-    # because of u'...' Unicode literals.
-    import json
-
-# ---------
-# Specifics
-# ---------
-
-if is_py2:
-    from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
-    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
-    from urllib2 import parse_http_list
-    import cookielib
-    from Cookie import Morsel
-    from StringIO import StringIO
-    from .packages.urllib3.packages.ordered_dict import OrderedDict
-
-    builtin_str = str
-    bytes = str
-    str = unicode
-    basestring = basestring
-    numeric_types = (int, long, float)
-
-elif is_py3:
-    from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
-    from urllib.request import parse_http_list, getproxies, proxy_bypass
-    from http import cookiejar as cookielib
-    from http.cookies import Morsel
-    from io import StringIO
-    from collections import OrderedDict
-
-    builtin_str = str
-    str = str
-    bytes = bytes
-    basestring = (str, bytes)
-    numeric_types = (int, float)
diff --git a/python/ext-libs/requests/cookies.py b/python/ext-libs/requests/cookies.py
deleted file mode 100644
index eee5168..0000000
--- a/python/ext-libs/requests/cookies.py
+++ /dev/null
@@ -1,493 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-Compatibility code to be able to use `cookielib.CookieJar` with requests.
-
-requests.utils imports from here, so be careful with imports.
-"""
-
-import copy
-import time
-import calendar
-import collections
-from .compat import cookielib, urlparse, urlunparse, Morsel
-
-try:
-    import threading
-    # grr, pyflakes: this fixes "redefinition of unused 'threading'"
-    threading
-except ImportError:
-    import dummy_threading as threading
-
-
-class MockRequest(object):
-    """Wraps a `requests.Request` to mimic a `urllib2.Request`.
-
-    The code in `cookielib.CookieJar` expects this interface in order to correctly
-    manage cookie policies, i.e., determine whether a cookie can be set, given the
-    domains of the request and the cookie.
-
-    The original request object is read-only. The client is responsible for collecting
-    the new headers via `get_new_headers()` and interpreting them appropriately. You
-    probably want `get_cookie_header`, defined below.
-    """
-
-    def __init__(self, request):
-        self._r = request
-        self._new_headers = {}
-        self.type = urlparse(self._r.url).scheme
-
-    def get_type(self):
-        return self.type
-
-    def get_host(self):
-        return urlparse(self._r.url).netloc
-
-    def get_origin_req_host(self):
-        return self.get_host()
-
-    def get_full_url(self):
-        # Only return the response's URL if the user hadn't set the Host
-        # header
-        if not self._r.headers.get('Host'):
-            return self._r.url
-        # If they did set it, retrieve it and reconstruct the expected domain
-        host = self._r.headers['Host']
-        parsed = urlparse(self._r.url)
-        # Reconstruct the URL as we expect it
-        return urlunparse([
-            parsed.scheme, host, parsed.path, parsed.params, parsed.query,
-            parsed.fragment
-        ])
-
-    def is_unverifiable(self):
-        return True
-
-    def has_header(self, name):
-        return name in self._r.headers or name in self._new_headers
-
-    def get_header(self, name, default=None):
-        return self._r.headers.get(name, self._new_headers.get(name, default))
-
-    def add_header(self, key, val):
-        """cookielib has no legitimate use for this method; add it back if you find one."""
-        raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
-
-    def add_unredirected_header(self, name, value):
-        self._new_headers[name] = value
-
-    def get_new_headers(self):
-        return self._new_headers
-
-    @property
-    def unverifiable(self):
-        return self.is_unverifiable()
-
-    @property
-    def origin_req_host(self):
-        return self.get_origin_req_host()
-
-    @property
-    def host(self):
-        return self.get_host()
-
-
-class MockResponse(object):
-    """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
-
-    ...what? Basically, expose the parsed HTTP headers from the server response
-    the way `cookielib` expects to see them.
-    """
-
-    def __init__(self, headers):
-        """Make a MockResponse for `cookielib` to read.
-
-        :param headers: a httplib.HTTPMessage or analogous carrying the headers
-        """
-        self._headers = headers
-
-    def info(self):
-        return self._headers
-
-    def getheaders(self, name):
-        self._headers.getheaders(name)
-
-
-def extract_cookies_to_jar(jar, request, response):
-    """Extract the cookies from the response into a CookieJar.
-
-    :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
-    :param request: our own requests.Request object
-    :param response: urllib3.HTTPResponse object
-    """
-    if not (hasattr(response, '_original_response') and
-            response._original_response):
-        return
-    # the _original_response field is the wrapped httplib.HTTPResponse object,
-    req = MockRequest(request)
-    # pull out the HTTPMessage with the headers and put it in the mock:
-    res = MockResponse(response._original_response.msg)
-    jar.extract_cookies(res, req)
-
-
-def get_cookie_header(jar, request):
-    """Produce an appropriate Cookie header string to be sent with `request`, or None."""
-    r = MockRequest(request)
-    jar.add_cookie_header(r)
-    return r.get_new_headers().get('Cookie')
-
-
-def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
-    """Unsets a cookie by name, by default over all domains and paths.
-
-    Wraps CookieJar.clear(), is O(n).
-    """
-    clearables = []
-    for cookie in cookiejar:
-        if cookie.name != name:
-            continue
-        if domain is not None and domain != cookie.domain:
-            continue
-        if path is not None and path != cookie.path:
-            continue
-        clearables.append((cookie.domain, cookie.path, cookie.name))
-
-    for domain, path, name in clearables:
-        cookiejar.clear(domain, path, name)
-
-
-class CookieConflictError(RuntimeError):
-    """There are two cookies that meet the criteria specified in the cookie jar.
-    Use .get and .set and include domain and path args in order to be more specific."""
-
-
-class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
-    """Compatibility class; is a cookielib.CookieJar, but exposes a dict
-    interface.
-
-    This is the CookieJar we create by default for requests and sessions that
-    don't specify one, since some clients may expect response.cookies and
-    session.cookies to support dict operations.
-
-    Requests does not use the dict interface internally; it's just for
-    compatibility with external client code. All requests code should work
-    out of the box with externally provided instances of ``CookieJar``, e.g.
-    ``LWPCookieJar`` and ``FileCookieJar``.
-
-    Unlike a regular CookieJar, this class is pickleable.
-
-    .. warning:: dictionary operations that are normally O(1) may be O(n).
-    """
-    def get(self, name, default=None, domain=None, path=None):
-        """Dict-like get() that also supports optional domain and path args in
-        order to resolve naming collisions from using one cookie jar over
-        multiple domains.
-
-        .. warning:: operation is O(n), not O(1)."""
-        try:
-            return self._find_no_duplicates(name, domain, path)
-        except KeyError:
-            return default
-
-    def set(self, name, value, **kwargs):
-        """Dict-like set() that also supports optional domain and path args in
-        order to resolve naming collisions from using one cookie jar over
-        multiple domains."""
-        # support client code that unsets cookies by assignment of a None value:
-        if value is None:
-            remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
-            return
-
-        if isinstance(value, Morsel):
-            c = morsel_to_cookie(value)
-        else:
-            c = create_cookie(name, value, **kwargs)
-        self.set_cookie(c)
-        return c
-
-    def iterkeys(self):
-        """Dict-like iterkeys() that returns an iterator of names of cookies
-        from the jar. See itervalues() and iteritems()."""
-        for cookie in iter(self):
-            yield cookie.name
-
-    def keys(self):
-        """Dict-like keys() that returns a list of names of cookies from the
-        jar. See values() and items()."""
-        return list(self.iterkeys())
-
-    def itervalues(self):
-        """Dict-like itervalues() that returns an iterator of values of cookies
-        from the jar. See iterkeys() and iteritems()."""
-        for cookie in iter(self):
-            yield cookie.value
-
-    def values(self):
-        """Dict-like values() that returns a list of values of cookies from the
-        jar. See keys() and items()."""
-        return list(self.itervalues())
-
-    def iteritems(self):
-        """Dict-like iteritems() that returns an iterator of name-value tuples
-        from the jar. See iterkeys() and itervalues()."""
-        for cookie in iter(self):
-            yield cookie.name, cookie.value
-
-    def items(self):
-        """Dict-like items() that returns a list of name-value tuples from the
-        jar. See keys() and values(). Allows client-code to call
-        ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
-        pairs."""
-        return list(self.iteritems())
-
-    def list_domains(self):
-        """Utility method to list all the domains in the jar."""
-        domains = []
-        for cookie in iter(self):
-            if cookie.domain not in domains:
-                domains.append(cookie.domain)
-        return domains
-
-    def list_paths(self):
-        """Utility method to list all the paths in the jar."""
-        paths = []
-        for cookie in iter(self):
-            if cookie.path not in paths:
-                paths.append(cookie.path)
-        return paths
-
-    def multiple_domains(self):
-        """Returns True if there are multiple domains in the jar.
-        Returns False otherwise."""
-        domains = []
-        for cookie in iter(self):
-            if cookie.domain is not None and cookie.domain in domains:
-                return True
-            domains.append(cookie.domain)
-        return False  # there is only one domain in jar
-
-    def get_dict(self, domain=None, path=None):
-        """Takes as an argument an optional domain and path and returns a plain
-        old Python dict of name-value pairs of cookies that meet the
-        requirements."""
-        dictionary = {}
-        for cookie in iter(self):
-            if (domain is None or cookie.domain == domain) and (path is None
-                                                or cookie.path == path):
-                dictionary[cookie.name] = cookie.value
-        return dictionary
-
-    def __contains__(self, name):
-        try:
-            return super(RequestsCookieJar, self).__contains__(name)
-        except CookieConflictError:
-            return True
-
-    def __getitem__(self, name):
-        """Dict-like __getitem__() for compatibility with client code. Throws
-        exception if there are more than one cookie with name. In that case,
-        use the more explicit get() method instead.
-
-        .. warning:: operation is O(n), not O(1)."""
-
-        return self._find_no_duplicates(name)
-
-    def __setitem__(self, name, value):
-        """Dict-like __setitem__ for compatibility with client code. Throws
-        exception if there is already a cookie of that name in the jar. In that
-        case, use the more explicit set() method instead."""
-
-        self.set(name, value)
-
-    def __delitem__(self, name):
-        """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
-        ``remove_cookie_by_name()``."""
-        remove_cookie_by_name(self, name)
-
-    def set_cookie(self, cookie, *args, **kwargs):
-        if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
-            cookie.value = cookie.value.replace('\\"', '')
-        return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
-
-    def update(self, other):
-        """Updates this jar with cookies from another CookieJar or dict-like"""
-        if isinstance(other, cookielib.CookieJar):
-            for cookie in other:
-                self.set_cookie(copy.copy(cookie))
-        else:
-            super(RequestsCookieJar, self).update(other)
-
-    def _find(self, name, domain=None, path=None):
-        """Requests uses this method internally to get cookie values. Takes as
-        args name and optional domain and path. Returns a cookie.value. If
-        there are conflicting cookies, _find arbitrarily chooses one. See
-        _find_no_duplicates if you want an exception thrown if there are
-        conflicting cookies."""
-        for cookie in iter(self):
-            if cookie.name == name:
-                if domain is None or cookie.domain == domain:
-                    if path is None or cookie.path == path:
-                        return cookie.value
-
-        raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
-
-    def _find_no_duplicates(self, name, domain=None, path=None):
-        """Both ``__get_item__`` and ``get`` call this function: it's never
-        used elsewhere in Requests. Takes as args name and optional domain and
-        path. Returns a cookie.value. Throws KeyError if cookie is not found
-        and CookieConflictError if there are multiple cookies that match name
-        and optionally domain and path."""
-        toReturn = None
-        for cookie in iter(self):
-            if cookie.name == name:
-                if domain is None or cookie.domain == domain:
-                    if path is None or cookie.path == path:
-                        if toReturn is not None:  # if there are multiple cookies that meet passed in criteria
-                            raise CookieConflictError('There are multiple cookies with name, %r' % (name))
-                        toReturn = cookie.value  # we will eventually return this as long as no cookie conflict
-
-        if toReturn:
-            return toReturn
-        raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
-
-    def __getstate__(self):
-        """Unlike a normal CookieJar, this class is pickleable."""
-        state = self.__dict__.copy()
-        # remove the unpickleable RLock object
-        state.pop('_cookies_lock')
-        return state
-
-    def __setstate__(self, state):
-        """Unlike a normal CookieJar, this class is pickleable."""
-        self.__dict__.update(state)
-        if '_cookies_lock' not in self.__dict__:
-            self._cookies_lock = threading.RLock()
-
-    def copy(self):
-        """Return a copy of this RequestsCookieJar."""
-        new_cj = RequestsCookieJar()
-        new_cj.update(self)
-        return new_cj
-
-
-def _copy_cookie_jar(jar):
-    if jar is None:
-        return None
-
-    if hasattr(jar, 'copy'):
-        # We're dealing with an instance of RequestsCookieJar
-        return jar.copy()
-    # We're dealing with a generic CookieJar instance
-    new_jar = copy.copy(jar)
-    new_jar.clear()
-    for cookie in jar:
-        new_jar.set_cookie(copy.copy(cookie))
-    return new_jar
-
-
-def create_cookie(name, value, **kwargs):
-    """Make a cookie from underspecified parameters.
-
-    By default, the pair of `name` and `value` will be set for the domain ''
-    and sent on every request (this is sometimes called a "supercookie").
-    """
-    result = dict(
-        version=0,
-        name=name,
-        value=value,
-        port=None,
-        domain='',
-        path='/',
-        secure=False,
-        expires=None,
-        discard=True,
-        comment=None,
-        comment_url=None,
-        rest={'HttpOnly': None},
-        rfc2109=False,)
-
-    badargs = set(kwargs) - set(result)
-    if badargs:
-        err = 'create_cookie() got unexpected keyword arguments: %s'
-        raise TypeError(err % list(badargs))
-
-    result.update(kwargs)
-    result['port_specified'] = bool(result['port'])
-    result['domain_specified'] = bool(result['domain'])
-    result['domain_initial_dot'] = result['domain'].startswith('.')
-    result['path_specified'] = bool(result['path'])
-
-    return cookielib.Cookie(**result)
-
-
-def morsel_to_cookie(morsel):
-    """Convert a Morsel object into a Cookie containing the one k/v pair."""
-
-    expires = None
-    if morsel['max-age']:
-        try:
-            expires = int(time.time() + int(morsel['max-age']))
-        except ValueError:
-            raise TypeError('max-age: %s must be integer' % morsel['max-age'])
-    elif morsel['expires']:
-        time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
-        expires = calendar.timegm(
-            time.strptime(morsel['expires'], time_template)
-        )
-    return create_cookie(
-        comment=morsel['comment'],
-        comment_url=bool(morsel['comment']),
-        discard=False,
-        domain=morsel['domain'],
-        expires=expires,
-        name=morsel.key,
-        path=morsel['path'],
-        port=None,
-        rest={'HttpOnly': morsel['httponly']},
-        rfc2109=False,
-        secure=bool(morsel['secure']),
-        value=morsel.value,
-        version=morsel['version'] or 0,
-    )
-
-
-def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
-    """Returns a CookieJar from a key/value dictionary.
-
-    :param cookie_dict: Dict of key/values to insert into CookieJar.
-    :param cookiejar: (optional) A cookiejar to add the cookies to.
-    :param overwrite: (optional) If False, will not replace cookies
-        already in the jar with new ones.
-    """
-    if cookiejar is None:
-        cookiejar = RequestsCookieJar()
-
-    if cookie_dict is not None:
-        names_from_jar = [cookie.name for cookie in cookiejar]
-        for name in cookie_dict:
-            if overwrite or (name not in names_from_jar):
-                cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
-
-    return cookiejar
-
-
-def merge_cookies(cookiejar, cookies):
-    """Add cookies to cookiejar and returns a merged CookieJar.
-
-    :param cookiejar: CookieJar object to add the cookies to.
-    :param cookies: Dictionary or CookieJar object to be added.
-    """
-    if not isinstance(cookiejar, cookielib.CookieJar):
-        raise ValueError('You can only merge into CookieJar')
-
-    if isinstance(cookies, dict):
-        cookiejar = cookiejar_from_dict(
-            cookies, cookiejar=cookiejar, overwrite=False)
-    elif isinstance(cookies, cookielib.CookieJar):
-        try:
-            cookiejar.update(cookies)
-        except AttributeError:
-            for cookie_in_jar in cookies:
-                cookiejar.set_cookie(cookie_in_jar)
-
-    return cookiejar
diff --git a/python/ext-libs/requests/exceptions.py b/python/ext-libs/requests/exceptions.py
deleted file mode 100644
index ba0b910..0000000
--- a/python/ext-libs/requests/exceptions.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.exceptions
-~~~~~~~~~~~~~~~~~~~
-
-This module contains the set of Requests' exceptions.
-
-"""
-from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
-
-
-class RequestException(IOError):
-    """There was an ambiguous exception that occurred while handling your
-    request."""
-
-    def __init__(self, *args, **kwargs):
-        """
-        Initialize RequestException with `request` and `response` objects.
-        """
-        response = kwargs.pop('response', None)
-        self.response = response
-        self.request = kwargs.pop('request', None)
-        if (response is not None and not self.request and
-                hasattr(response, 'request')):
-            self.request = self.response.request
-        super(RequestException, self).__init__(*args, **kwargs)
-
-
-class HTTPError(RequestException):
-    """An HTTP error occurred."""
-
-
-class ConnectionError(RequestException):
-    """A Connection error occurred."""
-
-
-class ProxyError(ConnectionError):
-    """A proxy error occurred."""
-
-
-class SSLError(ConnectionError):
-    """An SSL error occurred."""
-
-
-class Timeout(RequestException):
-    """The request timed out.
-
-    Catching this error will catch both
-    :exc:`~requests.exceptions.ConnectTimeout` and
-    :exc:`~requests.exceptions.ReadTimeout` errors.
-    """
-
-
-class ConnectTimeout(ConnectionError, Timeout):
-    """The request timed out while trying to connect to the remote server.
-
-    Requests that produced this error are safe to retry.
-    """
-
-
-class ReadTimeout(Timeout):
-    """The server did not send any data in the allotted amount of time."""
-
-
-class URLRequired(RequestException):
-    """A valid URL is required to make a request."""
-
-
-class TooManyRedirects(RequestException):
-    """Too many redirects."""
-
-
-class MissingSchema(RequestException, ValueError):
-    """The URL schema (e.g. http or https) is missing."""
-
-
-class InvalidSchema(RequestException, ValueError):
-    """See defaults.py for valid schemas."""
-
-
-class InvalidURL(RequestException, ValueError):
-    """ The URL provided was somehow invalid. """
-
-
-class ChunkedEncodingError(RequestException):
-    """The server declared chunked encoding but sent an invalid chunk."""
-
-
-class ContentDecodingError(RequestException, BaseHTTPError):
-    """Failed to decode response content"""
-
-
-class StreamConsumedError(RequestException, TypeError):
-    """The content for this response was already consumed"""
-
-
-class RetryError(RequestException):
-    """Custom retries logic failed"""
-
-
-# Warnings
-
-
-class RequestsWarning(Warning):
-    """Base warning for Requests."""
-    pass
-
-
-class FileModeWarning(RequestsWarning, DeprecationWarning):
-    """
-    A file was opened in text mode, but Requests determined its binary length.
-    """
-    pass
diff --git a/python/ext-libs/requests/hooks.py b/python/ext-libs/requests/hooks.py
deleted file mode 100644
index 9da9436..0000000
--- a/python/ext-libs/requests/hooks.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.hooks
-~~~~~~~~~~~~~~
-
-This module provides the capabilities for the Requests hooks system.
-
-Available hooks:
-
-``response``:
-    The response generated from a Request.
-
-"""
-HOOKS = ['response']
-
-def default_hooks():
-    return dict((event, []) for event in HOOKS)
-
-# TODO: response is the only one
-
-
-def dispatch_hook(key, hooks, hook_data, **kwargs):
-    """Dispatches a hook dictionary on a given piece of data."""
-    hooks = hooks or dict()
-    hooks = hooks.get(key)
-    if hooks:
-        if hasattr(hooks, '__call__'):
-            hooks = [hooks]
-        for hook in hooks:
-            _hook_data = hook(hook_data, **kwargs)
-            if _hook_data is not None:
-                hook_data = _hook_data
-    return hook_data
diff --git a/python/ext-libs/requests/models.py b/python/ext-libs/requests/models.py
deleted file mode 100644
index fe4bec1..0000000
--- a/python/ext-libs/requests/models.py
+++ /dev/null
@@ -1,855 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.models
-~~~~~~~~~~~~~~~
-
-This module contains the primary objects that power Requests.
-"""
-
-import collections
-import datetime
-
-from io import BytesIO, UnsupportedOperation
-from .hooks import default_hooks
-from .structures import CaseInsensitiveDict
-
-from .auth import HTTPBasicAuth
-from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
-from .packages.urllib3.fields import RequestField
-from .packages.urllib3.filepost import encode_multipart_formdata
-from .packages.urllib3.util import parse_url
-from .packages.urllib3.exceptions import (
-    DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
-from .exceptions import (
-    HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
-    ContentDecodingError, ConnectionError, StreamConsumedError)
-from .utils import (
-    guess_filename, get_auth_from_url, requote_uri,
-    stream_decode_response_unicode, to_key_val_list, parse_header_links,
-    iter_slices, guess_json_utf, super_len, to_native_string)
-from .compat import (
-    cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
-    is_py2, chardet, builtin_str, basestring)
-from .compat import json as complexjson
-from .status_codes import codes
-
-#: The set of HTTP status codes that indicate an automatically
-#: processable redirect.
-REDIRECT_STATI = (
-    codes.moved,              # 301
-    codes.found,              # 302
-    codes.other,              # 303
-    codes.temporary_redirect, # 307
-    codes.permanent_redirect, # 308
-)
-
-DEFAULT_REDIRECT_LIMIT = 30
-CONTENT_CHUNK_SIZE = 10 * 1024
-ITER_CHUNK_SIZE = 512
-
-
-class RequestEncodingMixin(object):
-    @property
-    def path_url(self):
-        """Build the path URL to use."""
-
-        url = []
-
-        p = urlsplit(self.url)
-
-        path = p.path
-        if not path:
-            path = '/'
-
-        url.append(path)
-
-        query = p.query
-        if query:
-            url.append('?')
-            url.append(query)
-
-        return ''.join(url)
-
-    @staticmethod
-    def _encode_params(data):
-        """Encode parameters in a piece of data.
-
-        Will successfully encode parameters when passed as a dict or a list of
-        2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
-        if parameters are supplied as a dict.
-        """
-
-        if isinstance(data, (str, bytes)):
-            return data
-        elif hasattr(data, 'read'):
-            return data
-        elif hasattr(data, '__iter__'):
-            result = []
-            for k, vs in to_key_val_list(data):
-                if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
-                    vs = [vs]
-                for v in vs:
-                    if v is not None:
-                        result.append(
-                            (k.encode('utf-8') if isinstance(k, str) else k,
-                             v.encode('utf-8') if isinstance(v, str) else v))
-            return urlencode(result, doseq=True)
-        else:
-            return data
-
-    @staticmethod
-    def _encode_files(files, data):
-        """Build the body for a multipart/form-data request.
-
-        Will successfully encode files when passed as a dict or a list of
-        tuples. Order is retained if data is a list of tuples but arbitrary
-        if parameters are supplied as a dict.
-        The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
-        or 4-tuples (filename, fileobj, contentype, custom_headers).
-
-        """
-        if (not files):
-            raise ValueError("Files must be provided.")
-        elif isinstance(data, basestring):
-            raise ValueError("Data must not be a string.")
-
-        new_fields = []
-        fields = to_key_val_list(data or {})
-        files = to_key_val_list(files or {})
-
-        for field, val in fields:
-            if isinstance(val, basestring) or not hasattr(val, '__iter__'):
-                val = [val]
-            for v in val:
-                if v is not None:
-                    # Don't call str() on bytestrings: in Py3 it all goes wrong.
-                    if not isinstance(v, bytes):
-                        v = str(v)
-
-                    new_fields.append(
-                        (field.decode('utf-8') if isinstance(field, bytes) else field,
-                         v.encode('utf-8') if isinstance(v, str) else v))
-
-        for (k, v) in files:
-            # support for explicit filename
-            ft = None
-            fh = None
-            if isinstance(v, (tuple, list)):
-                if len(v) == 2:
-                    fn, fp = v
-                elif len(v) == 3:
-                    fn, fp, ft = v
-                else:
-                    fn, fp, ft, fh = v
-            else:
-                fn = guess_filename(v) or k
-                fp = v
-
-            if isinstance(fp, (str, bytes, bytearray)):
-                fdata = fp
-            else:
-                fdata = fp.read()
-
-            rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
-            rf.make_multipart(content_type=ft)
-            new_fields.append(rf)
-
-        body, content_type = encode_multipart_formdata(new_fields)
-
-        return body, content_type
-
-
-class RequestHooksMixin(object):
-    def register_hook(self, event, hook):
-        """Properly register a hook."""
-
-        if event not in self.hooks:
-            raise ValueError('Unsupported event specified, with event name "%s"' % (event))
-
-        if isinstance(hook, collections.Callable):
-            self.hooks[event].append(hook)
-        elif hasattr(hook, '__iter__'):
-            self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
-
-    def deregister_hook(self, event, hook):
-        """Deregister a previously registered hook.
-        Returns True if the hook existed, False if not.
-        """
-
-        try:
-            self.hooks[event].remove(hook)
-            return True
-        except ValueError:
-            return False
-
-
-class Request(RequestHooksMixin):
-    """A user-created :class:`Request <Request>` object.
-
-    Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
-
-    :param method: HTTP method to use.
-    :param url: URL to send.
-    :param headers: dictionary of headers to send.
-    :param files: dictionary of {filename: fileobject} files to multipart upload.
-    :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
-    :param json: json for the body to attach to the request (if files or data is not specified).
-    :param params: dictionary of URL parameters to append to the URL.
-    :param auth: Auth handler or (user, pass) tuple.
-    :param cookies: dictionary or CookieJar of cookies to attach to this request.
-    :param hooks: dictionary of callback hooks, for internal usage.
-
-    Usage::
-
-      >>> import requests
-      >>> req = requests.Request('GET', 'http://httpbin.org/get')
-      >>> req.prepare()
-      <PreparedRequest [GET]>
-
-    """
-    def __init__(self, method=None, url=None, headers=None, files=None,
-        data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
-
-        # Default empty dicts for dict params.
-        data = [] if data is None else data
-        files = [] if files is None else files
-        headers = {} if headers is None else headers
-        params = {} if params is None else params
-        hooks = {} if hooks is None else hooks
-
-        self.hooks = default_hooks()
-        for (k, v) in list(hooks.items()):
-            self.register_hook(event=k, hook=v)
-
-        self.method = method
-        self.url = url
-        self.headers = headers
-        self.files = files
-        self.data = data
-        self.json = json
-        self.params = params
-        self.auth = auth
-        self.cookies = cookies
-
-    def __repr__(self):
-        return '<Request [%s]>' % (self.method)
-
-    def prepare(self):
-        """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
-        p = PreparedRequest()
-        p.prepare(
-            method=self.method,
-            url=self.url,
-            headers=self.headers,
-            files=self.files,
-            data=self.data,
-            json=self.json,
-            params=self.params,
-            auth=self.auth,
-            cookies=self.cookies,
-            hooks=self.hooks,
-        )
-        return p
-
-
-class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
-    """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
-    containing the exact bytes that will be sent to the server.
-
-    Generated from either a :class:`Request <Request>` object or manually.
-
-    Usage::
-
-      >>> import requests
-      >>> req = requests.Request('GET', 'http://httpbin.org/get')
-      >>> r = req.prepare()
-      <PreparedRequest [GET]>
-
-      >>> s = requests.Session()
-      >>> s.send(r)
-      <Response [200]>
-
-    """
-
-    def __init__(self):
-        #: HTTP verb to send to the server.
-        self.method = None
-        #: HTTP URL to send the request to.
-        self.url = None
-        #: dictionary of HTTP headers.
-        self.headers = None
-        # The `CookieJar` used to create the Cookie header will be stored here
-        # after prepare_cookies is called
-        self._cookies = None
-        #: request body to send to the server.
-        self.body = None
-        #: dictionary of callback hooks, for internal usage.
-        self.hooks = default_hooks()
-
-    def prepare(self, method=None, url=None, headers=None, files=None,
-        data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
-        """Prepares the entire request with the given parameters."""
-
-        self.prepare_method(method)
-        self.prepare_url(url, params)
-        self.prepare_headers(headers)
-        self.prepare_cookies(cookies)
-        self.prepare_body(data, files, json)
-        self.prepare_auth(auth, url)
-
-        # Note that prepare_auth must be last to enable authentication schemes
-        # such as OAuth to work on a fully prepared request.
-
-        # This MUST go after prepare_auth. Authenticators could add a hook
-        self.prepare_hooks(hooks)
-
-    def __repr__(self):
-        return '<PreparedRequest [%s]>' % (self.method)
-
-    def copy(self):
-        p = PreparedRequest()
-        p.method = self.method
-        p.url = self.url
-        p.headers = self.headers.copy() if self.headers is not None else None
-        p._cookies = _copy_cookie_jar(self._cookies)
-        p.body = self.body
-        p.hooks = self.hooks
-        return p
-
-    def prepare_method(self, method):
-        """Prepares the given HTTP method."""
-        self.method = method
-        if self.method is not None:
-            self.method = to_native_string(self.method.upper())
-
-    def prepare_url(self, url, params):
-        """Prepares the given HTTP URL."""
-        #: Accept objects that have string representations.
-        #: We're unable to blindly call unicode/str functions
-        #: as this will include the bytestring indicator (b'')
-        #: on python 3.x.
-        #: https://github.com/kennethreitz/requests/pull/2238
-        if isinstance(url, bytes):
-            url = url.decode('utf8')
-        else:
-            url = unicode(url) if is_py2 else str(url)
-
-        # Don't do any URL preparation for non-HTTP schemes like `mailto`,
-        # `data` etc to work around exceptions from `url_parse`, which
-        # handles RFC 3986 only.
-        if ':' in url and not url.lower().startswith('http'):
-            self.url = url
-            return
-
-        # Support for unicode domain names and paths.
-        try:
-            scheme, auth, host, port, path, query, fragment = parse_url(url)
-        except LocationParseError as e:
-            raise InvalidURL(*e.args)
-
-        if not scheme:
-            error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
-            error = error.format(to_native_string(url, 'utf8'))
-
-            raise MissingSchema(error)
-
-        if not host:
-            raise InvalidURL("Invalid URL %r: No host supplied" % url)
-
-        # Only want to apply IDNA to the hostname
-        try:
-            host = host.encode('idna').decode('utf-8')
-        except UnicodeError:
-            raise InvalidURL('URL has an invalid label.')
-
-        # Carefully reconstruct the network location
-        netloc = auth or ''
-        if netloc:
-            netloc += '@'
-        netloc += host
-        if port:
-            netloc += ':' + str(port)
-
-        # Bare domains aren't valid URLs.
-        if not path:
-            path = '/'
-
-        if is_py2:
-            if isinstance(scheme, str):
-                scheme = scheme.encode('utf-8')
-            if isinstance(netloc, str):
-                netloc = netloc.encode('utf-8')
-            if isinstance(path, str):
-                path = path.encode('utf-8')
-            if isinstance(query, str):
-                query = query.encode('utf-8')
-            if isinstance(fragment, str):
-                fragment = fragment.encode('utf-8')
-
-        if isinstance(params, (str, bytes)):
-            params = to_native_string(params)
-
-        enc_params = self._encode_params(params)
-        if enc_params:
-            if query:
-                query = '%s&%s' % (query, enc_params)
-            else:
-                query = enc_params
-
-        url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
-        self.url = url
-
-    def prepare_headers(self, headers):
-        """Prepares the given HTTP headers."""
-
-        if headers:
-            self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
-        else:
-            self.headers = CaseInsensitiveDict()
-
-    def prepare_body(self, data, files, json=None):
-        """Prepares the given HTTP body data."""
-
-        # Check if file, fo, generator, iterator.
-        # If not, run through normal process.
-
-        # Nottin' on you.
-        body = None
-        content_type = None
-        length = None
-
-        if not data and json is not None:
-            content_type = 'application/json'
-            body = complexjson.dumps(json)
-
-        is_stream = all([
-            hasattr(data, '__iter__'),
-            not isinstance(data, (basestring, list, tuple, dict))
-        ])
-
-        try:
-            length = super_len(data)
-        except (TypeError, AttributeError, UnsupportedOperation):
-            length = None
-
-        if is_stream:
-            body = data
-
-            if files:
-                raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
-
-            if length:
-                self.headers['Content-Length'] = builtin_str(length)
-            else:
-                self.headers['Transfer-Encoding'] = 'chunked'
-        else:
-            # Multi-part file uploads.
-            if files:
-                (body, content_type) = self._encode_files(files, data)
-            else:
-                if data:
-                    body = self._encode_params(data)
-                    if isinstance(data, basestring) or hasattr(data, 'read'):
-                        content_type = None
-                    else:
-                        content_type = 'application/x-www-form-urlencoded'
-
-            self.prepare_content_length(body)
-
-            # Add content-type if it wasn't explicitly provided.
-            if content_type and ('content-type' not in self.headers):
-                self.headers['Content-Type'] = content_type
-
-        self.body = body
-
-    def prepare_content_length(self, body):
-        if hasattr(body, 'seek') and hasattr(body, 'tell'):
-            curr_pos = body.tell()
-            body.seek(0, 2)
-            end_pos = body.tell()
-            self.headers['Content-Length'] = builtin_str(max(0, end_pos - curr_pos))
-            body.seek(curr_pos, 0)
-        elif body is not None:
-            l = super_len(body)
-            if l:
-                self.headers['Content-Length'] = builtin_str(l)
-        elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
-            self.headers['Content-Length'] = '0'
-
-    def prepare_auth(self, auth, url=''):
-        """Prepares the given HTTP auth data."""
-
-        # If no Auth is explicitly provided, extract it from the URL first.
-        if auth is None:
-            url_auth = get_auth_from_url(self.url)
-            auth = url_auth if any(url_auth) else None
-
-        if auth:
-            if isinstance(auth, tuple) and len(auth) == 2:
-                # special-case basic HTTP auth
-                auth = HTTPBasicAuth(*auth)
-
-            # Allow auth to make its changes.
-            r = auth(self)
-
-            # Update self to reflect the auth changes.
-            self.__dict__.update(r.__dict__)
-
-            # Recompute Content-Length
-            self.prepare_content_length(self.body)
-
-    def prepare_cookies(self, cookies):
-        """Prepares the given HTTP cookie data.
-
-        This function eventually generates a ``Cookie`` header from the
-        given cookies using cookielib. Due to cookielib's design, the header
-        will not be regenerated if it already exists, meaning this function
-        can only be called once for the life of the
-        :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
-        to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
-        header is removed beforehand."""
-
-        if isinstance(cookies, cookielib.CookieJar):
-            self._cookies = cookies
-        else:
-            self._cookies = cookiejar_from_dict(cookies)
-
-        cookie_header = get_cookie_header(self._cookies, self)
-        if cookie_header is not None:
-            self.headers['Cookie'] = cookie_header
-
-    def prepare_hooks(self, hooks):
-        """Prepares the given hooks."""
-        # hooks can be passed as None to the prepare method and to this
-        # method. To prevent iterating over None, simply use an empty list
-        # if hooks is False-y
-        hooks = hooks or []
-        for event in hooks:
-            self.register_hook(event, hooks[event])
-
-
-class Response(object):
-    """The :class:`Response <Response>` object, which contains a
-    server's response to an HTTP request.
-    """
-
-    __attrs__ = [
-        '_content', 'status_code', 'headers', 'url', 'history',
-        'encoding', 'reason', 'cookies', 'elapsed', 'request'
-    ]
-
-    def __init__(self):
-        super(Response, self).__init__()
-
-        self._content = False
-        self._content_consumed = False
-
-        #: Integer Code of responded HTTP Status, e.g. 404 or 200.
-        self.status_code = None
-
-        #: Case-insensitive Dictionary of Response Headers.
-        #: For example, ``headers['content-encoding']`` will return the
-        #: value of a ``'Content-Encoding'`` response header.
-        self.headers = CaseInsensitiveDict()
-
-        #: File-like object representation of response (for advanced usage).
-        #: Use of ``raw`` requires that ``stream=True`` be set on the request.
-        # This requirement does not apply for use internally to Requests.
-        self.raw = None
-
-        #: Final URL location of Response.
-        self.url = None
-
-        #: Encoding to decode with when accessing r.text.
-        self.encoding = None
-
-        #: A list of :class:`Response <Response>` objects from
-        #: the history of the Request. Any redirect responses will end
-        #: up here. The list is sorted from the oldest to the most recent request.
-        self.history = []
-
-        #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
-        self.reason = None
-
-        #: A CookieJar of Cookies the server sent back.
-        self.cookies = cookiejar_from_dict({})
-
-        #: The amount of time elapsed between sending the request
-        #: and the arrival of the response (as a timedelta).
-        #: This property specifically measures the time taken between sending
-        #: the first byte of the request and finishing parsing the headers. It
-        #: is therefore unaffected by consuming the response content or the
-        #: value of the ``stream`` keyword argument.
-        self.elapsed = datetime.timedelta(0)
-
-        #: The :class:`PreparedRequest <PreparedRequest>` object to which this
-        #: is a response.
-        self.request = None
-
-    def __getstate__(self):
-        # Consume everything; accessing the content attribute makes
-        # sure the content has been fully read.
-        if not self._content_consumed:
-            self.content
-
-        return dict(
-            (attr, getattr(self, attr, None))
-            for attr in self.__attrs__
-        )
-
-    def __setstate__(self, state):
-        for name, value in state.items():
-            setattr(self, name, value)
-
-        # pickled objects do not have .raw
-        setattr(self, '_content_consumed', True)
-        setattr(self, 'raw', None)
-
-    def __repr__(self):
-        return '<Response [%s]>' % (self.status_code)
-
-    def __bool__(self):
-        """Returns true if :attr:`status_code` is 'OK'."""
-        return self.ok
-
-    def __nonzero__(self):
-        """Returns true if :attr:`status_code` is 'OK'."""
-        return self.ok
-
-    def __iter__(self):
-        """Allows you to use a response as an iterator."""
-        return self.iter_content(128)
-
-    @property
-    def ok(self):
-        try:
-            self.raise_for_status()
-        except HTTPError:
-            return False
-        return True
-
-    @property
-    def is_redirect(self):
-        """True if this Response is a well-formed HTTP redirect that could have
-        been processed automatically (by :meth:`Session.resolve_redirects`).
-        """
-        return ('location' in self.headers and self.status_code in REDIRECT_STATI)
-
-    @property
-    def is_permanent_redirect(self):
-        """True if this Response one of the permanent versions of redirect"""
-        return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
-
-    @property
-    def apparent_encoding(self):
-        """The apparent encoding, provided by the chardet library"""
-        return chardet.detect(self.content)['encoding']
-
-    def iter_content(self, chunk_size=1, decode_unicode=False):
-        """Iterates over the response data.  When stream=True is set on the
-        request, this avoids reading the content at once into memory for
-        large responses.  The chunk size is the number of bytes it should
-        read into memory.  This is not necessarily the length of each item
-        returned as decoding can take place.
-
-        If decode_unicode is True, content will be decoded using the best
-        available encoding based on the response.
-        """
-
-        def generate():
-            # Special case for urllib3.
-            if hasattr(self.raw, 'stream'):
-                try:
-                    for chunk in self.raw.stream(chunk_size, decode_content=True):
-                        yield chunk
-                except ProtocolError as e:
-                    raise ChunkedEncodingError(e)
-                except DecodeError as e:
-                    raise ContentDecodingError(e)
-                except ReadTimeoutError as e:
-                    raise ConnectionError(e)
-            else:
-                # Standard file-like object.
-                while True:
-                    chunk = self.raw.read(chunk_size)
-                    if not chunk:
-                        break
-                    yield chunk
-
-            self._content_consumed = True
-
-        if self._content_consumed and isinstance(self._content, bool):
-            raise StreamConsumedError()
-        # simulate reading small chunks of the content
-        reused_chunks = iter_slices(self._content, chunk_size)
-
-        stream_chunks = generate()
-
-        chunks = reused_chunks if self._content_consumed else stream_chunks
-
-        if decode_unicode:
-            chunks = stream_decode_response_unicode(chunks, self)
-
-        return chunks
-
-    def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
-        """Iterates over the response data, one line at a time.  When
-        stream=True is set on the request, this avoids reading the
-        content at once into memory for large responses.
-
-        .. note:: This method is not reentrant safe.
-        """
-
-        pending = None
-
-        for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
-
-            if pending is not None:
-                chunk = pending + chunk
-
-            if delimiter:
-                lines = chunk.split(delimiter)
-            else:
-                lines = chunk.splitlines()
-
-            if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
-                pending = lines.pop()
-            else:
-                pending = None
-
-            for line in lines:
-                yield line
-
-        if pending is not None:
-            yield pending
-
-    @property
-    def content(self):
-        """Content of the response, in bytes."""
-
-        if self._content is False:
-            # Read the contents.
-            try:
-                if self._content_consumed:
-                    raise RuntimeError(
-                        'The content for this response was already consumed')
-
-                if self.status_code == 0:
-                    self._content = None
-                else:
-                    self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
-
-            except AttributeError:
-                self._content = None
-
-        self._content_consumed = True
-        # don't need to release the connection; that's been handled by urllib3
-        # since we exhausted the data.
-        return self._content
-
-    @property
-    def text(self):
-        """Content of the response, in unicode.
-
-        If Response.encoding is None, encoding will be guessed using
-        ``chardet``.
-
-        The encoding of the response content is determined based solely on HTTP
-        headers, following RFC 2616 to the letter. If you can take advantage of
-        non-HTTP knowledge to make a better guess at the encoding, you should
-        set ``r.encoding`` appropriately before accessing this property.
-        """
-
-        # Try charset from content-type
-        content = None
-        encoding = self.encoding
-
-        if not self.content:
-            return str('')
-
-        # Fallback to auto-detected encoding.
-        if self.encoding is None:
-            encoding = self.apparent_encoding
-
-        # Decode unicode from given encoding.
-        try:
-            content = str(self.content, encoding, errors='replace')
-        except (LookupError, TypeError):
-            # A LookupError is raised if the encoding was not found which could
-            # indicate a misspelling or similar mistake.
-            #
-            # A TypeError can be raised if encoding is None
-            #
-            # So we try blindly encoding.
-            content = str(self.content, errors='replace')
-
-        return content
-
-    def json(self, **kwargs):
-        """Returns the json-encoded content of a response, if any.
-
-        :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
-        """
-
-        if not self.encoding and len(self.content) > 3:
-            # No encoding set. JSON RFC 4627 section 3 states we should expect
-            # UTF-8, -16 or -32. Detect which one to use; If the detection or
-            # decoding fails, fall back to `self.text` (using chardet to make
-            # a best guess).
-            encoding = guess_json_utf(self.content)
-            if encoding is not None:
-                try:
-                    return complexjson.loads(
-                        self.content.decode(encoding), **kwargs
-                    )
-                except UnicodeDecodeError:
-                    # Wrong UTF codec detected; usually because it's not UTF-8
-                    # but some other 8-bit codec.  This is an RFC violation,
-                    # and the server didn't bother to tell us what codec *was*
-                    # used.
-                    pass
-        return complexjson.loads(self.text, **kwargs)
-
-    @property
-    def links(self):
-        """Returns the parsed header links of the response, if any."""
-
-        header = self.headers.get('link')
-
-        # l = MultiDict()
-        l = {}
-
-        if header:
-            links = parse_header_links(header)
-
-            for link in links:
-                key = link.get('rel') or link.get('url')
-                l[key] = link
-
-        return l
-
-    def raise_for_status(self):
-        """Raises stored :class:`HTTPError`, if one occurred."""
-
-        http_error_msg = ''
-
-        if 400 <= self.status_code < 500:
-            http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
-
-        elif 500 <= self.status_code < 600:
-            http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
-
-        if http_error_msg:
-            raise HTTPError(http_error_msg, response=self)
-
-    def close(self):
-        """Releases the connection back to the pool. Once this method has been
-        called the underlying ``raw`` object must not be accessed again.
-
-        *Note: Should not normally need to be called explicitly.*
-        """
-        if not self._content_consumed:
-            return self.raw.close()
-
-        return self.raw.release_conn()
diff --git a/python/ext-libs/requests/packages/__init__.py b/python/ext-libs/requests/packages/__init__.py
deleted file mode 100644
index 971c2ad..0000000
--- a/python/ext-libs/requests/packages/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-'''
-Debian and other distributions "unbundle" requests' vendored dependencies, and
-rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
-The problem with this is that not only requests itself imports those
-dependencies, but third-party code outside of the distros' control too.
-
-In reaction to these problems, the distro maintainers replaced
-``requests.packages`` with a magical "stub module" that imports the correct
-modules. The implementations were varying in quality and all had severe
-problems. For example, a symlink (or hardlink) that links the correct modules
-into place introduces problems regarding object identity, since you now have
-two modules in `sys.modules` with the same API, but different identities::
-
-    requests.packages.urllib3 is not urllib3
-
-With version ``2.5.2``, requests started to maintain its own stub, so that
-distro-specific breakage would be reduced to a minimum, even though the whole
-issue is not requests' fault in the first place. See
-https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
-request.
-'''
-
-from __future__ import absolute_import
-import sys
-
-try:
-    from . import urllib3
-except ImportError:
-    import urllib3
-    sys.modules['%s.urllib3' % __name__] = urllib3
-
-try:
-    from . import chardet
-except ImportError:
-    import chardet
-    sys.modules['%s.chardet' % __name__] = chardet
diff --git a/python/ext-libs/requests/packages/chardet/__init__.py b/python/ext-libs/requests/packages/chardet/__init__.py
deleted file mode 100644
index 82c2a48..0000000
--- a/python/ext-libs/requests/packages/chardet/__init__.py
+++ /dev/null
@@ -1,32 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-__version__ = "2.3.0"
-from sys import version_info
-
-
-def detect(aBuf):
-    if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
-            (version_info >= (3, 0) and not isinstance(aBuf, bytes))):
-        raise ValueError('Expected a bytes object, not a unicode object')
-
-    from . import universaldetector
-    u = universaldetector.UniversalDetector()
-    u.reset()
-    u.feed(aBuf)
-    u.close()
-    return u.result
diff --git a/python/ext-libs/requests/packages/chardet/big5freq.py b/python/ext-libs/requests/packages/chardet/big5freq.py
deleted file mode 100644
index 65bffc0..0000000
--- a/python/ext-libs/requests/packages/chardet/big5freq.py
+++ /dev/null
@@ -1,925 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# Big5 frequency table
-# by Taiwan's Mandarin Promotion Council
-# <http://www.edu.tw:81/mandr/>
-#
-# 128  --> 0.42261
-# 256  --> 0.57851
-# 512  --> 0.74851
-# 1024 --> 0.89384
-# 2048 --> 0.97583
-#
-# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
-# Random Distribution Ration = 512/(5401-512)=0.105
-#
-# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
-
-BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
-
-#Char to FreqOrder table
-BIG5_TABLE_SIZE = 5376
-
-Big5CharToFreqOrder = (
-   1,1801,1506, 255,1431, 198,   9,  82,   6,5008, 177, 202,3681,1256,2821, 110, #   16
-3814,  33,3274, 261,  76,  44,2114,  16,2946,2187,1176, 659,3971,  26,3451,2653, #   32
-1198,3972,3350,4202, 410,2215, 302, 590, 361,1964,   8, 204,  58,4510,5009,1932, #   48
-  63,5010,5011, 317,1614,  75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, #   64
-3682,   3,  10,3973,1471,  29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, #   80
-4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947,  34,3556,3204,  64, 604, #   96
-5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337,  72, 406,5017,  80, #  112
- 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449,  69,2987, 591, #  128
- 179,2096, 471, 115,2035,1844,  60,  50,2988, 134, 806,1869, 734,2036,3454, 180, #  144
- 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, #  160
-2502,  90,2716,1338, 663,  11, 906,1099,2553,  20,2441, 182, 532,1716,5019, 732, #  176
-1376,4204,1311,1420,3206,  25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, #  192
-3276, 475,1447,3683,5020, 117,  21, 656, 810,1297,2300,2334,3557,5021, 126,4205, #  208
- 706, 456, 150, 613,4513,  71,1118,2037,4206, 145,3092,  85, 835, 486,2115,1246, #  224
-1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, #  240
-3558,3135,5023,1956,1153,4207,  83, 296,1199,3093, 192, 624,  93,5024, 822,1898, #  256
-2823,3136, 795,2065, 991,1554,1542,1592,  27,  43,2867, 859, 139,1456, 860,4514, #  272
- 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, #  288
-3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, #  304
-1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, #  320
-5026,5027,2176,3207,3685,2682, 593, 845,1062,3277,  88,1723,2038,3978,1951, 212, #  336
- 266, 152, 149, 468,1899,4208,4516,  77, 187,5028,3038,  37,   5,2990,5029,3979, #  352
-5030,5031,  39,2524,4517,2908,3208,2079,  55, 148,  74,4518, 545, 483,1474,1029, #  368
-1665, 217,1870,1531,3138,1104,2655,4209,  24, 172,3562, 900,3980,3563,3564,4519, #  384
-  32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683,   4,3039,3351,1427,1789, #  400
- 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, #  416
-3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439,  38,5037,1063,5038, 794, #  432
-3982,1435,2301,  46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804,  35, 707, #  448
- 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, #  464
-2129,1363,3689,1423, 697, 100,3094,  48,  70,1231, 495,3139,2196,5043,1294,5044, #  480
-2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, #  496
- 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, #  512
- 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, #  528
-3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, #  544
-1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, #  560
-1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, #  576
-1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381,   7, #  592
-2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, #  608
- 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, #  624
-4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, #  640
-1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, #  656
-5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, #  672
-2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, #  688
- 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, #  704
-  98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, #  720
- 523,2789,2790,2658,5061, 141,2235,1333,  68, 176, 441, 876, 907,4220, 603,2602, #  736
- 710, 171,3464, 404, 549,  18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, #  752
-5063,2991, 368,5064, 146, 366,  99, 871,3693,1543, 748, 807,1586,1185,  22,2263, #  768
- 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, #  784
-1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068,  59,5069, #  800
- 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, #  816
- 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, #  832
-5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, #  848
-1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, #  864
- 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, #  880
-3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, #  896
-4224,  57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, #  912
-3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, #  928
- 279,3145,  51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, #  944
- 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, #  960
-1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, #  976
-4227,2475,1436, 953,4228,2055,4545, 671,2400,  79,4229,2446,3285, 608, 567,2689, #  992
-3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
-3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
-2402,5097,5098,5099,4232,3045,   0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
-5101, 233,4233,3697,1819,4550,4551,5102,  96,1777,1315,2083,5103, 257,5104,1810, # 1056
-3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
-5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
-1484,5110,1712, 127,  67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
-2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
-1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
-  78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
-1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
-4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
-3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
- 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
- 165, 243,4559,3703,2528, 123, 683,4239, 764,4560,  36,3998,1793, 589,2916, 816, # 1232
- 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
-2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
-5122, 611,1156, 854,2386,1316,2875,   2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
-1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
-2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
-1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
-1994,5135,4564,5136,5137,2198,  13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
-5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
-5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
-5149, 128,2133,  92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
-3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
-4567,2252,  94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
-4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
-2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
-5163,2337,2068,  23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
-3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
- 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
-5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863,  41, # 1520
-5170,5171,4575,5172,1657,2338,  19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
-1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
-2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
-3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
-4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
-5182,2692, 733,  40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
-3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
-4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
-1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
-1871,2762,3004,5187, 435,5188, 343,1108, 596,  17,1751,4579,2239,3477,3709,5189, # 1680
-4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
-1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
- 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
-1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
-1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
-3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
- 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
-5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
-2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
-1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
-1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551,  30,2268,4266, # 1856
-5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
- 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
-4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
- 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
-2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
- 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
-1041,3005, 293,1168,  87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
-1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
- 730,1515, 184,2840,  66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
-4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
-4021,5231,5232,1186,  15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
-1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
-3596,1342,1681,1718, 766,3297, 286,  89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
-5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
-5240,3298, 310, 313,3482,2304, 770,4278,  54,3054, 189,4611,3105,3848,4025,5241, # 2096
-1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
-2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
-1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
-3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
-2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
-3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
-2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
-4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
-4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
-3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
-  97,  81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
-3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
- 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
-3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
-4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
-3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
-1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
-5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
- 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
-5286, 587,  14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
-1702,1226, 102,1547,  62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
- 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
-4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294,  86,1494,1730, # 2464
-4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
- 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
-2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
-2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885,  28,2695, # 2528
-3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
-1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
-4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
-2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
-1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
-1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
-2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
-3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
-1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
-5313,3493,5314,5315,5316,3310,2698,1433,3311, 131,  95,1504,4049, 723,4303,3166, # 2688
-1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
-4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654,  53,5320,3014,5321, # 2720
-1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
- 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
-1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
-4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
-4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
-2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
-1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
-4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
- 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
-5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
-2322,3316,5346,5347,4308,5348,4309,  84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
-3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
-4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
- 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
-5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
-5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
-1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
-4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
-4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
-2699,1516,3614,1121,1082,1329,3317,4073,1449,3873,  65,1128,2848,2927,2769,1590, # 3040
-3874,5370,5371,  12,2668,  45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
-3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
-2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
-1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
-4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
-3736,1859,  91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
-3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
-2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
-4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771,  61,4079,3738,1823,4080, # 3184
-5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
-3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
-2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
-3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
-1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
-2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
-3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
-4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063,  56,1396,3113, # 3312
-2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
-2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
-5418,1076,  49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
-1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
-2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
-1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
-3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
-4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629,  31,2851, # 3440
-2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
-3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
-3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
-2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
-4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
-2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
-3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
-4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
-5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
-3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
- 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
-1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412,  42,3119, 464,5455,2642, # 3632
-4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
-1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
-4701,5462,3020, 962, 588,3629, 289,3250,2644,1116,  52,5463,3067,1797,5464,5465, # 3680
-5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
- 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
-5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
-5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
-2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
-3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
-2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
-2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
- 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
-1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
-4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
-3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
-3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
- 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
-2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
- 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
-2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
-4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
-1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
-4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
-1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
-3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
- 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
-3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
-5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
-5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
-3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
-3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
-1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
-2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
-5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
-1561,2674,1452,4113,1375,5549,5550,  47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
-1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
-3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
- 919,2352,2975,2353,1270,4727,4115,  73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
-1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
-4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
-5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
-2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
-3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
- 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
-1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
-2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
-2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
-5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
-5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
-5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
-2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
-2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
-1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
-4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
-3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
-3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
-4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
-4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
-2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
-2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
-5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
-4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
-5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
-4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
- 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
- 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
-1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
-3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
-4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
-1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
-5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
-2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
-2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
-3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
-5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
-1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
-3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
-5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
-1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
-5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
-2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
-3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
-2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
-3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
-3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
-3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
-4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
- 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
-2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
-4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
-3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
-5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
-1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
-5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
- 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
-1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
- 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
-4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
-1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
-4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
-1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
- 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
-3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
-4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
-5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
- 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
-3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
- 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
-2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376  #last 512
-#Everything below is of no interest for detection purpose
-2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
-2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
-5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
-5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
-5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
-5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
-5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
-5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
-5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
-5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
-5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
-5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
-5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
-5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
-6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
-6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
-6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
-6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
-6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
-6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
-6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
-6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
-6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
-6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
-6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
-6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
-6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
-6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
-6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
-6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
-6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
-6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
-6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
-6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
-6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
-6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
-6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
-6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
-6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
-6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
-6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
-6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
-6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
-6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
-6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
-6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
-6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
-6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
-6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
-6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
-6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
-6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
-6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
-6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
-6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
-3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
-6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
-6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
-3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
-6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
-6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
-6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
-6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
-6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
-6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
-6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
-4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
-6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
-6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
-3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
-6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
-6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
-6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
-6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
-6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
-6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
-6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
-6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
-6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
-6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
-6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
-7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
-7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
-7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
-7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
-7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
-7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
-7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
-7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
-7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
-7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
-7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
-7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
-7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
-7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
-7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
-7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
-7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
-7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
-7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
-7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
-7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
-7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
-7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
-7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
-7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
-7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
-7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
-7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
-7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
-7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
-7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
-7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
-7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
-7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
-7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
-7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
-7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
-7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
-7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
-7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
-7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
-7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
-7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
-7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
-7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
-7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
-7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
-7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
-3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
-7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
-7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
-7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
-7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
-4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
-7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
-7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
-7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
-7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
-7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
-7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
-7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
-7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
-7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
-7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
-7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
-8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
-8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
-8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
-8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
-8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
-8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
-8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
-8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
-8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
-8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
-8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
-8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
-8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
-8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
-8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
-8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
-8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
-8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
-8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
-8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
-8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
-8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
-8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
-8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
-8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
-8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
-8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
-8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
-8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
-8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
-8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
-8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
-8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
-8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
-8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
-8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
-8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
-8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
-8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
-8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
-8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
-8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
-8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
-8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
-8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
-8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
-8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
-8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
-8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
-8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
-8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
-8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
-8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
-8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
-8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
-8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
-8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
-8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
-8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
-8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
-8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
-4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
-8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
-8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
-8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
-8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
-9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
-9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
-9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
-9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
-9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
-9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
-9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
-9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
-9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
-9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
-9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
-9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
-9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
-9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
-9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
-9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
-9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
-9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
-9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
-9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
-9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
-9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
-9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
-9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
-9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
-9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
-9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
-9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
-9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
-9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
-9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
-9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
-9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
-9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
-9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
-9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
-9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
-9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
-3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
-9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
-9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
-9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
-4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
-9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
-9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
-9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
-9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
-9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
-9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
-9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
-9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
-9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
-9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
-9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
-9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
-9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
-9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
-9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
-9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
-9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
-9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
-9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
-9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
-9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
-9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
-9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
-10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
-10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
-10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
-10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
-10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
-10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
-10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
-10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
-10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
-4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
-10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
-10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
-10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
-10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
-10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
-10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
-10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
-10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
-4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
-10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
-10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
-10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
-10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
-10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
-10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
-10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
-10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
-10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
-10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
-10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
-10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
-10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
-10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
-10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
-10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
-10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
-4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
-10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
-10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
-10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
-10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
-10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
-10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
-10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
-10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
-10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
-10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
-10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
-10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
-10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
-10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
-10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
-10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
-10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
-10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
-10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
-10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
-10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
-10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
-10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
-10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
-10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
-10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
-10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
-10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
-10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
-10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
-11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
-11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
-11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
-4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
-11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
-11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
-11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
-11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
-11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
-11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
-11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
-11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
-11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
-11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
-11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
-11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
-11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
-11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
-11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
-11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
-11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
-11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
-11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
-11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
-11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
-11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
-11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
-11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
-11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
-11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
-11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
-11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
-11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
-11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
-11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
-11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
-4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
-11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
-11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
-11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
-11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
-11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
-11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
-11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
-11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
-11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
-11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
-11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
-11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
-11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
-11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
-11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
-11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
-11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
-11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
-11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
-11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
-11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
-11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
-11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
-11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
-11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
-11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
-11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
-11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
-12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
-12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
-12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
-12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
-12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
-12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
-12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
-12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
-12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
-12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
-12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
-12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
-12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
-12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
-12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
-4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
-4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
-4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
-12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
-12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
-12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
-12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
-12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
-12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
-12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
-12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
-12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
-12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
-12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
-12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
-12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
-12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
-12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
-12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
-12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
-12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
-12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
-12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
-12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
-12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
-12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
-12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
-12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
-12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
-12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
-12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
-12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
-12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
-12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
-12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
-12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
-12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
-12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
-12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
-12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
-12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
-12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
-12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
-12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
-12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
-12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
-12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
-12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
-12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
-12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
-4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
-13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
-13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
-13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
-13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
-13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
-13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
-13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
-4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
-13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
-13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
-13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
-13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
-13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
-13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
-13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
-13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
-13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
-13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
-13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
-13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
-13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
-13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
-13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
-5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
-13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
-13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
-13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
-13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
-13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
-13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
-13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
-13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
-13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
-13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
-13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
-13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
-13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
-13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
-13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
-13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
-13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
-13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
-13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
-13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
-13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
-13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
-13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
-13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
-13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
-13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
-13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
-13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
-13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
-13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
-13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
-13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
-13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
-13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
-13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
-13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
-13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
-13968,13969,13970,13971,13972) #13973
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/big5prober.py b/python/ext-libs/requests/packages/chardet/big5prober.py
deleted file mode 100644
index becce81..0000000
--- a/python/ext-libs/requests/packages/chardet/big5prober.py
+++ /dev/null
@@ -1,42 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import Big5DistributionAnalysis
-from .mbcssm import Big5SMModel
-
-
-class Big5Prober(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(Big5SMModel)
-        self._mDistributionAnalyzer = Big5DistributionAnalysis()
-        self.reset()
-
-    def get_charset_name(self):
-        return "Big5"
diff --git a/python/ext-libs/requests/packages/chardet/chardetect.py b/python/ext-libs/requests/packages/chardet/chardetect.py
deleted file mode 100644
index ffe892f..0000000
--- a/python/ext-libs/requests/packages/chardet/chardetect.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-"""
-Script which takes one or more file paths and reports on their detected
-encodings
-
-Example::
-
-    % chardetect somefile someotherfile
-    somefile: windows-1252 with confidence 0.5
-    someotherfile: ascii with confidence 1.0
-
-If no paths are provided, it takes its input from stdin.
-
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import argparse
-import sys
-from io import open
-
-from chardet import __version__
-from chardet.universaldetector import UniversalDetector
-
-
-def description_of(lines, name='stdin'):
-    """
-    Return a string describing the probable encoding of a file or
-    list of strings.
-
-    :param lines: The lines to get the encoding of.
-    :type lines: Iterable of bytes
-    :param name: Name of file or collection of lines
-    :type name: str
-    """
-    u = UniversalDetector()
-    for line in lines:
-        u.feed(line)
-    u.close()
-    result = u.result
-    if result['encoding']:
-        return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
-                                                     result['confidence'])
-    else:
-        return '{0}: no result'.format(name)
-
-
-def main(argv=None):
-    '''
-    Handles command line arguments and gets things started.
-
-    :param argv: List of arguments, as if specified on the command-line.
-                 If None, ``sys.argv[1:]`` is used instead.
-    :type argv: list of str
-    '''
-    # Get command line arguments
-    parser = argparse.ArgumentParser(
-        description="Takes one or more file paths and reports their detected \
-                     encodings",
-        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
-        conflict_handler='resolve')
-    parser.add_argument('input',
-                        help='File whose encoding we would like to determine.',
-                        type=argparse.FileType('rb'), nargs='*',
-                        default=[sys.stdin])
-    parser.add_argument('--version', action='version',
-                        version='%(prog)s {0}'.format(__version__))
-    args = parser.parse_args(argv)
-
-    for f in args.input:
-        if f.isatty():
-            print("You are running chardetect interactively. Press " +
-                  "CTRL-D twice at the start of a blank line to signal the " +
-                  "end of your input. If you want help, run chardetect " +
-                  "--help\n", file=sys.stderr)
-        print(description_of(f, f.name))
-
-
-if __name__ == '__main__':
-    main()
diff --git a/python/ext-libs/requests/packages/chardet/chardistribution.py b/python/ext-libs/requests/packages/chardet/chardistribution.py
deleted file mode 100644
index 4e64a00..0000000
--- a/python/ext-libs/requests/packages/chardet/chardistribution.py
+++ /dev/null
@@ -1,231 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
-                        EUCTW_TYPICAL_DISTRIBUTION_RATIO)
-from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
-                        EUCKR_TYPICAL_DISTRIBUTION_RATIO)
-from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
-                         GB2312_TYPICAL_DISTRIBUTION_RATIO)
-from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
-                       BIG5_TYPICAL_DISTRIBUTION_RATIO)
-from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
-                      JIS_TYPICAL_DISTRIBUTION_RATIO)
-from .compat import wrap_ord
-
-ENOUGH_DATA_THRESHOLD = 1024
-SURE_YES = 0.99
-SURE_NO = 0.01
-MINIMUM_DATA_THRESHOLD = 3
-
-
-class CharDistributionAnalysis:
-    def __init__(self):
-        # Mapping table to get frequency order from char order (get from
-        # GetOrder())
-        self._mCharToFreqOrder = None
-        self._mTableSize = None  # Size of above table
-        # This is a constant value which varies from language to language,
-        # used in calculating confidence.  See
-        # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
-        # for further detail.
-        self._mTypicalDistributionRatio = None
-        self.reset()
-
-    def reset(self):
-        """reset analyser, clear any state"""
-        # If this flag is set to True, detection is done and conclusion has
-        # been made
-        self._mDone = False
-        self._mTotalChars = 0  # Total characters encountered
-        # The number of characters whose frequency order is less than 512
-        self._mFreqChars = 0
-
-    def feed(self, aBuf, aCharLen):
-        """feed a character with known length"""
-        if aCharLen == 2:
-            # we only care about 2-bytes character in our distribution analysis
-            order = self.get_order(aBuf)
-        else:
-            order = -1
-        if order >= 0:
-            self._mTotalChars += 1
-            # order is valid
-            if order < self._mTableSize:
-                if 512 > self._mCharToFreqOrder[order]:
-                    self._mFreqChars += 1
-
-    def get_confidence(self):
-        """return confidence based on existing data"""
-        # if we didn't receive any character in our consideration range,
-        # return negative answer
-        if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
-            return SURE_NO
-
-        if self._mTotalChars != self._mFreqChars:
-            r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
-                 * self._mTypicalDistributionRatio))
-            if r < SURE_YES:
-                return r
-
-        # normalize confidence (we don't want to be 100% sure)
-        return SURE_YES
-
-    def got_enough_data(self):
-        # It is not necessary to receive all data to draw conclusion.
-        # For charset detection, certain amount of data is enough
-        return self._mTotalChars > ENOUGH_DATA_THRESHOLD
-
-    def get_order(self, aBuf):
-        # We do not handle characters based on the original encoding string,
-        # but convert this encoding string to a number, here called order.
-        # This allows multiple encodings of a language to share one frequency
-        # table.
-        return -1
-
-
-class EUCTWDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        CharDistributionAnalysis.__init__(self)
-        self._mCharToFreqOrder = EUCTWCharToFreqOrder
-        self._mTableSize = EUCTW_TABLE_SIZE
-        self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, aBuf):
-        # for euc-TW encoding, we are interested
-        #   first  byte range: 0xc4 -- 0xfe
-        #   second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char = wrap_ord(aBuf[0])
-        if first_char >= 0xC4:
-            return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
-        else:
-            return -1
-
-
-class EUCKRDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        CharDistributionAnalysis.__init__(self)
-        self._mCharToFreqOrder = EUCKRCharToFreqOrder
-        self._mTableSize = EUCKR_TABLE_SIZE
-        self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, aBuf):
-        # for euc-KR encoding, we are interested
-        #   first  byte range: 0xb0 -- 0xfe
-        #   second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char = wrap_ord(aBuf[0])
-        if first_char >= 0xB0:
-            return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
-        else:
-            return -1
-
-
-class GB2312DistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        CharDistributionAnalysis.__init__(self)
-        self._mCharToFreqOrder = GB2312CharToFreqOrder
-        self._mTableSize = GB2312_TABLE_SIZE
-        self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, aBuf):
-        # for GB2312 encoding, we are interested
-        #  first  byte range: 0xb0 -- 0xfe
-        #  second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
-        if (first_char >= 0xB0) and (second_char >= 0xA1):
-            return 94 * (first_char - 0xB0) + second_char - 0xA1
-        else:
-            return -1
-
-
-class Big5DistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        CharDistributionAnalysis.__init__(self)
-        self._mCharToFreqOrder = Big5CharToFreqOrder
-        self._mTableSize = BIG5_TABLE_SIZE
-        self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, aBuf):
-        # for big5 encoding, we are interested
-        #   first  byte range: 0xa4 -- 0xfe
-        #   second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
-        if first_char >= 0xA4:
-            if second_char >= 0xA1:
-                return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
-            else:
-                return 157 * (first_char - 0xA4) + second_char - 0x40
-        else:
-            return -1
-
-
-class SJISDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        CharDistributionAnalysis.__init__(self)
-        self._mCharToFreqOrder = JISCharToFreqOrder
-        self._mTableSize = JIS_TABLE_SIZE
-        self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, aBuf):
-        # for sjis encoding, we are interested
-        #   first  byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
-        #   second byte range: 0x40 -- 0x7e,  0x81 -- oxfe
-        # no validation needed here. State machine has done that
-        first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
-        if (first_char >= 0x81) and (first_char <= 0x9F):
-            order = 188 * (first_char - 0x81)
-        elif (first_char >= 0xE0) and (first_char <= 0xEF):
-            order = 188 * (first_char - 0xE0 + 31)
-        else:
-            return -1
-        order = order + second_char - 0x40
-        if second_char > 0x7F:
-            order = -1
-        return order
-
-
-class EUCJPDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        CharDistributionAnalysis.__init__(self)
-        self._mCharToFreqOrder = JISCharToFreqOrder
-        self._mTableSize = JIS_TABLE_SIZE
-        self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, aBuf):
-        # for euc-JP encoding, we are interested
-        #   first  byte range: 0xa0 -- 0xfe
-        #   second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        char = wrap_ord(aBuf[0])
-        if char >= 0xA0:
-            return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
-        else:
-            return -1
diff --git a/python/ext-libs/requests/packages/chardet/charsetgroupprober.py b/python/ext-libs/requests/packages/chardet/charsetgroupprober.py
deleted file mode 100644
index 85e7a1c..0000000
--- a/python/ext-libs/requests/packages/chardet/charsetgroupprober.py
+++ /dev/null
@@ -1,106 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-# 
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-# 
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-# 
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-# 
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from . import constants
-import sys
-from .charsetprober import CharSetProber
-
-
-class CharSetGroupProber(CharSetProber):
-    def __init__(self):
-        CharSetProber.__init__(self)
-        self._mActiveNum = 0
-        self._mProbers = []
-        self._mBestGuessProber = None
-
-    def reset(self):
-        CharSetProber.reset(self)
-        self._mActiveNum = 0
-        for prober in self._mProbers:
-            if prober:
-                prober.reset()
-                prober.active = True
-                self._mActiveNum += 1
-        self._mBestGuessProber = None
-
-    def get_charset_name(self):
-        if not self._mBestGuessProber:
-            self.get_confidence()
-            if not self._mBestGuessProber:
-                return None
-#                self._mBestGuessProber = self._mProbers[0]
-        return self._mBestGuessProber.get_charset_name()
-
-    def feed(self, aBuf):
-        for prober in self._mProbers:
-            if not prober:
-                continue
-            if not prober.active:
-                continue
-            st = prober.feed(aBuf)
-            if not st:
-                continue
-            if st == constants.eFoundIt:
-                self._mBestGuessProber = prober
-                return self.get_state()
-            elif st == constants.eNotMe:
-                prober.active = False
-                self._mActiveNum -= 1
-                if self._mActiveNum <= 0:
-                    self._mState = constants.eNotMe
-                    return self.get_state()
-        return self.get_state()
-
-    def get_confidence(self):
-        st = self.get_state()
-        if st == constants.eFoundIt:
-            return 0.99
-        elif st == constants.eNotMe:
-            return 0.01
-        bestConf = 0.0
-        self._mBestGuessProber = None
-        for prober in self._mProbers:
-            if not prober:
-                continue
-            if not prober.active:
-                if constants._debug:
-                    sys.stderr.write(prober.get_charset_name()
-                                     + ' not active\n')
-                continue
-            cf = prober.get_confidence()
-            if constants._debug:
-                sys.stderr.write('%s confidence = %s\n' %
-                                 (prober.get_charset_name(), cf))
-            if bestConf < cf:
-                bestConf = cf
-                self._mBestGuessProber = prober
-        if not self._mBestGuessProber:
-            return 0.0
-        return bestConf
-#        else:
-#            self._mBestGuessProber = self._mProbers[0]
-#            return self._mBestGuessProber.get_confidence()
diff --git a/python/ext-libs/requests/packages/chardet/charsetprober.py b/python/ext-libs/requests/packages/chardet/charsetprober.py
deleted file mode 100644
index 9758171..0000000
--- a/python/ext-libs/requests/packages/chardet/charsetprober.py
+++ /dev/null
@@ -1,62 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from . import constants
-import re
-
-
-class CharSetProber:
-    def __init__(self):
-        pass
-
-    def reset(self):
-        self._mState = constants.eDetecting
-
-    def get_charset_name(self):
-        return None
-
-    def feed(self, aBuf):
-        pass
-
-    def get_state(self):
-        return self._mState
-
-    def get_confidence(self):
-        return 0.0
-
-    def filter_high_bit_only(self, aBuf):
-        aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
-        return aBuf
-
-    def filter_without_english_letters(self, aBuf):
-        aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
-        return aBuf
-
-    def filter_with_english_letters(self, aBuf):
-        # TODO
-        return aBuf
diff --git a/python/ext-libs/requests/packages/chardet/codingstatemachine.py b/python/ext-libs/requests/packages/chardet/codingstatemachine.py
deleted file mode 100644
index 8dd8c91..0000000
--- a/python/ext-libs/requests/packages/chardet/codingstatemachine.py
+++ /dev/null
@@ -1,61 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .constants import eStart
-from .compat import wrap_ord
-
-
-class CodingStateMachine:
-    def __init__(self, sm):
-        self._mModel = sm
-        self._mCurrentBytePos = 0
-        self._mCurrentCharLen = 0
-        self.reset()
-
-    def reset(self):
-        self._mCurrentState = eStart
-
-    def next_state(self, c):
-        # for each byte we get its class
-        # if it is first byte, we also get byte length
-        # PY3K: aBuf is a byte stream, so c is an int, not a byte
-        byteCls = self._mModel['classTable'][wrap_ord(c)]
-        if self._mCurrentState == eStart:
-            self._mCurrentBytePos = 0
-            self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
-        # from byte's class and stateTable, we get its next state
-        curr_state = (self._mCurrentState * self._mModel['classFactor']
-                      + byteCls)
-        self._mCurrentState = self._mModel['stateTable'][curr_state]
-        self._mCurrentBytePos += 1
-        return self._mCurrentState
-
-    def get_current_charlen(self):
-        return self._mCurrentCharLen
-
-    def get_coding_state_machine(self):
-        return self._mModel['name']
diff --git a/python/ext-libs/requests/packages/chardet/compat.py b/python/ext-libs/requests/packages/chardet/compat.py
deleted file mode 100644
index d9e30ad..0000000
--- a/python/ext-libs/requests/packages/chardet/compat.py
+++ /dev/null
@@ -1,34 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# Contributor(s):
-#   Ian Cordasco - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-
-
-if sys.version_info < (3, 0):
-    base_str = (str, unicode)
-else:
-    base_str = (bytes, str)
-
-
-def wrap_ord(a):
-    if sys.version_info < (3, 0) and isinstance(a, base_str):
-        return ord(a)
-    else:
-        return a
diff --git a/python/ext-libs/requests/packages/chardet/constants.py b/python/ext-libs/requests/packages/chardet/constants.py
deleted file mode 100644
index e4d148b..0000000
--- a/python/ext-libs/requests/packages/chardet/constants.py
+++ /dev/null
@@ -1,39 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-# 
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-# 
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-_debug = 0
-
-eDetecting = 0
-eFoundIt = 1
-eNotMe = 2
-
-eStart = 0
-eError = 1
-eItsMe = 2
-
-SHORTCUT_THRESHOLD = 0.95
diff --git a/python/ext-libs/requests/packages/chardet/cp949prober.py b/python/ext-libs/requests/packages/chardet/cp949prober.py
deleted file mode 100644
index ff4272f..0000000
--- a/python/ext-libs/requests/packages/chardet/cp949prober.py
+++ /dev/null
@@ -1,44 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCKRDistributionAnalysis
-from .mbcssm import CP949SMModel
-
-
-class CP949Prober(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(CP949SMModel)
-        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
-        #       not different.
-        self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
-        self.reset()
-
-    def get_charset_name(self):
-        return "CP949"
diff --git a/python/ext-libs/requests/packages/chardet/escprober.py b/python/ext-libs/requests/packages/chardet/escprober.py
deleted file mode 100644
index 80a844f..0000000
--- a/python/ext-libs/requests/packages/chardet/escprober.py
+++ /dev/null
@@ -1,86 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from . import constants
-from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
-                    ISO2022KRSMModel)
-from .charsetprober import CharSetProber
-from .codingstatemachine import CodingStateMachine
-from .compat import wrap_ord
-
-
-class EscCharSetProber(CharSetProber):
-    def __init__(self):
-        CharSetProber.__init__(self)
-        self._mCodingSM = [
-            CodingStateMachine(HZSMModel),
-            CodingStateMachine(ISO2022CNSMModel),
-            CodingStateMachine(ISO2022JPSMModel),
-            CodingStateMachine(ISO2022KRSMModel)
-        ]
-        self.reset()
-
-    def reset(self):
-        CharSetProber.reset(self)
-        for codingSM in self._mCodingSM:
-            if not codingSM:
-                continue
-            codingSM.active = True
-            codingSM.reset()
-        self._mActiveSM = len(self._mCodingSM)
-        self._mDetectedCharset = None
-
-    def get_charset_name(self):
-        return self._mDetectedCharset
-
-    def get_confidence(self):
-        if self._mDetectedCharset:
-            return 0.99
-        else:
-            return 0.00
-
-    def feed(self, aBuf):
-        for c in aBuf:
-            # PY3K: aBuf is a byte array, so c is an int, not a byte
-            for codingSM in self._mCodingSM:
-                if not codingSM:
-                    continue
-                if not codingSM.active:
-                    continue
-                codingState = codingSM.next_state(wrap_ord(c))
-                if codingState == constants.eError:
-                    codingSM.active = False
-                    self._mActiveSM -= 1
-                    if self._mActiveSM <= 0:
-                        self._mState = constants.eNotMe
-                        return self.get_state()
-                elif codingState == constants.eItsMe:
-                    self._mState = constants.eFoundIt
-                    self._mDetectedCharset = codingSM.get_coding_state_machine()  # nopep8
-                    return self.get_state()
-
-        return self.get_state()
diff --git a/python/ext-libs/requests/packages/chardet/escsm.py b/python/ext-libs/requests/packages/chardet/escsm.py
deleted file mode 100644
index bd302b4..0000000
--- a/python/ext-libs/requests/packages/chardet/escsm.py
+++ /dev/null
@@ -1,242 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .constants import eStart, eError, eItsMe
-
-HZ_cls = (
-1,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,0,0,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,0,0,0,0,  # 20 - 27
-0,0,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-0,0,0,0,0,0,0,0,  # 40 - 47
-0,0,0,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,4,0,5,2,0,  # 78 - 7f
-1,1,1,1,1,1,1,1,  # 80 - 87
-1,1,1,1,1,1,1,1,  # 88 - 8f
-1,1,1,1,1,1,1,1,  # 90 - 97
-1,1,1,1,1,1,1,1,  # 98 - 9f
-1,1,1,1,1,1,1,1,  # a0 - a7
-1,1,1,1,1,1,1,1,  # a8 - af
-1,1,1,1,1,1,1,1,  # b0 - b7
-1,1,1,1,1,1,1,1,  # b8 - bf
-1,1,1,1,1,1,1,1,  # c0 - c7
-1,1,1,1,1,1,1,1,  # c8 - cf
-1,1,1,1,1,1,1,1,  # d0 - d7
-1,1,1,1,1,1,1,1,  # d8 - df
-1,1,1,1,1,1,1,1,  # e0 - e7
-1,1,1,1,1,1,1,1,  # e8 - ef
-1,1,1,1,1,1,1,1,  # f0 - f7
-1,1,1,1,1,1,1,1,  # f8 - ff
-)
-
-HZ_st = (
-eStart,eError,     3,eStart,eStart,eStart,eError,eError,# 00-07
-eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
-eItsMe,eItsMe,eError,eError,eStart,eStart,     4,eError,# 10-17
-     5,eError,     6,eError,     5,     5,     4,eError,# 18-1f
-     4,eError,     4,     4,     4,eError,     4,eError,# 20-27
-     4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
-)
-
-HZCharLenTable = (0, 0, 0, 0, 0, 0)
-
-HZSMModel = {'classTable': HZ_cls,
-             'classFactor': 6,
-             'stateTable': HZ_st,
-             'charLenTable': HZCharLenTable,
-             'name': "HZ-GB-2312"}
-
-ISO2022CN_cls = (
-2,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,0,0,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,0,0,0,0,  # 20 - 27
-0,3,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-0,0,0,4,0,0,0,0,  # 40 - 47
-0,0,0,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,0,0,0,0,0,  # 78 - 7f
-2,2,2,2,2,2,2,2,  # 80 - 87
-2,2,2,2,2,2,2,2,  # 88 - 8f
-2,2,2,2,2,2,2,2,  # 90 - 97
-2,2,2,2,2,2,2,2,  # 98 - 9f
-2,2,2,2,2,2,2,2,  # a0 - a7
-2,2,2,2,2,2,2,2,  # a8 - af
-2,2,2,2,2,2,2,2,  # b0 - b7
-2,2,2,2,2,2,2,2,  # b8 - bf
-2,2,2,2,2,2,2,2,  # c0 - c7
-2,2,2,2,2,2,2,2,  # c8 - cf
-2,2,2,2,2,2,2,2,  # d0 - d7
-2,2,2,2,2,2,2,2,  # d8 - df
-2,2,2,2,2,2,2,2,  # e0 - e7
-2,2,2,2,2,2,2,2,  # e8 - ef
-2,2,2,2,2,2,2,2,  # f0 - f7
-2,2,2,2,2,2,2,2,  # f8 - ff
-)
-
-ISO2022CN_st = (
-eStart,     3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
-eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
-eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
-eItsMe,eItsMe,eItsMe,eError,eError,eError,     4,eError,# 18-1f
-eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
-     5,     6,eError,eError,eError,eError,eError,eError,# 28-2f
-eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
-eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
-)
-
-ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
-
-ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
-                    'classFactor': 9,
-                    'stateTable': ISO2022CN_st,
-                    'charLenTable': ISO2022CNCharLenTable,
-                    'name': "ISO-2022-CN"}
-
-ISO2022JP_cls = (
-2,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,2,2,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,7,0,0,0,  # 20 - 27
-3,0,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-6,0,4,0,8,0,0,0,  # 40 - 47
-0,9,5,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,0,0,0,0,0,  # 78 - 7f
-2,2,2,2,2,2,2,2,  # 80 - 87
-2,2,2,2,2,2,2,2,  # 88 - 8f
-2,2,2,2,2,2,2,2,  # 90 - 97
-2,2,2,2,2,2,2,2,  # 98 - 9f
-2,2,2,2,2,2,2,2,  # a0 - a7
-2,2,2,2,2,2,2,2,  # a8 - af
-2,2,2,2,2,2,2,2,  # b0 - b7
-2,2,2,2,2,2,2,2,  # b8 - bf
-2,2,2,2,2,2,2,2,  # c0 - c7
-2,2,2,2,2,2,2,2,  # c8 - cf
-2,2,2,2,2,2,2,2,  # d0 - d7
-2,2,2,2,2,2,2,2,  # d8 - df
-2,2,2,2,2,2,2,2,  # e0 - e7
-2,2,2,2,2,2,2,2,  # e8 - ef
-2,2,2,2,2,2,2,2,  # f0 - f7
-2,2,2,2,2,2,2,2,  # f8 - ff
-)
-
-ISO2022JP_st = (
-eStart,     3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
-eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
-eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
-eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
-eError,     5,eError,eError,eError,     4,eError,eError,# 20-27
-eError,eError,eError,     6,eItsMe,eError,eItsMe,eError,# 28-2f
-eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
-eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
-eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
-)
-
-ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
-
-ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
-                    'classFactor': 10,
-                    'stateTable': ISO2022JP_st,
-                    'charLenTable': ISO2022JPCharLenTable,
-                    'name': "ISO-2022-JP"}
-
-ISO2022KR_cls = (
-2,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,0,0,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,3,0,0,0,  # 20 - 27
-0,4,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-0,0,0,5,0,0,0,0,  # 40 - 47
-0,0,0,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,0,0,0,0,0,  # 78 - 7f
-2,2,2,2,2,2,2,2,  # 80 - 87
-2,2,2,2,2,2,2,2,  # 88 - 8f
-2,2,2,2,2,2,2,2,  # 90 - 97
-2,2,2,2,2,2,2,2,  # 98 - 9f
-2,2,2,2,2,2,2,2,  # a0 - a7
-2,2,2,2,2,2,2,2,  # a8 - af
-2,2,2,2,2,2,2,2,  # b0 - b7
-2,2,2,2,2,2,2,2,  # b8 - bf
-2,2,2,2,2,2,2,2,  # c0 - c7
-2,2,2,2,2,2,2,2,  # c8 - cf
-2,2,2,2,2,2,2,2,  # d0 - d7
-2,2,2,2,2,2,2,2,  # d8 - df
-2,2,2,2,2,2,2,2,  # e0 - e7
-2,2,2,2,2,2,2,2,  # e8 - ef
-2,2,2,2,2,2,2,2,  # f0 - f7
-2,2,2,2,2,2,2,2,  # f8 - ff
-)
-
-ISO2022KR_st = (
-eStart,     3,eError,eStart,eStart,eStart,eError,eError,# 00-07
-eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
-eItsMe,eItsMe,eError,eError,eError,     4,eError,eError,# 10-17
-eError,eError,eError,eError,     5,eError,eError,eError,# 18-1f
-eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
-)
-
-ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
-
-ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
-                    'classFactor': 6,
-                    'stateTable': ISO2022KR_st,
-                    'charLenTable': ISO2022KRCharLenTable,
-                    'name': "ISO-2022-KR"}
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/eucjpprober.py b/python/ext-libs/requests/packages/chardet/eucjpprober.py
deleted file mode 100644
index 8e64fdc..0000000
--- a/python/ext-libs/requests/packages/chardet/eucjpprober.py
+++ /dev/null
@@ -1,90 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-from . import constants
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCJPDistributionAnalysis
-from .jpcntx import EUCJPContextAnalysis
-from .mbcssm import EUCJPSMModel
-
-
-class EUCJPProber(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(EUCJPSMModel)
-        self._mDistributionAnalyzer = EUCJPDistributionAnalysis()
-        self._mContextAnalyzer = EUCJPContextAnalysis()
-        self.reset()
-
-    def reset(self):
-        MultiByteCharSetProber.reset(self)
-        self._mContextAnalyzer.reset()
-
-    def get_charset_name(self):
-        return "EUC-JP"
-
-    def feed(self, aBuf):
-        aLen = len(aBuf)
-        for i in range(0, aLen):
-            # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte
-            codingState = self._mCodingSM.next_state(aBuf[i])
-            if codingState == constants.eError:
-                if constants._debug:
-                    sys.stderr.write(self.get_charset_name()
-                                     + ' prober hit error at byte ' + str(i)
-                                     + '\n')
-                self._mState = constants.eNotMe
-                break
-            elif codingState == constants.eItsMe:
-                self._mState = constants.eFoundIt
-                break
-            elif codingState == constants.eStart:
-                charLen = self._mCodingSM.get_current_charlen()
-                if i == 0:
-                    self._mLastChar[1] = aBuf[0]
-                    self._mContextAnalyzer.feed(self._mLastChar, charLen)
-                    self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
-                else:
-                    self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)
-                    self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
-                                                     charLen)
-
-        self._mLastChar[0] = aBuf[aLen - 1]
-
-        if self.get_state() == constants.eDetecting:
-            if (self._mContextAnalyzer.got_enough_data() and
-               (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
-                self._mState = constants.eFoundIt
-
-        return self.get_state()
-
-    def get_confidence(self):
-        contxtCf = self._mContextAnalyzer.get_confidence()
-        distribCf = self._mDistributionAnalyzer.get_confidence()
-        return max(contxtCf, distribCf)
diff --git a/python/ext-libs/requests/packages/chardet/euckrfreq.py b/python/ext-libs/requests/packages/chardet/euckrfreq.py
deleted file mode 100644
index a179e4c..0000000
--- a/python/ext-libs/requests/packages/chardet/euckrfreq.py
+++ /dev/null
@@ -1,596 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-# 
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-# 
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# Sampling from about 20M text materials include literature and computer technology
-
-# 128  --> 0.79
-# 256  --> 0.92
-# 512  --> 0.986
-# 1024 --> 0.99944
-# 2048 --> 0.99999
-#
-# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
-# Random Distribution Ration = 512 / (2350-512) = 0.279.
-# 
-# Typical Distribution Ratio  
-
-EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
-
-EUCKR_TABLE_SIZE = 2352
-
-# Char to FreqOrder table , 
-EUCKRCharToFreqOrder = ( \
-  13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722,  87,
-1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
-1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488,  20,1733,1269,1734,
- 945,1400,1735,  47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
- 116, 987, 813,1401, 683,  75,1204, 145,1740,1741,1742,1743,  16, 847, 667, 622,
- 708,1744,1745,1746, 966, 787, 304, 129,1747,  60, 820, 123, 676,1748,1749,1750,
-1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
- 344,1763,1764,1765,1766,  89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
- 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
-1780, 337, 751,1058,  28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782,  19,
-1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
-1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
-1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
-1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
- 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
-1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
-1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
-1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
-1412,1837,1838,  39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
- 544,1023,1081, 869,  91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
-1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
- 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
- 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
-1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
- 282,  96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
-1421, 268,1877,1422,1878,1879,1880, 308,1881,   2, 537,1882,1883,1215,1884,1885,
- 127, 791,1886,1273,1423,1887,  34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
-   0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
-1894,1123,  48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
-1899, 694,1900, 909, 734,1424, 572, 866,1425, 691,  85, 524,1010, 543, 394, 841,
-1901,1902,1903,1026,1904,1905,1906,1907,1908,1909,  30, 451, 651, 988, 310,1910,
-1911,1426, 810,1216,  93,1912,1913,1277,1217,1914, 858, 759,  45,  58, 181, 610,
- 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
-1919, 359,1920, 687,1921, 822,1922, 293,1923,1924,  40, 662, 118, 692,  29, 939,
- 887, 640, 482, 174,1925,  69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
- 217, 854,1163, 823,1927,1928,1929,1930, 834,1931,  78,1932, 859,1933,1063,1934,
-1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
-1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
-1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
-1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
-1283,1222,1960,1961,1962,1963,  36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
-1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
-  50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
- 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971,   7,
- 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
-1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
- 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
-1995, 560, 223,1287,  98,   8, 189, 650, 978,1288,1996,1437,1997,  17, 345, 250,
- 423, 277, 234, 512, 226,  97, 289,  42, 167,1998, 201,1999,2000, 843, 836, 824,
- 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
-2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008,  71,1440, 745,
- 619, 688,2009, 829,2010,2011, 147,2012,  33, 948,2013,2014,  74, 224,2015,  61,
- 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
-2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591,  52, 724, 246,2031,2032,
-2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
-2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
- 719,1170, 959, 440, 437, 534,  84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
- 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
-2051,2052,2053,  59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
- 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
-1444,2064,2065,  41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
-2069,1292,2070,2071,1445,2072,1446,2073,2074,  55, 588,  66,1447, 271,1092,2075,
-1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
-2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
-2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
-1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
- 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
-2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
-2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
-  22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174,  73,1096, 231, 274,
- 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
-2141,2142,2143,2144,  11, 374, 844,2145, 154,1232,  46,1461,2146, 838, 830, 721,
-1233, 106,2147,  90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
-2150,1462, 761, 565,2151, 686,2152, 649,2153,  72, 173,2154, 460, 415,2155,1463,
-2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
-2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177,  23, 530, 285,
-2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
-2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193,  10,
-2194, 613, 424,2195, 979, 108, 449, 589,  27, 172,  81,1031,  80, 774, 281, 350,
-1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
-2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
-2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
-2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
-2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
-2243, 521, 486, 548,2244,2245,2246,1473,1300,  53, 549, 137, 875,  76, 158,2247,
-1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
-1475,2249,  82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
-2256,  18, 450, 206,2257, 290, 292,1142,2258, 511, 162,  99, 346, 164, 735,2259,
-1476,1477,   4, 554, 343, 798,1099,2260,1100,2261,  43, 171,1303, 139, 215,2262,
-2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
-1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272,  67,2273,
- 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
-2282,2283,2284,2285,2286,  70, 852,1071,2287,2288,2289,2290,  21,  56, 509, 117,
- 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
-2294,1046,1479,2295, 340,2296,  63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
- 808, 494,2299,2300,2301, 903,2302,  37,1072,  14,   5,2303,  79, 675,2304, 312,
-2305,2306,2307,2308,2309,1480,   6,1307,2310,2311,2312,   1, 470,  35,  24, 229,
-2313, 695, 210,  86, 778,  15, 784, 592, 779,  32,  77, 855, 964,2314, 259,2315,
- 501, 380,2316,2317,  83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
-2320,2321,2322,2323,2324,2325,1485,2326,2327, 128,  57,  68, 261,1048, 211, 170,
-1240,  31,2328,  51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
- 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
-1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
-2351,1490,1491,  62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
-1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
-2361,2362, 332,  12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
- 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
-2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
-1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
-2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
-1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
-2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
-1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
- 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
-2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
-2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
- 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
- 915, 489,2449,1514,1184,2450,2451, 515,  64, 427, 495,2452, 583,2453, 483, 485,
-1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
-1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
- 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
-2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
-2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
- 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187,  65,2494,
- 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
- 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
-2499,2500,  49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
-  95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
- 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
-2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
-2533,  25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
- 704, 504, 468, 758, 657,1528, 196,  44, 839,1246, 272, 750,2543, 765, 862,2544,
-2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
-1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
- 249,1075,2556,2557,2558, 466, 743,2559,2560,2561,  92, 514, 426, 420, 526,2562,
-2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
-2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
-2584,1532,  54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
-   3, 458,   9,  38,2588, 107, 110, 890, 209,  26, 737, 498,2589,1534,2590, 431,
- 202,  88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
- 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
-2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601,  94, 175, 197, 406,
-2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
-2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
-1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
-2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
- 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642,  # 512, 256
-#Everything below is of no interest for detection purpose
-2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
-2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
-2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
-2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
-2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
-2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
-2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
-2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
-2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
-2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
-2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
-2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
-2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
-2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
-1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
-2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
-2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
-2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
-2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
-2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
-2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
-2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
-2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
-2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
-3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
-3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
-3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
-3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
-3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
-3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
-3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
-3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
-3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
-3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
-3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
-3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
-3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
-3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
-3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
-3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
-3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
-3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
-3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
-3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
-3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
-3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
-3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
-3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
-3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
-3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
-3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
-3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
-3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
-3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
-3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
-3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
-1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
-1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
-3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
-3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
-3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
-3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
-3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
-3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
-3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
-3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
-3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
-3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
-3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
-3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
-3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
-1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
-3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
-3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
-3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
-3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
-3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
-3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
-3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
-1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
-3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
-3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
-3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
-3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
-1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
-3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
-3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
-3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
-3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
-3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
-3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
-3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
-4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
-4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
-1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
-4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
-4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
-4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
-4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
-4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
-4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
-4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
-4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
-4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
-4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
-4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
-4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
-4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
-4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
-4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
-4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
-4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
-4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
-4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
-4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
-4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
-4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
-4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
-4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
-4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
-4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
-4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
-4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
-4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
-4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
-4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
-4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
-4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
-4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
-4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
-4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
-4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
-4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
-4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
-4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
-4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
-4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
-1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
-4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
-4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
-4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
-4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
-4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
-4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
-4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
-4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
-4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
-4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
-4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
-4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
-4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
-4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
-4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
-4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
-4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
-4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
-4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
-4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
-5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
-5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
-1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
-5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
-5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
-5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
-5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
-5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
-1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
-5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
-5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
-5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
-5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
-5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
-1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
-5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
-5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
-5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
-5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
-5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
-5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
-5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
-5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
-5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
-5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
-5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
-5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
-5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
-5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
-5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
-5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
-5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
-5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
-5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
-5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
-5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
-5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
-5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
-1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
-5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
-5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
-5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
-5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
-5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
-1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
-5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
-5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
-5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
-5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
-5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
-1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
-5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
-1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
-5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
-5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
-5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
-5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
-5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
-5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
-5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
-5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
-5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
-5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
-5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
-5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
-5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
-5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
-5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
-6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
-6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
-6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
-6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
-6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
-6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
-6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
-6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
-6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
-6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
-6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
-6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
-6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
-6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
-6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
-6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
-6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271,  #1024
-6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
-6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
-6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
-6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
-6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
-6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
-6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
-6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
-6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
-6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
-6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
-6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
-6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
-6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
-6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
-6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
-6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
-6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
-1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
-6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
-6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
-6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
-6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
-6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
-1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
-6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
-1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
-6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
-6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
-6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
-1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
-6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
-6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
-6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
-6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
-6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
-6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
-6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
-6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
-6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
-6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
-6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
-6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
-6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
-6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
-6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
-6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
-6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
-7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
-7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
-7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
-7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
-7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
-7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
-7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
-7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
-7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
-7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
-7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
-7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
-7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
-7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
-7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
-7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
-7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
-7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
-7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
-7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
-7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
-7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
-7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
-7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
-7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
-7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
-7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
-7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
-7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
-7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
-7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
-7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
-7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
-7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
-7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
-7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
-7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
-7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
-7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
-7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
-7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
-7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
-7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
-7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
-7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
-7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
-7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
-7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
-7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
-7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
-7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
-7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
-7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
-7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
-7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
-7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
-7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
-7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
-7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
-7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
-7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
-7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
-8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
-8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
-8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
-8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
-8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
-8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
-8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
-8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
-8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
-8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
-8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
-8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
-8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
-8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
-8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
-8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
-8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
-8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
-8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
-8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
-8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
-8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
-8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
-8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
-8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
-8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
-8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
-8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
-8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
-8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
-8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
-8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
-8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
-8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
-8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
-8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
-8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
-8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
-8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
-8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
-8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
-8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
-8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
-8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
-8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
-8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
-8736,8737,8738,8739,8740,8741)
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/euckrprober.py b/python/ext-libs/requests/packages/chardet/euckrprober.py
deleted file mode 100644
index 5982a46..0000000
--- a/python/ext-libs/requests/packages/chardet/euckrprober.py
+++ /dev/null
@@ -1,42 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCKRDistributionAnalysis
-from .mbcssm import EUCKRSMModel
-
-
-class EUCKRProber(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(EUCKRSMModel)
-        self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
-        self.reset()
-
-    def get_charset_name(self):
-        return "EUC-KR"
diff --git a/python/ext-libs/requests/packages/chardet/euctwfreq.py b/python/ext-libs/requests/packages/chardet/euctwfreq.py
deleted file mode 100644
index 576e750..0000000
--- a/python/ext-libs/requests/packages/chardet/euctwfreq.py
+++ /dev/null
@@ -1,428 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# EUCTW frequency table
-# Converted from big5 work
-# by Taiwan's Mandarin Promotion Council
-# <http:#www.edu.tw:81/mandr/>
-
-# 128  --> 0.42261
-# 256  --> 0.57851
-# 512  --> 0.74851
-# 1024 --> 0.89384
-# 2048 --> 0.97583
-#
-# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
-# Random Distribution Ration = 512/(5401-512)=0.105
-#
-# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
-
-EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
-
-# Char to FreqOrder table ,
-EUCTW_TABLE_SIZE = 8102
-
-EUCTWCharToFreqOrder = (
-   1,1800,1506, 255,1431, 198,   9,  82,   6,7310, 177, 202,3615,1256,2808, 110, # 2742
-3735,  33,3241, 261,  76,  44,2113,  16,2931,2184,1176, 659,3868,  26,3404,2643, # 2758
-1198,3869,3313,4060, 410,2211, 302, 590, 361,1963,   8, 204,  58,4296,7311,1931, # 2774
-  63,7312,7313, 317,1614,  75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
-3616,   3,  10,3870,1471,  29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
-4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932,  34,3501,3173,  64, 604, # 2822
-7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337,  72, 406,7319,  80, # 2838
- 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449,  69,2969, 591, # 2854
- 179,2095, 471, 115,2034,1843,  60,  50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
- 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
-2495,  90,2707,1338, 663,  11, 906,1099,2545,  20,2436, 182, 532,1716,7321, 732, # 2902
-1376,4062,1311,1420,3175,  25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
-3243, 475,1447,3617,7322, 117,  21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
- 706, 456, 150, 613,4299,  71,1118,2036,4064, 145,3069,  85, 835, 486,2114,1246, # 2950
-1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
-3503,3110,7325,1955,1153,4065,  83, 296,1199,3070, 192, 624,  93,7326, 822,1897, # 2982
-2810,3111, 795,2064, 991,1554,1542,1592,  27,  43,2853, 859, 139,1456, 860,4300, # 2998
- 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
-3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
-1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
-7328,7329,2173,3176,3619,2673, 593, 845,1062,3244,  88,1723,2037,3875,1950, 212, # 3062
- 266, 152, 149, 468,1898,4066,4302,  77, 187,7330,3018,  37,   5,2972,7331,3876, # 3078
-7332,7333,  39,2517,4303,2894,3177,2078,  55, 148,  74,4304, 545, 483,1474,1029, # 3094
-1665, 217,1869,1531,3113,1104,2645,4067,  24, 172,3507, 900,3877,3508,3509,4305, # 3110
-  32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674,   4,3019,3314,1427,1788, # 3126
- 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
-3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439,  38,7339,1063,7340, 794, # 3158
-3879,1435,2296,  46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804,  35, 707, # 3174
- 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
-2128,1363,3623,1423, 697, 100,3071,  48,  70,1231, 495,3114,2193,7345,1294,7346, # 3206
-2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
- 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
- 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
-3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
-1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
-1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
-1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381,   7, # 3318
-2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
- 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
-4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
-1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
-7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
-2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
- 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
-  98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
- 523,2776,2777,2648,7364, 141,2231,1333,  68, 176, 441, 876, 907,4077, 603,2592, # 3462
- 710, 171,3417, 404, 549,  18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
-7366,2973, 368,7367, 146, 366,  99, 871,3627,1543, 748, 807,1586,1185,  22,2258, # 3494
- 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
-1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371,  59,7372, # 3526
- 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
- 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
-7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
-1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
- 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
-3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
-4081,  57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
-3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
- 279,3120,  51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
- 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
-1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
-4084,2468,1436, 953,4085,2054,4331, 671,2395,  79,4086,2441,3252, 608, 567,2680, # 3718
-3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
-3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
-2397,7400,7401,7402,4089,3025,   0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
-7404, 233,4090,3631,1818,4336,4337,7405,  96,1776,1315,2082,7406, 257,7407,1809, # 3782
-3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
-7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
-1484,7413,1712, 127,  67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
-2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
-1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
-  78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
-1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
-4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
-3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
- 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
- 165, 243,4345,3637,2521, 123, 683,4096, 764,4346,  36,3895,1792, 589,2902, 816, # 3958
- 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
-2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
-7425, 611,1156, 854,2381,1316,2861,   2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
-1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
-2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
-1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
-1993,7438,4350,7439,7440,2195,  13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
-7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
-7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
-7452, 128,2132,  92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
-3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
-4353,2248,  94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
-1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
-7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
-2332,2067,  23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
-7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
-3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
-3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863,  41,7473, # 4246
-7474,4361,7475,1657,2333,  19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
-2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
-7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
- 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
-4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
-2683, 733,  40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
-7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
-3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
-2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
-2752,2986,7490, 435,7491, 343,1108, 596,  17,1751,4365,2235,3430,3643,7492,4366, # 4406
- 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
-2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
-1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
-1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
-2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
-1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
-7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
-7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
-2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
-4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
-1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551,  30,2263,4122, # 4582
-7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
- 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
-4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
- 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
-2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
- 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
-1041,2987, 293,1168,  87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
-1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
- 730,1515, 184,2827,  66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
-3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
-3918,7535,7536,1186,  15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
-1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
-3541,1342,1681,1718, 766,3264, 286,  89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
-7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
-7544,3265, 310, 313,3435,2299, 770,4134,  54,3034, 189,4397,3082,3769,3922,7545, # 4822
-1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
-2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
-1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
-3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
-2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
-3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
-2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
-4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
-4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
-3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
-  97,  81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
-3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
- 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
-3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
-3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
-3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
-1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
-7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
- 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
-7590, 587,  14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
-1702,1226, 102,1547,  62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
- 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
-4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598,  86,1494,1730, # 5190
-3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
- 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
-2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
-2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885,  28,2686, # 5254
-3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
-1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
-4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
-2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
-1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
-1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
-2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
-3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
-1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
-7617,3446,7618,7619,7620,3277,2689,1433,3278, 131,  95,1504,3946, 723,4159,3141, # 5414
-1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
-4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654,  53,7624,2996,7625, # 5446
-1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
- 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
-1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
-3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
-3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
-2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
-1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
-4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
- 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
-7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
-2317,3283,7650,7651,4164,7652,4165,  84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
-3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
-4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
- 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
-7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
-7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
-1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
-4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
-3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
-2690,1516,3559,1121,1082,1329,3284,3970,1449,3794,  65,1128,2835,2913,2759,1590, # 5766
-3795,7674,7675,  12,2658,  45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
-3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
-2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
-1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
-4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
-3670,1858,  91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
-3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
-2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
-4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761,  61,3976,3672,1822,3977, # 5910
-7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
-3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
-2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
-3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
-1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
-2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
-3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
-4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043,  56,1396,3090, # 6038
-2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
-2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
-7722,1076,  49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
-1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
-2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
-1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
-3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
-4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629,  31,2838, # 6166
-2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
-3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
-3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
-2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
-4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
-2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
-3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
-4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
-7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
-3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
- 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
-1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412,  42,3096, 464,7759,2632, # 6358
-4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
-1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
-4487,7766,3002, 962, 588,3574, 289,3219,2634,1116,  52,7767,3047,1796,7768,7769, # 6406
-7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
- 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
-7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
-2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
-1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
-1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
-3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
- 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
- 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
- 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
-3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
-2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
- 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
-7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
-1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
-3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
-7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
-1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
-7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
-4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
-1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
-2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
-2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
-4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
- 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
- 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
-3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
-3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
-1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
-2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
-7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
-1561,2664,1452,4010,1375,7855,7856,  47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
-1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
-3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
- 919,2347,2960,2348,1270,4511,4012,  73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
-1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
-4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
-7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
-2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
-3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
- 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
-1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
-2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
-2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
-7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
-7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
-7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
-2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
-2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
-1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
-4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
-3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
-3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
-4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
-4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
-2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
-2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
-7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
-4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
-7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
-2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
-1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
-3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
-4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
-2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
- 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
-2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
-1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
-2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
-2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
-4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
-7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
-1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
-3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
-7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
-1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
-8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
-2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
-8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
-2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
-2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
-8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
-8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
-8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
- 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
-8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
-4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
-3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
-8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
-1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
-8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
- 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
-1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
- 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
-4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
-1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
-4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
-1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
- 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
-3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
-4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
-8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
- 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
-3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
- 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
-2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
-#Everything below is of no interest for detection purpose
-2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
-2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
-8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
-8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
-8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
-8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
-8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
-8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
-8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
-8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
-8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
-8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
-8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
-8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
-8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
-8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
-8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
-8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
-8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
-8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
-8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
-8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
-8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
-8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
-8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
-8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
-8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
-8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
-8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
-8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
-8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
-8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
-8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
-8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
-8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
-8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
-8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
-8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
-8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
-8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/euctwprober.py b/python/ext-libs/requests/packages/chardet/euctwprober.py
deleted file mode 100644
index fe652fe..0000000
--- a/python/ext-libs/requests/packages/chardet/euctwprober.py
+++ /dev/null
@@ -1,41 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-# 
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-# 
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCTWDistributionAnalysis
-from .mbcssm import EUCTWSMModel
-
-class EUCTWProber(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(EUCTWSMModel)
-        self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
-        self.reset()
-
-    def get_charset_name(self):
-        return "EUC-TW"
diff --git a/python/ext-libs/requests/packages/chardet/gb2312freq.py b/python/ext-libs/requests/packages/chardet/gb2312freq.py
deleted file mode 100644
index 1238f51..0000000
--- a/python/ext-libs/requests/packages/chardet/gb2312freq.py
+++ /dev/null
@@ -1,472 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# GB2312 most frequently used character table
-#
-# Char to FreqOrder table , from hz6763
-
-# 512  --> 0.79  -- 0.79
-# 1024 --> 0.92  -- 0.13
-# 2048 --> 0.98  -- 0.06
-# 6768 --> 1.00  -- 0.02
-#
-# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
-# Random Distribution Ration = 512 / (3755 - 512) = 0.157
-#
-# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
-
-GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
-
-GB2312_TABLE_SIZE = 3760
-
-GB2312CharToFreqOrder = (
-1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
-2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
-2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
- 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
-1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
-1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
- 152,1687,1539, 738,1559,  59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
-1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850,  70,3285,2729,3534,3575,
-2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
-3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
- 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
-1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
- 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
-2534,1546,2393,2760, 737,2494,  13, 447, 245,2747,  38,2765,2129,2589,1079, 606,
- 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
-2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
-1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
-3195,4115,5627,2489,2991,  24,2065,2697,1087,2719,  48,1634, 315,  68, 985,2052,
- 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
-1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
- 253,3099,  32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
-2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
-1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563,  26,
-3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
-1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
-2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
-1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
- 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
-3777,3657, 643,2298,1148,1779, 190, 989,3544, 414,  11,2135,2063,2979,1471, 403,
-3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
- 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
-3651, 210,  33,1608,2516, 200,1520, 415, 102,   0,3389,1287, 817,  91,3299,2940,
- 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687,  20,1819, 121,
-1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
-3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
-2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680,  72, 842,1990, 212,1233,
-1154,1586,  75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
- 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
-1910, 534, 529,3309,1721,1660, 274,  39,2827, 661,2670,1578, 925,3248,3815,1094,
-4278,4901,4252,  41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
- 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
-3568, 194,5062,  15, 961,3870,1241,1192,2664,  66,5215,3260,2111,1295,1127,2152,
-3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426,  53,2909,
- 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
-1272,2363, 284,1753,3679,4064,1695,  81, 815,2677,2757,2731,1386, 859, 500,4221,
-2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
-1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
-1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
- 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
-3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
-3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640,  67,2360,
-4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
- 296,3979,1739,1611,3684,  23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
-3116,  17,1074, 467,2692,2201, 387,2922,  45,1326,3055,1645,3659,2817, 958, 243,
-1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
-1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
-4046,3572,2399,1571,3281,  79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
- 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
- 814,4968,3487,1548,2644,1567,1285,   2, 295,2636,  97, 946,3576, 832, 141,4257,
-3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
-1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
- 602,1525,2608,1605,1639,3175, 694,3064,  10, 465,  76,2000,4846,4208, 444,3781,
-1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
-2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844,  89, 937,
- 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
- 432, 445,2811, 206,4136,1472, 730, 349,  73, 397,2802,2547, 998,1637,1167, 789,
- 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
-3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
-4996, 371,1575,2436,1621,2210, 984,4033,1734,2638,  16,4529, 663,2755,3255,1451,
-3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
- 750,2058, 165,  80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
-2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
-2357, 395,3740, 137,2075, 944,4089,2584,1267,3802,  62,1533,2285, 178, 176, 780,
-2440, 201,3707, 590, 478,1560,4354,2117,1075,  30,  74,4643,4004,1635,1441,2745,
- 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
-2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
- 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669,  43,2523,1657,
- 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
- 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
-3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
-2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
-2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024,  40,3240,1536,
-1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
-  18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
-2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
-  90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
- 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
-1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
-1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076,  46,4253,2873,1889,1894,
- 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
- 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
-1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
-2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
-3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
-2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
-2269,2246,1446,  36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
-2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
-3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
-1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906,  51, 369, 170,3541,
-1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
-2101,2730,2490,  82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
-1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
-3750,2289,2795, 813,3123,2610,1136,4368,   5,3391,4541,2174, 420, 429,1728, 754,
-1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
-1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
-3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
- 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
-2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
-1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
-4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
-1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
-1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
-3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
-1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
-  47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
- 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096,  99,
-1397,1769,2300,4428,1643,3455,1978,1757,3718,1440,  35,4879,3742,1296,4228,2280,
- 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
-1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
-1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
- 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
-3708, 135,2131,  87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
-4314,   9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
-3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
-2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
-2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
-1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
-3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
-2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
-1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
-1505,1911,1883,3526, 698,3629,3456,1833,1431, 746,  77,1261,2017,2296,1977,1885,
- 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
-2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
-2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
-3192,2910,2010, 140,2395,2859,  55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
-4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
-3399,  98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
- 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
-3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
-2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
-1086,1974,2034, 630, 257,3338,2788,4903,1017,  86,4790, 966,2789,1995,1696,1131,
- 259,3095,4188,1308, 179,1463,5257, 289,4107,1248,  42,3413,1725,2288, 896,1947,
- 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
-3034,3310, 540,2370,1562,1288,2990, 502,4765,1147,   4,1853,2708, 207, 294,2814,
-4078,2902,2509, 684,  34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
-2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
-1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
-1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
- 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
-1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196,  19, 941,3624,3480,
-3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
- 955,1089,3103,1053,  96,  88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
- 642,4006, 903,2539,1877,2082, 596,  29,4066,1790, 722,2157, 130, 995,1569, 769,
-1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445,  50, 625, 487,2207,
-  57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
-1783, 362,   8,3433,3422, 610,2793,3277,1390,1284,1654,  21,3823, 734, 367, 623,
- 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
-2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
- 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
-2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
-2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
-1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
-1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
-2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
- 819,1541, 142,2284,  44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
-1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
-1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
-2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
-2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434,  92,1466,4920,2616,
-3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
-1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
-4462,  64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
- 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
- 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
-3264,2855,2722,1952,1029,2839,2467,  84,4383,2215, 820,1391,2015,2448,3672, 377,
-1948,2168, 797,2545,3536,2578,2645,  94,2874,1678, 405,1259,3071, 771, 546,1315,
- 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928,  14,2594, 557,
-3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
-1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
-4031,2641,4067,3145,1870,  37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
-1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
-2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
-1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
- 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
-1178,2639,2351,  93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
-3341,1618,4126,2595,2334, 603, 651,  69, 701, 268,2662,3411,2555,1380,1606, 503,
- 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
-2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
- 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
-1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
-1281,  52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169,  27,
-1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
-3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
-2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
-3891,2868,3621,2254,  58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
-3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
-3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
- 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
-2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
- 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
-2724,1927,2333,4440, 567,  22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
-  12, 974,3783,4391, 951,1412,   1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
-1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040,  31,
- 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
- 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
-1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
-3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
-3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118,  63,2076, 314,1881,
-1348,1061, 172, 978,3515,1747, 532, 511,3970,   6, 601, 905,2699,3300,1751, 276,
-1467,3725,2668,  65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
-3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
-2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
-2754,  95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
-1985, 244,2546, 474, 495,1046,2611,1851,2061,  71,2089,1675,2590, 742,3758,2843,
-3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
- 451,   3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
-4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
-1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
-2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078,  49,3770,
-3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
-3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
-1197,1663,4476,3127,  85,4240,2528,  25,1111,1181,3673, 407,3470,4561,2679,2713,
- 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
- 391,2963, 187,  61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
-2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
- 931, 317,2517,3027, 325, 569, 686,2107,3084,  60,1042,1333,2794, 264,3177,4014,
-1628, 258,3712,   7,4464,1176,1043,1778, 683, 114,1975,  78,1492, 383,1886, 510,
- 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
-1282,1289,4609, 697,1453,3044,2666,3611,1856,2412,  54, 719,1330, 568,3778,2459,
-1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
-1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
-1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421,  56,1908,1640,2387,2232,
-1917,1874,2477,4921, 148,  83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
- 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
- 852,1221,1400,1486, 882,2299,4036, 351,  28,1122, 700,6479,6480,6481,6482,6483,  # last 512
-#Everything below is of no interest for detection purpose
-5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
-5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
-5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
-3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
-4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
-5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
-5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
-4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
-4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
-4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
-4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
-3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
-6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
-4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
-6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
-4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
-4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
-4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
-5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
-3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
-4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
-3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
-4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
-4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
-6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
-6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
-5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
-4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
-6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
-4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
-5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
-5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
-5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
-6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
-3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
-6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
-4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
-5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
-6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
-6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
-4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
-5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
-4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
-5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
-5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
-4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
-4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
-5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
-4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
-4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
-5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
-4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
-4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
-4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
-5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
-5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
-4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
-3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
-4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
-6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
-5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
-5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
-4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
-6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
-5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
-6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
-4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
-5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
-5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
-3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
-5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
-6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
-4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
-6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
-4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
-4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
-6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
-3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
-6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
-4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
-3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
-3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
-3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
-4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
-2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
-5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
-4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
-5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
-5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
-5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
-4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
-5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
-4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
-5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
-1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
-3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
-4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
-4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
-6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
-4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
-5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
-3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
-5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
-5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
-5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
-3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
-5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
-5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
-3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
-5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
-5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
-5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
-6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
-4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
-6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
-4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
-3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
-4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
-5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
-5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
-5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
-3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
-3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
-6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
-6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
-5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
-6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
-6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
-6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
-6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
-6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
-5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
-6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
-6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
-3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
-3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
-4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
-4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
-3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
-5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
-5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
-5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
-5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
-5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
-4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
-5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
-6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
-5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
-4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
-4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
-6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
-3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
-4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
-4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
-5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
-6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
-6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
-4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
-6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
-5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
-5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
-5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
-5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
-5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
-4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
-5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
-5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
-5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
-5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
-6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
-4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
-5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
-4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
-4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
-6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
-4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
-6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
-3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
-5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
-6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
-6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
-6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
-5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
-6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
-6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
-3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
-5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
-4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/gb2312prober.py b/python/ext-libs/requests/packages/chardet/gb2312prober.py
deleted file mode 100644
index 0325a2d..0000000
--- a/python/ext-libs/requests/packages/chardet/gb2312prober.py
+++ /dev/null
@@ -1,41 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-# 
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-# 
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import GB2312DistributionAnalysis
-from .mbcssm import GB2312SMModel
-
-class GB2312Prober(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(GB2312SMModel)
-        self._mDistributionAnalyzer = GB2312DistributionAnalysis()
-        self.reset()
-
-    def get_charset_name(self):
-        return "GB2312"
diff --git a/python/ext-libs/requests/packages/chardet/hebrewprober.py b/python/ext-libs/requests/packages/chardet/hebrewprober.py
deleted file mode 100644
index ba225c5..0000000
--- a/python/ext-libs/requests/packages/chardet/hebrewprober.py
+++ /dev/null
@@ -1,283 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-#          Shy Shalom
-# Portions created by the Initial Developer are Copyright (C) 2005
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .constants import eNotMe, eDetecting
-from .compat import wrap_ord
-
-# This prober doesn't actually recognize a language or a charset.
-# It is a helper prober for the use of the Hebrew model probers
-
-### General ideas of the Hebrew charset recognition ###
-#
-# Four main charsets exist in Hebrew:
-# "ISO-8859-8" - Visual Hebrew
-# "windows-1255" - Logical Hebrew
-# "ISO-8859-8-I" - Logical Hebrew
-# "x-mac-hebrew" - ?? Logical Hebrew ??
-#
-# Both "ISO" charsets use a completely identical set of code points, whereas
-# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
-# these code points. windows-1255 defines additional characters in the range
-# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
-# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
-# x-mac-hebrew defines similar additional code points but with a different
-# mapping.
-#
-# As far as an average Hebrew text with no diacritics is concerned, all four
-# charsets are identical with respect to code points. Meaning that for the
-# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
-# (including final letters).
-#
-# The dominant difference between these charsets is their directionality.
-# "Visual" directionality means that the text is ordered as if the renderer is
-# not aware of a BIDI rendering algorithm. The renderer sees the text and
-# draws it from left to right. The text itself when ordered naturally is read
-# backwards. A buffer of Visual Hebrew generally looks like so:
-# "[last word of first line spelled backwards] [whole line ordered backwards
-# and spelled backwards] [first word of first line spelled backwards]
-# [end of line] [last word of second line] ... etc' "
-# adding punctuation marks, numbers and English text to visual text is
-# naturally also "visual" and from left to right.
-#
-# "Logical" directionality means the text is ordered "naturally" according to
-# the order it is read. It is the responsibility of the renderer to display
-# the text from right to left. A BIDI algorithm is used to place general
-# punctuation marks, numbers and English text in the text.
-#
-# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
-# what little evidence I could find, it seems that its general directionality
-# is Logical.
-#
-# To sum up all of the above, the Hebrew probing mechanism knows about two
-# charsets:
-# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
-#    backwards while line order is natural. For charset recognition purposes
-#    the line order is unimportant (In fact, for this implementation, even
-#    word order is unimportant).
-# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
-#
-# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
-#    specifically identified.
-# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
-#    that contain special punctuation marks or diacritics is displayed with
-#    some unconverted characters showing as question marks. This problem might
-#    be corrected using another model prober for x-mac-hebrew. Due to the fact
-#    that x-mac-hebrew texts are so rare, writing another model prober isn't
-#    worth the effort and performance hit.
-#
-#### The Prober ####
-#
-# The prober is divided between two SBCharSetProbers and a HebrewProber,
-# all of which are managed, created, fed data, inquired and deleted by the
-# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
-# fact some kind of Hebrew, Logical or Visual. The final decision about which
-# one is it is made by the HebrewProber by combining final-letter scores
-# with the scores of the two SBCharSetProbers to produce a final answer.
-#
-# The SBCSGroupProber is responsible for stripping the original text of HTML
-# tags, English characters, numbers, low-ASCII punctuation characters, spaces
-# and new lines. It reduces any sequence of such characters to a single space.
-# The buffer fed to each prober in the SBCS group prober is pure text in
-# high-ASCII.
-# The two SBCharSetProbers (model probers) share the same language model:
-# Win1255Model.
-# The first SBCharSetProber uses the model normally as any other
-# SBCharSetProber does, to recognize windows-1255, upon which this model was
-# built. The second SBCharSetProber is told to make the pair-of-letter
-# lookup in the language model backwards. This in practice exactly simulates
-# a visual Hebrew model using the windows-1255 logical Hebrew model.
-#
-# The HebrewProber is not using any language model. All it does is look for
-# final-letter evidence suggesting the text is either logical Hebrew or visual
-# Hebrew. Disjointed from the model probers, the results of the HebrewProber
-# alone are meaningless. HebrewProber always returns 0.00 as confidence
-# since it never identifies a charset by itself. Instead, the pointer to the
-# HebrewProber is passed to the model probers as a helper "Name Prober".
-# When the Group prober receives a positive identification from any prober,
-# it asks for the name of the charset identified. If the prober queried is a
-# Hebrew model prober, the model prober forwards the call to the
-# HebrewProber to make the final decision. In the HebrewProber, the
-# decision is made according to the final-letters scores maintained and Both
-# model probers scores. The answer is returned in the form of the name of the
-# charset identified, either "windows-1255" or "ISO-8859-8".
-
-# windows-1255 / ISO-8859-8 code points of interest
-FINAL_KAF = 0xea
-NORMAL_KAF = 0xeb
-FINAL_MEM = 0xed
-NORMAL_MEM = 0xee
-FINAL_NUN = 0xef
-NORMAL_NUN = 0xf0
-FINAL_PE = 0xf3
-NORMAL_PE = 0xf4
-FINAL_TSADI = 0xf5
-NORMAL_TSADI = 0xf6
-
-# Minimum Visual vs Logical final letter score difference.
-# If the difference is below this, don't rely solely on the final letter score
-# distance.
-MIN_FINAL_CHAR_DISTANCE = 5
-
-# Minimum Visual vs Logical model score difference.
-# If the difference is below this, don't rely at all on the model score
-# distance.
-MIN_MODEL_DISTANCE = 0.01
-
-VISUAL_HEBREW_NAME = "ISO-8859-8"
-LOGICAL_HEBREW_NAME = "windows-1255"
-
-
-class HebrewProber(CharSetProber):
-    def __init__(self):
-        CharSetProber.__init__(self)
-        self._mLogicalProber = None
-        self._mVisualProber = None
-        self.reset()
-
-    def reset(self):
-        self._mFinalCharLogicalScore = 0
-        self._mFinalCharVisualScore = 0
-        # The two last characters seen in the previous buffer,
-        # mPrev and mBeforePrev are initialized to space in order to simulate
-        # a word delimiter at the beginning of the data
-        self._mPrev = ' '
-        self._mBeforePrev = ' '
-        # These probers are owned by the group prober.
-
-    def set_model_probers(self, logicalProber, visualProber):
-        self._mLogicalProber = logicalProber
-        self._mVisualProber = visualProber
-
-    def is_final(self, c):
-        return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
-                               FINAL_TSADI]
-
-    def is_non_final(self, c):
-        # The normal Tsadi is not a good Non-Final letter due to words like
-        # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
-        # apostrophe is converted to a space in FilterWithoutEnglishLetters
-        # causing the Non-Final tsadi to appear at an end of a word even
-        # though this is not the case in the original text.
-        # The letters Pe and Kaf rarely display a related behavior of not being
-        # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
-        # for example legally end with a Non-Final Pe or Kaf. However, the
-        # benefit of these letters as Non-Final letters outweighs the damage
-        # since these words are quite rare.
-        return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
-
-    def feed(self, aBuf):
-        # Final letter analysis for logical-visual decision.
-        # Look for evidence that the received buffer is either logical Hebrew
-        # or visual Hebrew.
-        # The following cases are checked:
-        # 1) A word longer than 1 letter, ending with a final letter. This is
-        #    an indication that the text is laid out "naturally" since the
-        #    final letter really appears at the end. +1 for logical score.
-        # 2) A word longer than 1 letter, ending with a Non-Final letter. In
-        #    normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
-        #    should not end with the Non-Final form of that letter. Exceptions
-        #    to this rule are mentioned above in isNonFinal(). This is an
-        #    indication that the text is laid out backwards. +1 for visual
-        #    score
-        # 3) A word longer than 1 letter, starting with a final letter. Final
-        #    letters should not appear at the beginning of a word. This is an
-        #    indication that the text is laid out backwards. +1 for visual
-        #    score.
-        #
-        # The visual score and logical score are accumulated throughout the
-        # text and are finally checked against each other in GetCharSetName().
-        # No checking for final letters in the middle of words is done since
-        # that case is not an indication for either Logical or Visual text.
-        #
-        # We automatically filter out all 7-bit characters (replace them with
-        # spaces) so the word boundary detection works properly. [MAP]
-
-        if self.get_state() == eNotMe:
-            # Both model probers say it's not them. No reason to continue.
-            return eNotMe
-
-        aBuf = self.filter_high_bit_only(aBuf)
-
-        for cur in aBuf:
-            if cur == ' ':
-                # We stand on a space - a word just ended
-                if self._mBeforePrev != ' ':
-                    # next-to-last char was not a space so self._mPrev is not a
-                    # 1 letter word
-                    if self.is_final(self._mPrev):
-                        # case (1) [-2:not space][-1:final letter][cur:space]
-                        self._mFinalCharLogicalScore += 1
-                    elif self.is_non_final(self._mPrev):
-                        # case (2) [-2:not space][-1:Non-Final letter][
-                        #  cur:space]
-                        self._mFinalCharVisualScore += 1
-            else:
-                # Not standing on a space
-                if ((self._mBeforePrev == ' ') and
-                        (self.is_final(self._mPrev)) and (cur != ' ')):
-                    # case (3) [-2:space][-1:final letter][cur:not space]
-                    self._mFinalCharVisualScore += 1
-            self._mBeforePrev = self._mPrev
-            self._mPrev = cur
-
-        # Forever detecting, till the end or until both model probers return
-        # eNotMe (handled above)
-        return eDetecting
-
-    def get_charset_name(self):
-        # Make the decision: is it Logical or Visual?
-        # If the final letter score distance is dominant enough, rely on it.
-        finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
-        if finalsub >= MIN_FINAL_CHAR_DISTANCE:
-            return LOGICAL_HEBREW_NAME
-        if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
-            return VISUAL_HEBREW_NAME
-
-        # It's not dominant enough, try to rely on the model scores instead.
-        modelsub = (self._mLogicalProber.get_confidence()
-                    - self._mVisualProber.get_confidence())
-        if modelsub > MIN_MODEL_DISTANCE:
-            return LOGICAL_HEBREW_NAME
-        if modelsub < -MIN_MODEL_DISTANCE:
-            return VISUAL_HEBREW_NAME
-
-        # Still no good, back to final letter distance, maybe it'll save the
-        # day.
-        if finalsub < 0.0:
-            return VISUAL_HEBREW_NAME
-
-        # (finalsub > 0 - Logical) or (don't know what to do) default to
-        # Logical.
-        return LOGICAL_HEBREW_NAME
-
-    def get_state(self):
-        # Remain active as long as any of the model probers are active.
-        if (self._mLogicalProber.get_state() == eNotMe) and \
-           (self._mVisualProber.get_state() == eNotMe):
-            return eNotMe
-        return eDetecting
diff --git a/python/ext-libs/requests/packages/chardet/jisfreq.py b/python/ext-libs/requests/packages/chardet/jisfreq.py
deleted file mode 100644
index 064345b..0000000
--- a/python/ext-libs/requests/packages/chardet/jisfreq.py
+++ /dev/null
@@ -1,569 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# Sampling from about 20M text materials include literature and computer technology
-#
-# Japanese frequency table, applied to both S-JIS and EUC-JP
-# They are sorted in order.
-
-# 128  --> 0.77094
-# 256  --> 0.85710
-# 512  --> 0.92635
-# 1024 --> 0.97130
-# 2048 --> 0.99431
-#
-# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
-# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
-#
-# Typical Distribution Ratio, 25% of IDR
-
-JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
-
-# Char to FreqOrder table ,
-JIS_TABLE_SIZE = 4368
-
-JISCharToFreqOrder = (
-  40,   1,   6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, #   16
-3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247,  18, 179,5071, 856,1661, #   32
-1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, #   48
-2042,1061,1062,  48,  49,  44,  45, 433, 434,1040,1041, 996, 787,2997,1255,4305, #   64
-2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, #   80
-5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, #   96
-1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, #  112
-5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, #  128
-5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, #  144
-5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, #  160
-5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, #  176
-5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, #  192
-5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, #  208
-1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, #  224
-1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, #  240
-1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, #  256
-2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, #  272
-3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161,  26,3377,   2,3929,  20, #  288
-3691,  47,4100,  50,  17,  16,  35, 268,  27, 243,  42, 155,  24, 154,  29, 184, #  304
-   4,  91,  14,  92,  53, 396,  33, 289,   9,  37,  64, 620,  21,  39, 321,   5, #  320
-  12,  11,  52,  13,   3, 208, 138,   0,   7,  60, 526, 141, 151,1069, 181, 275, #  336
-1591,  83, 132,1475, 126, 331, 829,  15,  69, 160,  59,  22, 157,  55,1079, 312, #  352
- 109,  38,  23,  25,  10,  19,  79,5195,  61, 382,1124,   8,  30,5196,5197,5198, #  368
-5199,5200,5201,5202,5203,5204,5205,5206,  89,  62,  74,  34,2416, 112, 139, 196, #  384
- 271, 149,  84, 607, 131, 765,  46,  88, 153, 683,  76, 874, 101, 258,  57,  80, #  400
-  32, 364, 121,1508, 169,1547,  68, 235, 145,2999,  41, 360,3027,  70,  63,  31, #  416
-  43, 259, 262,1383,  99, 533, 194,  66,  93, 846, 217, 192,  56, 106,  58, 565, #  432
- 280, 272, 311, 256, 146,  82, 308,  71, 100, 128, 214, 655, 110, 261, 104,1140, #  448
-  54,  51,  36,  87,  67,3070, 185,2618,2936,2020,  28,1066,2390,2059,5207,5208, #  464
-5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, #  480
-5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, #  496
-5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, #  512
-4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, #  528
-5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, #  544
-5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, #  560
-5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, #  576
-5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, #  592
-5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, #  608
-5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, #  624
-5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, #  640
-5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, #  656
-5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, #  672
-3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, #  688
-5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, #  704
-5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, #  720
-5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, #  736
-5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, #  752
-5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, #  768
-5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, #  784
-5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, #  800
-5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, #  816
-5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, #  832
-5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, #  848
-5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, #  864
-5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, #  880
-5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, #  896
-5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, #  912
-5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, #  928
-5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, #  944
-5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, #  960
-5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, #  976
-5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, #  992
-5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
-5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
-5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
-5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
-5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
-5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
-5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
-5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
-5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
-5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
-5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
-5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
-5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
-5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
-5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
-5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
-5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
-5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
-5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
-6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
-6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
-6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
-6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
-6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
-6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
-6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
-6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
-4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
- 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
- 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
-1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619,  65,3302,2045, # 1488
-1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
- 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
-3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
-3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
- 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
-3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
-3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
- 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
-2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
- 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
-3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
-1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
- 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
-1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
- 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
-2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
-2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
-2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
-2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
-1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
-1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
-1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
-1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
-2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
-1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
-2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
-1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
-1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
-1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
-1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
-1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
-1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
- 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
- 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
-1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
-2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
-2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
-2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
-3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
-3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
- 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
-3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
-1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876,  78,2287,1482,1277, # 2176
- 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
-2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
-1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
- 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
-3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
-4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
-2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
-1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
-2601,1919,1078,  75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
-1075, 292,3818,1756,2602, 317,  98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
- 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
- 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
-1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
-2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
-2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
-2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
-3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
-1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
-2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
- 359,2291,1676,  73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
- 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
- 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
-1209,  96, 587,2166,1032, 260,1072,2153, 173,  94, 226,3244, 819,2006,4642,4114, # 2544
-2203, 231,1744, 782,  97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
- 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
-1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
-1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
- 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
-1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
-1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
-1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
- 764,2861,1853, 688,2429,1920,1462,  77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
-2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
- 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
-2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
-3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
-2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
-1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
-6147, 441, 762,1771,3447,3607,3608,1904, 840,3037,  86, 939,1385, 572,1370,2445, # 2800
-1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
-2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
-1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
- 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
-  72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
-3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
-3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
-1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
-1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
-1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
-1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
- 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
- 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
-2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
- 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
-3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
-2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
- 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
-1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
-2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
- 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
-1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
- 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
-4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
-2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
-1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
- 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
-1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
-2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
- 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
-6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
-1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
-1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
-2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
-3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
- 914,2550,2587,  81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
-3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
-1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
- 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
-1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
- 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
-3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
- 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
-2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
- 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
-4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
-2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
-1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
-1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
-1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
- 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
-1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
-3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
-1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
-3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
- 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
- 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
- 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
-2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
-1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
- 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
-1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
- 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
-1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
- 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
- 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
- 480,2083,1774,3458, 923,2279,1350, 221,3086,  85,2233,2234,3835,1585,3010,2147, # 3872
-1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
-1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
-2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
-4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
- 227,1351,1645,2453,2193,1421,2887, 812,2121, 634,  95,2435, 201,2312,4665,1646, # 3952
-1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
- 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
-1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
-3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
-1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
-2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
-2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
-1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
-1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
-2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
- 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
-2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
-1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
-1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
-1279,2136,1697,2335, 204, 721,2097,3838,  90,6186,2085,2505, 191,3967, 124,2148, # 4192
-1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
-3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
-2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
-2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
- 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
-3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
-3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
-1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
-2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
-1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
-2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368  #last 512
-#Everything below is of no interest for detection purpose
-2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
-6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
-6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
-6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
-6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
-4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
-4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
-3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
-3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
-4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
-3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
-6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
-4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
-6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
-6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
-6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
-6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
-6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
-6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
-3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
-3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
-6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
-2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
-4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
-4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
-4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
-6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
-3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
-4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
-4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
-6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
-4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
-6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
-3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
-2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
-4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
-2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
-6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
-4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
-6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
-6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
-6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
-4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
-6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
-2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
-6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
-4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
-6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
-4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
-4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
-6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
-6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
-6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
-3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
-1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
-3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
-3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
-4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
-6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
-3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
-6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
-3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
-3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
-2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
-6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
-6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
-3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
-6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
-3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
-6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
-6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
-6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
-4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
-6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
-4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
-3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
-3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
-6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
-6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
-4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
-6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
-6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
-6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
-6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
-6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
-6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
-4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
-4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
-3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
-6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
-4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
-2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
-6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
-6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
-4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
-2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
-4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
-2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
-4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
-4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
-4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
-6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
-3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
-6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
-3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
-6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
-2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
-3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
-7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
-2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
-3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
-3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
-3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
-3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
-7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
-7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
-7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
-7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
-7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
-4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
-3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
-3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
-4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
-3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
-3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
-7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
-4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
-7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
-7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
-7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
-7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
-7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
-4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
-4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
-7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
-3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
-4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
-7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
-7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
-4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
-3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
-3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
-7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
-4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
-4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
-4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
-4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
-4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
-4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
-7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
-7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
-7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
-7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
-7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
-2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
-3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
-7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
-7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
-3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
-4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
-3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
-3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
-2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
-7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
-7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
-4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
-3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
-3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
-7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
-7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
-7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
-4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
-7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
-2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
-3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
-4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
-7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
-4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
-4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
-7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
-7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
-5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
-7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
-7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
-7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
-7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
-7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
-5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
-5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
-7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
-3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
-7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
-7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
-3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
-7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
-7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
-1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
-3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
-4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
-2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
-3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
-2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
-5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
-4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
-4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
-5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
-7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
-7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
-7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
-7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
-3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
-7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
-3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
-7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
-4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
-7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
-7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
-7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
-7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
-7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
-7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
-7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
-7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
-7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
-7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
-7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
-7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
-8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
-8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
-8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
-8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
-8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
-8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
-8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
-8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
-8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
-8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
-8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
-8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
-8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
-8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
-8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
-8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
-8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/jpcntx.py b/python/ext-libs/requests/packages/chardet/jpcntx.py
deleted file mode 100644
index 59aeb6a..0000000
--- a/python/ext-libs/requests/packages/chardet/jpcntx.py
+++ /dev/null
@@ -1,227 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .compat import wrap_ord
-
-NUM_OF_CATEGORY = 6
-DONT_KNOW = -1
-ENOUGH_REL_THRESHOLD = 100
-MAX_REL_THRESHOLD = 1000
-MINIMUM_DATA_THRESHOLD = 4
-
-# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
-jp2CharContext = (
-(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
-(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
-(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
-(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
-(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
-(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
-(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
-(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
-(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
-(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
-(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
-(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
-(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
-(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
-(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
-(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
-(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
-(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
-(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
-(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
-(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
-(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
-(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
-(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
-(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
-(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
-(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
-(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
-(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
-(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
-(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
-(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
-(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
-(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
-(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
-(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
-(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
-(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
-(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
-(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
-(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
-(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
-(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
-(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
-(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
-(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
-(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
-(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
-(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
-(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
-(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
-(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
-(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
-(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
-(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
-(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
-(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
-(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
-(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
-(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
-(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
-(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
-(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
-(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
-(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
-(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
-(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
-(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
-(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
-(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
-(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
-(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
-(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
-(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
-)
-
-class JapaneseContextAnalysis:
-    def __init__(self):
-        self.reset()
-
-    def reset(self):
-        self._mTotalRel = 0  # total sequence received
-        # category counters, each interger counts sequence in its category
-        self._mRelSample = [0] * NUM_OF_CATEGORY
-        # if last byte in current buffer is not the last byte of a character,
-        # we need to know how many bytes to skip in next buffer
-        self._mNeedToSkipCharNum = 0
-        self._mLastCharOrder = -1  # The order of previous char
-        # If this flag is set to True, detection is done and conclusion has
-        # been made
-        self._mDone = False
-
-    def feed(self, aBuf, aLen):
-        if self._mDone:
-            return
-
-        # The buffer we got is byte oriented, and a character may span in more than one
-        # buffers. In case the last one or two byte in last buffer is not
-        # complete, we record how many byte needed to complete that character
-        # and skip these bytes here.  We can choose to record those bytes as
-        # well and analyse the character once it is complete, but since a
-        # character will not make much difference, by simply skipping
-        # this character will simply our logic and improve performance.
-        i = self._mNeedToSkipCharNum
-        while i < aLen:
-            order, charLen = self.get_order(aBuf[i:i + 2])
-            i += charLen
-            if i > aLen:
-                self._mNeedToSkipCharNum = i - aLen
-                self._mLastCharOrder = -1
-            else:
-                if (order != -1) and (self._mLastCharOrder != -1):
-                    self._mTotalRel += 1
-                    if self._mTotalRel > MAX_REL_THRESHOLD:
-                        self._mDone = True
-                        break
-                    self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
-                self._mLastCharOrder = order
-
-    def got_enough_data(self):
-        return self._mTotalRel > ENOUGH_REL_THRESHOLD
-
-    def get_confidence(self):
-        # This is just one way to calculate confidence. It works well for me.
-        if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
-            return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
-        else:
-            return DONT_KNOW
-
-    def get_order(self, aBuf):
-        return -1, 1
-
-class SJISContextAnalysis(JapaneseContextAnalysis):
-    def __init__(self):
-        self.charset_name = "SHIFT_JIS"
-
-    def get_charset_name(self):
-        return self.charset_name
-
-    def get_order(self, aBuf):
-        if not aBuf:
-            return -1, 1
-        # find out current char's byte length
-        first_char = wrap_ord(aBuf[0])
-        if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
-            charLen = 2
-            if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
-                self.charset_name = "CP932"
-        else:
-            charLen = 1
-
-        # return its order if it is hiragana
-        if len(aBuf) > 1:
-            second_char = wrap_ord(aBuf[1])
-            if (first_char == 202) and (0x9F <= second_char <= 0xF1):
-                return second_char - 0x9F, charLen
-
-        return -1, charLen
-
-class EUCJPContextAnalysis(JapaneseContextAnalysis):
-    def get_order(self, aBuf):
-        if not aBuf:
-            return -1, 1
-        # find out current char's byte length
-        first_char = wrap_ord(aBuf[0])
-        if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
-            charLen = 2
-        elif first_char == 0x8F:
-            charLen = 3
-        else:
-            charLen = 1
-
-        # return its order if it is hiragana
-        if len(aBuf) > 1:
-            second_char = wrap_ord(aBuf[1])
-            if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
-                return second_char - 0xA1, charLen
-
-        return -1, charLen
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/langbulgarianmodel.py b/python/ext-libs/requests/packages/chardet/langbulgarianmodel.py
deleted file mode 100644
index e5788fc..0000000
--- a/python/ext-libs/requests/packages/chardet/langbulgarianmodel.py
+++ /dev/null
@@ -1,229 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-# this table is modified base on win1251BulgarianCharToOrderMap, so
-# only number <64 is sure valid
-
-Latin5_BulgarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
-110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
-253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
-116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
-194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,  # 80
-210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,  # 90
- 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238,  # a0
- 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # b0
- 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56,  # c0
-  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # d0
-  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16,  # e0
- 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253,  # f0
-)
-
-win1251BulgarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
-110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
-253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
-116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
-206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220,  # 80
-221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229,  # 90
- 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240,  # a0
- 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250,  # b0
- 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # c0
- 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56,  # d0
-  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # e0
-  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16,  # f0
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 96.9392%
-# first 1024 sequences:3.0618%
-# rest  sequences:     0.2992%
-# negative sequences:  0.0020%
-BulgarianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
-3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
-0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
-0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
-0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
-0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
-0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
-2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
-3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
-1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
-3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
-1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
-2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
-2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
-3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
-1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
-2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
-2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
-3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
-1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
-2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
-2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
-2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
-1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
-2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
-1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
-3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
-1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
-3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
-1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
-2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
-1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
-2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
-1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
-2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
-1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
-1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
-1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
-2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
-1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
-2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
-1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
-0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
-1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
-1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
-1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
-0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
-0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
-0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
-1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
-1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
-1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
-1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-)
-
-Latin5BulgarianModel = {
-  'charToOrderMap': Latin5_BulgarianCharToOrderMap,
-  'precedenceMatrix': BulgarianLangModel,
-  'mTypicalPositiveRatio': 0.969392,
-  'keepEnglishLetter': False,
-  'charsetName': "ISO-8859-5"
-}
-
-Win1251BulgarianModel = {
-  'charToOrderMap': win1251BulgarianCharToOrderMap,
-  'precedenceMatrix': BulgarianLangModel,
-  'mTypicalPositiveRatio': 0.969392,
-  'keepEnglishLetter': False,
-  'charsetName': "windows-1251"
-}
-
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/langcyrillicmodel.py b/python/ext-libs/requests/packages/chardet/langcyrillicmodel.py
deleted file mode 100644
index a86f54b..0000000
--- a/python/ext-libs/requests/packages/chardet/langcyrillicmodel.py
+++ /dev/null
@@ -1,329 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# KOI8-R language model
-# Character Mapping Table:
-KOI8R_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,  # 80
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,  # 90
-223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237,  # a0
-238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,  # b0
- 27,  3, 21, 28, 13,  2, 39, 19, 26,  4, 23, 11,  8, 12,  5,  1,  # c0
- 15, 16,  9,  7,  6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54,  # d0
- 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34,  # e0
- 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70,  # f0
-)
-
-win1251_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
-239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
-)
-
-latin5_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
-239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
-)
-
-macCyrillic_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
-239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
-)
-
-IBM855_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
-206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
-  3, 37, 21, 44, 28, 58, 13, 41,  2, 48, 39, 53, 19, 46,218,219,
-220,221,222,223,224, 26, 55,  4, 42,225,226,227,228, 23, 60,229,
-230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
-  8, 49, 12, 38,  5, 31,  1, 34, 15,244,245,246,247, 35, 16,248,
- 43,  9, 45,  7, 32,  6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
-250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
-)
-
-IBM866_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
-239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 97.6601%
-# first 1024 sequences: 2.3389%
-# rest  sequences:      0.1237%
-# negative sequences:   0.0009%
-RussianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
-3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
-0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
-0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
-1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
-1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
-2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
-1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
-3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
-1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
-2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
-1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
-1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
-1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
-2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
-1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
-3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
-1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
-2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
-1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
-2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
-1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
-1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
-1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
-3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
-2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
-3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
-1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
-1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
-0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
-2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
-1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
-1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
-0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
-1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
-2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
-1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
-1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
-2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
-1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
-0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
-2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
-1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
-1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
-0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
-0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
-0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
-0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
-0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
-1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
-0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
-2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
-0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
-)
-
-Koi8rModel = {
-  'charToOrderMap': KOI8R_CharToOrderMap,
-  'precedenceMatrix': RussianLangModel,
-  'mTypicalPositiveRatio': 0.976601,
-  'keepEnglishLetter': False,
-  'charsetName': "KOI8-R"
-}
-
-Win1251CyrillicModel = {
-  'charToOrderMap': win1251_CharToOrderMap,
-  'precedenceMatrix': RussianLangModel,
-  'mTypicalPositiveRatio': 0.976601,
-  'keepEnglishLetter': False,
-  'charsetName': "windows-1251"
-}
-
-Latin5CyrillicModel = {
-  'charToOrderMap': latin5_CharToOrderMap,
-  'precedenceMatrix': RussianLangModel,
-  'mTypicalPositiveRatio': 0.976601,
-  'keepEnglishLetter': False,
-  'charsetName': "ISO-8859-5"
-}
-
-MacCyrillicModel = {
-  'charToOrderMap': macCyrillic_CharToOrderMap,
-  'precedenceMatrix': RussianLangModel,
-  'mTypicalPositiveRatio': 0.976601,
-  'keepEnglishLetter': False,
-  'charsetName': "MacCyrillic"
-};
-
-Ibm866Model = {
-  'charToOrderMap': IBM866_CharToOrderMap,
-  'precedenceMatrix': RussianLangModel,
-  'mTypicalPositiveRatio': 0.976601,
-  'keepEnglishLetter': False,
-  'charsetName': "IBM866"
-}
-
-Ibm855Model = {
-  'charToOrderMap': IBM855_CharToOrderMap,
-  'precedenceMatrix': RussianLangModel,
-  'mTypicalPositiveRatio': 0.976601,
-  'keepEnglishLetter': False,
-  'charsetName': "IBM855"
-}
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/langgreekmodel.py b/python/ext-libs/requests/packages/chardet/langgreekmodel.py
deleted file mode 100644
index ddb5837..0000000
--- a/python/ext-libs/requests/packages/chardet/langgreekmodel.py
+++ /dev/null
@@ -1,225 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-Latin7_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85,  # 40
- 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253,  # 50
-253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55,  # 60
- 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253,  # 70
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 80
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 90
-253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253,  # a0
-253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123,  # b0
-110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39,  # c0
- 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15,  # d0
-124,  1, 29, 20, 21,  3, 32, 13, 25,  5, 11, 16, 10,  6, 30,  4,  # e0
-  9,  8, 14,  7,  2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253,  # f0
-)
-
-win1253_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85,  # 40
- 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253,  # 50
-253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55,  # 60
- 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253,  # 70
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 80
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 90
-253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253,  # a0
-253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123,  # b0
-110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39,  # c0
- 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15,  # d0
-124,  1, 29, 20, 21,  3, 32, 13, 25,  5, 11, 16, 10,  6, 30,  4,  # e0
-  9,  8, 14,  7,  2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253,  # f0
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 98.2851%
-# first 1024 sequences:1.7001%
-# rest  sequences:     0.0359%
-# negative sequences:  0.0148%
-GreekLangModel = (
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
-3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
-2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
-0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
-2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
-2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
-0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
-2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
-0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
-3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
-3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
-2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
-2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
-0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
-0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
-0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
-0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
-0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
-0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
-0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
-0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
-0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
-0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
-0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
-0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
-0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
-0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
-0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
-0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
-0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
-0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
-0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
-0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
-0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
-0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
-0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
-0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
-0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
-0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
-0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
-0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
-0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
-0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-)
-
-Latin7GreekModel = {
-  'charToOrderMap': Latin7_CharToOrderMap,
-  'precedenceMatrix': GreekLangModel,
-  'mTypicalPositiveRatio': 0.982851,
-  'keepEnglishLetter': False,
-  'charsetName': "ISO-8859-7"
-}
-
-Win1253GreekModel = {
-  'charToOrderMap': win1253_CharToOrderMap,
-  'precedenceMatrix': GreekLangModel,
-  'mTypicalPositiveRatio': 0.982851,
-  'keepEnglishLetter': False,
-  'charsetName': "windows-1253"
-}
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/langhebrewmodel.py b/python/ext-libs/requests/packages/chardet/langhebrewmodel.py
deleted file mode 100644
index 75f2bc7..0000000
--- a/python/ext-libs/requests/packages/chardet/langhebrewmodel.py
+++ /dev/null
@@ -1,201 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-#          Simon Montagu
-# Portions created by the Initial Developer are Copyright (C) 2005
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#   Shoshannah Forbes - original C code (?)
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Windows-1255 language model
-# Character Mapping Table:
-win1255_CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85,  # 40
- 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253,  # 50
-253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49,  # 60
- 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253,  # 70
-124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
-215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
- 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
-106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
- 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
-238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
-  9,  8, 20, 16,  3,  2, 24, 14, 22,  1, 25, 15,  4, 11,  6, 23,
- 12, 19, 13, 26, 18, 27, 21, 17,  7, 10,  5,251,252,128, 96,253,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 98.4004%
-# first 1024 sequences: 1.5981%
-# rest  sequences:      0.087%
-# negative sequences:   0.0015%
-HebrewLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
-3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
-1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
-1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
-1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
-1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
-1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
-0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
-0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
-1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
-0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
-0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
-0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
-0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
-0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
-0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
-0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
-0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
-0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
-0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
-0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
-0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
-0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
-0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
-1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
-0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
-0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
-0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
-0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
-0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
-0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
-2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
-0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
-0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
-0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
-1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
-0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
-2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
-1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
-2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
-1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
-2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
-0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
-1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
-0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
-)
-
-Win1255HebrewModel = {
-  'charToOrderMap': win1255_CharToOrderMap,
-  'precedenceMatrix': HebrewLangModel,
-  'mTypicalPositiveRatio': 0.984004,
-  'keepEnglishLetter': False,
-  'charsetName': "windows-1255"
-}
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/langhungarianmodel.py b/python/ext-libs/requests/packages/chardet/langhungarianmodel.py
deleted file mode 100644
index 49d2f0f..0000000
--- a/python/ext-libs/requests/packages/chardet/langhungarianmodel.py
+++ /dev/null
@@ -1,225 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-Latin2_HungarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
- 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
-253,  2, 18, 26, 17,  1, 27, 12, 20,  9, 22,  7,  6, 13,  4,  8,
- 23, 67, 10,  5,  3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
-159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
-175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
-191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
- 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
-221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
-232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
- 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
-245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
-)
-
-win1250HungarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
- 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
-253,  2, 18, 26, 17,  1, 27, 12, 20,  9, 22,  7,  6, 13,  4,  8,
- 23, 67, 10,  5,  3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
-161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
-177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
-191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
- 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
-221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
-232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
- 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
-245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 94.7368%
-# first 1024 sequences:5.2623%
-# rest  sequences:     0.8894%
-# negative sequences:  0.0009%
-HungarianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
-3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
-3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
-3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
-0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
-0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
-1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
-1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
-1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
-3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
-2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
-2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
-2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
-2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
-2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
-3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
-2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
-2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
-2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
-1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
-1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
-3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
-1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
-1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
-2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
-2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
-2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
-3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
-2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
-1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
-1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
-2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
-2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
-1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
-1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
-2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
-1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
-1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
-2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
-2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
-2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
-1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
-1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
-1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
-0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
-2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
-2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
-1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
-2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
-1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
-1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
-2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
-2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
-2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
-1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
-2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
-0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
-0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
-0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
-2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
-0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
-)
-
-Latin2HungarianModel = {
-  'charToOrderMap': Latin2_HungarianCharToOrderMap,
-  'precedenceMatrix': HungarianLangModel,
-  'mTypicalPositiveRatio': 0.947368,
-  'keepEnglishLetter': True,
-  'charsetName': "ISO-8859-2"
-}
-
-Win1250HungarianModel = {
-  'charToOrderMap': win1250HungarianCharToOrderMap,
-  'precedenceMatrix': HungarianLangModel,
-  'mTypicalPositiveRatio': 0.947368,
-  'keepEnglishLetter': True,
-  'charsetName': "windows-1250"
-}
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/langthaimodel.py b/python/ext-libs/requests/packages/chardet/langthaimodel.py
deleted file mode 100644
index 0508b1b..0000000
--- a/python/ext-libs/requests/packages/chardet/langthaimodel.py
+++ /dev/null
@@ -1,200 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# The following result for thai was collected from a limited sample (1M).
-
-# Character Mapping Table:
-TIS620CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111,  # 40
-188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253,  # 50
-253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82,  # 60
- 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253,  # 70
-209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
-223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
-236,  5, 30,237, 24,238, 75,  8, 26, 52, 34, 51,119, 47, 58, 57,
- 49, 53, 55, 43, 20, 19, 44, 14, 48,  3, 17, 25, 39, 62, 31, 54,
- 45,  9, 16,  2, 61, 15,239, 12, 42, 46, 18, 21, 76,  4, 66, 63,
- 22, 10,  1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
- 11, 28, 41, 29, 33,245, 50, 37,  6,  7, 67, 77, 38, 93,246,247,
- 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 92.6386%
-# first 1024 sequences:7.3177%
-# rest  sequences:     1.0230%
-# negative sequences:  0.0436%
-ThaiLangModel = (
-0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
-0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
-3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
-0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
-3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
-3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
-3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
-3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
-3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
-3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
-3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
-2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
-3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
-0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
-0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
-3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
-1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
-3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
-3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
-1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
-0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
-2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
-0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
-3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
-2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
-3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
-0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
-3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
-3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
-2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
-3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
-2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
-3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
-3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
-3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
-3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
-1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
-0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
-0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
-3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
-3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
-1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
-3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
-3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
-0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
-0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
-1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
-1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
-3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
-0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
-3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
-0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
-0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
-0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
-0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
-0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
-0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
-0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
-3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
-0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
-0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
-3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
-2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
-0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
-3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
-1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
-1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
-1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-)
-
-TIS620ThaiModel = {
-  'charToOrderMap': TIS620CharToOrderMap,
-  'precedenceMatrix': ThaiLangModel,
-  'mTypicalPositiveRatio': 0.926386,
-  'keepEnglishLetter': False,
-  'charsetName': "TIS-620"
-}
-
-# flake8: noqa
diff --git a/python/ext-libs/requests/packages/chardet/latin1prober.py b/python/ext-libs/requests/packages/chardet/latin1prober.py
deleted file mode 100644
index eef3573..0000000
--- a/python/ext-libs/requests/packages/chardet/latin1prober.py
+++ /dev/null
@@ -1,139 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .constants import eNotMe
-from .compat import wrap_ord
-
-FREQ_CAT_NUM = 4
-
-UDF = 0  # undefined
-OTH = 1  # other
-ASC = 2  # ascii capital letter
-ASS = 3  # ascii small letter
-ACV = 4  # accent capital vowel
-ACO = 5  # accent capital other
-ASV = 6  # accent small vowel
-ASO = 7  # accent small other
-CLASS_NUM = 8  # total classes
-
-Latin1_CharToClass = (
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 00 - 07
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 08 - 0F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 10 - 17
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 18 - 1F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 20 - 27
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 28 - 2F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 30 - 37
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 38 - 3F
-    OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 40 - 47
-    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 48 - 4F
-    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 50 - 57
-    ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH,   # 58 - 5F
-    OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 60 - 67
-    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 68 - 6F
-    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 70 - 77
-    ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH,   # 78 - 7F
-    OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH,   # 80 - 87
-    OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF,   # 88 - 8F
-    UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 90 - 97
-    OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO,   # 98 - 9F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A0 - A7
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A8 - AF
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B0 - B7
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B8 - BF
-    ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO,   # C0 - C7
-    ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV,   # C8 - CF
-    ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH,   # D0 - D7
-    ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO,   # D8 - DF
-    ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO,   # E0 - E7
-    ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV,   # E8 - EF
-    ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH,   # F0 - F7
-    ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO,   # F8 - FF
-)
-
-# 0 : illegal
-# 1 : very unlikely
-# 2 : normal
-# 3 : very likely
-Latin1ClassModel = (
-    # UDF OTH ASC ASS ACV ACO ASV ASO
-    0,  0,  0,  0,  0,  0,  0,  0,  # UDF
-    0,  3,  3,  3,  3,  3,  3,  3,  # OTH
-    0,  3,  3,  3,  3,  3,  3,  3,  # ASC
-    0,  3,  3,  3,  1,  1,  3,  3,  # ASS
-    0,  3,  3,  3,  1,  2,  1,  2,  # ACV
-    0,  3,  3,  3,  3,  3,  3,  3,  # ACO
-    0,  3,  1,  3,  1,  1,  1,  3,  # ASV
-    0,  3,  1,  3,  1,  1,  3,  3,  # ASO
-)
-
-
-class Latin1Prober(CharSetProber):
-    def __init__(self):
-        CharSetProber.__init__(self)
-        self.reset()
-
-    def reset(self):
-        self._mLastCharClass = OTH
-        self._mFreqCounter = [0] * FREQ_CAT_NUM
-        CharSetProber.reset(self)
-
-    def get_charset_name(self):
-        return "windows-1252"
-
-    def feed(self, aBuf):
-        aBuf = self.filter_with_english_letters(aBuf)
-        for c in aBuf:
-            charClass = Latin1_CharToClass[wrap_ord(c)]
-            freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
-                                    + charClass]
-            if freq == 0:
-                self._mState = eNotMe
-                break
-            self._mFreqCounter[freq] += 1
-            self._mLastCharClass = charClass
-
-        return self.get_state()
-
-    def get_confidence(self):
-        if self.get_state() == eNotMe:
-            return 0.01
-
-        total = sum(self._mFreqCounter)
-        if total < 0.01:
-            confidence = 0.0
-        else:
-            confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
-                          / total)
-        if confidence < 0.0:
-            confidence = 0.0
-        # lower the confidence of latin1 so that other more accurate
-        # detector can take priority.
-        confidence = confidence * 0.73
-        return confidence
diff --git a/python/ext-libs/requests/packages/chardet/mbcharsetprober.py b/python/ext-libs/requests/packages/chardet/mbcharsetprober.py
deleted file mode 100644
index bb42f2f..0000000
--- a/python/ext-libs/requests/packages/chardet/mbcharsetprober.py
+++ /dev/null
@@ -1,86 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#   Proofpoint, Inc.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-from . import constants
-from .charsetprober import CharSetProber
-
-
-class MultiByteCharSetProber(CharSetProber):
-    def __init__(self):
-        CharSetProber.__init__(self)
-        self._mDistributionAnalyzer = None
-        self._mCodingSM = None
-        self._mLastChar = [0, 0]
-
-    def reset(self):
-        CharSetProber.reset(self)
-        if self._mCodingSM:
-            self._mCodingSM.reset()
-        if self._mDistributionAnalyzer:
-            self._mDistributionAnalyzer.reset()
-        self._mLastChar = [0, 0]
-
-    def get_charset_name(self):
-        pass
-
-    def feed(self, aBuf):
-        aLen = len(aBuf)
-        for i in range(0, aLen):
-            codingState = self._mCodingSM.next_state(aBuf[i])
-            if codingState == constants.eError:
-                if constants._debug:
-                    sys.stderr.write(self.get_charset_name()
-                                     + ' prober hit error at byte ' + str(i)
-                                     + '\n')
-                self._mState = constants.eNotMe
-                break
-            elif codingState == constants.eItsMe:
-                self._mState = constants.eFoundIt
-                break
-            elif codingState == constants.eStart:
-                charLen = self._mCodingSM.get_current_charlen()
-                if i == 0:
-                    self._mLastChar[1] = aBuf[0]
-                    self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
-                else:
-                    self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
-                                                     charLen)
-
-        self._mLastChar[0] = aBuf[aLen - 1]
-
-        if self.get_state() == constants.eDetecting:
-            if (self._mDistributionAnalyzer.got_enough_data() and
-                    (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
-                self._mState = constants.eFoundIt
-
-        return self.get_state()
-
-    def get_confidence(self):
-        return self._mDistributionAnalyzer.get_confidence()
diff --git a/python/ext-libs/requests/packages/chardet/mbcsgroupprober.py b/python/ext-libs/requests/packages/chardet/mbcsgroupprober.py
deleted file mode 100644
index 03c9dcf..0000000
--- a/python/ext-libs/requests/packages/chardet/mbcsgroupprober.py
+++ /dev/null
@@ -1,54 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#   Proofpoint, Inc.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetgroupprober import CharSetGroupProber
-from .utf8prober import UTF8Prober
-from .sjisprober import SJISProber
-from .eucjpprober import EUCJPProber
-from .gb2312prober import GB2312Prober
-from .euckrprober import EUCKRProber
-from .cp949prober import CP949Prober
-from .big5prober import Big5Prober
-from .euctwprober import EUCTWProber
-
-
-class MBCSGroupProber(CharSetGroupProber):
-    def __init__(self):
-        CharSetGroupProber.__init__(self)
-        self._mProbers = [
-            UTF8Prober(),
-            SJISProber(),
-            EUCJPProber(),
-            GB2312Prober(),
-            EUCKRProber(),
-            CP949Prober(),
-            Big5Prober(),
-            EUCTWProber()
-        ]
-        self.reset()
diff --git a/python/ext-libs/requests/packages/chardet/mbcssm.py b/python/ext-libs/requests/packages/chardet/mbcssm.py
deleted file mode 100644
index efe678c..0000000
--- a/python/ext-libs/requests/packages/chardet/mbcssm.py
+++ /dev/null
@@ -1,572 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .constants import eStart, eError, eItsMe
-
-# BIG5
-
-BIG5_cls = (
-    1,1,1,1,1,1,1,1,  # 00 - 07    #allow 0x00 as legal value
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,1,  # 78 - 7f
-    4,4,4,4,4,4,4,4,  # 80 - 87
-    4,4,4,4,4,4,4,4,  # 88 - 8f
-    4,4,4,4,4,4,4,4,  # 90 - 97
-    4,4,4,4,4,4,4,4,  # 98 - 9f
-    4,3,3,3,3,3,3,3,  # a0 - a7
-    3,3,3,3,3,3,3,3,  # a8 - af
-    3,3,3,3,3,3,3,3,  # b0 - b7
-    3,3,3,3,3,3,3,3,  # b8 - bf
-    3,3,3,3,3,3,3,3,  # c0 - c7
-    3,3,3,3,3,3,3,3,  # c8 - cf
-    3,3,3,3,3,3,3,3,  # d0 - d7
-    3,3,3,3,3,3,3,3,  # d8 - df
-    3,3,3,3,3,3,3,3,  # e0 - e7
-    3,3,3,3,3,3,3,3,  # e8 - ef
-    3,3,3,3,3,3,3,3,  # f0 - f7
-    3,3,3,3,3,3,3,0  # f8 - ff
-)
-
-BIG5_st = (
-    eError,eStart,eStart,     3,eError,eError,eError,eError,#00-07
-    eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
-    eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
-)
-
-Big5CharLenTable = (0, 1, 1, 2, 0)
-
-Big5SMModel = {'classTable': BIG5_cls,
-               'classFactor': 5,
-               'stateTable': BIG5_st,
-               'charLenTable': Big5CharLenTable,
-               'name': 'Big5'}
-
-# CP949
-
-CP949_cls  = (
-    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0,  # 00 - 0f
-    1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1,  # 10 - 1f
-    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,  # 20 - 2f
-    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,  # 30 - 3f
-    1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4,  # 40 - 4f
-    4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1,  # 50 - 5f
-    1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5,  # 60 - 6f
-    5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1,  # 70 - 7f
-    0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6,  # 80 - 8f
-    6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6,  # 90 - 9f
-    6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8,  # a0 - af
-    7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,  # b0 - bf
-    7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2,  # c0 - cf
-    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,  # d0 - df
-    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,  # e0 - ef
-    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0,  # f0 - ff
-)
-
-CP949_st = (
-#cls=    0      1      2      3      4      5      6      7      8      9  # previous state =
-    eError,eStart,     3,eError,eStart,eStart,     4,     5,eError,     6, # eStart
-    eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
-    eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
-    eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
-    eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
-    eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
-    eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
-)
-
-CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
-
-CP949SMModel = {'classTable': CP949_cls,
-                'classFactor': 10,
-                'stateTable': CP949_st,
-                'charLenTable': CP949CharLenTable,
-                'name': 'CP949'}
-
-# EUC-JP
-
-EUCJP_cls = (
-    4,4,4,4,4,4,4,4,  # 00 - 07
-    4,4,4,4,4,4,5,5,  # 08 - 0f
-    4,4,4,4,4,4,4,4,  # 10 - 17
-    4,4,4,5,4,4,4,4,  # 18 - 1f
-    4,4,4,4,4,4,4,4,  # 20 - 27
-    4,4,4,4,4,4,4,4,  # 28 - 2f
-    4,4,4,4,4,4,4,4,  # 30 - 37
-    4,4,4,4,4,4,4,4,  # 38 - 3f
-    4,4,4,4,4,4,4,4,  # 40 - 47
-    4,4,4,4,4,4,4,4,  # 48 - 4f
-    4,4,4,4,4,4,4,4,  # 50 - 57
-    4,4,4,4,4,4,4,4,  # 58 - 5f
-    4,4,4,4,4,4,4,4,  # 60 - 67
-    4,4,4,4,4,4,4,4,  # 68 - 6f
-    4,4,4,4,4,4,4,4,  # 70 - 77
-    4,4,4,4,4,4,4,4,  # 78 - 7f
-    5,5,5,5,5,5,5,5,  # 80 - 87
-    5,5,5,5,5,5,1,3,  # 88 - 8f
-    5,5,5,5,5,5,5,5,  # 90 - 97
-    5,5,5,5,5,5,5,5,  # 98 - 9f
-    5,2,2,2,2,2,2,2,  # a0 - a7
-    2,2,2,2,2,2,2,2,  # a8 - af
-    2,2,2,2,2,2,2,2,  # b0 - b7
-    2,2,2,2,2,2,2,2,  # b8 - bf
-    2,2,2,2,2,2,2,2,  # c0 - c7
-    2,2,2,2,2,2,2,2,  # c8 - cf
-    2,2,2,2,2,2,2,2,  # d0 - d7
-    2,2,2,2,2,2,2,2,  # d8 - df
-    0,0,0,0,0,0,0,0,  # e0 - e7
-    0,0,0,0,0,0,0,0,  # e8 - ef
-    0,0,0,0,0,0,0,0,  # f0 - f7
-    0,0,0,0,0,0,0,5  # f8 - ff
-)
-
-EUCJP_st = (
-          3,     4,     3,     5,eStart,eError,eError,eError,#00-07
-     eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
-     eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
-     eError,eError,eStart,eError,eError,eError,     3,eError,#18-1f
-          3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
-)
-
-EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
-
-EUCJPSMModel = {'classTable': EUCJP_cls,
-                'classFactor': 6,
-                'stateTable': EUCJP_st,
-                'charLenTable': EUCJPCharLenTable,
-                'name': 'EUC-JP'}
-
-# EUC-KR
-
-EUCKR_cls  = (
-    1,1,1,1,1,1,1,1,  # 00 - 07
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    1,1,1,1,1,1,1,1,  # 40 - 47
-    1,1,1,1,1,1,1,1,  # 48 - 4f
-    1,1,1,1,1,1,1,1,  # 50 - 57
-    1,1,1,1,1,1,1,1,  # 58 - 5f
-    1,1,1,1,1,1,1,1,  # 60 - 67
-    1,1,1,1,1,1,1,1,  # 68 - 6f
-    1,1,1,1,1,1,1,1,  # 70 - 77
-    1,1,1,1,1,1,1,1,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,0,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,2,2,2,2,2,2,2,  # a0 - a7
-    2,2,2,2,2,3,3,3,  # a8 - af
-    2,2,2,2,2,2,2,2,  # b0 - b7
-    2,2,2,2,2,2,2,2,  # b8 - bf
-    2,2,2,2,2,2,2,2,  # c0 - c7
-    2,3,2,2,2,2,2,2,  # c8 - cf
-    2,2,2,2,2,2,2,2,  # d0 - d7
-    2,2,2,2,2,2,2,2,  # d8 - df
-    2,2,2,2,2,2,2,2,  # e0 - e7
-    2,2,2,2,2,2,2,2,  # e8 - ef
-    2,2,2,2,2,2,2,2,  # f0 - f7
-    2,2,2,2,2,2,2,0   # f8 - ff
-)
-
-EUCKR_st = (
-    eError,eStart,     3,eError,eError,eError,eError,eError,#00-07
-    eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
-)
-
-EUCKRCharLenTable = (0, 1, 2, 0)
-
-EUCKRSMModel = {'classTable': EUCKR_cls,
-                'classFactor': 4,
-                'stateTable': EUCKR_st,
-                'charLenTable': EUCKRCharLenTable,
-                'name': 'EUC-KR'}
-
-# EUC-TW
-
-EUCTW_cls = (
-    2,2,2,2,2,2,2,2,  # 00 - 07
-    2,2,2,2,2,2,0,0,  # 08 - 0f
-    2,2,2,2,2,2,2,2,  # 10 - 17
-    2,2,2,0,2,2,2,2,  # 18 - 1f
-    2,2,2,2,2,2,2,2,  # 20 - 27
-    2,2,2,2,2,2,2,2,  # 28 - 2f
-    2,2,2,2,2,2,2,2,  # 30 - 37
-    2,2,2,2,2,2,2,2,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,2,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,6,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,3,4,4,4,4,4,4,  # a0 - a7
-    5,5,1,1,1,1,1,1,  # a8 - af
-    1,1,1,1,1,1,1,1,  # b0 - b7
-    1,1,1,1,1,1,1,1,  # b8 - bf
-    1,1,3,1,3,3,3,3,  # c0 - c7
-    3,3,3,3,3,3,3,3,  # c8 - cf
-    3,3,3,3,3,3,3,3,  # d0 - d7
-    3,3,3,3,3,3,3,3,  # d8 - df
-    3,3,3,3,3,3,3,3,  # e0 - e7
-    3,3,3,3,3,3,3,3,  # e8 - ef
-    3,3,3,3,3,3,3,3,  # f0 - f7
-    3,3,3,3,3,3,3,0   # f8 - ff
-)
-
-EUCTW_st = (
-    eError,eError,eStart,     3,     3,     3,     4,eError,#00-07
-    eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
-    eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
-    eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
-         5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
-    eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
-)
-
-EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
-
-EUCTWSMModel = {'classTable': EUCTW_cls,
-                'classFactor': 7,
-                'stateTable': EUCTW_st,
-                'charLenTable': EUCTWCharLenTable,
-                'name': 'x-euc-tw'}
-
-# GB2312
-
-GB2312_cls = (
-    1,1,1,1,1,1,1,1,  # 00 - 07
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    3,3,3,3,3,3,3,3,  # 30 - 37
-    3,3,1,1,1,1,1,1,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,4,  # 78 - 7f
-    5,6,6,6,6,6,6,6,  # 80 - 87
-    6,6,6,6,6,6,6,6,  # 88 - 8f
-    6,6,6,6,6,6,6,6,  # 90 - 97
-    6,6,6,6,6,6,6,6,  # 98 - 9f
-    6,6,6,6,6,6,6,6,  # a0 - a7
-    6,6,6,6,6,6,6,6,  # a8 - af
-    6,6,6,6,6,6,6,6,  # b0 - b7
-    6,6,6,6,6,6,6,6,  # b8 - bf
-    6,6,6,6,6,6,6,6,  # c0 - c7
-    6,6,6,6,6,6,6,6,  # c8 - cf
-    6,6,6,6,6,6,6,6,  # d0 - d7
-    6,6,6,6,6,6,6,6,  # d8 - df
-    6,6,6,6,6,6,6,6,  # e0 - e7
-    6,6,6,6,6,6,6,6,  # e8 - ef
-    6,6,6,6,6,6,6,6,  # f0 - f7
-    6,6,6,6,6,6,6,0   # f8 - ff
-)
-
-GB2312_st = (
-    eError,eStart,eStart,eStart,eStart,eStart,     3,eError,#00-07
-    eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
-    eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
-         4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
-    eError,eError,     5,eError,eError,eError,eItsMe,eError,#20-27
-    eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
-)
-
-# To be accurate, the length of class 6 can be either 2 or 4.
-# But it is not necessary to discriminate between the two since
-# it is used for frequency analysis only, and we are validing
-# each code range there as well. So it is safe to set it to be
-# 2 here.
-GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
-
-GB2312SMModel = {'classTable': GB2312_cls,
-                  'classFactor': 7,
-                  'stateTable': GB2312_st,
-                  'charLenTable': GB2312CharLenTable,
-                  'name': 'GB2312'}
-
-# Shift_JIS
-
-SJIS_cls = (
-    1,1,1,1,1,1,1,1,  # 00 - 07
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,1,  # 78 - 7f
-    3,3,3,3,3,2,2,3,  # 80 - 87
-    3,3,3,3,3,3,3,3,  # 88 - 8f
-    3,3,3,3,3,3,3,3,  # 90 - 97
-    3,3,3,3,3,3,3,3,  # 98 - 9f
-    #0xa0 is illegal in sjis encoding, but some pages does
-    #contain such byte. We need to be more error forgiven.
-    2,2,2,2,2,2,2,2,  # a0 - a7
-    2,2,2,2,2,2,2,2,  # a8 - af
-    2,2,2,2,2,2,2,2,  # b0 - b7
-    2,2,2,2,2,2,2,2,  # b8 - bf
-    2,2,2,2,2,2,2,2,  # c0 - c7
-    2,2,2,2,2,2,2,2,  # c8 - cf
-    2,2,2,2,2,2,2,2,  # d0 - d7
-    2,2,2,2,2,2,2,2,  # d8 - df
-    3,3,3,3,3,3,3,3,  # e0 - e7
-    3,3,3,3,3,4,4,4,  # e8 - ef
-    3,3,3,3,3,3,3,3,  # f0 - f7
-    3,3,3,3,3,0,0,0)  # f8 - ff
-
-
-SJIS_st = (
-    eError,eStart,eStart,     3,eError,eError,eError,eError,#00-07
-    eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
-    eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
-)
-
-SJISCharLenTable = (0, 1, 1, 2, 0, 0)
-
-SJISSMModel = {'classTable': SJIS_cls,
-               'classFactor': 6,
-               'stateTable': SJIS_st,
-               'charLenTable': SJISCharLenTable,
-               'name': 'Shift_JIS'}
-
-# UCS2-BE
-
-UCS2BE_cls = (
-    0,0,0,0,0,0,0,0,  # 00 - 07
-    0,0,1,0,0,2,0,0,  # 08 - 0f
-    0,0,0,0,0,0,0,0,  # 10 - 17
-    0,0,0,3,0,0,0,0,  # 18 - 1f
-    0,0,0,0,0,0,0,0,  # 20 - 27
-    0,3,3,3,3,3,0,0,  # 28 - 2f
-    0,0,0,0,0,0,0,0,  # 30 - 37
-    0,0,0,0,0,0,0,0,  # 38 - 3f
-    0,0,0,0,0,0,0,0,  # 40 - 47
-    0,0,0,0,0,0,0,0,  # 48 - 4f
-    0,0,0,0,0,0,0,0,  # 50 - 57
-    0,0,0,0,0,0,0,0,  # 58 - 5f
-    0,0,0,0,0,0,0,0,  # 60 - 67
-    0,0,0,0,0,0,0,0,  # 68 - 6f
-    0,0,0,0,0,0,0,0,  # 70 - 77
-    0,0,0,0,0,0,0,0,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,0,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,0,0,0,0,0,0,0,  # a0 - a7
-    0,0,0,0,0,0,0,0,  # a8 - af
-    0,0,0,0,0,0,0,0,  # b0 - b7
-    0,0,0,0,0,0,0,0,  # b8 - bf
-    0,0,0,0,0,0,0,0,  # c0 - c7
-    0,0,0,0,0,0,0,0,  # c8 - cf
-    0,0,0,0,0,0,0,0,  # d0 - d7
-    0,0,0,0,0,0,0,0,  # d8 - df
-    0,0,0,0,0,0,0,0,  # e0 - e7
-    0,0,0,0,0,0,0,0,  # e8 - ef
-    0,0,0,0,0,0,0,0,  # f0 - f7
-    0,0,0,0,0,0,4,5   # f8 - ff
-)
-
-UCS2BE_st  = (
-          5,     7,     7,eError,     4,     3,eError,eError,#00-07
-     eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
-     eItsMe,eItsMe,     6,     6,     6,     6,eError,eError,#10-17
-          6,     6,     6,     6,     6,eItsMe,     6,     6,#18-1f
-          6,     6,     6,     6,     5,     7,     7,eError,#20-27
-          5,     8,     6,     6,eError,     6,     6,     6,#28-2f
-          6,     6,     6,     6,eError,eError,eStart,eStart #30-37
-)
-
-UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
-
-UCS2BESMModel = {'classTable': UCS2BE_cls,
-                 'classFactor': 6,
-                 'stateTable': UCS2BE_st,
-                 'charLenTable': UCS2BECharLenTable,
-                 'name': 'UTF-16BE'}
-
-# UCS2-LE
-
-UCS2LE_cls = (
-    0,0,0,0,0,0,0,0,  # 00 - 07
-    0,0,1,0,0,2,0,0,  # 08 - 0f
-    0,0,0,0,0,0,0,0,  # 10 - 17
-    0,0,0,3,0,0,0,0,  # 18 - 1f
-    0,0,0,0,0,0,0,0,  # 20 - 27
-    0,3,3,3,3,3,0,0,  # 28 - 2f
-    0,0,0,0,0,0,0,0,  # 30 - 37
-    0,0,0,0,0,0,0,0,  # 38 - 3f
-    0,0,0,0,0,0,0,0,  # 40 - 47
-    0,0,0,0,0,0,0,0,  # 48 - 4f
-    0,0,0,0,0,0,0,0,  # 50 - 57
-    0,0,0,0,0,0,0,0,  # 58 - 5f
-    0,0,0,0,0,0,0,0,  # 60 - 67
-    0,0,0,0,0,0,0,0,  # 68 - 6f
-    0,0,0,0,0,0,0,0,  # 70 - 77
-    0,0,0,0,0,0,0,0,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,0,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,0,0,0,0,0,0,0,  # a0 - a7
-    0,0,0,0,0,0,0,0,  # a8 - af
-    0,0,0,0,0,0,0,0,  # b0 - b7
-    0,0,0,0,0,0,0,0,  # b8 - bf
-    0,0,0,0,0,0,0,0,  # c0 - c7
-    0,0,0,0,0,0,0,0,  # c8 - cf
-    0,0,0,0,0,0,0,0,  # d0 - d7
-    0,0,0,0,0,0,0,0,  # d8 - df
-    0,0,0,0,0,0,0,0,  # e0 - e7
-    0,0,0,0,0,0,0,0,  # e8 - ef
-    0,0,0,0,0,0,0,0,  # f0 - f7
-    0,0,0,0,0,0,4,5   # f8 - ff
-)
-
-UCS2LE_st = (
-          6,     6,     7,     6,     4,     3,eError,eError,#00-07
-     eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
-     eItsMe,eItsMe,     5,     5,     5,eError,eItsMe,eError,#10-17
-          5,     5,     5,eError,     5,eError,     6,     6,#18-1f
-          7,     6,     8,     8,     5,     5,     5,eError,#20-27
-          5,     5,     5,eError,eError,eError,     5,     5,#28-2f
-          5,     5,     5,eError,     5,eError,eStart,eStart #30-37
-)
-
-UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
-
-UCS2LESMModel = {'classTable': UCS2LE_cls,
-                 'classFactor': 6,
-                 'stateTable': UCS2LE_st,
-                 'charLenTable': UCS2LECharLenTable,
-                 'name': 'UTF-16LE'}
-
-# UTF-8
-
-UTF8_cls = (
-    1,1,1,1,1,1,1,1,  # 00 - 07  #allow 0x00 as a legal value
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    1,1,1,1,1,1,1,1,  # 40 - 47
-    1,1,1,1,1,1,1,1,  # 48 - 4f
-    1,1,1,1,1,1,1,1,  # 50 - 57
-    1,1,1,1,1,1,1,1,  # 58 - 5f
-    1,1,1,1,1,1,1,1,  # 60 - 67
-    1,1,1,1,1,1,1,1,  # 68 - 6f
-    1,1,1,1,1,1,1,1,  # 70 - 77
-    1,1,1,1,1,1,1,1,  # 78 - 7f
-    2,2,2,2,3,3,3,3,  # 80 - 87
-    4,4,4,4,4,4,4,4,  # 88 - 8f
-    4,4,4,4,4,4,4,4,  # 90 - 97
-    4,4,4,4,4,4,4,4,  # 98 - 9f
-    5,5,5,5,5,5,5,5,  # a0 - a7
-    5,5,5,5,5,5,5,5,  # a8 - af
-    5,5,5,5,5,5,5,5,  # b0 - b7
-    5,5,5,5,5,5,5,5,  # b8 - bf
-    0,0,6,6,6,6,6,6,  # c0 - c7
-    6,6,6,6,6,6,6,6,  # c8 - cf
-    6,6,6,6,6,6,6,6,  # d0 - d7
-    6,6,6,6,6,6,6,6,  # d8 - df
-    7,8,8,8,8,8,8,8,  # e0 - e7
-    8,8,8,8,8,9,8,8,  # e8 - ef
-    10,11,11,11,11,11,11,11,  # f0 - f7
-    12,13,13,13,14,15,0,0    # f8 - ff
-)
-
-UTF8_st = (
-    eError,eStart,eError,eError,eError,eError,     12,   10,#00-07
-         9,     11,     8,     7,     6,     5,     4,    3,#08-0f
-    eError,eError,eError,eError,eError,eError,eError,eError,#10-17
-    eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
-    eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
-    eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
-    eError,eError,     5,     5,     5,     5,eError,eError,#30-37
-    eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
-    eError,eError,eError,     5,     5,     5,eError,eError,#40-47
-    eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
-    eError,eError,     7,     7,     7,     7,eError,eError,#50-57
-    eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
-    eError,eError,eError,eError,     7,     7,eError,eError,#60-67
-    eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
-    eError,eError,     9,     9,     9,     9,eError,eError,#70-77
-    eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
-    eError,eError,eError,eError,eError,     9,eError,eError,#80-87
-    eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
-    eError,eError,    12,    12,    12,    12,eError,eError,#90-97
-    eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
-    eError,eError,eError,eError,eError,    12,eError,eError,#a0-a7
-    eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
-    eError,eError,    12,    12,    12,eError,eError,eError,#b0-b7
-    eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
-    eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
-    eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
-)
-
-UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
-
-UTF8SMModel = {'classTable': UTF8_cls,
-               'classFactor': 16,
-               'stateTable': UTF8_st,
-               'charLenTable': UTF8CharLenTable,
-               'name': 'UTF-8'}
diff --git a/python/ext-libs/requests/packages/chardet/sbcharsetprober.py b/python/ext-libs/requests/packages/chardet/sbcharsetprober.py
deleted file mode 100644
index 37291bd..0000000
--- a/python/ext-libs/requests/packages/chardet/sbcharsetprober.py
+++ /dev/null
@@ -1,120 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-from . import constants
-from .charsetprober import CharSetProber
-from .compat import wrap_ord
-
-SAMPLE_SIZE = 64
-SB_ENOUGH_REL_THRESHOLD = 1024
-POSITIVE_SHORTCUT_THRESHOLD = 0.95
-NEGATIVE_SHORTCUT_THRESHOLD = 0.05
-SYMBOL_CAT_ORDER = 250
-NUMBER_OF_SEQ_CAT = 4
-POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
-#NEGATIVE_CAT = 0
-
-
-class SingleByteCharSetProber(CharSetProber):
-    def __init__(self, model, reversed=False, nameProber=None):
-        CharSetProber.__init__(self)
-        self._mModel = model
-        # TRUE if we need to reverse every pair in the model lookup
-        self._mReversed = reversed
-        # Optional auxiliary prober for name decision
-        self._mNameProber = nameProber
-        self.reset()
-
-    def reset(self):
-        CharSetProber.reset(self)
-        # char order of last character
-        self._mLastOrder = 255
-        self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
-        self._mTotalSeqs = 0
-        self._mTotalChar = 0
-        # characters that fall in our sampling range
-        self._mFreqChar = 0
-
-    def get_charset_name(self):
-        if self._mNameProber:
-            return self._mNameProber.get_charset_name()
-        else:
-            return self._mModel['charsetName']
-
-    def feed(self, aBuf):
-        if not self._mModel['keepEnglishLetter']:
-            aBuf = self.filter_without_english_letters(aBuf)
-        aLen = len(aBuf)
-        if not aLen:
-            return self.get_state()
-        for c in aBuf:
-            order = self._mModel['charToOrderMap'][wrap_ord(c)]
-            if order < SYMBOL_CAT_ORDER:
-                self._mTotalChar += 1
-            if order < SAMPLE_SIZE:
-                self._mFreqChar += 1
-                if self._mLastOrder < SAMPLE_SIZE:
-                    self._mTotalSeqs += 1
-                    if not self._mReversed:
-                        i = (self._mLastOrder * SAMPLE_SIZE) + order
-                        model = self._mModel['precedenceMatrix'][i]
-                    else:  # reverse the order of the letters in the lookup
-                        i = (order * SAMPLE_SIZE) + self._mLastOrder
-                        model = self._mModel['precedenceMatrix'][i]
-                    self._mSeqCounters[model] += 1
-            self._mLastOrder = order
-
-        if self.get_state() == constants.eDetecting:
-            if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
-                cf = self.get_confidence()
-                if cf > POSITIVE_SHORTCUT_THRESHOLD:
-                    if constants._debug:
-                        sys.stderr.write('%s confidence = %s, we have a'
-                                         'winner\n' %
-                                         (self._mModel['charsetName'], cf))
-                    self._mState = constants.eFoundIt
-                elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
-                    if constants._debug:
-                        sys.stderr.write('%s confidence = %s, below negative'
-                                         'shortcut threshhold %s\n' %
-                                         (self._mModel['charsetName'], cf,
-                                          NEGATIVE_SHORTCUT_THRESHOLD))
-                    self._mState = constants.eNotMe
-
-        return self.get_state()
-
-    def get_confidence(self):
-        r = 0.01
-        if self._mTotalSeqs > 0:
-            r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
-                 / self._mModel['mTypicalPositiveRatio'])
-            r = r * self._mFreqChar / self._mTotalChar
-            if r >= 1.0:
-                r = 0.99
-        return r
diff --git a/python/ext-libs/requests/packages/chardet/sbcsgroupprober.py b/python/ext-libs/requests/packages/chardet/sbcsgroupprober.py
deleted file mode 100644
index 1b6196c..0000000
--- a/python/ext-libs/requests/packages/chardet/sbcsgroupprober.py
+++ /dev/null
@@ -1,69 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetgroupprober import CharSetGroupProber
-from .sbcharsetprober import SingleByteCharSetProber
-from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
-                                Latin5CyrillicModel, MacCyrillicModel,
-                                Ibm866Model, Ibm855Model)
-from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
-from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
-from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
-from .langthaimodel import TIS620ThaiModel
-from .langhebrewmodel import Win1255HebrewModel
-from .hebrewprober import HebrewProber
-
-
-class SBCSGroupProber(CharSetGroupProber):
-    def __init__(self):
-        CharSetGroupProber.__init__(self)
-        self._mProbers = [
-            SingleByteCharSetProber(Win1251CyrillicModel),
-            SingleByteCharSetProber(Koi8rModel),
-            SingleByteCharSetProber(Latin5CyrillicModel),
-            SingleByteCharSetProber(MacCyrillicModel),
-            SingleByteCharSetProber(Ibm866Model),
-            SingleByteCharSetProber(Ibm855Model),
-            SingleByteCharSetProber(Latin7GreekModel),
-            SingleByteCharSetProber(Win1253GreekModel),
-            SingleByteCharSetProber(Latin5BulgarianModel),
-            SingleByteCharSetProber(Win1251BulgarianModel),
-            SingleByteCharSetProber(Latin2HungarianModel),
-            SingleByteCharSetProber(Win1250HungarianModel),
-            SingleByteCharSetProber(TIS620ThaiModel),
-        ]
-        hebrewProber = HebrewProber()
-        logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,
-                                                      False, hebrewProber)
-        visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,
-                                                     hebrewProber)
-        hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
-        self._mProbers.extend([hebrewProber, logicalHebrewProber,
-                               visualHebrewProber])
-
-        self.reset()
diff --git a/python/ext-libs/requests/packages/chardet/sjisprober.py b/python/ext-libs/requests/packages/chardet/sjisprober.py
deleted file mode 100644
index cd0e9e7..0000000
--- a/python/ext-libs/requests/packages/chardet/sjisprober.py
+++ /dev/null
@@ -1,91 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import SJISDistributionAnalysis
-from .jpcntx import SJISContextAnalysis
-from .mbcssm import SJISSMModel
-from . import constants
-
-
-class SJISProber(MultiByteCharSetProber):
-    def __init__(self):
-        MultiByteCharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(SJISSMModel)
-        self._mDistributionAnalyzer = SJISDistributionAnalysis()
-        self._mContextAnalyzer = SJISContextAnalysis()
-        self.reset()
-
-    def reset(self):
-        MultiByteCharSetProber.reset(self)
-        self._mContextAnalyzer.reset()
-
-    def get_charset_name(self):
-        return self._mContextAnalyzer.get_charset_name()
-
-    def feed(self, aBuf):
-        aLen = len(aBuf)
-        for i in range(0, aLen):
-            codingState = self._mCodingSM.next_state(aBuf[i])
-            if codingState == constants.eError:
-                if constants._debug:
-                    sys.stderr.write(self.get_charset_name()
-                                     + ' prober hit error at byte ' + str(i)
-                                     + '\n')
-                self._mState = constants.eNotMe
-                break
-            elif codingState == constants.eItsMe:
-                self._mState = constants.eFoundIt
-                break
-            elif codingState == constants.eStart:
-                charLen = self._mCodingSM.get_current_charlen()
-                if i == 0:
-                    self._mLastChar[1] = aBuf[0]
-                    self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
-                                                charLen)
-                    self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
-                else:
-                    self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
-                                                     - charLen], charLen)
-                    self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
-                                                     charLen)
-
-        self._mLastChar[0] = aBuf[aLen - 1]
-
-        if self.get_state() == constants.eDetecting:
-            if (self._mContextAnalyzer.got_enough_data() and
-               (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
-                self._mState = constants.eFoundIt
-
-        return self.get_state()
-
-    def get_confidence(self):
-        contxtCf = self._mContextAnalyzer.get_confidence()
-        distribCf = self._mDistributionAnalyzer.get_confidence()
-        return max(contxtCf, distribCf)
diff --git a/python/ext-libs/requests/packages/chardet/universaldetector.py b/python/ext-libs/requests/packages/chardet/universaldetector.py
deleted file mode 100644
index 476522b..0000000
--- a/python/ext-libs/requests/packages/chardet/universaldetector.py
+++ /dev/null
@@ -1,170 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from . import constants
-import sys
-import codecs
-from .latin1prober import Latin1Prober  # windows-1252
-from .mbcsgroupprober import MBCSGroupProber  # multi-byte character sets
-from .sbcsgroupprober import SBCSGroupProber  # single-byte character sets
-from .escprober import EscCharSetProber  # ISO-2122, etc.
-import re
-
-MINIMUM_THRESHOLD = 0.20
-ePureAscii = 0
-eEscAscii = 1
-eHighbyte = 2
-
-
-class UniversalDetector:
-    def __init__(self):
-        self._highBitDetector = re.compile(b'[\x80-\xFF]')
-        self._escDetector = re.compile(b'(\033|~{)')
-        self._mEscCharSetProber = None
-        self._mCharSetProbers = []
-        self.reset()
-
-    def reset(self):
-        self.result = {'encoding': None, 'confidence': 0.0}
-        self.done = False
-        self._mStart = True
-        self._mGotData = False
-        self._mInputState = ePureAscii
-        self._mLastChar = b''
-        if self._mEscCharSetProber:
-            self._mEscCharSetProber.reset()
-        for prober in self._mCharSetProbers:
-            prober.reset()
-
-    def feed(self, aBuf):
-        if self.done:
-            return
-
-        aLen = len(aBuf)
-        if not aLen:
-            return
-
-        if not self._mGotData:
-            # If the data starts with BOM, we know it is UTF
-            if aBuf[:3] == codecs.BOM_UTF8:
-                # EF BB BF  UTF-8 with BOM
-                self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
-            elif aBuf[:4] == codecs.BOM_UTF32_LE:
-                # FF FE 00 00  UTF-32, little-endian BOM
-                self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
-            elif aBuf[:4] == codecs.BOM_UTF32_BE:
-                # 00 00 FE FF  UTF-32, big-endian BOM
-                self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
-            elif aBuf[:4] == b'\xFE\xFF\x00\x00':
-                # FE FF 00 00  UCS-4, unusual octet order BOM (3412)
-                self.result = {
-                    'encoding': "X-ISO-10646-UCS-4-3412",
-                    'confidence': 1.0
-                }
-            elif aBuf[:4] == b'\x00\x00\xFF\xFE':
-                # 00 00 FF FE  UCS-4, unusual octet order BOM (2143)
-                self.result = {
-                    'encoding': "X-ISO-10646-UCS-4-2143",
-                    'confidence': 1.0
-                }
-            elif aBuf[:2] == codecs.BOM_LE:
-                # FF FE  UTF-16, little endian BOM
-                self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
-            elif aBuf[:2] == codecs.BOM_BE:
-                # FE FF  UTF-16, big endian BOM
-                self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
-
-        self._mGotData = True
-        if self.result['encoding'] and (self.result['confidence'] > 0.0):
-            self.done = True
-            return
-
-        if self._mInputState == ePureAscii:
-            if self._highBitDetector.search(aBuf):
-                self._mInputState = eHighbyte
-            elif ((self._mInputState == ePureAscii) and
-                    self._escDetector.search(self._mLastChar + aBuf)):
-                self._mInputState = eEscAscii
-
-        self._mLastChar = aBuf[-1:]
-
-        if self._mInputState == eEscAscii:
-            if not self._mEscCharSetProber:
-                self._mEscCharSetProber = EscCharSetProber()
-            if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
-                self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
-                               'confidence': self._mEscCharSetProber.get_confidence()}
-                self.done = True
-        elif self._mInputState == eHighbyte:
-            if not self._mCharSetProbers:
-                self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
-                                         Latin1Prober()]
-            for prober in self._mCharSetProbers:
-                if prober.feed(aBuf) == constants.eFoundIt:
-                    self.result = {'encoding': prober.get_charset_name(),
-                                   'confidence': prober.get_confidence()}
-                    self.done = True
-                    break
-
-    def close(self):
-        if self.done:
-            return
-        if not self._mGotData:
-            if constants._debug:
-                sys.stderr.write('no data received!\n')
-            return
-        self.done = True
-
-        if self._mInputState == ePureAscii:
-            self.result = {'encoding': 'ascii', 'confidence': 1.0}
-            return self.result
-
-        if self._mInputState == eHighbyte:
-            proberConfidence = None
-            maxProberConfidence = 0.0
-            maxProber = None
-            for prober in self._mCharSetProbers:
-                if not prober:
-                    continue
-                proberConfidence = prober.get_confidence()
-                if proberConfidence > maxProberConfidence:
-                    maxProberConfidence = proberConfidence
-                    maxProber = prober
-            if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
-                self.result = {'encoding': maxProber.get_charset_name(),
-                               'confidence': maxProber.get_confidence()}
-                return self.result
-
-        if constants._debug:
-            sys.stderr.write('no probers hit minimum threshhold\n')
-            for prober in self._mCharSetProbers[0].mProbers:
-                if not prober:
-                    continue
-                sys.stderr.write('%s confidence = %s\n' %
-                                 (prober.get_charset_name(),
-                                  prober.get_confidence()))
diff --git a/python/ext-libs/requests/packages/chardet/utf8prober.py b/python/ext-libs/requests/packages/chardet/utf8prober.py
deleted file mode 100644
index 1c0bb5d..0000000
--- a/python/ext-libs/requests/packages/chardet/utf8prober.py
+++ /dev/null
@@ -1,76 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from . import constants
-from .charsetprober import CharSetProber
-from .codingstatemachine import CodingStateMachine
-from .mbcssm import UTF8SMModel
-
-ONE_CHAR_PROB = 0.5
-
-
-class UTF8Prober(CharSetProber):
-    def __init__(self):
-        CharSetProber.__init__(self)
-        self._mCodingSM = CodingStateMachine(UTF8SMModel)
-        self.reset()
-
-    def reset(self):
-        CharSetProber.reset(self)
-        self._mCodingSM.reset()
-        self._mNumOfMBChar = 0
-
-    def get_charset_name(self):
-        return "utf-8"
-
-    def feed(self, aBuf):
-        for c in aBuf:
-            codingState = self._mCodingSM.next_state(c)
-            if codingState == constants.eError:
-                self._mState = constants.eNotMe
-                break
-            elif codingState == constants.eItsMe:
-                self._mState = constants.eFoundIt
-                break
-            elif codingState == constants.eStart:
-                if self._mCodingSM.get_current_charlen() >= 2:
-                    self._mNumOfMBChar += 1
-
-        if self.get_state() == constants.eDetecting:
-            if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
-                self._mState = constants.eFoundIt
-
-        return self.get_state()
-
-    def get_confidence(self):
-        unlike = 0.99
-        if self._mNumOfMBChar < 6:
-            for i in range(0, self._mNumOfMBChar):
-                unlike = unlike * ONE_CHAR_PROB
-            return 1.0 - unlike
-        else:
-            return unlike
diff --git a/python/ext-libs/requests/packages/urllib3/__init__.py b/python/ext-libs/requests/packages/urllib3/__init__.py
deleted file mode 100644
index 7366899..0000000
--- a/python/ext-libs/requests/packages/urllib3/__init__.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""
-urllib3 - Thread-safe connection pooling and re-using.
-"""
-
-from __future__ import absolute_import
-import warnings
-
-from .connectionpool import (
-    HTTPConnectionPool,
-    HTTPSConnectionPool,
-    connection_from_url
-)
-
-from . import exceptions
-from .filepost import encode_multipart_formdata
-from .poolmanager import PoolManager, ProxyManager, proxy_from_url
-from .response import HTTPResponse
-from .util.request import make_headers
-from .util.url import get_host
-from .util.timeout import Timeout
-from .util.retry import Retry
-
-
-# Set default logging handler to avoid "No handler found" warnings.
-import logging
-try:  # Python 2.7+
-    from logging import NullHandler
-except ImportError:
-    class NullHandler(logging.Handler):
-        def emit(self, record):
-            pass
-
-__author__ = 'Andrey Petrov (andrey.petrov at shazow.net)'
-__license__ = 'MIT'
-__version__ = '1.15.1'
-
-__all__ = (
-    'HTTPConnectionPool',
-    'HTTPSConnectionPool',
-    'PoolManager',
-    'ProxyManager',
-    'HTTPResponse',
-    'Retry',
-    'Timeout',
-    'add_stderr_logger',
-    'connection_from_url',
-    'disable_warnings',
-    'encode_multipart_formdata',
-    'get_host',
-    'make_headers',
-    'proxy_from_url',
-)
-
-logging.getLogger(__name__).addHandler(NullHandler())
-
-
-def add_stderr_logger(level=logging.DEBUG):
-    """
-    Helper for quickly adding a StreamHandler to the logger. Useful for
-    debugging.
-
-    Returns the handler after adding it.
-    """
-    # This method needs to be in this __init__.py to get the __name__ correct
-    # even if urllib3 is vendored within another package.
-    logger = logging.getLogger(__name__)
-    handler = logging.StreamHandler()
-    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
-    logger.addHandler(handler)
-    logger.setLevel(level)
-    logger.debug('Added a stderr logging handler to logger: %s', __name__)
-    return handler
-
-# ... Clean up.
-del NullHandler
-
-
-# All warning filters *must* be appended unless you're really certain that they
-# shouldn't be: otherwise, it's very hard for users to use most Python
-# mechanisms to silence them.
-# SecurityWarning's always go off by default.
-warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
-# SubjectAltNameWarning's should go off once per host
-warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)
-# InsecurePlatformWarning's don't vary between requests, so we keep it default.
-warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
-                      append=True)
-# SNIMissingWarnings should go off only once.
-warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)
-
-
-def disable_warnings(category=exceptions.HTTPWarning):
-    """
-    Helper for quickly disabling all urllib3 warnings.
-    """
-    warnings.simplefilter('ignore', category)
diff --git a/python/ext-libs/requests/packages/urllib3/_collections.py b/python/ext-libs/requests/packages/urllib3/_collections.py
deleted file mode 100644
index 77cee01..0000000
--- a/python/ext-libs/requests/packages/urllib3/_collections.py
+++ /dev/null
@@ -1,324 +0,0 @@
-from __future__ import absolute_import
-from collections import Mapping, MutableMapping
-try:
-    from threading import RLock
-except ImportError:  # Platform-specific: No threads available
-    class RLock:
-        def __enter__(self):
-            pass
-
-        def __exit__(self, exc_type, exc_value, traceback):
-            pass
-
-
-try:  # Python 2.7+
-    from collections import OrderedDict
-except ImportError:
-    from .packages.ordered_dict import OrderedDict
-from .packages.six import iterkeys, itervalues, PY3
-
-
-__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
-
-
-_Null = object()
-
-
-class RecentlyUsedContainer(MutableMapping):
-    """
-    Provides a thread-safe dict-like container which maintains up to
-    ``maxsize`` keys while throwing away the least-recently-used keys beyond
-    ``maxsize``.
-
-    :param maxsize:
-        Maximum number of recent elements to retain.
-
-    :param dispose_func:
-        Every time an item is evicted from the container,
-        ``dispose_func(value)`` is called.  Callback which will get called
-    """
-
-    ContainerCls = OrderedDict
-
-    def __init__(self, maxsize=10, dispose_func=None):
-        self._maxsize = maxsize
-        self.dispose_func = dispose_func
-
-        self._container = self.ContainerCls()
-        self.lock = RLock()
-
-    def __getitem__(self, key):
-        # Re-insert the item, moving it to the end of the eviction line.
-        with self.lock:
-            item = self._container.pop(key)
-            self._container[key] = item
-            return item
-
-    def __setitem__(self, key, value):
-        evicted_value = _Null
-        with self.lock:
-            # Possibly evict the existing value of 'key'
-            evicted_value = self._container.get(key, _Null)
-            self._container[key] = value
-
-            # If we didn't evict an existing value, we might have to evict the
-            # least recently used item from the beginning of the container.
-            if len(self._container) > self._maxsize:
-                _key, evicted_value = self._container.popitem(last=False)
-
-        if self.dispose_func and evicted_value is not _Null:
-            self.dispose_func(evicted_value)
-
-    def __delitem__(self, key):
-        with self.lock:
-            value = self._container.pop(key)
-
-        if self.dispose_func:
-            self.dispose_func(value)
-
-    def __len__(self):
-        with self.lock:
-            return len(self._container)
-
-    def __iter__(self):
-        raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
-
-    def clear(self):
-        with self.lock:
-            # Copy pointers to all values, then wipe the mapping
-            values = list(itervalues(self._container))
-            self._container.clear()
-
-        if self.dispose_func:
-            for value in values:
-                self.dispose_func(value)
-
-    def keys(self):
-        with self.lock:
-            return list(iterkeys(self._container))
-
-
-class HTTPHeaderDict(MutableMapping):
-    """
-    :param headers:
-        An iterable of field-value pairs. Must not contain multiple field names
-        when compared case-insensitively.
-
-    :param kwargs:
-        Additional field-value pairs to pass in to ``dict.update``.
-
-    A ``dict`` like container for storing HTTP Headers.
-
-    Field names are stored and compared case-insensitively in compliance with
-    RFC 7230. Iteration provides the first case-sensitive key seen for each
-    case-insensitive pair.
-
-    Using ``__setitem__`` syntax overwrites fields that compare equal
-    case-insensitively in order to maintain ``dict``'s api. For fields that
-    compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
-    in a loop.
-
-    If multiple fields that are equal case-insensitively are passed to the
-    constructor or ``.update``, the behavior is undefined and some will be
-    lost.
-
-    >>> headers = HTTPHeaderDict()
-    >>> headers.add('Set-Cookie', 'foo=bar')
-    >>> headers.add('set-cookie', 'baz=quxx')
-    >>> headers['content-length'] = '7'
-    >>> headers['SET-cookie']
-    'foo=bar, baz=quxx'
-    >>> headers['Content-Length']
-    '7'
-    """
-
-    def __init__(self, headers=None, **kwargs):
-        super(HTTPHeaderDict, self).__init__()
-        self._container = OrderedDict()
-        if headers is not None:
-            if isinstance(headers, HTTPHeaderDict):
-                self._copy_from(headers)
-            else:
-                self.extend(headers)
-        if kwargs:
-            self.extend(kwargs)
-
-    def __setitem__(self, key, val):
-        self._container[key.lower()] = (key, val)
-        return self._container[key.lower()]
-
-    def __getitem__(self, key):
-        val = self._container[key.lower()]
-        return ', '.join(val[1:])
-
-    def __delitem__(self, key):
-        del self._container[key.lower()]
-
-    def __contains__(self, key):
-        return key.lower() in self._container
-
-    def __eq__(self, other):
-        if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
-            return False
-        if not isinstance(other, type(self)):
-            other = type(self)(other)
-        return (dict((k.lower(), v) for k, v in self.itermerged()) ==
-                dict((k.lower(), v) for k, v in other.itermerged()))
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    if not PY3:  # Python 2
-        iterkeys = MutableMapping.iterkeys
-        itervalues = MutableMapping.itervalues
-
-    __marker = object()
-
-    def __len__(self):
-        return len(self._container)
-
-    def __iter__(self):
-        # Only provide the originally cased names
-        for vals in self._container.values():
-            yield vals[0]
-
-    def pop(self, key, default=__marker):
-        '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
-          If key is not found, d is returned if given, otherwise KeyError is raised.
-        '''
-        # Using the MutableMapping function directly fails due to the private marker.
-        # Using ordinary dict.pop would expose the internal structures.
-        # So let's reinvent the wheel.
-        try:
-            value = self[key]
-        except KeyError:
-            if default is self.__marker:
-                raise
-            return default
-        else:
-            del self[key]
-            return value
-
-    def discard(self, key):
-        try:
-            del self[key]
-        except KeyError:
-            pass
-
-    def add(self, key, val):
-        """Adds a (name, value) pair, doesn't overwrite the value if it already
-        exists.
-
-        >>> headers = HTTPHeaderDict(foo='bar')
-        >>> headers.add('Foo', 'baz')
-        >>> headers['foo']
-        'bar, baz'
-        """
-        key_lower = key.lower()
-        new_vals = key, val
-        # Keep the common case aka no item present as fast as possible
-        vals = self._container.setdefault(key_lower, new_vals)
-        if new_vals is not vals:
-            # new_vals was not inserted, as there was a previous one
-            if isinstance(vals, list):
-                # If already several items got inserted, we have a list
-                vals.append(val)
-            else:
-                # vals should be a tuple then, i.e. only one item so far
-                # Need to convert the tuple to list for further extension
-                self._container[key_lower] = [vals[0], vals[1], val]
-
-    def extend(self, *args, **kwargs):
-        """Generic import function for any type of header-like object.
-        Adapted version of MutableMapping.update in order to insert items
-        with self.add instead of self.__setitem__
-        """
-        if len(args) > 1:
-            raise TypeError("extend() takes at most 1 positional "
-                            "arguments ({0} given)".format(len(args)))
-        other = args[0] if len(args) >= 1 else ()
-
-        if isinstance(other, HTTPHeaderDict):
-            for key, val in other.iteritems():
-                self.add(key, val)
-        elif isinstance(other, Mapping):
-            for key in other:
-                self.add(key, other[key])
-        elif hasattr(other, "keys"):
-            for key in other.keys():
-                self.add(key, other[key])
-        else:
-            for key, value in other:
-                self.add(key, value)
-
-        for key, value in kwargs.items():
-            self.add(key, value)
-
-    def getlist(self, key):
-        """Returns a list of all the values for the named field. Returns an
-        empty list if the key doesn't exist."""
-        try:
-            vals = self._container[key.lower()]
-        except KeyError:
-            return []
-        else:
-            if isinstance(vals, tuple):
-                return [vals[1]]
-            else:
-                return vals[1:]
-
-    # Backwards compatibility for httplib
-    getheaders = getlist
-    getallmatchingheaders = getlist
-    iget = getlist
-
-    def __repr__(self):
-        return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
-
-    def _copy_from(self, other):
-        for key in other:
-            val = other.getlist(key)
-            if isinstance(val, list):
-                # Don't need to convert tuples
-                val = list(val)
-            self._container[key.lower()] = [key] + val
-
-    def copy(self):
-        clone = type(self)()
-        clone._copy_from(self)
-        return clone
-
-    def iteritems(self):
-        """Iterate over all header lines, including duplicate ones."""
-        for key in self:
-            vals = self._container[key.lower()]
-            for val in vals[1:]:
-                yield vals[0], val
-
-    def itermerged(self):
-        """Iterate over all headers, merging duplicate ones together."""
-        for key in self:
-            val = self._container[key.lower()]
-            yield val[0], ', '.join(val[1:])
-
-    def items(self):
-        return list(self.iteritems())
-
-    @classmethod
-    def from_httplib(cls, message):  # Python 2
-        """Read headers from a Python 2 httplib message object."""
-        # python2.7 does not expose a proper API for exporting multiheaders
-        # efficiently. This function re-reads raw lines from the message
-        # object and extracts the multiheaders properly.
-        headers = []
-
-        for line in message.headers:
-            if line.startswith((' ', '\t')):
-                key, value = headers[-1]
-                headers[-1] = (key, value + '\r\n' + line.rstrip())
-                continue
-
-            key, value = line.split(':', 1)
-            headers.append((key, value.strip()))
-
-        return cls(headers)
diff --git a/python/ext-libs/requests/packages/urllib3/connection.py b/python/ext-libs/requests/packages/urllib3/connection.py
deleted file mode 100644
index 5ce0080..0000000
--- a/python/ext-libs/requests/packages/urllib3/connection.py
+++ /dev/null
@@ -1,330 +0,0 @@
-from __future__ import absolute_import
-import datetime
-import logging
-import os
-import sys
-import socket
-from socket import error as SocketError, timeout as SocketTimeout
-import warnings
-from .packages import six
-
-try:  # Python 3
-    from http.client import HTTPConnection as _HTTPConnection
-    from http.client import HTTPException  # noqa: unused in this module
-except ImportError:
-    from httplib import HTTPConnection as _HTTPConnection
-    from httplib import HTTPException  # noqa: unused in this module
-
-try:  # Compiled with SSL?
-    import ssl
-    BaseSSLError = ssl.SSLError
-except (ImportError, AttributeError):  # Platform-specific: No SSL.
-    ssl = None
-
-    class BaseSSLError(BaseException):
-        pass
-
-
-try:  # Python 3:
-    # Not a no-op, we're adding this to the namespace so it can be imported.
-    ConnectionError = ConnectionError
-except NameError:  # Python 2:
-    class ConnectionError(Exception):
-        pass
-
-
-from .exceptions import (
-    NewConnectionError,
-    ConnectTimeoutError,
-    SubjectAltNameWarning,
-    SystemTimeWarning,
-)
-from .packages.ssl_match_hostname import match_hostname, CertificateError
-
-from .util.ssl_ import (
-    resolve_cert_reqs,
-    resolve_ssl_version,
-    ssl_wrap_socket,
-    assert_fingerprint,
-)
-
-
-from .util import connection
-
-from ._collections import HTTPHeaderDict
-
-log = logging.getLogger(__name__)
-
-port_by_scheme = {
-    'http': 80,
-    'https': 443,
-}
-
-RECENT_DATE = datetime.date(2014, 1, 1)
-
-
-class DummyConnection(object):
-    """Used to detect a failed ConnectionCls import."""
-    pass
-
-
-class HTTPConnection(_HTTPConnection, object):
-    """
-    Based on httplib.HTTPConnection but provides an extra constructor
-    backwards-compatibility layer between older and newer Pythons.
-
-    Additional keyword parameters are used to configure attributes of the connection.
-    Accepted parameters include:
-
-      - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
-      - ``source_address``: Set the source address for the current connection.
-
-        .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
-
-      - ``socket_options``: Set specific options on the underlying socket. If not specified, then
-        defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
-        Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
-
-        For example, if you wish to enable TCP Keep Alive in addition to the defaults,
-        you might pass::
-
-            HTTPConnection.default_socket_options + [
-                (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
-            ]
-
-        Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
-    """
-
-    default_port = port_by_scheme['http']
-
-    #: Disable Nagle's algorithm by default.
-    #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
-    default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
-
-    #: Whether this connection verifies the host's certificate.
-    is_verified = False
-
-    def __init__(self, *args, **kw):
-        if six.PY3:  # Python 3
-            kw.pop('strict', None)
-
-        # Pre-set source_address in case we have an older Python like 2.6.
-        self.source_address = kw.get('source_address')
-
-        if sys.version_info < (2, 7):  # Python 2.6
-            # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
-            # not newer versions. We can still use it when creating a
-            # connection though, so we pop it *after* we have saved it as
-            # self.source_address.
-            kw.pop('source_address', None)
-
-        #: The socket options provided by the user. If no options are
-        #: provided, we use the default options.
-        self.socket_options = kw.pop('socket_options', self.default_socket_options)
-
-        # Superclass also sets self.source_address in Python 2.7+.
-        _HTTPConnection.__init__(self, *args, **kw)
-
-    def _new_conn(self):
-        """ Establish a socket connection and set nodelay settings on it.
-
-        :return: New socket connection.
-        """
-        extra_kw = {}
-        if self.source_address:
-            extra_kw['source_address'] = self.source_address
-
-        if self.socket_options:
-            extra_kw['socket_options'] = self.socket_options
-
-        try:
-            conn = connection.create_connection(
-                (self.host, self.port), self.timeout, **extra_kw)
-
-        except SocketTimeout as e:
-            raise ConnectTimeoutError(
-                self, "Connection to %s timed out. (connect timeout=%s)" %
-                (self.host, self.timeout))
-
-        except SocketError as e:
-            raise NewConnectionError(
-                self, "Failed to establish a new connection: %s" % e)
-
-        return conn
-
-    def _prepare_conn(self, conn):
-        self.sock = conn
-        # the _tunnel_host attribute was added in python 2.6.3 (via
-        # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
-        # not have them.
-        if getattr(self, '_tunnel_host', None):
-            # TODO: Fix tunnel so it doesn't depend on self.sock state.
-            self._tunnel()
-            # Mark this connection as not reusable
-            self.auto_open = 0
-
-    def connect(self):
-        conn = self._new_conn()
-        self._prepare_conn(conn)
-
-    def request_chunked(self, method, url, body=None, headers=None):
-        """
-        Alternative to the common request method, which sends the
-        body with chunked encoding and not as one block
-        """
-        headers = HTTPHeaderDict(headers if headers is not None else {})
-        skip_accept_encoding = 'accept-encoding' in headers
-        self.putrequest(method, url, skip_accept_encoding=skip_accept_encoding)
-        for header, value in headers.items():
-            self.putheader(header, value)
-        if 'transfer-encoding' not in headers:
-            self.putheader('Transfer-Encoding', 'chunked')
-        self.endheaders()
-
-        if body is not None:
-            stringish_types = six.string_types + (six.binary_type,)
-            if isinstance(body, stringish_types):
-                body = (body,)
-            for chunk in body:
-                if not chunk:
-                    continue
-                if not isinstance(chunk, six.binary_type):
-                    chunk = chunk.encode('utf8')
-                len_str = hex(len(chunk))[2:]
-                self.send(len_str.encode('utf-8'))
-                self.send(b'\r\n')
-                self.send(chunk)
-                self.send(b'\r\n')
-
-        # After the if clause, to always have a closed body
-        self.send(b'0\r\n\r\n')
-
-
-class HTTPSConnection(HTTPConnection):
-    default_port = port_by_scheme['https']
-
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
-
-        HTTPConnection.__init__(self, host, port, strict=strict,
-                                timeout=timeout, **kw)
-
-        self.key_file = key_file
-        self.cert_file = cert_file
-
-        # Required property for Google AppEngine 1.9.0 which otherwise causes
-        # HTTPS requests to go out as HTTP. (See Issue #356)
-        self._protocol = 'https'
-
-    def connect(self):
-        conn = self._new_conn()
-        self._prepare_conn(conn)
-        self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
-
-
-class VerifiedHTTPSConnection(HTTPSConnection):
-    """
-    Based on httplib.HTTPSConnection but wraps the socket with
-    SSL certification.
-    """
-    cert_reqs = None
-    ca_certs = None
-    ca_cert_dir = None
-    ssl_version = None
-    assert_fingerprint = None
-
-    def set_cert(self, key_file=None, cert_file=None,
-                 cert_reqs=None, ca_certs=None,
-                 assert_hostname=None, assert_fingerprint=None,
-                 ca_cert_dir=None):
-
-        if (ca_certs or ca_cert_dir) and cert_reqs is None:
-            cert_reqs = 'CERT_REQUIRED'
-
-        self.key_file = key_file
-        self.cert_file = cert_file
-        self.cert_reqs = cert_reqs
-        self.assert_hostname = assert_hostname
-        self.assert_fingerprint = assert_fingerprint
-        self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
-        self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
-
-    def connect(self):
-        # Add certificate verification
-        conn = self._new_conn()
-
-        resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
-        resolved_ssl_version = resolve_ssl_version(self.ssl_version)
-
-        hostname = self.host
-        if getattr(self, '_tunnel_host', None):
-            # _tunnel_host was added in Python 2.6.3
-            # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
-
-            self.sock = conn
-            # Calls self._set_hostport(), so self.host is
-            # self._tunnel_host below.
-            self._tunnel()
-            # Mark this connection as not reusable
-            self.auto_open = 0
-
-            # Override the host with the one we're requesting data from.
-            hostname = self._tunnel_host
-
-        is_time_off = datetime.date.today() < RECENT_DATE
-        if is_time_off:
-            warnings.warn((
-                'System time is way off (before {0}). This will probably '
-                'lead to SSL verification errors').format(RECENT_DATE),
-                SystemTimeWarning
-            )
-
-        # Wrap socket using verification with the root certs in
-        # trusted_root_certs
-        self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
-                                    cert_reqs=resolved_cert_reqs,
-                                    ca_certs=self.ca_certs,
-                                    ca_cert_dir=self.ca_cert_dir,
-                                    server_hostname=hostname,
-                                    ssl_version=resolved_ssl_version)
-
-        if self.assert_fingerprint:
-            assert_fingerprint(self.sock.getpeercert(binary_form=True),
-                               self.assert_fingerprint)
-        elif resolved_cert_reqs != ssl.CERT_NONE \
-                and self.assert_hostname is not False:
-            cert = self.sock.getpeercert()
-            if not cert.get('subjectAltName', ()):
-                warnings.warn((
-                    'Certificate for {0} has no `subjectAltName`, falling back to check for a '
-                    '`commonName` for now. This feature is being removed by major browsers and '
-                    'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
-                    'for details.)'.format(hostname)),
-                    SubjectAltNameWarning
-                )
-            _match_hostname(cert, self.assert_hostname or hostname)
-
-        self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
-                            self.assert_fingerprint is not None)
-
-
-def _match_hostname(cert, asserted_hostname):
-    try:
-        match_hostname(cert, asserted_hostname)
-    except CertificateError as e:
-        log.error(
-            'Certificate did not match expected hostname: %s. '
-            'Certificate: %s', asserted_hostname, cert
-        )
-        # Add cert to exception and reraise so client code can inspect
-        # the cert when catching the exception, if they want to
-        e._peer_cert = cert
-        raise
-
-
-if ssl:
-    # Make a copy for testing.
-    UnverifiedHTTPSConnection = HTTPSConnection
-    HTTPSConnection = VerifiedHTTPSConnection
-else:
-    HTTPSConnection = DummyConnection
diff --git a/python/ext-libs/requests/packages/urllib3/connectionpool.py b/python/ext-libs/requests/packages/urllib3/connectionpool.py
deleted file mode 100644
index 3fcfb12..0000000
--- a/python/ext-libs/requests/packages/urllib3/connectionpool.py
+++ /dev/null
@@ -1,849 +0,0 @@
-from __future__ import absolute_import
-import errno
-import logging
-import sys
-import warnings
-
-from socket import error as SocketError, timeout as SocketTimeout
-import socket
-
-try:  # Python 3
-    from queue import LifoQueue, Empty, Full
-except ImportError:
-    from Queue import LifoQueue, Empty, Full
-    # Queue is imported for side effects on MS Windows
-    import Queue as _unused_module_Queue  # noqa: unused
-
-
-from .exceptions import (
-    ClosedPoolError,
-    ProtocolError,
-    EmptyPoolError,
-    HeaderParsingError,
-    HostChangedError,
-    LocationValueError,
-    MaxRetryError,
-    ProxyError,
-    ReadTimeoutError,
-    SSLError,
-    TimeoutError,
-    InsecureRequestWarning,
-    NewConnectionError,
-)
-from .packages.ssl_match_hostname import CertificateError
-from .packages import six
-from .connection import (
-    port_by_scheme,
-    DummyConnection,
-    HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
-    HTTPException, BaseSSLError,
-)
-from .request import RequestMethods
-from .response import HTTPResponse
-
-from .util.connection import is_connection_dropped
-from .util.response import assert_header_parsing
-from .util.retry import Retry
-from .util.timeout import Timeout
-from .util.url import get_host, Url
-
-
-xrange = six.moves.xrange
-
-log = logging.getLogger(__name__)
-
-_Default = object()
-
-
-# Pool objects
-class ConnectionPool(object):
-    """
-    Base class for all connection pools, such as
-    :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
-    """
-
-    scheme = None
-    QueueCls = LifoQueue
-
-    def __init__(self, host, port=None):
-        if not host:
-            raise LocationValueError("No host specified.")
-
-        # httplib doesn't like it when we include brackets in ipv6 addresses
-        # Specifically, if we include brackets but also pass the port then
-        # httplib crazily doubles up the square brackets on the Host header.
-        # Instead, we need to make sure we never pass ``None`` as the port.
-        # However, for backward compatibility reasons we can't actually
-        # *assert* that.
-        self.host = host.strip('[]')
-        self.port = port
-
-    def __str__(self):
-        return '%s(host=%r, port=%r)' % (type(self).__name__,
-                                         self.host, self.port)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self.close()
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def close():
-        """
-        Close all pooled connections and disable the pool.
-        """
-        pass
-
-
-# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
-_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
-
-
-class HTTPConnectionPool(ConnectionPool, RequestMethods):
-    """
-    Thread-safe connection pool for one host.
-
-    :param host:
-        Host used for this HTTP Connection (e.g. "localhost"), passed into
-        :class:`httplib.HTTPConnection`.
-
-    :param port:
-        Port used for this HTTP Connection (None is equivalent to 80), passed
-        into :class:`httplib.HTTPConnection`.
-
-    :param strict:
-        Causes BadStatusLine to be raised if the status line can't be parsed
-        as a valid HTTP/1.0 or 1.1 status line, passed into
-        :class:`httplib.HTTPConnection`.
-
-        .. note::
-           Only works in Python 2. This parameter is ignored in Python 3.
-
-    :param timeout:
-        Socket timeout in seconds for each individual connection. This can
-        be a float or integer, which sets the timeout for the HTTP request,
-        or an instance of :class:`urllib3.util.Timeout` which gives you more
-        fine-grained control over request timeouts. After the constructor has
-        been parsed, this is always a `urllib3.util.Timeout` object.
-
-    :param maxsize:
-        Number of connections to save that can be reused. More than 1 is useful
-        in multithreaded situations. If ``block`` is set to False, more
-        connections will be created but they will not be saved once they've
-        been used.
-
-    :param block:
-        If set to True, no more than ``maxsize`` connections will be used at
-        a time. When no free connections are available, the call will block
-        until a connection has been released. This is a useful side effect for
-        particular multithreaded situations where one does not want to use more
-        than maxsize connections per host to prevent flooding.
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-
-    :param retries:
-        Retry configuration to use by default with requests in this pool.
-
-    :param _proxy:
-        Parsed proxy URL, should not be used directly, instead, see
-        :class:`urllib3.connectionpool.ProxyManager`"
-
-    :param _proxy_headers:
-        A dictionary with proxy headers, should not be used directly,
-        instead, see :class:`urllib3.connectionpool.ProxyManager`"
-
-    :param \**conn_kw:
-        Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
-        :class:`urllib3.connection.HTTPSConnection` instances.
-    """
-
-    scheme = 'http'
-    ConnectionCls = HTTPConnection
-
-    def __init__(self, host, port=None, strict=False,
-                 timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
-                 headers=None, retries=None,
-                 _proxy=None, _proxy_headers=None,
-                 **conn_kw):
-        ConnectionPool.__init__(self, host, port)
-        RequestMethods.__init__(self, headers)
-
-        self.strict = strict
-
-        if not isinstance(timeout, Timeout):
-            timeout = Timeout.from_float(timeout)
-
-        if retries is None:
-            retries = Retry.DEFAULT
-
-        self.timeout = timeout
-        self.retries = retries
-
-        self.pool = self.QueueCls(maxsize)
-        self.block = block
-
-        self.proxy = _proxy
-        self.proxy_headers = _proxy_headers or {}
-
-        # Fill the queue up so that doing get() on it will block properly
-        for _ in xrange(maxsize):
-            self.pool.put(None)
-
-        # These are mostly for testing and debugging purposes.
-        self.num_connections = 0
-        self.num_requests = 0
-        self.conn_kw = conn_kw
-
-        if self.proxy:
-            # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
-            # We cannot know if the user has added default socket options, so we cannot replace the
-            # list.
-            self.conn_kw.setdefault('socket_options', [])
-
-    def _new_conn(self):
-        """
-        Return a fresh :class:`HTTPConnection`.
-        """
-        self.num_connections += 1
-        log.info("Starting new HTTP connection (%d): %s",
-                 self.num_connections, self.host)
-
-        conn = self.ConnectionCls(host=self.host, port=self.port,
-                                  timeout=self.timeout.connect_timeout,
-                                  strict=self.strict, **self.conn_kw)
-        return conn
-
-    def _get_conn(self, timeout=None):
-        """
-        Get a connection. Will return a pooled connection if one is available.
-
-        If no connections are available and :prop:`.block` is ``False``, then a
-        fresh connection is returned.
-
-        :param timeout:
-            Seconds to wait before giving up and raising
-            :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
-            :prop:`.block` is ``True``.
-        """
-        conn = None
-        try:
-            conn = self.pool.get(block=self.block, timeout=timeout)
-
-        except AttributeError:  # self.pool is None
-            raise ClosedPoolError(self, "Pool is closed.")
-
-        except Empty:
-            if self.block:
-                raise EmptyPoolError(self,
-                                     "Pool reached maximum size and no more "
-                                     "connections are allowed.")
-            pass  # Oh well, we'll create a new connection then
-
-        # If this is a persistent connection, check if it got disconnected
-        if conn and is_connection_dropped(conn):
-            log.info("Resetting dropped connection: %s", self.host)
-            conn.close()
-            if getattr(conn, 'auto_open', 1) == 0:
-                # This is a proxied connection that has been mutated by
-                # httplib._tunnel() and cannot be reused (since it would
-                # attempt to bypass the proxy)
-                conn = None
-
-        return conn or self._new_conn()
-
-    def _put_conn(self, conn):
-        """
-        Put a connection back into the pool.
-
-        :param conn:
-            Connection object for the current host and port as returned by
-            :meth:`._new_conn` or :meth:`._get_conn`.
-
-        If the pool is already full, the connection is closed and discarded
-        because we exceeded maxsize. If connections are discarded frequently,
-        then maxsize should be increased.
-
-        If the pool is closed, then the connection will be closed and discarded.
-        """
-        try:
-            self.pool.put(conn, block=False)
-            return  # Everything is dandy, done.
-        except AttributeError:
-            # self.pool is None.
-            pass
-        except Full:
-            # This should never happen if self.block == True
-            log.warning(
-                "Connection pool is full, discarding connection: %s",
-                self.host)
-
-        # Connection never got put back into the pool, close it.
-        if conn:
-            conn.close()
-
-    def _validate_conn(self, conn):
-        """
-        Called right before a request is made, after the socket is created.
-        """
-        pass
-
-    def _prepare_proxy(self, conn):
-        # Nothing to do for HTTP connections.
-        pass
-
-    def _get_timeout(self, timeout):
-        """ Helper that always returns a :class:`urllib3.util.Timeout` """
-        if timeout is _Default:
-            return self.timeout.clone()
-
-        if isinstance(timeout, Timeout):
-            return timeout.clone()
-        else:
-            # User passed us an int/float. This is for backwards compatibility,
-            # can be removed later
-            return Timeout.from_float(timeout)
-
-    def _raise_timeout(self, err, url, timeout_value):
-        """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
-
-        if isinstance(err, SocketTimeout):
-            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
-
-        # See the above comment about EAGAIN in Python 3. In Python 2 we have
-        # to specifically catch it and throw the timeout error
-        if hasattr(err, 'errno') and err.errno in _blocking_errnos:
-            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
-
-        # Catch possible read timeouts thrown as SSL errors. If not the
-        # case, rethrow the original. We need to do this because of:
-        # http://bugs.python.org/issue10272
-        if 'timed out' in str(err) or 'did not complete (read)' in str(err):  # Python 2.6
-            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
-
-    def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
-                      **httplib_request_kw):
-        """
-        Perform a request on a given urllib connection object taken from our
-        pool.
-
-        :param conn:
-            a connection from one of our connection pools
-
-        :param timeout:
-            Socket timeout in seconds for the request. This can be a
-            float or integer, which will set the same timeout value for
-            the socket connect and the socket read, or an instance of
-            :class:`urllib3.util.Timeout`, which gives you more fine-grained
-            control over your timeouts.
-        """
-        self.num_requests += 1
-
-        timeout_obj = self._get_timeout(timeout)
-        timeout_obj.start_connect()
-        conn.timeout = timeout_obj.connect_timeout
-
-        # Trigger any extra validation we need to do.
-        try:
-            self._validate_conn(conn)
-        except (SocketTimeout, BaseSSLError) as e:
-            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
-            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
-            raise
-
-        # conn.request() calls httplib.*.request, not the method in
-        # urllib3.request. It also calls makefile (recv) on the socket.
-        if chunked:
-            conn.request_chunked(method, url, **httplib_request_kw)
-        else:
-            conn.request(method, url, **httplib_request_kw)
-
-        # Reset the timeout for the recv() on the socket
-        read_timeout = timeout_obj.read_timeout
-
-        # App Engine doesn't have a sock attr
-        if getattr(conn, 'sock', None):
-            # In Python 3 socket.py will catch EAGAIN and return None when you
-            # try and read into the file pointer created by http.client, which
-            # instead raises a BadStatusLine exception. Instead of catching
-            # the exception and assuming all BadStatusLine exceptions are read
-            # timeouts, check for a zero timeout before making the request.
-            if read_timeout == 0:
-                raise ReadTimeoutError(
-                    self, url, "Read timed out. (read timeout=%s)" % read_timeout)
-            if read_timeout is Timeout.DEFAULT_TIMEOUT:
-                conn.sock.settimeout(socket.getdefaulttimeout())
-            else:  # None or a value
-                conn.sock.settimeout(read_timeout)
-
-        # Receive the response from the server
-        try:
-            try:  # Python 2.7, use buffering of HTTP responses
-                httplib_response = conn.getresponse(buffering=True)
-            except TypeError:  # Python 2.6 and older
-                httplib_response = conn.getresponse()
-        except (SocketTimeout, BaseSSLError, SocketError) as e:
-            self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
-            raise
-
-        # AppEngine doesn't have a version attr.
-        http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
-        log.debug("\"%s %s %s\" %s %s", method, url, http_version,
-                  httplib_response.status, httplib_response.length)
-
-        try:
-            assert_header_parsing(httplib_response.msg)
-        except HeaderParsingError as hpe:  # Platform-specific: Python 3
-            log.warning(
-                'Failed to parse headers (url=%s): %s',
-                self._absolute_url(url), hpe, exc_info=True)
-
-        return httplib_response
-
-    def _absolute_url(self, path):
-        return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
-
-    def close(self):
-        """
-        Close all pooled connections and disable the pool.
-        """
-        # Disable access to the pool
-        old_pool, self.pool = self.pool, None
-
-        try:
-            while True:
-                conn = old_pool.get(block=False)
-                if conn:
-                    conn.close()
-
-        except Empty:
-            pass  # Done.
-
-    def is_same_host(self, url):
-        """
-        Check if the given ``url`` is a member of the same host as this
-        connection pool.
-        """
-        if url.startswith('/'):
-            return True
-
-        # TODO: Add optional support for socket.gethostbyname checking.
-        scheme, host, port = get_host(url)
-
-        # Use explicit default port for comparison when none is given
-        if self.port and not port:
-            port = port_by_scheme.get(scheme)
-        elif not self.port and port == port_by_scheme.get(scheme):
-            port = None
-
-        return (scheme, host, port) == (self.scheme, self.host, self.port)
-
-    def urlopen(self, method, url, body=None, headers=None, retries=None,
-                redirect=True, assert_same_host=True, timeout=_Default,
-                pool_timeout=None, release_conn=None, chunked=False,
-                **response_kw):
-        """
-        Get a connection from the pool and perform an HTTP request. This is the
-        lowest level call for making a request, so you'll need to specify all
-        the raw details.
-
-        .. note::
-
-           More commonly, it's appropriate to use a convenience method provided
-           by :class:`.RequestMethods`, such as :meth:`request`.
-
-        .. note::
-
-           `release_conn` will only behave as expected if
-           `preload_content=False` because we want to make
-           `preload_content=False` the default behaviour someday soon without
-           breaking backwards compatibility.
-
-        :param method:
-            HTTP request method (such as GET, POST, PUT, etc.)
-
-        :param body:
-            Data to send in the request body (useful for creating
-            POST requests, see HTTPConnectionPool.post_url for
-            more convenience).
-
-        :param headers:
-            Dictionary of custom headers to send, such as User-Agent,
-            If-None-Match, etc. If None, pool headers are used. If provided,
-            these headers completely replace any pool-specific headers.
-
-        :param retries:
-            Configure the number of retries to allow before raising a
-            :class:`~urllib3.exceptions.MaxRetryError` exception.
-
-            Pass ``None`` to retry until you receive a response. Pass a
-            :class:`~urllib3.util.retry.Retry` object for fine-grained control
-            over different types of retries.
-            Pass an integer number to retry connection errors that many times,
-            but no other types of errors. Pass zero to never retry.
-
-            If ``False``, then retries are disabled and any exception is raised
-            immediately. Also, instead of raising a MaxRetryError on redirects,
-            the redirect response will be returned.
-
-        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
-
-        :param redirect:
-            If True, automatically handle redirects (status codes 301, 302,
-            303, 307, 308). Each redirect counts as a retry. Disabling retries
-            will disable redirect, too.
-
-        :param assert_same_host:
-            If ``True``, will make sure that the host of the pool requests is
-            consistent else will raise HostChangedError. When False, you can
-            use the pool on an HTTP proxy and request foreign hosts.
-
-        :param timeout:
-            If specified, overrides the default timeout for this one
-            request. It may be a float (in seconds) or an instance of
-            :class:`urllib3.util.Timeout`.
-
-        :param pool_timeout:
-            If set and the pool is set to block=True, then this method will
-            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
-            connection is available within the time period.
-
-        :param release_conn:
-            If False, then the urlopen call will not release the connection
-            back into the pool once a response is received (but will release if
-            you read the entire contents of the response such as when
-            `preload_content=True`). This is useful if you're not preloading
-            the response's content immediately. You will need to call
-            ``r.release_conn()`` on the response ``r`` to return the connection
-            back into the pool. If None, it takes the value of
-            ``response_kw.get('preload_content', True)``.
-
-        :param chunked:
-            If True, urllib3 will send the body using chunked transfer
-            encoding. Otherwise, urllib3 will send the body using the standard
-            content-length form. Defaults to False.
-
-        :param \**response_kw:
-            Additional parameters are passed to
-            :meth:`urllib3.response.HTTPResponse.from_httplib`
-        """
-        if headers is None:
-            headers = self.headers
-
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
-
-        if release_conn is None:
-            release_conn = response_kw.get('preload_content', True)
-
-        # Check host
-        if assert_same_host and not self.is_same_host(url):
-            raise HostChangedError(self, url, retries)
-
-        conn = None
-
-        # Merge the proxy headers. Only do this in HTTP. We have to copy the
-        # headers dict so we can safely change it without those changes being
-        # reflected in anyone else's copy.
-        if self.scheme == 'http':
-            headers = headers.copy()
-            headers.update(self.proxy_headers)
-
-        # Must keep the exception bound to a separate variable or else Python 3
-        # complains about UnboundLocalError.
-        err = None
-
-        # Keep track of whether we cleanly exited the except block. This
-        # ensures we do proper cleanup in finally.
-        clean_exit = False
-
-        try:
-            # Request a connection from the queue.
-            timeout_obj = self._get_timeout(timeout)
-            conn = self._get_conn(timeout=pool_timeout)
-
-            conn.timeout = timeout_obj.connect_timeout
-
-            is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
-            if is_new_proxy_conn:
-                self._prepare_proxy(conn)
-
-            # Make the request on the httplib connection object.
-            httplib_response = self._make_request(conn, method, url,
-                                                  timeout=timeout_obj,
-                                                  body=body, headers=headers,
-                                                  chunked=chunked)
-
-            # If we're going to release the connection in ``finally:``, then
-            # the response doesn't need to know about the connection. Otherwise
-            # it will also try to release it and we'll have a double-release
-            # mess.
-            response_conn = conn if not release_conn else None
-
-            # Import httplib's response into our own wrapper object
-            response = HTTPResponse.from_httplib(httplib_response,
-                                                 pool=self,
-                                                 connection=response_conn,
-                                                 **response_kw)
-
-            # Everything went great!
-            clean_exit = True
-
-        except Empty:
-            # Timed out by queue.
-            raise EmptyPoolError(self, "No pool connections are available.")
-
-        except (BaseSSLError, CertificateError) as e:
-            # Close the connection. If a connection is reused on which there
-            # was a Certificate error, the next request will certainly raise
-            # another Certificate error.
-            clean_exit = False
-            raise SSLError(e)
-
-        except SSLError:
-            # Treat SSLError separately from BaseSSLError to preserve
-            # traceback.
-            clean_exit = False
-            raise
-
-        except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
-            # Discard the connection for these exceptions. It will be
-            # be replaced during the next _get_conn() call.
-            clean_exit = False
-
-            if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
-                e = ProxyError('Cannot connect to proxy.', e)
-            elif isinstance(e, (SocketError, HTTPException)):
-                e = ProtocolError('Connection aborted.', e)
-
-            retries = retries.increment(method, url, error=e, _pool=self,
-                                        _stacktrace=sys.exc_info()[2])
-            retries.sleep()
-
-            # Keep track of the error for the retry warning.
-            err = e
-
-        finally:
-            if not clean_exit:
-                # We hit some kind of exception, handled or otherwise. We need
-                # to throw the connection away unless explicitly told not to.
-                # Close the connection, set the variable to None, and make sure
-                # we put the None back in the pool to avoid leaking it.
-                conn = conn and conn.close()
-                release_conn = True
-
-            if release_conn:
-                # Put the connection back to be reused. If the connection is
-                # expired then it will be None, which will get replaced with a
-                # fresh connection during _get_conn.
-                self._put_conn(conn)
-
-        if not conn:
-            # Try again
-            log.warning("Retrying (%r) after connection "
-                        "broken by '%r': %s", retries, err, url)
-            return self.urlopen(method, url, body, headers, retries,
-                                redirect, assert_same_host,
-                                timeout=timeout, pool_timeout=pool_timeout,
-                                release_conn=release_conn, **response_kw)
-
-        # Handle redirect?
-        redirect_location = redirect and response.get_redirect_location()
-        if redirect_location:
-            if response.status == 303:
-                method = 'GET'
-
-            try:
-                retries = retries.increment(method, url, response=response, _pool=self)
-            except MaxRetryError:
-                if retries.raise_on_redirect:
-                    # Release the connection for this response, since we're not
-                    # returning it to be released manually.
-                    response.release_conn()
-                    raise
-                return response
-
-            log.info("Redirecting %s -> %s", url, redirect_location)
-            return self.urlopen(
-                method, redirect_location, body, headers,
-                retries=retries, redirect=redirect,
-                assert_same_host=assert_same_host,
-                timeout=timeout, pool_timeout=pool_timeout,
-                release_conn=release_conn, **response_kw)
-
-        # Check if we should retry the HTTP response.
-        if retries.is_forced_retry(method, status_code=response.status):
-            try:
-                retries = retries.increment(method, url, response=response, _pool=self)
-            except MaxRetryError:
-                if retries.raise_on_status:
-                    # Release the connection for this response, since we're not
-                    # returning it to be released manually.
-                    response.release_conn()
-                    raise
-                return response
-            retries.sleep()
-            log.info("Forced retry: %s", url)
-            return self.urlopen(
-                method, url, body, headers,
-                retries=retries, redirect=redirect,
-                assert_same_host=assert_same_host,
-                timeout=timeout, pool_timeout=pool_timeout,
-                release_conn=release_conn, **response_kw)
-
-        return response
-
-
-class HTTPSConnectionPool(HTTPConnectionPool):
-    """
-    Same as :class:`.HTTPConnectionPool`, but HTTPS.
-
-    When Python is compiled with the :mod:`ssl` module, then
-    :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
-    instead of :class:`.HTTPSConnection`.
-
-    :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
-    ``assert_hostname`` and ``host`` in this order to verify connections.
-    If ``assert_hostname`` is False, no verification is done.
-
-    The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
-    ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
-    available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
-    the connection socket into an SSL socket.
-    """
-
-    scheme = 'https'
-    ConnectionCls = HTTPSConnection
-
-    def __init__(self, host, port=None,
-                 strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
-                 block=False, headers=None, retries=None,
-                 _proxy=None, _proxy_headers=None,
-                 key_file=None, cert_file=None, cert_reqs=None,
-                 ca_certs=None, ssl_version=None,
-                 assert_hostname=None, assert_fingerprint=None,
-                 ca_cert_dir=None, **conn_kw):
-
-        HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
-                                    block, headers, retries, _proxy, _proxy_headers,
-                                    **conn_kw)
-
-        if ca_certs and cert_reqs is None:
-            cert_reqs = 'CERT_REQUIRED'
-
-        self.key_file = key_file
-        self.cert_file = cert_file
-        self.cert_reqs = cert_reqs
-        self.ca_certs = ca_certs
-        self.ca_cert_dir = ca_cert_dir
-        self.ssl_version = ssl_version
-        self.assert_hostname = assert_hostname
-        self.assert_fingerprint = assert_fingerprint
-
-    def _prepare_conn(self, conn):
-        """
-        Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
-        and establish the tunnel if proxy is used.
-        """
-
-        if isinstance(conn, VerifiedHTTPSConnection):
-            conn.set_cert(key_file=self.key_file,
-                          cert_file=self.cert_file,
-                          cert_reqs=self.cert_reqs,
-                          ca_certs=self.ca_certs,
-                          ca_cert_dir=self.ca_cert_dir,
-                          assert_hostname=self.assert_hostname,
-                          assert_fingerprint=self.assert_fingerprint)
-            conn.ssl_version = self.ssl_version
-
-        return conn
-
-    def _prepare_proxy(self, conn):
-        """
-        Establish tunnel connection early, because otherwise httplib
-        would improperly set Host: header to proxy's IP:port.
-        """
-        # Python 2.7+
-        try:
-            set_tunnel = conn.set_tunnel
-        except AttributeError:  # Platform-specific: Python 2.6
-            set_tunnel = conn._set_tunnel
-
-        if sys.version_info <= (2, 6, 4) and not self.proxy_headers:  # Python 2.6.4 and older
-            set_tunnel(self.host, self.port)
-        else:
-            set_tunnel(self.host, self.port, self.proxy_headers)
-
-        conn.connect()
-
-    def _new_conn(self):
-        """
-        Return a fresh :class:`httplib.HTTPSConnection`.
-        """
-        self.num_connections += 1
-        log.info("Starting new HTTPS connection (%d): %s",
-                 self.num_connections, self.host)
-
-        if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
-            raise SSLError("Can't connect to HTTPS URL because the SSL "
-                           "module is not available.")
-
-        actual_host = self.host
-        actual_port = self.port
-        if self.proxy is not None:
-            actual_host = self.proxy.host
-            actual_port = self.proxy.port
-
-        conn = self.ConnectionCls(host=actual_host, port=actual_port,
-                                  timeout=self.timeout.connect_timeout,
-                                  strict=self.strict, **self.conn_kw)
-
-        return self._prepare_conn(conn)
-
-    def _validate_conn(self, conn):
-        """
-        Called right before a request is made, after the socket is created.
-        """
-        super(HTTPSConnectionPool, self)._validate_conn(conn)
-
-        # Force connect early to allow us to validate the connection.
-        if not getattr(conn, 'sock', None):  # AppEngine might not have  `.sock`
-            conn.connect()
-
-        if not conn.is_verified:
-            warnings.warn((
-                'Unverified HTTPS request is being made. '
-                'Adding certificate verification is strongly advised. See: '
-                'https://urllib3.readthedocs.org/en/latest/security.html'),
-                InsecureRequestWarning)
-
-
-def connection_from_url(url, **kw):
-    """
-    Given a url, return an :class:`.ConnectionPool` instance of its host.
-
-    This is a shortcut for not having to parse out the scheme, host, and port
-    of the url before creating an :class:`.ConnectionPool` instance.
-
-    :param url:
-        Absolute URL string that must include the scheme. Port is optional.
-
-    :param \**kw:
-        Passes additional parameters to the constructor of the appropriate
-        :class:`.ConnectionPool`. Useful for specifying things like
-        timeout, maxsize, headers, etc.
-
-    Example::
-
-        >>> conn = connection_from_url('http://google.com/')
-        >>> r = conn.request('GET', '/')
-    """
-    scheme, host, port = get_host(url)
-    port = port or port_by_scheme.get(scheme, 80)
-    if scheme == 'https':
-        return HTTPSConnectionPool(host, port=port, **kw)
-    else:
-        return HTTPConnectionPool(host, port=port, **kw)
diff --git a/python/ext-libs/requests/packages/urllib3/contrib/__init__.py b/python/ext-libs/requests/packages/urllib3/contrib/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/python/ext-libs/requests/packages/urllib3/contrib/appengine.py b/python/ext-libs/requests/packages/urllib3/contrib/appengine.py
deleted file mode 100644
index f4289c0..0000000
--- a/python/ext-libs/requests/packages/urllib3/contrib/appengine.py
+++ /dev/null
@@ -1,231 +0,0 @@
-from __future__ import absolute_import
-import logging
-import os
-import warnings
-
-from ..exceptions import (
-    HTTPError,
-    HTTPWarning,
-    MaxRetryError,
-    ProtocolError,
-    TimeoutError,
-    SSLError
-)
-
-from ..packages.six import BytesIO
-from ..request import RequestMethods
-from ..response import HTTPResponse
-from ..util.timeout import Timeout
-from ..util.retry import Retry
-
-try:
-    from google.appengine.api import urlfetch
-except ImportError:
-    urlfetch = None
-
-
-log = logging.getLogger(__name__)
-
-
-class AppEnginePlatformWarning(HTTPWarning):
-    pass
-
-
-class AppEnginePlatformError(HTTPError):
-    pass
-
-
-class AppEngineManager(RequestMethods):
-    """
-    Connection manager for Google App Engine sandbox applications.
-
-    This manager uses the URLFetch service directly instead of using the
-    emulated httplib, and is subject to URLFetch limitations as described in
-    the App Engine documentation here:
-
-        https://cloud.google.com/appengine/docs/python/urlfetch
-
-    Notably it will raise an AppEnginePlatformError if:
-        * URLFetch is not available.
-        * If you attempt to use this on GAEv2 (Managed VMs), as full socket
-          support is available.
-        * If a request size is more than 10 megabytes.
-        * If a response size is more than 32 megabtyes.
-        * If you use an unsupported request method such as OPTIONS.
-
-    Beyond those cases, it will raise normal urllib3 errors.
-    """
-
-    def __init__(self, headers=None, retries=None, validate_certificate=True):
-        if not urlfetch:
-            raise AppEnginePlatformError(
-                "URLFetch is not available in this environment.")
-
-        if is_prod_appengine_mvms():
-            raise AppEnginePlatformError(
-                "Use normal urllib3.PoolManager instead of AppEngineManager"
-                "on Managed VMs, as using URLFetch is not necessary in "
-                "this environment.")
-
-        warnings.warn(
-            "urllib3 is using URLFetch on Google App Engine sandbox instead "
-            "of sockets. To use sockets directly instead of URLFetch see "
-            "https://urllib3.readthedocs.org/en/latest/contrib.html.",
-            AppEnginePlatformWarning)
-
-        RequestMethods.__init__(self, headers)
-        self.validate_certificate = validate_certificate
-
-        self.retries = retries or Retry.DEFAULT
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def urlopen(self, method, url, body=None, headers=None,
-                retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
-                **response_kw):
-
-        retries = self._get_retries(retries, redirect)
-
-        try:
-            response = urlfetch.fetch(
-                url,
-                payload=body,
-                method=method,
-                headers=headers or {},
-                allow_truncated=False,
-                follow_redirects=(
-                    redirect and
-                    retries.redirect != 0 and
-                    retries.total),
-                deadline=self._get_absolute_timeout(timeout),
-                validate_certificate=self.validate_certificate,
-            )
-        except urlfetch.DeadlineExceededError as e:
-            raise TimeoutError(self, e)
-
-        except urlfetch.InvalidURLError as e:
-            if 'too large' in str(e):
-                raise AppEnginePlatformError(
-                    "URLFetch request too large, URLFetch only "
-                    "supports requests up to 10mb in size.", e)
-            raise ProtocolError(e)
-
-        except urlfetch.DownloadError as e:
-            if 'Too many redirects' in str(e):
-                raise MaxRetryError(self, url, reason=e)
-            raise ProtocolError(e)
-
-        except urlfetch.ResponseTooLargeError as e:
-            raise AppEnginePlatformError(
-                "URLFetch response too large, URLFetch only supports"
-                "responses up to 32mb in size.", e)
-
-        except urlfetch.SSLCertificateError as e:
-            raise SSLError(e)
-
-        except urlfetch.InvalidMethodError as e:
-            raise AppEnginePlatformError(
-                "URLFetch does not support method: %s" % method, e)
-
-        http_response = self._urlfetch_response_to_http_response(
-            response, **response_kw)
-
-        # Check for redirect response
-        if (http_response.get_redirect_location() and
-                retries.raise_on_redirect and redirect):
-            raise MaxRetryError(self, url, "too many redirects")
-
-        # Check if we should retry the HTTP response.
-        if retries.is_forced_retry(method, status_code=http_response.status):
-            retries = retries.increment(
-                method, url, response=http_response, _pool=self)
-            log.info("Forced retry: %s", url)
-            retries.sleep()
-            return self.urlopen(
-                method, url,
-                body=body, headers=headers,
-                retries=retries, redirect=redirect,
-                timeout=timeout, **response_kw)
-
-        return http_response
-
-    def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
-
-        if is_prod_appengine():
-            # Production GAE handles deflate encoding automatically, but does
-            # not remove the encoding header.
-            content_encoding = urlfetch_resp.headers.get('content-encoding')
-
-            if content_encoding == 'deflate':
-                del urlfetch_resp.headers['content-encoding']
-
-        transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')
-        # We have a full response's content,
-        # so let's make sure we don't report ourselves as chunked data.
-        if transfer_encoding == 'chunked':
-            encodings = transfer_encoding.split(",")
-            encodings.remove('chunked')
-            urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)
-
-        return HTTPResponse(
-            # In order for decoding to work, we must present the content as
-            # a file-like object.
-            body=BytesIO(urlfetch_resp.content),
-            headers=urlfetch_resp.headers,
-            status=urlfetch_resp.status_code,
-            **response_kw
-        )
-
-    def _get_absolute_timeout(self, timeout):
-        if timeout is Timeout.DEFAULT_TIMEOUT:
-            return 5  # 5s is the default timeout for URLFetch.
-        if isinstance(timeout, Timeout):
-            if timeout._read is not timeout._connect:
-                warnings.warn(
-                    "URLFetch does not support granular timeout settings, "
-                    "reverting to total timeout.", AppEnginePlatformWarning)
-            return timeout.total
-        return timeout
-
-    def _get_retries(self, retries, redirect):
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(
-                retries, redirect=redirect, default=self.retries)
-
-        if retries.connect or retries.read or retries.redirect:
-            warnings.warn(
-                "URLFetch only supports total retries and does not "
-                "recognize connect, read, or redirect retry parameters.",
-                AppEnginePlatformWarning)
-
-        return retries
-
-
-def is_appengine():
-    return (is_local_appengine() or
-            is_prod_appengine() or
-            is_prod_appengine_mvms())
-
-
-def is_appengine_sandbox():
-    return is_appengine() and not is_prod_appengine_mvms()
-
-
-def is_local_appengine():
-    return ('APPENGINE_RUNTIME' in os.environ and
-            'Development/' in os.environ['SERVER_SOFTWARE'])
-
-
-def is_prod_appengine():
-    return ('APPENGINE_RUNTIME' in os.environ and
-            'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
-            not is_prod_appengine_mvms())
-
-
-def is_prod_appengine_mvms():
-    return os.environ.get('GAE_VM', False) == 'true'
diff --git a/python/ext-libs/requests/packages/urllib3/contrib/ntlmpool.py b/python/ext-libs/requests/packages/urllib3/contrib/ntlmpool.py
deleted file mode 100644
index 11d0b5c..0000000
--- a/python/ext-libs/requests/packages/urllib3/contrib/ntlmpool.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""
-NTLM authenticating pool, contributed by erikcederstran
-
-Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
-"""
-from __future__ import absolute_import
-
-try:
-    from http.client import HTTPSConnection
-except ImportError:
-    from httplib import HTTPSConnection
-from logging import getLogger
-from ntlm import ntlm
-
-from urllib3 import HTTPSConnectionPool
-
-
-log = getLogger(__name__)
-
-
-class NTLMConnectionPool(HTTPSConnectionPool):
-    """
-    Implements an NTLM authentication version of an urllib3 connection pool
-    """
-
-    scheme = 'https'
-
-    def __init__(self, user, pw, authurl, *args, **kwargs):
-        """
-        authurl is a random URL on the server that is protected by NTLM.
-        user is the Windows user, probably in the DOMAIN\\username format.
-        pw is the password for the user.
-        """
-        super(NTLMConnectionPool, self).__init__(*args, **kwargs)
-        self.authurl = authurl
-        self.rawuser = user
-        user_parts = user.split('\\', 1)
-        self.domain = user_parts[0].upper()
-        self.user = user_parts[1]
-        self.pw = pw
-
-    def _new_conn(self):
-        # Performs the NTLM handshake that secures the connection. The socket
-        # must be kept open while requests are performed.
-        self.num_connections += 1
-        log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
-                  self.num_connections, self.host, self.authurl)
-
-        headers = {}
-        headers['Connection'] = 'Keep-Alive'
-        req_header = 'Authorization'
-        resp_header = 'www-authenticate'
-
-        conn = HTTPSConnection(host=self.host, port=self.port)
-
-        # Send negotiation message
-        headers[req_header] = (
-            'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
-        log.debug('Request headers: %s', headers)
-        conn.request('GET', self.authurl, None, headers)
-        res = conn.getresponse()
-        reshdr = dict(res.getheaders())
-        log.debug('Response status: %s %s', res.status, res.reason)
-        log.debug('Response headers: %s', reshdr)
-        log.debug('Response data: %s [...]', res.read(100))
-
-        # Remove the reference to the socket, so that it can not be closed by
-        # the response object (we want to keep the socket open)
-        res.fp = None
-
-        # Server should respond with a challenge message
-        auth_header_values = reshdr[resp_header].split(', ')
-        auth_header_value = None
-        for s in auth_header_values:
-            if s[:5] == 'NTLM ':
-                auth_header_value = s[5:]
-        if auth_header_value is None:
-            raise Exception('Unexpected %s response header: %s' %
-                            (resp_header, reshdr[resp_header]))
-
-        # Send authentication message
-        ServerChallenge, NegotiateFlags = \
-            ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
-        auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
-                                                         self.user,
-                                                         self.domain,
-                                                         self.pw,
-                                                         NegotiateFlags)
-        headers[req_header] = 'NTLM %s' % auth_msg
-        log.debug('Request headers: %s', headers)
-        conn.request('GET', self.authurl, None, headers)
-        res = conn.getresponse()
-        log.debug('Response status: %s %s', res.status, res.reason)
-        log.debug('Response headers: %s', dict(res.getheaders()))
-        log.debug('Response data: %s [...]', res.read()[:100])
-        if res.status != 200:
-            if res.status == 401:
-                raise Exception('Server rejected request: wrong '
-                                'username or password')
-            raise Exception('Wrong server response: %s %s' %
-                            (res.status, res.reason))
-
-        res.fp = None
-        log.debug('Connection established')
-        return conn
-
-    def urlopen(self, method, url, body=None, headers=None, retries=3,
-                redirect=True, assert_same_host=True):
-        if headers is None:
-            headers = {}
-        headers['Connection'] = 'Keep-Alive'
-        return super(NTLMConnectionPool, self).urlopen(method, url, body,
-                                                       headers, retries,
-                                                       redirect,
-                                                       assert_same_host)
diff --git a/python/ext-libs/requests/packages/urllib3/contrib/pyopenssl.py b/python/ext-libs/requests/packages/urllib3/contrib/pyopenssl.py
deleted file mode 100644
index ed3b9cc..0000000
--- a/python/ext-libs/requests/packages/urllib3/contrib/pyopenssl.py
+++ /dev/null
@@ -1,358 +0,0 @@
-'''SSL with SNI_-support for Python 2. Follow these instructions if you would
-like to verify SSL certificates in Python 2. Note, the default libraries do
-*not* do certificate checking; you need to do additional work to validate
-certificates yourself.
-
-This needs the following packages installed:
-
-* pyOpenSSL (tested with 0.13)
-* ndg-httpsclient (tested with 0.3.2)
-* pyasn1 (tested with 0.1.6)
-
-You can install them with the following command:
-
-    pip install pyopenssl ndg-httpsclient pyasn1
-
-To activate certificate checking, call
-:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
-before you begin making HTTP requests. This can be done in a ``sitecustomize``
-module, or at any other time before your application begins using ``urllib3``,
-like this::
-
-    try:
-        import urllib3.contrib.pyopenssl
-        urllib3.contrib.pyopenssl.inject_into_urllib3()
-    except ImportError:
-        pass
-
-Now you can use :mod:`urllib3` as you normally would, and it will support SNI
-when the required modules are installed.
-
-Activating this module also has the positive side effect of disabling SSL/TLS
-compression in Python 2 (see `CRIME attack`_).
-
-If you want to configure the default list of supported cipher suites, you can
-set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
-
-Module Variables
-----------------
-
-:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
-
-.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
-.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
-
-'''
-from __future__ import absolute_import
-
-try:
-    from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
-    from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
-except SyntaxError as e:
-    raise ImportError(e)
-
-import OpenSSL.SSL
-from pyasn1.codec.der import decoder as der_decoder
-from pyasn1.type import univ, constraint
-from socket import timeout, error as SocketError
-
-try:  # Platform-specific: Python 2
-    from socket import _fileobject
-except ImportError:  # Platform-specific: Python 3
-    _fileobject = None
-    from urllib3.packages.backports.makefile import backport_makefile
-
-import ssl
-import select
-import six
-
-from .. import connection
-from .. import util
-
-__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
-
-# SNI only *really* works if we can read the subjectAltName of certificates.
-HAS_SNI = SUBJ_ALT_NAME_SUPPORT
-
-# Map from urllib3 to PyOpenSSL compatible parameter-values.
-_openssl_versions = {
-    ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
-    ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
-}
-
-if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
-    _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
-
-if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
-    _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
-
-try:
-    _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
-except AttributeError:
-    pass
-
-_openssl_verify = {
-    ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
-    ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
-    ssl.CERT_REQUIRED:
-        OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
-}
-
-DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS.encode('ascii')
-
-# OpenSSL will only write 16K at a time
-SSL_WRITE_BLOCKSIZE = 16384
-
-orig_util_HAS_SNI = util.HAS_SNI
-orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
-
-
-def inject_into_urllib3():
-    'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
-
-    connection.ssl_wrap_socket = ssl_wrap_socket
-    util.HAS_SNI = HAS_SNI
-    util.IS_PYOPENSSL = True
-
-
-def extract_from_urllib3():
-    'Undo monkey-patching by :func:`inject_into_urllib3`.'
-
-    connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
-    util.HAS_SNI = orig_util_HAS_SNI
-    util.IS_PYOPENSSL = False
-
-
-# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
-class SubjectAltName(BaseSubjectAltName):
-    '''ASN.1 implementation for subjectAltNames support'''
-
-    # There is no limit to how many SAN certificates a certificate may have,
-    #   however this needs to have some limit so we'll set an arbitrarily high
-    #   limit.
-    sizeSpec = univ.SequenceOf.sizeSpec + \
-        constraint.ValueSizeConstraint(1, 1024)
-
-
-# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
-def get_subj_alt_name(peer_cert):
-    # Search through extensions
-    dns_name = []
-    if not SUBJ_ALT_NAME_SUPPORT:
-        return dns_name
-
-    general_names = SubjectAltName()
-    for i in range(peer_cert.get_extension_count()):
-        ext = peer_cert.get_extension(i)
-        ext_name = ext.get_short_name()
-        if ext_name != b'subjectAltName':
-            continue
-
-        # PyOpenSSL returns extension data in ASN.1 encoded form
-        ext_dat = ext.get_data()
-        decoded_dat = der_decoder.decode(ext_dat,
-                                         asn1Spec=general_names)
-
-        for name in decoded_dat:
-            if not isinstance(name, SubjectAltName):
-                continue
-            for entry in range(len(name)):
-                component = name.getComponentByPosition(entry)
-                if component.getName() != 'dNSName':
-                    continue
-                dns_name.append(str(component.getComponent()))
-
-    return dns_name
-
-
-class WrappedSocket(object):
-    '''API-compatibility wrapper for Python OpenSSL's Connection-class.
-
-    Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
-    collector of pypy.
-    '''
-
-    def __init__(self, connection, socket, suppress_ragged_eofs=True):
-        self.connection = connection
-        self.socket = socket
-        self.suppress_ragged_eofs = suppress_ragged_eofs
-        self._makefile_refs = 0
-        self._closed = False
-
-    def fileno(self):
-        return self.socket.fileno()
-
-    # Copy-pasted from Python 3.5 source code
-    def _decref_socketios(self):
-        if self._makefile_refs > 0:
-            self._makefile_refs -= 1
-        if self._closed:
-            self.close()
-
-    def recv(self, *args, **kwargs):
-        try:
-            data = self.connection.recv(*args, **kwargs)
-        except OpenSSL.SSL.SysCallError as e:
-            if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
-                return b''
-            else:
-                raise SocketError(str(e))
-        except OpenSSL.SSL.ZeroReturnError as e:
-            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
-                return b''
-            else:
-                raise
-        except OpenSSL.SSL.WantReadError:
-            rd, wd, ed = select.select(
-                [self.socket], [], [], self.socket.gettimeout())
-            if not rd:
-                raise timeout('The read operation timed out')
-            else:
-                return self.recv(*args, **kwargs)
-        else:
-            return data
-
-    def recv_into(self, *args, **kwargs):
-        try:
-            return self.connection.recv_into(*args, **kwargs)
-        except OpenSSL.SSL.SysCallError as e:
-            if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
-                return 0
-            else:
-                raise SocketError(str(e))
-        except OpenSSL.SSL.ZeroReturnError as e:
-            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
-                return 0
-            else:
-                raise
-        except OpenSSL.SSL.WantReadError:
-            rd, wd, ed = select.select(
-                [self.socket], [], [], self.socket.gettimeout())
-            if not rd:
-                raise timeout('The read operation timed out')
-            else:
-                return self.recv_into(*args, **kwargs)
-
-    def settimeout(self, timeout):
-        return self.socket.settimeout(timeout)
-
-    def _send_until_done(self, data):
-        while True:
-            try:
-                return self.connection.send(data)
-            except OpenSSL.SSL.WantWriteError:
-                _, wlist, _ = select.select([], [self.socket], [],
-                                            self.socket.gettimeout())
-                if not wlist:
-                    raise timeout()
-                continue
-
-    def sendall(self, data):
-        total_sent = 0
-        while total_sent < len(data):
-            sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
-            total_sent += sent
-
-    def shutdown(self):
-        # FIXME rethrow compatible exceptions should we ever use this
-        self.connection.shutdown()
-
-    def close(self):
-        if self._makefile_refs < 1:
-            try:
-                self._closed = True
-                return self.connection.close()
-            except OpenSSL.SSL.Error:
-                return
-        else:
-            self._makefile_refs -= 1
-
-    def getpeercert(self, binary_form=False):
-        x509 = self.connection.get_peer_certificate()
-
-        if not x509:
-            return x509
-
-        if binary_form:
-            return OpenSSL.crypto.dump_certificate(
-                OpenSSL.crypto.FILETYPE_ASN1,
-                x509)
-
-        return {
-            'subject': (
-                (('commonName', x509.get_subject().CN),),
-            ),
-            'subjectAltName': [
-                ('DNS', value)
-                for value in get_subj_alt_name(x509)
-            ]
-        }
-
-    def _reuse(self):
-        self._makefile_refs += 1
-
-    def _drop(self):
-        if self._makefile_refs < 1:
-            self.close()
-        else:
-            self._makefile_refs -= 1
-
-
-if _fileobject:  # Platform-specific: Python 2
-    def makefile(self, mode, bufsize=-1):
-        self._makefile_refs += 1
-        return _fileobject(self, mode, bufsize, close=True)
-else:  # Platform-specific: Python 3
-    makefile = backport_makefile
-
-WrappedSocket.makefile = makefile
-
-
-def _verify_callback(cnx, x509, err_no, err_depth, return_code):
-    return err_no == 0
-
-
-def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
-                    ca_certs=None, server_hostname=None,
-                    ssl_version=None, ca_cert_dir=None):
-    ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
-    if certfile:
-        keyfile = keyfile or certfile  # Match behaviour of the normal python ssl library
-        ctx.use_certificate_file(certfile)
-    if keyfile:
-        ctx.use_privatekey_file(keyfile)
-    if cert_reqs != ssl.CERT_NONE:
-        ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
-    if ca_certs or ca_cert_dir:
-        try:
-            ctx.load_verify_locations(ca_certs, ca_cert_dir)
-        except OpenSSL.SSL.Error as e:
-            raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
-    else:
-        ctx.set_default_verify_paths()
-
-    # Disable TLS compression to mitigate CRIME attack (issue #309)
-    OP_NO_COMPRESSION = 0x20000
-    ctx.set_options(OP_NO_COMPRESSION)
-
-    # Set list of supported ciphersuites.
-    ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
-
-    cnx = OpenSSL.SSL.Connection(ctx, sock)
-    if isinstance(server_hostname, six.text_type):  # Platform-specific: Python 3
-        server_hostname = server_hostname.encode('utf-8')
-    cnx.set_tlsext_host_name(server_hostname)
-    cnx.set_connect_state()
-    while True:
-        try:
-            cnx.do_handshake()
-        except OpenSSL.SSL.WantReadError:
-            rd, _, _ = select.select([sock], [], [], sock.gettimeout())
-            if not rd:
-                raise timeout('select timed out')
-            continue
-        except OpenSSL.SSL.Error as e:
-            raise ssl.SSLError('bad handshake: %r' % e)
-        break
-
-    return WrappedSocket(cnx, sock)
diff --git a/python/ext-libs/requests/packages/urllib3/contrib/socks.py b/python/ext-libs/requests/packages/urllib3/contrib/socks.py
deleted file mode 100644
index 3748fee..0000000
--- a/python/ext-libs/requests/packages/urllib3/contrib/socks.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-SOCKS support for urllib3
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-This contrib module contains provisional support for SOCKS proxies from within
-urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
-SOCKS5. To enable its functionality, either install PySocks or install this
-module with the ``socks`` extra.
-
-Known Limitations:
-
-- Currently PySocks does not support contacting remote websites via literal
-  IPv6 addresses. Any such connection attempt will fail.
-- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
-  such connection attempt will fail.
-"""
-from __future__ import absolute_import
-
-try:
-    import socks
-except ImportError:
-    import warnings
-    from ..exceptions import DependencyWarning
-
-    warnings.warn((
-        'SOCKS support in urllib3 requires the installation of optional '
-        'dependencies: specifically, PySocks.  For more information, see '
-        'https://urllib3.readthedocs.org/en/latest/contrib.html#socks-proxies'
-        ),
-        DependencyWarning
-    )
-    raise
-
-from socket import error as SocketError, timeout as SocketTimeout
-
-from ..connection import (
-    HTTPConnection, HTTPSConnection
-)
-from ..connectionpool import (
-    HTTPConnectionPool, HTTPSConnectionPool
-)
-from ..exceptions import ConnectTimeoutError, NewConnectionError
-from ..poolmanager import PoolManager
-from ..util.url import parse_url
-
-try:
-    import ssl
-except ImportError:
-    ssl = None
-
-
-class SOCKSConnection(HTTPConnection):
-    """
-    A plain-text HTTP connection that connects via a SOCKS proxy.
-    """
-    def __init__(self, *args, **kwargs):
-        self._socks_options = kwargs.pop('_socks_options')
-        super(SOCKSConnection, self).__init__(*args, **kwargs)
-
-    def _new_conn(self):
-        """
-        Establish a new connection via the SOCKS proxy.
-        """
-        extra_kw = {}
-        if self.source_address:
-            extra_kw['source_address'] = self.source_address
-
-        if self.socket_options:
-            extra_kw['socket_options'] = self.socket_options
-
-        try:
-            conn = socks.create_connection(
-                (self.host, self.port),
-                proxy_type=self._socks_options['socks_version'],
-                proxy_addr=self._socks_options['proxy_host'],
-                proxy_port=self._socks_options['proxy_port'],
-                proxy_username=self._socks_options['username'],
-                proxy_password=self._socks_options['password'],
-                timeout=self.timeout,
-                **extra_kw
-            )
-
-        except SocketTimeout as e:
-            raise ConnectTimeoutError(
-                self, "Connection to %s timed out. (connect timeout=%s)" %
-                (self.host, self.timeout))
-
-        except socks.ProxyError as e:
-            # This is fragile as hell, but it seems to be the only way to raise
-            # useful errors here.
-            if e.socket_err:
-                error = e.socket_err
-                if isinstance(error, SocketTimeout):
-                    raise ConnectTimeoutError(
-                        self,
-                        "Connection to %s timed out. (connect timeout=%s)" %
-                        (self.host, self.timeout)
-                    )
-                else:
-                    raise NewConnectionError(
-                        self,
-                        "Failed to establish a new connection: %s" % error
-                    )
-            else:
-                raise NewConnectionError(
-                    self,
-                    "Failed to establish a new connection: %s" % e
-                )
-
-        except SocketError as e:  # Defensive: PySocks should catch all these.
-            raise NewConnectionError(
-                self, "Failed to establish a new connection: %s" % e)
-
-        return conn
-
-
-# We don't need to duplicate the Verified/Unverified distinction from
-# urllib3/connection.py here because the HTTPSConnection will already have been
-# correctly set to either the Verified or Unverified form by that module. This
-# means the SOCKSHTTPSConnection will automatically be the correct type.
-class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
-    pass
-
-
-class SOCKSHTTPConnectionPool(HTTPConnectionPool):
-    ConnectionCls = SOCKSConnection
-
-
-class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
-    ConnectionCls = SOCKSHTTPSConnection
-
-
-class SOCKSProxyManager(PoolManager):
-    """
-    A version of the urllib3 ProxyManager that routes connections via the
-    defined SOCKS proxy.
-    """
-    pool_classes_by_scheme = {
-        'http': SOCKSHTTPConnectionPool,
-        'https': SOCKSHTTPSConnectionPool,
-    }
-
-    def __init__(self, proxy_url, username=None, password=None,
-                 num_pools=10, headers=None, **connection_pool_kw):
-        parsed = parse_url(proxy_url)
-
-        if parsed.scheme == 'socks5':
-            socks_version = socks.PROXY_TYPE_SOCKS5
-        elif parsed.scheme == 'socks4':
-            socks_version = socks.PROXY_TYPE_SOCKS4
-        else:
-            raise ValueError(
-                "Unable to determine SOCKS version from %s" % proxy_url
-            )
-
-        self.proxy_url = proxy_url
-
-        socks_options = {
-            'socks_version': socks_version,
-            'proxy_host': parsed.host,
-            'proxy_port': parsed.port,
-            'username': username,
-            'password': password,
-        }
-        connection_pool_kw['_socks_options'] = socks_options
-
-        super(SOCKSProxyManager, self).__init__(
-            num_pools, headers, **connection_pool_kw
-        )
-
-        self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
diff --git a/python/ext-libs/requests/packages/urllib3/exceptions.py b/python/ext-libs/requests/packages/urllib3/exceptions.py
deleted file mode 100644
index f2e6591..0000000
--- a/python/ext-libs/requests/packages/urllib3/exceptions.py
+++ /dev/null
@@ -1,209 +0,0 @@
-from __future__ import absolute_import
-# Base Exceptions
-
-
-class HTTPError(Exception):
-    "Base exception used by this module."
-    pass
-
-
-class HTTPWarning(Warning):
-    "Base warning used by this module."
-    pass
-
-
-class PoolError(HTTPError):
-    "Base exception for errors caused within a pool."
-    def __init__(self, pool, message):
-        self.pool = pool
-        HTTPError.__init__(self, "%s: %s" % (pool, message))
-
-    def __reduce__(self):
-        # For pickling purposes.
-        return self.__class__, (None, None)
-
-
-class RequestError(PoolError):
-    "Base exception for PoolErrors that have associated URLs."
-    def __init__(self, pool, url, message):
-        self.url = url
-        PoolError.__init__(self, pool, message)
-
-    def __reduce__(self):
-        # For pickling purposes.
-        return self.__class__, (None, self.url, None)
-
-
-class SSLError(HTTPError):
-    "Raised when SSL certificate fails in an HTTPS connection."
-    pass
-
-
-class ProxyError(HTTPError):
-    "Raised when the connection to a proxy fails."
-    pass
-
-
-class DecodeError(HTTPError):
-    "Raised when automatic decoding based on Content-Type fails."
-    pass
-
-
-class ProtocolError(HTTPError):
-    "Raised when something unexpected happens mid-request/response."
-    pass
-
-
-#: Renamed to ProtocolError but aliased for backwards compatibility.
-ConnectionError = ProtocolError
-
-
-# Leaf Exceptions
-
-class MaxRetryError(RequestError):
-    """Raised when the maximum number of retries is exceeded.
-
-    :param pool: The connection pool
-    :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
-    :param string url: The requested Url
-    :param exceptions.Exception reason: The underlying error
-
-    """
-
-    def __init__(self, pool, url, reason=None):
-        self.reason = reason
-
-        message = "Max retries exceeded with url: %s (Caused by %r)" % (
-            url, reason)
-
-        RequestError.__init__(self, pool, url, message)
-
-
-class HostChangedError(RequestError):
-    "Raised when an existing pool gets a request for a foreign host."
-
-    def __init__(self, pool, url, retries=3):
-        message = "Tried to open a foreign host with url: %s" % url
-        RequestError.__init__(self, pool, url, message)
-        self.retries = retries
-
-
-class TimeoutStateError(HTTPError):
-    """ Raised when passing an invalid state to a timeout """
-    pass
-
-
-class TimeoutError(HTTPError):
-    """ Raised when a socket timeout error occurs.
-
-    Catching this error will catch both :exc:`ReadTimeoutErrors
-    <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
-    """
-    pass
-
-
-class ReadTimeoutError(TimeoutError, RequestError):
-    "Raised when a socket timeout occurs while receiving data from a server"
-    pass
-
-
-# This timeout error does not have a URL attached and needs to inherit from the
-# base HTTPError
-class ConnectTimeoutError(TimeoutError):
-    "Raised when a socket timeout occurs while connecting to a server"
-    pass
-
-
-class NewConnectionError(ConnectTimeoutError, PoolError):
-    "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
-    pass
-
-
-class EmptyPoolError(PoolError):
-    "Raised when a pool runs out of connections and no more are allowed."
-    pass
-
-
-class ClosedPoolError(PoolError):
-    "Raised when a request enters a pool after the pool has been closed."
-    pass
-
-
-class LocationValueError(ValueError, HTTPError):
-    "Raised when there is something wrong with a given URL input."
-    pass
-
-
-class LocationParseError(LocationValueError):
-    "Raised when get_host or similar fails to parse the URL input."
-
-    def __init__(self, location):
-        message = "Failed to parse: %s" % location
-        HTTPError.__init__(self, message)
-
-        self.location = location
-
-
-class ResponseError(HTTPError):
-    "Used as a container for an error reason supplied in a MaxRetryError."
-    GENERIC_ERROR = 'too many error responses'
-    SPECIFIC_ERROR = 'too many {status_code} error responses'
-
-
-class SecurityWarning(HTTPWarning):
-    "Warned when perfoming security reducing actions"
-    pass
-
-
-class SubjectAltNameWarning(SecurityWarning):
-    "Warned when connecting to a host with a certificate missing a SAN."
-    pass
-
-
-class InsecureRequestWarning(SecurityWarning):
-    "Warned when making an unverified HTTPS request."
-    pass
-
-
-class SystemTimeWarning(SecurityWarning):
-    "Warned when system time is suspected to be wrong"
-    pass
-
-
-class InsecurePlatformWarning(SecurityWarning):
-    "Warned when certain SSL configuration is not available on a platform."
-    pass
-
-
-class SNIMissingWarning(HTTPWarning):
-    "Warned when making a HTTPS request without SNI available."
-    pass
-
-
-class DependencyWarning(HTTPWarning):
-    """
-    Warned when an attempt is made to import a module with missing optional
-    dependencies.
-    """
-    pass
-
-
-class ResponseNotChunked(ProtocolError, ValueError):
-    "Response needs to be chunked in order to read it as chunks."
-    pass
-
-
-class ProxySchemeUnknown(AssertionError, ValueError):
-    "ProxyManager does not support the supplied scheme"
-    # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
-
-    def __init__(self, scheme):
-        message = "Not supported proxy scheme %s" % scheme
-        super(ProxySchemeUnknown, self).__init__(message)
-
-
-class HeaderParsingError(HTTPError):
-    "Raised by assert_header_parsing, but we convert it to a log.warning statement."
-    def __init__(self, defects, unparsed_data):
-        message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
-        super(HeaderParsingError, self).__init__(message)
diff --git a/python/ext-libs/requests/packages/urllib3/fields.py b/python/ext-libs/requests/packages/urllib3/fields.py
deleted file mode 100644
index 8fa2a12..0000000
--- a/python/ext-libs/requests/packages/urllib3/fields.py
+++ /dev/null
@@ -1,178 +0,0 @@
-from __future__ import absolute_import
-import email.utils
-import mimetypes
-
-from .packages import six
-
-
-def guess_content_type(filename, default='application/octet-stream'):
-    """
-    Guess the "Content-Type" of a file.
-
-    :param filename:
-        The filename to guess the "Content-Type" of using :mod:`mimetypes`.
-    :param default:
-        If no "Content-Type" can be guessed, default to `default`.
-    """
-    if filename:
-        return mimetypes.guess_type(filename)[0] or default
-    return default
-
-
-def format_header_param(name, value):
-    """
-    Helper function to format and quote a single header parameter.
-
-    Particularly useful for header parameters which might contain
-    non-ASCII values, like file names. This follows RFC 2231, as
-    suggested by RFC 2388 Section 4.4.
-
-    :param name:
-        The name of the parameter, a string expected to be ASCII only.
-    :param value:
-        The value of the parameter, provided as a unicode string.
-    """
-    if not any(ch in value for ch in '"\\\r\n'):
-        result = '%s="%s"' % (name, value)
-        try:
-            result.encode('ascii')
-        except (UnicodeEncodeError, UnicodeDecodeError):
-            pass
-        else:
-            return result
-    if not six.PY3 and isinstance(value, six.text_type):  # Python 2:
-        value = value.encode('utf-8')
-    value = email.utils.encode_rfc2231(value, 'utf-8')
-    value = '%s*=%s' % (name, value)
-    return value
-
-
-class RequestField(object):
-    """
-    A data container for request body parameters.
-
-    :param name:
-        The name of this request field.
-    :param data:
-        The data/value body.
-    :param filename:
-        An optional filename of the request field.
-    :param headers:
-        An optional dict-like object of headers to initially use for the field.
-    """
-    def __init__(self, name, data, filename=None, headers=None):
-        self._name = name
-        self._filename = filename
-        self.data = data
-        self.headers = {}
-        if headers:
-            self.headers = dict(headers)
-
-    @classmethod
-    def from_tuples(cls, fieldname, value):
-        """
-        A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
-
-        Supports constructing :class:`~urllib3.fields.RequestField` from
-        parameter of key/value strings AND key/filetuple. A filetuple is a
-        (filename, data, MIME type) tuple where the MIME type is optional.
-        For example::
-
-            'foo': 'bar',
-            'fakefile': ('foofile.txt', 'contents of foofile'),
-            'realfile': ('barfile.txt', open('realfile').read()),
-            'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
-            'nonamefile': 'contents of nonamefile field',
-
-        Field names and filenames must be unicode.
-        """
-        if isinstance(value, tuple):
-            if len(value) == 3:
-                filename, data, content_type = value
-            else:
-                filename, data = value
-                content_type = guess_content_type(filename)
-        else:
-            filename = None
-            content_type = None
-            data = value
-
-        request_param = cls(fieldname, data, filename=filename)
-        request_param.make_multipart(content_type=content_type)
-
-        return request_param
-
-    def _render_part(self, name, value):
-        """
-        Overridable helper function to format a single header parameter.
-
-        :param name:
-            The name of the parameter, a string expected to be ASCII only.
-        :param value:
-            The value of the parameter, provided as a unicode string.
-        """
-        return format_header_param(name, value)
-
-    def _render_parts(self, header_parts):
-        """
-        Helper function to format and quote a single header.
-
-        Useful for single headers that are composed of multiple items. E.g.,
-        'Content-Disposition' fields.
-
-        :param header_parts:
-            A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
-            as `k1="v1"; k2="v2"; ...`.
-        """
-        parts = []
-        iterable = header_parts
-        if isinstance(header_parts, dict):
-            iterable = header_parts.items()
-
-        for name, value in iterable:
-            if value:
-                parts.append(self._render_part(name, value))
-
-        return '; '.join(parts)
-
-    def render_headers(self):
-        """
-        Renders the headers for this request field.
-        """
-        lines = []
-
-        sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
-        for sort_key in sort_keys:
-            if self.headers.get(sort_key, False):
-                lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
-
-        for header_name, header_value in self.headers.items():
-            if header_name not in sort_keys:
-                if header_value:
-                    lines.append('%s: %s' % (header_name, header_value))
-
-        lines.append('\r\n')
-        return '\r\n'.join(lines)
-
-    def make_multipart(self, content_disposition=None, content_type=None,
-                       content_location=None):
-        """
-        Makes this request field into a multipart request field.
-
-        This method overrides "Content-Disposition", "Content-Type" and
-        "Content-Location" headers to the request parameter.
-
-        :param content_type:
-            The 'Content-Type' of the request body.
-        :param content_location:
-            The 'Content-Location' of the request body.
-
-        """
-        self.headers['Content-Disposition'] = content_disposition or 'form-data'
-        self.headers['Content-Disposition'] += '; '.join([
-            '', self._render_parts(
-                (('name', self._name), ('filename', self._filename))
-            )
-        ])
-        self.headers['Content-Type'] = content_type
-        self.headers['Content-Location'] = content_location
diff --git a/python/ext-libs/requests/packages/urllib3/filepost.py b/python/ext-libs/requests/packages/urllib3/filepost.py
deleted file mode 100644
index 97a2843..0000000
--- a/python/ext-libs/requests/packages/urllib3/filepost.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from __future__ import absolute_import
-import codecs
-
-from uuid import uuid4
-from io import BytesIO
-
-from .packages import six
-from .packages.six import b
-from .fields import RequestField
-
-writer = codecs.lookup('utf-8')[3]
-
-
-def choose_boundary():
-    """
-    Our embarassingly-simple replacement for mimetools.choose_boundary.
-    """
-    return uuid4().hex
-
-
-def iter_field_objects(fields):
-    """
-    Iterate over fields.
-
-    Supports list of (k, v) tuples and dicts, and lists of
-    :class:`~urllib3.fields.RequestField`.
-
-    """
-    if isinstance(fields, dict):
-        i = six.iteritems(fields)
-    else:
-        i = iter(fields)
-
-    for field in i:
-        if isinstance(field, RequestField):
-            yield field
-        else:
-            yield RequestField.from_tuples(*field)
-
-
-def iter_fields(fields):
-    """
-    .. deprecated:: 1.6
-
-    Iterate over fields.
-
-    The addition of :class:`~urllib3.fields.RequestField` makes this function
-    obsolete. Instead, use :func:`iter_field_objects`, which returns
-    :class:`~urllib3.fields.RequestField` objects.
-
-    Supports list of (k, v) tuples and dicts.
-    """
-    if isinstance(fields, dict):
-        return ((k, v) for k, v in six.iteritems(fields))
-
-    return ((k, v) for k, v in fields)
-
-
-def encode_multipart_formdata(fields, boundary=None):
-    """
-    Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
-
-    :param fields:
-        Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
-
-    :param boundary:
-        If not specified, then a random boundary will be generated using
-        :func:`mimetools.choose_boundary`.
-    """
-    body = BytesIO()
-    if boundary is None:
-        boundary = choose_boundary()
-
-    for field in iter_field_objects(fields):
-        body.write(b('--%s\r\n' % (boundary)))
-
-        writer(body).write(field.render_headers())
-        data = field.data
-
-        if isinstance(data, int):
-            data = str(data)  # Backwards compatibility
-
-        if isinstance(data, six.text_type):
-            writer(body).write(data)
-        else:
-            body.write(data)
-
-        body.write(b'\r\n')
-
-    body.write(b('--%s--\r\n' % (boundary)))
-
-    content_type = str('multipart/form-data; boundary=%s' % boundary)
-
-    return body.getvalue(), content_type
diff --git a/python/ext-libs/requests/packages/urllib3/packages/__init__.py b/python/ext-libs/requests/packages/urllib3/packages/__init__.py
deleted file mode 100644
index 170e974..0000000
--- a/python/ext-libs/requests/packages/urllib3/packages/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from __future__ import absolute_import
-
-from . import ssl_match_hostname
-
-__all__ = ('ssl_match_hostname', )
diff --git a/python/ext-libs/requests/packages/urllib3/packages/ordered_dict.py b/python/ext-libs/requests/packages/urllib3/packages/ordered_dict.py
deleted file mode 100644
index 4479363..0000000
--- a/python/ext-libs/requests/packages/urllib3/packages/ordered_dict.py
+++ /dev/null
@@ -1,259 +0,0 @@
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-# Copyright 2009 Raymond Hettinger, released under the MIT License.
-# http://code.activestate.com/recipes/576693/
-try:
-    from thread import get_ident as _get_ident
-except ImportError:
-    from dummy_thread import get_ident as _get_ident
-
-try:
-    from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
-    pass
-
-
-class OrderedDict(dict):
-    'Dictionary that remembers insertion order'
-    # An inherited dict maps keys to values.
-    # The inherited dict provides __getitem__, __len__, __contains__, and get.
-    # The remaining methods are order-aware.
-    # Big-O running times for all methods are the same as for regular dictionaries.
-
-    # The internal self.__map dictionary maps keys to links in a doubly linked list.
-    # The circular doubly linked list starts and ends with a sentinel element.
-    # The sentinel element never gets deleted (this simplifies the algorithm).
-    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
-
-    def __init__(self, *args, **kwds):
-        '''Initialize an ordered dictionary.  Signature is the same as for
-        regular dictionaries, but keyword arguments are not recommended
-        because their insertion order is arbitrary.
-
-        '''
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__root
-        except AttributeError:
-            self.__root = root = []                     # sentinel node
-            root[:] = [root, root, None]
-            self.__map = {}
-        self.__update(*args, **kwds)
-
-    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
-        'od.__setitem__(i, y) <==> od[i]=y'
-        # Setting a new item creates a new link which goes at the end of the linked
-        # list, and the inherited dictionary is updated with the new key/value pair.
-        if key not in self:
-            root = self.__root
-            last = root[0]
-            last[1] = root[0] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
-
-    def __delitem__(self, key, dict_delitem=dict.__delitem__):
-        'od.__delitem__(y) <==> del od[y]'
-        # Deleting an existing item uses self.__map to find the link which is
-        # then removed by updating the links in the predecessor and successor nodes.
-        dict_delitem(self, key)
-        link_prev, link_next, key = self.__map.pop(key)
-        link_prev[1] = link_next
-        link_next[0] = link_prev
-
-    def __iter__(self):
-        'od.__iter__() <==> iter(od)'
-        root = self.__root
-        curr = root[1]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[1]
-
-    def __reversed__(self):
-        'od.__reversed__() <==> reversed(od)'
-        root = self.__root
-        curr = root[0]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[0]
-
-    def clear(self):
-        'od.clear() -> None.  Remove all items from od.'
-        try:
-            for node in self.__map.itervalues():
-                del node[:]
-            root = self.__root
-            root[:] = [root, root, None]
-            self.__map.clear()
-        except AttributeError:
-            pass
-        dict.clear(self)
-
-    def popitem(self, last=True):
-        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
-        Pairs are returned in LIFO order if last is true or FIFO order if false.
-
-        '''
-        if not self:
-            raise KeyError('dictionary is empty')
-        root = self.__root
-        if last:
-            link = root[0]
-            link_prev = link[0]
-            link_prev[1] = root
-            root[0] = link_prev
-        else:
-            link = root[1]
-            link_next = link[1]
-            root[1] = link_next
-            link_next[0] = root
-        key = link[2]
-        del self.__map[key]
-        value = dict.pop(self, key)
-        return key, value
-
-    # -- the following methods do not depend on the internal structure --
-
-    def keys(self):
-        'od.keys() -> list of keys in od'
-        return list(self)
-
-    def values(self):
-        'od.values() -> list of values in od'
-        return [self[key] for key in self]
-
-    def items(self):
-        'od.items() -> list of (key, value) pairs in od'
-        return [(key, self[key]) for key in self]
-
-    def iterkeys(self):
-        'od.iterkeys() -> an iterator over the keys in od'
-        return iter(self)
-
-    def itervalues(self):
-        'od.itervalues -> an iterator over the values in od'
-        for k in self:
-            yield self[k]
-
-    def iteritems(self):
-        'od.iteritems -> an iterator over the (key, value) items in od'
-        for k in self:
-            yield (k, self[k])
-
-    def update(*args, **kwds):
-        '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
-
-        If E is a dict instance, does:           for k in E: od[k] = E[k]
-        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
-        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
-        In either case, this is followed by:     for k, v in F.items(): od[k] = v
-
-        '''
-        if len(args) > 2:
-            raise TypeError('update() takes at most 2 positional '
-                            'arguments (%d given)' % (len(args),))
-        elif not args:
-            raise TypeError('update() takes at least 1 argument (0 given)')
-        self = args[0]
-        # Make progressively weaker assumptions about "other"
-        other = ()
-        if len(args) == 2:
-            other = args[1]
-        if isinstance(other, dict):
-            for key in other:
-                self[key] = other[key]
-        elif hasattr(other, 'keys'):
-            for key in other.keys():
-                self[key] = other[key]
-        else:
-            for key, value in other:
-                self[key] = value
-        for key, value in kwds.items():
-            self[key] = value
-
-    __update = update  # let subclasses override update without breaking __init__
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
-        If key is not found, d is returned if given, otherwise KeyError is raised.
-
-        '''
-        if key in self:
-            result = self[key]
-            del self[key]
-            return result
-        if default is self.__marker:
-            raise KeyError(key)
-        return default
-
-    def setdefault(self, key, default=None):
-        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-        if key in self:
-            return self[key]
-        self[key] = default
-        return default
-
-    def __repr__(self, _repr_running={}):
-        'od.__repr__() <==> repr(od)'
-        call_key = id(self), _get_ident()
-        if call_key in _repr_running:
-            return '...'
-        _repr_running[call_key] = 1
-        try:
-            if not self:
-                return '%s()' % (self.__class__.__name__,)
-            return '%s(%r)' % (self.__class__.__name__, self.items())
-        finally:
-            del _repr_running[call_key]
-
-    def __reduce__(self):
-        'Return state information for pickling'
-        items = [[k, self[k]] for k in self]
-        inst_dict = vars(self).copy()
-        for k in vars(OrderedDict()):
-            inst_dict.pop(k, None)
-        if inst_dict:
-            return (self.__class__, (items,), inst_dict)
-        return self.__class__, (items,)
-
-    def copy(self):
-        'od.copy() -> a shallow copy of od'
-        return self.__class__(self)
-
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
-        and values equal to v (which defaults to None).
-
-        '''
-        d = cls()
-        for key in iterable:
-            d[key] = value
-        return d
-
-    def __eq__(self, other):
-        '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
-        while comparison to a regular mapping is order-insensitive.
-
-        '''
-        if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
-        return dict.__eq__(self, other)
-
-    def __ne__(self, other):
-        return not self == other
-
-    # -- the following methods are only used in Python 2.7 --
-
-    def viewkeys(self):
-        "od.viewkeys() -> a set-like object providing a view on od's keys"
-        return KeysView(self)
-
-    def viewvalues(self):
-        "od.viewvalues() -> an object providing a view on od's values"
-        return ValuesView(self)
-
-    def viewitems(self):
-        "od.viewitems() -> a set-like object providing a view on od's items"
-        return ItemsView(self)
diff --git a/python/ext-libs/requests/packages/urllib3/packages/six.py b/python/ext-libs/requests/packages/urllib3/packages/six.py
deleted file mode 100644
index 27d8011..0000000
--- a/python/ext-libs/requests/packages/urllib3/packages/six.py
+++ /dev/null
@@ -1,385 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-#Copyright (c) 2010-2011 Benjamin Peterson
-
-#Permission is hereby granted, free of charge, to any person obtaining a copy of
-#this software and associated documentation files (the "Software"), to deal in
-#the Software without restriction, including without limitation the rights to
-#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-#the Software, and to permit persons to whom the Software is furnished to do so,
-#subject to the following conditions:
-
-#The above copyright notice and this permission notice shall be included in all
-#copies or substantial portions of the Software.
-
-#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin at python.org>"
-__version__ = "1.2.0"  # Revision 41c74fef2ded
-
-
-# True if we are running on Python 3.
-PY3 = sys.version_info[0] == 3
-
-if PY3:
-    string_types = str,
-    integer_types = int,
-    class_types = type,
-    text_type = str
-    binary_type = bytes
-
-    MAXSIZE = sys.maxsize
-else:
-    string_types = basestring,
-    integer_types = (int, long)
-    class_types = (type, types.ClassType)
-    text_type = unicode
-    binary_type = str
-
-    if sys.platform.startswith("java"):
-        # Jython always uses 32 bits.
-        MAXSIZE = int((1 << 31) - 1)
-    else:
-        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
-        class X(object):
-            def __len__(self):
-                return 1 << 31
-        try:
-            len(X())
-        except OverflowError:
-            # 32-bit
-            MAXSIZE = int((1 << 31) - 1)
-        else:
-            # 64-bit
-            MAXSIZE = int((1 << 63) - 1)
-            del X
-
-
-def _add_doc(func, doc):
-    """Add documentation to a function."""
-    func.__doc__ = doc
-
-
-def _import_module(name):
-    """Import module, returning the module after the last dot."""
-    __import__(name)
-    return sys.modules[name]
-
-
-class _LazyDescr(object):
-
-    def __init__(self, name):
-        self.name = name
-
-    def __get__(self, obj, tp):
-        result = self._resolve()
-        setattr(obj, self.name, result)
-        # This is a bit ugly, but it avoids running this again.
-        delattr(tp, self.name)
-        return result
-
-
-class MovedModule(_LazyDescr):
-
-    def __init__(self, name, old, new=None):
-        super(MovedModule, self).__init__(name)
-        if PY3:
-            if new is None:
-                new = name
-            self.mod = new
-        else:
-            self.mod = old
-
-    def _resolve(self):
-        return _import_module(self.mod)
-
-
-class MovedAttribute(_LazyDescr):
-
-    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
-        super(MovedAttribute, self).__init__(name)
-        if PY3:
-            if new_mod is None:
-                new_mod = name
-            self.mod = new_mod
-            if new_attr is None:
-                if old_attr is None:
-                    new_attr = name
-                else:
-                    new_attr = old_attr
-            self.attr = new_attr
-        else:
-            self.mod = old_mod
-            if old_attr is None:
-                old_attr = name
-            self.attr = old_attr
-
-    def _resolve(self):
-        module = _import_module(self.mod)
-        return getattr(module, self.attr)
-
-
-
-class _MovedItems(types.ModuleType):
-    """Lazy loading of moved objects"""
-
-
-_moved_attributes = [
-    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
-    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
-    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
-    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
-    MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
-    MovedAttribute("reduce", "__builtin__", "functools"),
-    MovedAttribute("StringIO", "StringIO", "io"),
-    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
-    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
-
-    MovedModule("builtins", "__builtin__"),
-    MovedModule("configparser", "ConfigParser"),
-    MovedModule("copyreg", "copy_reg"),
-    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
-    MovedModule("http_cookies", "Cookie", "http.cookies"),
-    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
-    MovedModule("html_parser", "HTMLParser", "html.parser"),
-    MovedModule("http_client", "httplib", "http.client"),
-    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
-    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
-    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
-    MovedModule("cPickle", "cPickle", "pickle"),
-    MovedModule("queue", "Queue"),
-    MovedModule("reprlib", "repr"),
-    MovedModule("socketserver", "SocketServer"),
-    MovedModule("tkinter", "Tkinter"),
-    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
-    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
-    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
-    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
-    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
-    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
-    MovedModule("tkinter_colorchooser", "tkColorChooser",
-                "tkinter.colorchooser"),
-    MovedModule("tkinter_commondialog", "tkCommonDialog",
-                "tkinter.commondialog"),
-    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
-    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
-    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
-                "tkinter.simpledialog"),
-    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
-    MovedModule("winreg", "_winreg"),
-]
-for attr in _moved_attributes:
-    setattr(_MovedItems, attr.name, attr)
-del attr
-
-moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
-
-
-def add_move(move):
-    """Add an item to six.moves."""
-    setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
-    """Remove item from six.moves."""
-    try:
-        delattr(_MovedItems, name)
-    except AttributeError:
-        try:
-            del moves.__dict__[name]
-        except KeyError:
-            raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
-    _meth_func = "__func__"
-    _meth_self = "__self__"
-
-    _func_code = "__code__"
-    _func_defaults = "__defaults__"
-
-    _iterkeys = "keys"
-    _itervalues = "values"
-    _iteritems = "items"
-else:
-    _meth_func = "im_func"
-    _meth_self = "im_self"
-
-    _func_code = "func_code"
-    _func_defaults = "func_defaults"
-
-    _iterkeys = "iterkeys"
-    _itervalues = "itervalues"
-    _iteritems = "iteritems"
-
-
-try:
-    advance_iterator = next
-except NameError:
-    def advance_iterator(it):
-        return it.next()
-next = advance_iterator
-
-
-if PY3:
-    def get_unbound_function(unbound):
-        return unbound
-
-    Iterator = object
-
-    def callable(obj):
-        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-else:
-    def get_unbound_function(unbound):
-        return unbound.im_func
-
-    class Iterator(object):
-
-        def next(self):
-            return type(self).__next__(self)
-
-    callable = callable
-_add_doc(get_unbound_function,
-         """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-
-
-def iterkeys(d):
-    """Return an iterator over the keys of a dictionary."""
-    return iter(getattr(d, _iterkeys)())
-
-def itervalues(d):
-    """Return an iterator over the values of a dictionary."""
-    return iter(getattr(d, _itervalues)())
-
-def iteritems(d):
-    """Return an iterator over the (key, value) pairs of a dictionary."""
-    return iter(getattr(d, _iteritems)())
-
-
-if PY3:
-    def b(s):
-        return s.encode("latin-1")
-    def u(s):
-        return s
-    if sys.version_info[1] <= 1:
-        def int2byte(i):
-            return bytes((i,))
-    else:
-        # This is about 2x faster than the implementation above on 3.2+
-        int2byte = operator.methodcaller("to_bytes", 1, "big")
-    import io
-    StringIO = io.StringIO
-    BytesIO = io.BytesIO
-else:
-    def b(s):
-        return s
-    def u(s):
-        return unicode(s, "unicode_escape")
-    int2byte = chr
-    import StringIO
-    StringIO = BytesIO = StringIO.StringIO
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-if PY3:
-    import builtins
-    exec_ = getattr(builtins, "exec")
-
-
-    def reraise(tp, value, tb=None):
-        if value.__traceback__ is not tb:
-            raise value.with_traceback(tb)
-        raise value
-
-
-    print_ = getattr(builtins, "print")
-    del builtins
-
-else:
-    def exec_(code, globs=None, locs=None):
-        """Execute code in a namespace."""
-        if globs is None:
-            frame = sys._getframe(1)
-            globs = frame.f_globals
-            if locs is None:
-                locs = frame.f_locals
-            del frame
-        elif locs is None:
-            locs = globs
-        exec("""exec code in globs, locs""")
-
-
-    exec_("""def reraise(tp, value, tb=None):
-    raise tp, value, tb
-""")
-
-
-    def print_(*args, **kwargs):
-        """The new-style print function."""
-        fp = kwargs.pop("file", sys.stdout)
-        if fp is None:
-            return
-        def write(data):
-            if not isinstance(data, basestring):
-                data = str(data)
-            fp.write(data)
-        want_unicode = False
-        sep = kwargs.pop("sep", None)
-        if sep is not None:
-            if isinstance(sep, unicode):
-                want_unicode = True
-            elif not isinstance(sep, str):
-                raise TypeError("sep must be None or a string")
-        end = kwargs.pop("end", None)
-        if end is not None:
-            if isinstance(end, unicode):
-                want_unicode = True
-            elif not isinstance(end, str):
-                raise TypeError("end must be None or a string")
-        if kwargs:
-            raise TypeError("invalid keyword arguments to print()")
-        if not want_unicode:
-            for arg in args:
-                if isinstance(arg, unicode):
-                    want_unicode = True
-                    break
-        if want_unicode:
-            newline = unicode("\n")
-            space = unicode(" ")
-        else:
-            newline = "\n"
-            space = " "
-        if sep is None:
-            sep = space
-        if end is None:
-            end = newline
-        for i, arg in enumerate(args):
-            if i:
-                write(sep)
-            write(arg)
-        write(end)
-
-_add_doc(reraise, """Reraise an exception.""")
-
-
-def with_metaclass(meta, base=object):
-    """Create a base class with a metaclass."""
-    return meta("NewBase", (base,), {})
diff --git a/python/ext-libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/python/ext-libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
deleted file mode 100644
index dd59a75..0000000
--- a/python/ext-libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-try:
-    # Python 3.2+
-    from ssl import CertificateError, match_hostname
-except ImportError:
-    try:
-        # Backport of the function from a pypi module
-        from backports.ssl_match_hostname import CertificateError, match_hostname
-    except ImportError:
-        # Our vendored copy
-        from ._implementation import CertificateError, match_hostname
-
-# Not needed, but documenting what we provide.
-__all__ = ('CertificateError', 'match_hostname')
diff --git a/python/ext-libs/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py b/python/ext-libs/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
deleted file mode 100644
index 52f4287..0000000
--- a/python/ext-libs/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
+++ /dev/null
@@ -1,105 +0,0 @@
-"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
-
-# Note: This file is under the PSF license as the code comes from the python
-# stdlib.   http://docs.python.org/3/license.html
-
-import re
-
-__version__ = '3.4.0.2'
-
-class CertificateError(ValueError):
-    pass
-
-
-def _dnsname_match(dn, hostname, max_wildcards=1):
-    """Matching according to RFC 6125, section 6.4.3
-
-    http://tools.ietf.org/html/rfc6125#section-6.4.3
-    """
-    pats = []
-    if not dn:
-        return False
-
-    # Ported from python3-syntax:
-    # leftmost, *remainder = dn.split(r'.')
-    parts = dn.split(r'.')
-    leftmost = parts[0]
-    remainder = parts[1:]
-
-    wildcards = leftmost.count('*')
-    if wildcards > max_wildcards:
-        # Issue #17980: avoid denials of service by refusing more
-        # than one wildcard per fragment.  A survey of established
-        # policy among SSL implementations showed it to be a
-        # reasonable choice.
-        raise CertificateError(
-            "too many wildcards in certificate DNS name: " + repr(dn))
-
-    # speed up common case w/o wildcards
-    if not wildcards:
-        return dn.lower() == hostname.lower()
-
-    # RFC 6125, section 6.4.3, subitem 1.
-    # The client SHOULD NOT attempt to match a presented identifier in which
-    # the wildcard character comprises a label other than the left-most label.
-    if leftmost == '*':
-        # When '*' is a fragment by itself, it matches a non-empty dotless
-        # fragment.
-        pats.append('[^.]+')
-    elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
-        # RFC 6125, section 6.4.3, subitem 3.
-        # The client SHOULD NOT attempt to match a presented identifier
-        # where the wildcard character is embedded within an A-label or
-        # U-label of an internationalized domain name.
-        pats.append(re.escape(leftmost))
-    else:
-        # Otherwise, '*' matches any dotless string, e.g. www*
-        pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
-
-    # add the remaining fragments, ignore any wildcards
-    for frag in remainder:
-        pats.append(re.escape(frag))
-
-    pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
-    return pat.match(hostname)
-
-
-def match_hostname(cert, hostname):
-    """Verify that *cert* (in decoded format as returned by
-    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
-    rules are followed, but IP addresses are not accepted for *hostname*.
-
-    CertificateError is raised on failure. On success, the function
-    returns nothing.
-    """
-    if not cert:
-        raise ValueError("empty or no certificate")
-    dnsnames = []
-    san = cert.get('subjectAltName', ())
-    for key, value in san:
-        if key == 'DNS':
-            if _dnsname_match(value, hostname):
-                return
-            dnsnames.append(value)
-    if not dnsnames:
-        # The subject is only checked when there is no dNSName entry
-        # in subjectAltName
-        for sub in cert.get('subject', ()):
-            for key, value in sub:
-                # XXX according to RFC 2818, the most specific Common Name
-                # must be used.
-                if key == 'commonName':
-                    if _dnsname_match(value, hostname):
-                        return
-                    dnsnames.append(value)
-    if len(dnsnames) > 1:
-        raise CertificateError("hostname %r "
-            "doesn't match either of %s"
-            % (hostname, ', '.join(map(repr, dnsnames))))
-    elif len(dnsnames) == 1:
-        raise CertificateError("hostname %r "
-            "doesn't match %r"
-            % (hostname, dnsnames[0]))
-    else:
-        raise CertificateError("no appropriate commonName or "
-            "subjectAltName fields were found")
diff --git a/python/ext-libs/requests/packages/urllib3/poolmanager.py b/python/ext-libs/requests/packages/urllib3/poolmanager.py
deleted file mode 100644
index 1023dcb..0000000
--- a/python/ext-libs/requests/packages/urllib3/poolmanager.py
+++ /dev/null
@@ -1,284 +0,0 @@
-from __future__ import absolute_import
-import logging
-
-try:  # Python 3
-    from urllib.parse import urljoin
-except ImportError:
-    from urlparse import urljoin
-
-from ._collections import RecentlyUsedContainer
-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
-from .connectionpool import port_by_scheme
-from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
-from .request import RequestMethods
-from .util.url import parse_url
-from .util.retry import Retry
-
-
-__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
-
-
-log = logging.getLogger(__name__)
-
-SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
-                'ssl_version', 'ca_cert_dir')
-
-pool_classes_by_scheme = {
-    'http': HTTPConnectionPool,
-    'https': HTTPSConnectionPool,
-}
-
-
-class PoolManager(RequestMethods):
-    """
-    Allows for arbitrary requests while transparently keeping track of
-    necessary connection pools for you.
-
-    :param num_pools:
-        Number of connection pools to cache before discarding the least
-        recently used pool.
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-
-    :param \**connection_pool_kw:
-        Additional parameters are used to create fresh
-        :class:`urllib3.connectionpool.ConnectionPool` instances.
-
-    Example::
-
-        >>> manager = PoolManager(num_pools=2)
-        >>> r = manager.request('GET', 'http://google.com/')
-        >>> r = manager.request('GET', 'http://google.com/mail')
-        >>> r = manager.request('GET', 'http://yahoo.com/')
-        >>> len(manager.pools)
-        2
-
-    """
-
-    proxy = None
-
-    def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
-        RequestMethods.__init__(self, headers)
-        self.connection_pool_kw = connection_pool_kw
-        self.pools = RecentlyUsedContainer(num_pools,
-                                           dispose_func=lambda p: p.close())
-
-        # Locally set the pool classes so other PoolManagers can override them.
-        self.pool_classes_by_scheme = pool_classes_by_scheme
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self.clear()
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def _new_pool(self, scheme, host, port):
-        """
-        Create a new :class:`ConnectionPool` based on host, port and scheme.
-
-        This method is used to actually create the connection pools handed out
-        by :meth:`connection_from_url` and companion methods. It is intended
-        to be overridden for customization.
-        """
-        pool_cls = self.pool_classes_by_scheme[scheme]
-        kwargs = self.connection_pool_kw
-        if scheme == 'http':
-            kwargs = self.connection_pool_kw.copy()
-            for kw in SSL_KEYWORDS:
-                kwargs.pop(kw, None)
-
-        return pool_cls(host, port, **kwargs)
-
-    def clear(self):
-        """
-        Empty our store of pools and direct them all to close.
-
-        This will not affect in-flight connections, but they will not be
-        re-used after completion.
-        """
-        self.pools.clear()
-
-    def connection_from_host(self, host, port=None, scheme='http'):
-        """
-        Get a :class:`ConnectionPool` based on the host, port, and scheme.
-
-        If ``port`` isn't given, it will be derived from the ``scheme`` using
-        ``urllib3.connectionpool.port_by_scheme``.
-        """
-
-        if not host:
-            raise LocationValueError("No host specified.")
-
-        scheme = scheme or 'http'
-        port = port or port_by_scheme.get(scheme, 80)
-        pool_key = (scheme, host, port)
-
-        with self.pools.lock:
-            # If the scheme, host, or port doesn't match existing open
-            # connections, open a new ConnectionPool.
-            pool = self.pools.get(pool_key)
-            if pool:
-                return pool
-
-            # Make a fresh ConnectionPool of the desired type
-            pool = self._new_pool(scheme, host, port)
-            self.pools[pool_key] = pool
-
-        return pool
-
-    def connection_from_url(self, url):
-        """
-        Similar to :func:`urllib3.connectionpool.connection_from_url` but
-        doesn't pass any additional parameters to the
-        :class:`urllib3.connectionpool.ConnectionPool` constructor.
-
-        Additional parameters are taken from the :class:`.PoolManager`
-        constructor.
-        """
-        u = parse_url(url)
-        return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
-
-    def urlopen(self, method, url, redirect=True, **kw):
-        """
-        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
-        with custom cross-host redirect logic and only sends the request-uri
-        portion of the ``url``.
-
-        The given ``url`` parameter must be absolute, such that an appropriate
-        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
-        """
-        u = parse_url(url)
-        conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
-
-        kw['assert_same_host'] = False
-        kw['redirect'] = False
-        if 'headers' not in kw:
-            kw['headers'] = self.headers
-
-        if self.proxy is not None and u.scheme == "http":
-            response = conn.urlopen(method, url, **kw)
-        else:
-            response = conn.urlopen(method, u.request_uri, **kw)
-
-        redirect_location = redirect and response.get_redirect_location()
-        if not redirect_location:
-            return response
-
-        # Support relative URLs for redirecting.
-        redirect_location = urljoin(url, redirect_location)
-
-        # RFC 7231, Section 6.4.4
-        if response.status == 303:
-            method = 'GET'
-
-        retries = kw.get('retries')
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(retries, redirect=redirect)
-
-        try:
-            retries = retries.increment(method, url, response=response, _pool=conn)
-        except MaxRetryError:
-            if retries.raise_on_redirect:
-                raise
-            return response
-
-        kw['retries'] = retries
-        kw['redirect'] = redirect
-
-        log.info("Redirecting %s -> %s", url, redirect_location)
-        return self.urlopen(method, redirect_location, **kw)
-
-
-class ProxyManager(PoolManager):
-    """
-    Behaves just like :class:`PoolManager`, but sends all requests through
-    the defined proxy, using the CONNECT method for HTTPS URLs.
-
-    :param proxy_url:
-        The URL of the proxy to be used.
-
-    :param proxy_headers:
-        A dictionary contaning headers that will be sent to the proxy. In case
-        of HTTP they are being sent with each request, while in the
-        HTTPS/CONNECT case they are sent only once. Could be used for proxy
-        authentication.
-
-    Example:
-        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
-        >>> r1 = proxy.request('GET', 'http://google.com/')
-        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
-        >>> len(proxy.pools)
-        1
-        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
-        >>> r4 = proxy.request('GET', 'https://twitter.com/')
-        >>> len(proxy.pools)
-        3
-
-    """
-
-    def __init__(self, proxy_url, num_pools=10, headers=None,
-                 proxy_headers=None, **connection_pool_kw):
-
-        if isinstance(proxy_url, HTTPConnectionPool):
-            proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
-                                        proxy_url.port)
-        proxy = parse_url(proxy_url)
-        if not proxy.port:
-            port = port_by_scheme.get(proxy.scheme, 80)
-            proxy = proxy._replace(port=port)
-
-        if proxy.scheme not in ("http", "https"):
-            raise ProxySchemeUnknown(proxy.scheme)
-
-        self.proxy = proxy
-        self.proxy_headers = proxy_headers or {}
-
-        connection_pool_kw['_proxy'] = self.proxy
-        connection_pool_kw['_proxy_headers'] = self.proxy_headers
-
-        super(ProxyManager, self).__init__(
-            num_pools, headers, **connection_pool_kw)
-
-    def connection_from_host(self, host, port=None, scheme='http'):
-        if scheme == "https":
-            return super(ProxyManager, self).connection_from_host(
-                host, port, scheme)
-
-        return super(ProxyManager, self).connection_from_host(
-            self.proxy.host, self.proxy.port, self.proxy.scheme)
-
-    def _set_proxy_headers(self, url, headers=None):
-        """
-        Sets headers needed by proxies: specifically, the Accept and Host
-        headers. Only sets headers not provided by the user.
-        """
-        headers_ = {'Accept': '*/*'}
-
-        netloc = parse_url(url).netloc
-        if netloc:
-            headers_['Host'] = netloc
-
-        if headers:
-            headers_.update(headers)
-        return headers_
-
-    def urlopen(self, method, url, redirect=True, **kw):
-        "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
-        u = parse_url(url)
-
-        if u.scheme == "http":
-            # For proxied HTTPS requests, httplib sets the necessary headers
-            # on the CONNECT to the proxy. For HTTP, we'll definitely
-            # need to set 'Host' at the very least.
-            headers = kw.get('headers', self.headers)
-            kw['headers'] = self._set_proxy_headers(url, headers)
-
-        return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
-
-
-def proxy_from_url(url, **kw):
-    return ProxyManager(proxy_url=url, **kw)
diff --git a/python/ext-libs/requests/packages/urllib3/request.py b/python/ext-libs/requests/packages/urllib3/request.py
deleted file mode 100644
index d5aa62d..0000000
--- a/python/ext-libs/requests/packages/urllib3/request.py
+++ /dev/null
@@ -1,151 +0,0 @@
-from __future__ import absolute_import
-try:
-    from urllib.parse import urlencode
-except ImportError:
-    from urllib import urlencode
-
-from .filepost import encode_multipart_formdata
-
-
-__all__ = ['RequestMethods']
-
-
-class RequestMethods(object):
-    """
-    Convenience mixin for classes who implement a :meth:`urlopen` method, such
-    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
-    :class:`~urllib3.poolmanager.PoolManager`.
-
-    Provides behavior for making common types of HTTP request methods and
-    decides which type of request field encoding to use.
-
-    Specifically,
-
-    :meth:`.request_encode_url` is for sending requests whose fields are
-    encoded in the URL (such as GET, HEAD, DELETE).
-
-    :meth:`.request_encode_body` is for sending requests whose fields are
-    encoded in the *body* of the request using multipart or www-form-urlencoded
-    (such as for POST, PUT, PATCH).
-
-    :meth:`.request` is for making any kind of request, it will look up the
-    appropriate encoding format and use one of the above two methods to make
-    the request.
-
-    Initializer parameters:
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-    """
-
-    _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
-
-    def __init__(self, headers=None):
-        self.headers = headers or {}
-
-    def urlopen(self, method, url, body=None, headers=None,
-                encode_multipart=True, multipart_boundary=None,
-                **kw):  # Abstract
-        raise NotImplemented("Classes extending RequestMethods must implement "
-                             "their own ``urlopen`` method.")
-
-    def request(self, method, url, fields=None, headers=None, **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the appropriate encoding of
-        ``fields`` based on the ``method`` used.
-
-        This is a convenience method that requires the least amount of manual
-        effort. It can be used in most situations, while still having the
-        option to drop down to more specific methods when necessary, such as
-        :meth:`request_encode_url`, :meth:`request_encode_body`,
-        or even the lowest level :meth:`urlopen`.
-        """
-        method = method.upper()
-
-        if method in self._encode_url_methods:
-            return self.request_encode_url(method, url, fields=fields,
-                                           headers=headers,
-                                           **urlopen_kw)
-        else:
-            return self.request_encode_body(method, url, fields=fields,
-                                            headers=headers,
-                                            **urlopen_kw)
-
-    def request_encode_url(self, method, url, fields=None, headers=None,
-                           **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the ``fields`` encoded in
-        the url. This is useful for request methods like GET, HEAD, DELETE, etc.
-        """
-        if headers is None:
-            headers = self.headers
-
-        extra_kw = {'headers': headers}
-        extra_kw.update(urlopen_kw)
-
-        if fields:
-            url += '?' + urlencode(fields)
-
-        return self.urlopen(method, url, **extra_kw)
-
-    def request_encode_body(self, method, url, fields=None, headers=None,
-                            encode_multipart=True, multipart_boundary=None,
-                            **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the ``fields`` encoded in
-        the body. This is useful for request methods like POST, PUT, PATCH, etc.
-
-        When ``encode_multipart=True`` (default), then
-        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
-        the payload with the appropriate content type. Otherwise
-        :meth:`urllib.urlencode` is used with the
-        'application/x-www-form-urlencoded' content type.
-
-        Multipart encoding must be used when posting files, and it's reasonably
-        safe to use it in other times too. However, it may break request
-        signing, such as with OAuth.
-
-        Supports an optional ``fields`` parameter of key/value strings AND
-        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
-        the MIME type is optional. For example::
-
-            fields = {
-                'foo': 'bar',
-                'fakefile': ('foofile.txt', 'contents of foofile'),
-                'realfile': ('barfile.txt', open('realfile').read()),
-                'typedfile': ('bazfile.bin', open('bazfile').read(),
-                              'image/jpeg'),
-                'nonamefile': 'contents of nonamefile field',
-            }
-
-        When uploading a file, providing a filename (the first parameter of the
-        tuple) is optional but recommended to best mimick behavior of browsers.
-
-        Note that if ``headers`` are supplied, the 'Content-Type' header will
-        be overwritten because it depends on the dynamic random boundary string
-        which is used to compose the body of the request. The random boundary
-        string can be explicitly set with the ``multipart_boundary`` parameter.
-        """
-        if headers is None:
-            headers = self.headers
-
-        extra_kw = {'headers': {}}
-
-        if fields:
-            if 'body' in urlopen_kw:
-                raise TypeError(
-                    "request got values for both 'fields' and 'body', can only specify one.")
-
-            if encode_multipart:
-                body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
-            else:
-                body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
-
-            extra_kw['body'] = body
-            extra_kw['headers'] = {'Content-Type': content_type}
-
-        extra_kw['headers'].update(headers)
-        extra_kw.update(urlopen_kw)
-
-        return self.urlopen(method, url, **extra_kw)
diff --git a/python/ext-libs/requests/packages/urllib3/response.py b/python/ext-libs/requests/packages/urllib3/response.py
deleted file mode 100644
index ac1b2f1..0000000
--- a/python/ext-libs/requests/packages/urllib3/response.py
+++ /dev/null
@@ -1,526 +0,0 @@
-from __future__ import absolute_import
-from contextlib import contextmanager
-import zlib
-import io
-from socket import timeout as SocketTimeout
-from socket import error as SocketError
-
-from ._collections import HTTPHeaderDict
-from .exceptions import (
-    ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
-)
-from .packages.six import string_types as basestring, binary_type, PY3
-from .packages.six.moves import http_client as httplib
-from .connection import HTTPException, BaseSSLError
-from .util.response import is_fp_closed, is_response_to_head
-
-
-class DeflateDecoder(object):
-
-    def __init__(self):
-        self._first_try = True
-        self._data = binary_type()
-        self._obj = zlib.decompressobj()
-
-    def __getattr__(self, name):
-        return getattr(self._obj, name)
-
-    def decompress(self, data):
-        if not data:
-            return data
-
-        if not self._first_try:
-            return self._obj.decompress(data)
-
-        self._data += data
-        try:
-            return self._obj.decompress(data)
-        except zlib.error:
-            self._first_try = False
-            self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
-            try:
-                return self.decompress(self._data)
-            finally:
-                self._data = None
-
-
-class GzipDecoder(object):
-
-    def __init__(self):
-        self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
-
-    def __getattr__(self, name):
-        return getattr(self._obj, name)
-
-    def decompress(self, data):
-        if not data:
-            return data
-        return self._obj.decompress(data)
-
-
-def _get_decoder(mode):
-    if mode == 'gzip':
-        return GzipDecoder()
-
-    return DeflateDecoder()
-
-
-class HTTPResponse(io.IOBase):
-    """
-    HTTP Response container.
-
-    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
-    loaded and decoded on-demand when the ``data`` property is accessed.  This
-    class is also compatible with the Python standard library's :mod:`io`
-    module, and can hence be treated as a readable object in the context of that
-    framework.
-
-    Extra parameters for behaviour not present in httplib.HTTPResponse:
-
-    :param preload_content:
-        If True, the response's body will be preloaded during construction.
-
-    :param decode_content:
-        If True, attempts to decode specific content-encoding's based on headers
-        (like 'gzip' and 'deflate') will be skipped and raw data will be used
-        instead.
-
-    :param original_response:
-        When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
-        object, it's convenient to include the original for debug purposes. It's
-        otherwise unused.
-    """
-
-    CONTENT_DECODERS = ['gzip', 'deflate']
-    REDIRECT_STATUSES = [301, 302, 303, 307, 308]
-
-    def __init__(self, body='', headers=None, status=0, version=0, reason=None,
-                 strict=0, preload_content=True, decode_content=True,
-                 original_response=None, pool=None, connection=None):
-
-        if isinstance(headers, HTTPHeaderDict):
-            self.headers = headers
-        else:
-            self.headers = HTTPHeaderDict(headers)
-        self.status = status
-        self.version = version
-        self.reason = reason
-        self.strict = strict
-        self.decode_content = decode_content
-
-        self._decoder = None
-        self._body = None
-        self._fp = None
-        self._original_response = original_response
-        self._fp_bytes_read = 0
-
-        if body and isinstance(body, (basestring, binary_type)):
-            self._body = body
-
-        self._pool = pool
-        self._connection = connection
-
-        if hasattr(body, 'read'):
-            self._fp = body
-
-        # Are we using the chunked-style of transfer encoding?
-        self.chunked = False
-        self.chunk_left = None
-        tr_enc = self.headers.get('transfer-encoding', '').lower()
-        # Don't incur the penalty of creating a list and then discarding it
-        encodings = (enc.strip() for enc in tr_enc.split(","))
-        if "chunked" in encodings:
-            self.chunked = True
-
-        # If requested, preload the body.
-        if preload_content and not self._body:
-            self._body = self.read(decode_content=decode_content)
-
-    def get_redirect_location(self):
-        """
-        Should we redirect and where to?
-
-        :returns: Truthy redirect location string if we got a redirect status
-            code and valid location. ``None`` if redirect status and no
-            location. ``False`` if not a redirect status code.
-        """
-        if self.status in self.REDIRECT_STATUSES:
-            return self.headers.get('location')
-
-        return False
-
-    def release_conn(self):
-        if not self._pool or not self._connection:
-            return
-
-        self._pool._put_conn(self._connection)
-        self._connection = None
-
-    @property
-    def data(self):
-        # For backwords-compat with earlier urllib3 0.4 and earlier.
-        if self._body:
-            return self._body
-
-        if self._fp:
-            return self.read(cache_content=True)
-
-    def tell(self):
-        """
-        Obtain the number of bytes pulled over the wire so far. May differ from
-        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
-        are encoded on the wire (e.g, compressed).
-        """
-        return self._fp_bytes_read
-
-    def _init_decoder(self):
-        """
-        Set-up the _decoder attribute if necessar.
-        """
-        # Note: content-encoding value should be case-insensitive, per RFC 7230
-        # Section 3.2
-        content_encoding = self.headers.get('content-encoding', '').lower()
-        if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
-            self._decoder = _get_decoder(content_encoding)
-
-    def _decode(self, data, decode_content, flush_decoder):
-        """
-        Decode the data passed in and potentially flush the decoder.
-        """
-        try:
-            if decode_content and self._decoder:
-                data = self._decoder.decompress(data)
-        except (IOError, zlib.error) as e:
-            content_encoding = self.headers.get('content-encoding', '').lower()
-            raise DecodeError(
-                "Received response with content-encoding: %s, but "
-                "failed to decode it." % content_encoding, e)
-
-        if flush_decoder and decode_content:
-            data += self._flush_decoder()
-
-        return data
-
-    def _flush_decoder(self):
-        """
-        Flushes the decoder. Should only be called if the decoder is actually
-        being used.
-        """
-        if self._decoder:
-            buf = self._decoder.decompress(b'')
-            return buf + self._decoder.flush()
-
-        return b''
-
-    @contextmanager
-    def _error_catcher(self):
-        """
-        Catch low-level python exceptions, instead re-raising urllib3
-        variants, so that low-level exceptions are not leaked in the
-        high-level api.
-
-        On exit, release the connection back to the pool.
-        """
-        clean_exit = False
-
-        try:
-            try:
-                yield
-
-            except SocketTimeout:
-                # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
-                # there is yet no clean way to get at it from this context.
-                raise ReadTimeoutError(self._pool, None, 'Read timed out.')
-
-            except BaseSSLError as e:
-                # FIXME: Is there a better way to differentiate between SSLErrors?
-                if 'read operation timed out' not in str(e):  # Defensive:
-                    # This shouldn't happen but just in case we're missing an edge
-                    # case, let's avoid swallowing SSL errors.
-                    raise
-
-                raise ReadTimeoutError(self._pool, None, 'Read timed out.')
-
-            except (HTTPException, SocketError) as e:
-                # This includes IncompleteRead.
-                raise ProtocolError('Connection broken: %r' % e, e)
-
-            # If no exception is thrown, we should avoid cleaning up
-            # unnecessarily.
-            clean_exit = True
-        finally:
-            # If we didn't terminate cleanly, we need to throw away our
-            # connection.
-            if not clean_exit:
-                # The response may not be closed but we're not going to use it
-                # anymore so close it now to ensure that the connection is
-                # released back to the pool.
-                if self._original_response:
-                    self._original_response.close()
-
-                # Closing the response may not actually be sufficient to close
-                # everything, so if we have a hold of the connection close that
-                # too.
-                if self._connection:
-                    self._connection.close()
-
-            # If we hold the original response but it's closed now, we should
-            # return the connection back to the pool.
-            if self._original_response and self._original_response.isclosed():
-                self.release_conn()
-
-    def read(self, amt=None, decode_content=None, cache_content=False):
-        """
-        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
-        parameters: ``decode_content`` and ``cache_content``.
-
-        :param amt:
-            How much of the content to read. If specified, caching is skipped
-            because it doesn't make sense to cache partial content as the full
-            response.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-
-        :param cache_content:
-            If True, will save the returned data such that the same result is
-            returned despite of the state of the underlying file object. This
-            is useful if you want the ``.data`` property to continue working
-            after having ``.read()`` the file object. (Overridden if ``amt`` is
-            set.)
-        """
-        self._init_decoder()
-        if decode_content is None:
-            decode_content = self.decode_content
-
-        if self._fp is None:
-            return
-
-        flush_decoder = False
-        data = None
-
-        with self._error_catcher():
-            if amt is None:
-                # cStringIO doesn't like amt=None
-                data = self._fp.read()
-                flush_decoder = True
-            else:
-                cache_content = False
-                data = self._fp.read(amt)
-                if amt != 0 and not data:  # Platform-specific: Buggy versions of Python.
-                    # Close the connection when no data is returned
-                    #
-                    # This is redundant to what httplib/http.client _should_
-                    # already do.  However, versions of python released before
-                    # December 15, 2012 (http://bugs.python.org/issue16298) do
-                    # not properly close the connection in all cases. There is
-                    # no harm in redundantly calling close.
-                    self._fp.close()
-                    flush_decoder = True
-
-        if data:
-            self._fp_bytes_read += len(data)
-
-            data = self._decode(data, decode_content, flush_decoder)
-
-            if cache_content:
-                self._body = data
-
-        return data
-
-    def stream(self, amt=2**16, decode_content=None):
-        """
-        A generator wrapper for the read() method. A call will block until
-        ``amt`` bytes have been read from the connection or until the
-        connection is closed.
-
-        :param amt:
-            How much of the content to read. The generator will return up to
-            much data per iteration, but may return less. This is particularly
-            likely when using compressed data. However, the empty string will
-            never be returned.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-        """
-        if self.chunked:
-            for line in self.read_chunked(amt, decode_content=decode_content):
-                yield line
-        else:
-            while not is_fp_closed(self._fp):
-                data = self.read(amt=amt, decode_content=decode_content)
-
-                if data:
-                    yield data
-
-    @classmethod
-    def from_httplib(ResponseCls, r, **response_kw):
-        """
-        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
-        corresponding :class:`urllib3.response.HTTPResponse` object.
-
-        Remaining parameters are passed to the HTTPResponse constructor, along
-        with ``original_response=r``.
-        """
-        headers = r.msg
-
-        if not isinstance(headers, HTTPHeaderDict):
-            if PY3:  # Python 3
-                headers = HTTPHeaderDict(headers.items())
-            else:  # Python 2
-                headers = HTTPHeaderDict.from_httplib(headers)
-
-        # HTTPResponse objects in Python 3 don't have a .strict attribute
-        strict = getattr(r, 'strict', 0)
-        resp = ResponseCls(body=r,
-                           headers=headers,
-                           status=r.status,
-                           version=r.version,
-                           reason=r.reason,
-                           strict=strict,
-                           original_response=r,
-                           **response_kw)
-        return resp
-
-    # Backwards-compatibility methods for httplib.HTTPResponse
-    def getheaders(self):
-        return self.headers
-
-    def getheader(self, name, default=None):
-        return self.headers.get(name, default)
-
-    # Overrides from io.IOBase
-    def close(self):
-        if not self.closed:
-            self._fp.close()
-
-        if self._connection:
-            self._connection.close()
-
-    @property
-    def closed(self):
-        if self._fp is None:
-            return True
-        elif hasattr(self._fp, 'closed'):
-            return self._fp.closed
-        elif hasattr(self._fp, 'isclosed'):  # Python 2
-            return self._fp.isclosed()
-        else:
-            return True
-
-    def fileno(self):
-        if self._fp is None:
-            raise IOError("HTTPResponse has no file to get a fileno from")
-        elif hasattr(self._fp, "fileno"):
-            return self._fp.fileno()
-        else:
-            raise IOError("The file-like object this HTTPResponse is wrapped "
-                          "around has no file descriptor")
-
-    def flush(self):
-        if self._fp is not None and hasattr(self._fp, 'flush'):
-            return self._fp.flush()
-
-    def readable(self):
-        # This method is required for `io` module compatibility.
-        return True
-
-    def readinto(self, b):
-        # This method is required for `io` module compatibility.
-        temp = self.read(len(b))
-        if len(temp) == 0:
-            return 0
-        else:
-            b[:len(temp)] = temp
-            return len(temp)
-
-    def _update_chunk_length(self):
-        # First, we'll figure out length of a chunk and then
-        # we'll try to read it from socket.
-        if self.chunk_left is not None:
-            return
-        line = self._fp.fp.readline()
-        line = line.split(b';', 1)[0]
-        try:
-            self.chunk_left = int(line, 16)
-        except ValueError:
-            # Invalid chunked protocol response, abort.
-            self.close()
-            raise httplib.IncompleteRead(line)
-
-    def _handle_chunk(self, amt):
-        returned_chunk = None
-        if amt is None:
-            chunk = self._fp._safe_read(self.chunk_left)
-            returned_chunk = chunk
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-        elif amt < self.chunk_left:
-            value = self._fp._safe_read(amt)
-            self.chunk_left = self.chunk_left - amt
-            returned_chunk = value
-        elif amt == self.chunk_left:
-            value = self._fp._safe_read(amt)
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-            returned_chunk = value
-        else:  # amt > self.chunk_left
-            returned_chunk = self._fp._safe_read(self.chunk_left)
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-        return returned_chunk
-
-    def read_chunked(self, amt=None, decode_content=None):
-        """
-        Similar to :meth:`HTTPResponse.read`, but with an additional
-        parameter: ``decode_content``.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-        """
-        self._init_decoder()
-        # FIXME: Rewrite this method and make it a class with a better structured logic.
-        if not self.chunked:
-            raise ResponseNotChunked(
-                "Response is not chunked. "
-                "Header 'transfer-encoding: chunked' is missing.")
-
-        # Don't bother reading the body of a HEAD request.
-        if self._original_response and is_response_to_head(self._original_response):
-            self._original_response.close()
-            return
-
-        with self._error_catcher():
-            while True:
-                self._update_chunk_length()
-                if self.chunk_left == 0:
-                    break
-                chunk = self._handle_chunk(amt)
-                decoded = self._decode(chunk, decode_content=decode_content,
-                                       flush_decoder=False)
-                if decoded:
-                    yield decoded
-
-            if decode_content:
-                # On CPython and PyPy, we should never need to flush the
-                # decoder. However, on Jython we *might* need to, so
-                # lets defensively do it anyway.
-                decoded = self._flush_decoder()
-                if decoded:  # Platform-specific: Jython.
-                    yield decoded
-
-            # Chunk content ends with \r\n: discard it.
-            while True:
-                line = self._fp.fp.readline()
-                if not line:
-                    # Some sites may not end with '\r\n'.
-                    break
-                if line == b'\r\n':
-                    break
-
-            # We read everything; close the "file".
-            if self._original_response:
-                self._original_response.close()
diff --git a/python/ext-libs/requests/packages/urllib3/util/__init__.py b/python/ext-libs/requests/packages/urllib3/util/__init__.py
deleted file mode 100644
index 4778cf9..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/__init__.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from __future__ import absolute_import
-# For backwards compatibility, provide imports that used to be here.
-from .connection import is_connection_dropped
-from .request import make_headers
-from .response import is_fp_closed
-from .ssl_ import (
-    SSLContext,
-    HAS_SNI,
-    IS_PYOPENSSL,
-    assert_fingerprint,
-    resolve_cert_reqs,
-    resolve_ssl_version,
-    ssl_wrap_socket,
-)
-from .timeout import (
-    current_time,
-    Timeout,
-)
-
-from .retry import Retry
-from .url import (
-    get_host,
-    parse_url,
-    split_first,
-    Url,
-)
-
-__all__ = (
-    'HAS_SNI',
-    'IS_PYOPENSSL',
-    'SSLContext',
-    'Retry',
-    'Timeout',
-    'Url',
-    'assert_fingerprint',
-    'current_time',
-    'is_connection_dropped',
-    'is_fp_closed',
-    'get_host',
-    'parse_url',
-    'make_headers',
-    'resolve_cert_reqs',
-    'resolve_ssl_version',
-    'split_first',
-    'ssl_wrap_socket',
-)
diff --git a/python/ext-libs/requests/packages/urllib3/util/connection.py b/python/ext-libs/requests/packages/urllib3/util/connection.py
deleted file mode 100644
index 01a4812..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/connection.py
+++ /dev/null
@@ -1,101 +0,0 @@
-from __future__ import absolute_import
-import socket
-try:
-    from select import poll, POLLIN
-except ImportError:  # `poll` doesn't exist on OSX and other platforms
-    poll = False
-    try:
-        from select import select
-    except ImportError:  # `select` doesn't exist on AppEngine.
-        select = False
-
-
-def is_connection_dropped(conn):  # Platform-specific
-    """
-    Returns True if the connection is dropped and should be closed.
-
-    :param conn:
-        :class:`httplib.HTTPConnection` object.
-
-    Note: For platforms like AppEngine, this will always return ``False`` to
-    let the platform handle connection recycling transparently for us.
-    """
-    sock = getattr(conn, 'sock', False)
-    if sock is False:  # Platform-specific: AppEngine
-        return False
-    if sock is None:  # Connection already closed (such as by httplib).
-        return True
-
-    if not poll:
-        if not select:  # Platform-specific: AppEngine
-            return False
-
-        try:
-            return select([sock], [], [], 0.0)[0]
-        except socket.error:
-            return True
-
-    # This version is better on platforms that support it.
-    p = poll()
-    p.register(sock, POLLIN)
-    for (fno, ev) in p.poll(0.0):
-        if fno == sock.fileno():
-            # Either data is buffered (bad), or the connection is dropped.
-            return True
-
-
-# This function is copied from socket.py in the Python 2.7 standard
-# library test suite. Added to its signature is only `socket_options`.
-def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
-                      source_address=None, socket_options=None):
-    """Connect to *address* and return the socket object.
-
-    Convenience function.  Connect to *address* (a 2-tuple ``(host,
-    port)``) and return the socket object.  Passing the optional
-    *timeout* parameter will set the timeout on the socket instance
-    before attempting to connect.  If no *timeout* is supplied, the
-    global default timeout setting returned by :func:`getdefaulttimeout`
-    is used.  If *source_address* is set it must be a tuple of (host, port)
-    for the socket to bind as a source address before making the connection.
-    An host of '' or port 0 tells the OS to use the default.
-    """
-
-    host, port = address
-    if host.startswith('['):
-        host = host.strip('[]')
-    err = None
-    for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
-        af, socktype, proto, canonname, sa = res
-        sock = None
-        try:
-            sock = socket.socket(af, socktype, proto)
-
-            # If provided, set socket level options before connecting.
-            # This is the only addition urllib3 makes to this function.
-            _set_socket_options(sock, socket_options)
-
-            if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
-                sock.settimeout(timeout)
-            if source_address:
-                sock.bind(source_address)
-            sock.connect(sa)
-            return sock
-
-        except socket.error as e:
-            err = e
-            if sock is not None:
-                sock.close()
-                sock = None
-
-    if err is not None:
-        raise err
-
-    raise socket.error("getaddrinfo returns an empty list")
-
-
-def _set_socket_options(sock, options):
-    if options is None:
-        return
-
-    for opt in options:
-        sock.setsockopt(*opt)
diff --git a/python/ext-libs/requests/packages/urllib3/util/request.py b/python/ext-libs/requests/packages/urllib3/util/request.py
deleted file mode 100644
index 7377931..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/request.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from __future__ import absolute_import
-from base64 import b64encode
-
-from ..packages.six import b
-
-ACCEPT_ENCODING = 'gzip,deflate'
-
-
-def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
-                 basic_auth=None, proxy_basic_auth=None, disable_cache=None):
-    """
-    Shortcuts for generating request headers.
-
-    :param keep_alive:
-        If ``True``, adds 'connection: keep-alive' header.
-
-    :param accept_encoding:
-        Can be a boolean, list, or string.
-        ``True`` translates to 'gzip,deflate'.
-        List will get joined by comma.
-        String will be used as provided.
-
-    :param user_agent:
-        String representing the user-agent you want, such as
-        "python-urllib3/0.6"
-
-    :param basic_auth:
-        Colon-separated username:password string for 'authorization: basic ...'
-        auth header.
-
-    :param proxy_basic_auth:
-        Colon-separated username:password string for 'proxy-authorization: basic ...'
-        auth header.
-
-    :param disable_cache:
-        If ``True``, adds 'cache-control: no-cache' header.
-
-    Example::
-
-        >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
-        {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
-        >>> make_headers(accept_encoding=True)
-        {'accept-encoding': 'gzip,deflate'}
-    """
-    headers = {}
-    if accept_encoding:
-        if isinstance(accept_encoding, str):
-            pass
-        elif isinstance(accept_encoding, list):
-            accept_encoding = ','.join(accept_encoding)
-        else:
-            accept_encoding = ACCEPT_ENCODING
-        headers['accept-encoding'] = accept_encoding
-
-    if user_agent:
-        headers['user-agent'] = user_agent
-
-    if keep_alive:
-        headers['connection'] = 'keep-alive'
-
-    if basic_auth:
-        headers['authorization'] = 'Basic ' + \
-            b64encode(b(basic_auth)).decode('utf-8')
-
-    if proxy_basic_auth:
-        headers['proxy-authorization'] = 'Basic ' + \
-            b64encode(b(proxy_basic_auth)).decode('utf-8')
-
-    if disable_cache:
-        headers['cache-control'] = 'no-cache'
-
-    return headers
diff --git a/python/ext-libs/requests/packages/urllib3/util/response.py b/python/ext-libs/requests/packages/urllib3/util/response.py
deleted file mode 100644
index 0b5c75c..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/response.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from __future__ import absolute_import
-from ..packages.six.moves import http_client as httplib
-
-from ..exceptions import HeaderParsingError
-
-
-def is_fp_closed(obj):
-    """
-    Checks whether a given file-like object is closed.
-
-    :param obj:
-        The file-like object to check.
-    """
-
-    try:
-        # Check via the official file-like-object way.
-        return obj.closed
-    except AttributeError:
-        pass
-
-    try:
-        # Check if the object is a container for another file-like object that
-        # gets released on exhaustion (e.g. HTTPResponse).
-        return obj.fp is None
-    except AttributeError:
-        pass
-
-    raise ValueError("Unable to determine whether fp is closed.")
-
-
-def assert_header_parsing(headers):
-    """
-    Asserts whether all headers have been successfully parsed.
-    Extracts encountered errors from the result of parsing headers.
-
-    Only works on Python 3.
-
-    :param headers: Headers to verify.
-    :type headers: `httplib.HTTPMessage`.
-
-    :raises urllib3.exceptions.HeaderParsingError:
-        If parsing errors are found.
-    """
-
-    # This will fail silently if we pass in the wrong kind of parameter.
-    # To make debugging easier add an explicit check.
-    if not isinstance(headers, httplib.HTTPMessage):
-        raise TypeError('expected httplib.Message, got {0}.'.format(
-            type(headers)))
-
-    defects = getattr(headers, 'defects', None)
-    get_payload = getattr(headers, 'get_payload', None)
-
-    unparsed_data = None
-    if get_payload:  # Platform-specific: Python 3.
-        unparsed_data = get_payload()
-
-    if defects or unparsed_data:
-        raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
-
-
-def is_response_to_head(response):
-    """
-    Checks whether the request of a response has been a HEAD-request.
-    Handles the quirks of AppEngine.
-
-    :param conn:
-    :type conn: :class:`httplib.HTTPResponse`
-    """
-    # FIXME: Can we do this somehow without accessing private httplib _method?
-    method = response._method
-    if isinstance(method, int):  # Platform-specific: Appengine
-        return method == 3
-    return method.upper() == 'HEAD'
diff --git a/python/ext-libs/requests/packages/urllib3/util/retry.py b/python/ext-libs/requests/packages/urllib3/util/retry.py
deleted file mode 100644
index 2d3aa20..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/retry.py
+++ /dev/null
@@ -1,294 +0,0 @@
-from __future__ import absolute_import
-import time
-import logging
-
-from ..exceptions import (
-    ConnectTimeoutError,
-    MaxRetryError,
-    ProtocolError,
-    ReadTimeoutError,
-    ResponseError,
-)
-from ..packages import six
-
-
-log = logging.getLogger(__name__)
-
-
-class Retry(object):
-    """ Retry configuration.
-
-    Each retry attempt will create a new Retry object with updated values, so
-    they can be safely reused.
-
-    Retries can be defined as a default for a pool::
-
-        retries = Retry(connect=5, read=2, redirect=5)
-        http = PoolManager(retries=retries)
-        response = http.request('GET', 'http://example.com/')
-
-    Or per-request (which overrides the default for the pool)::
-
-        response = http.request('GET', 'http://example.com/', retries=Retry(10))
-
-    Retries can be disabled by passing ``False``::
-
-        response = http.request('GET', 'http://example.com/', retries=False)
-
-    Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
-    retries are disabled, in which case the causing exception will be raised.
-
-    :param int total:
-        Total number of retries to allow. Takes precedence over other counts.
-
-        Set to ``None`` to remove this constraint and fall back on other
-        counts. It's a good idea to set this to some sensibly-high value to
-        account for unexpected edge cases and avoid infinite retry loops.
-
-        Set to ``0`` to fail on the first retry.
-
-        Set to ``False`` to disable and imply ``raise_on_redirect=False``.
-
-    :param int connect:
-        How many connection-related errors to retry on.
-
-        These are errors raised before the request is sent to the remote server,
-        which we assume has not triggered the server to process the request.
-
-        Set to ``0`` to fail on the first retry of this type.
-
-    :param int read:
-        How many times to retry on read errors.
-
-        These errors are raised after the request was sent to the server, so the
-        request may have side-effects.
-
-        Set to ``0`` to fail on the first retry of this type.
-
-    :param int redirect:
-        How many redirects to perform. Limit this to avoid infinite redirect
-        loops.
-
-        A redirect is a HTTP response with a status code 301, 302, 303, 307 or
-        308.
-
-        Set to ``0`` to fail on the first retry of this type.
-
-        Set to ``False`` to disable and imply ``raise_on_redirect=False``.
-
-    :param iterable method_whitelist:
-        Set of uppercased HTTP method verbs that we should retry on.
-
-        By default, we only retry on methods which are considered to be
-        indempotent (multiple requests with the same parameters end with the
-        same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
-
-    :param iterable status_forcelist:
-        A set of HTTP status codes that we should force a retry on.
-
-        By default, this is disabled with ``None``.
-
-    :param float backoff_factor:
-        A backoff factor to apply between attempts. urllib3 will sleep for::
-
-            {backoff factor} * (2 ^ ({number of total retries} - 1))
-
-        seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
-        for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
-        than :attr:`Retry.BACKOFF_MAX`.
-
-        By default, backoff is disabled (set to 0).
-
-    :param bool raise_on_redirect: Whether, if the number of redirects is
-        exhausted, to raise a MaxRetryError, or to return a response with a
-        response code in the 3xx range.
-
-    :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
-        whether we should raise an exception, or return a response,
-        if status falls in ``status_forcelist`` range and retries have
-        been exhausted.
-    """
-
-    DEFAULT_METHOD_WHITELIST = frozenset([
-        'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
-
-    #: Maximum backoff time.
-    BACKOFF_MAX = 120
-
-    def __init__(self, total=10, connect=None, read=None, redirect=None,
-                 method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
-                 backoff_factor=0, raise_on_redirect=True, raise_on_status=True,
-                 _observed_errors=0):
-
-        self.total = total
-        self.connect = connect
-        self.read = read
-
-        if redirect is False or total is False:
-            redirect = 0
-            raise_on_redirect = False
-
-        self.redirect = redirect
-        self.status_forcelist = status_forcelist or set()
-        self.method_whitelist = method_whitelist
-        self.backoff_factor = backoff_factor
-        self.raise_on_redirect = raise_on_redirect
-        self.raise_on_status = raise_on_status
-        self._observed_errors = _observed_errors  # TODO: use .history instead?
-
-    def new(self, **kw):
-        params = dict(
-            total=self.total,
-            connect=self.connect, read=self.read, redirect=self.redirect,
-            method_whitelist=self.method_whitelist,
-            status_forcelist=self.status_forcelist,
-            backoff_factor=self.backoff_factor,
-            raise_on_redirect=self.raise_on_redirect,
-            raise_on_status=self.raise_on_status,
-            _observed_errors=self._observed_errors,
-        )
-        params.update(kw)
-        return type(self)(**params)
-
-    @classmethod
-    def from_int(cls, retries, redirect=True, default=None):
-        """ Backwards-compatibility for the old retries format."""
-        if retries is None:
-            retries = default if default is not None else cls.DEFAULT
-
-        if isinstance(retries, Retry):
-            return retries
-
-        redirect = bool(redirect) and None
-        new_retries = cls(retries, redirect=redirect)
-        log.debug("Converted retries value: %r -> %r", retries, new_retries)
-        return new_retries
-
-    def get_backoff_time(self):
-        """ Formula for computing the current backoff
-
-        :rtype: float
-        """
-        if self._observed_errors <= 1:
-            return 0
-
-        backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
-        return min(self.BACKOFF_MAX, backoff_value)
-
-    def sleep(self):
-        """ Sleep between retry attempts using an exponential backoff.
-
-        By default, the backoff factor is 0 and this method will return
-        immediately.
-        """
-        backoff = self.get_backoff_time()
-        if backoff <= 0:
-            return
-        time.sleep(backoff)
-
-    def _is_connection_error(self, err):
-        """ Errors when we're fairly sure that the server did not receive the
-        request, so it should be safe to retry.
-        """
-        return isinstance(err, ConnectTimeoutError)
-
-    def _is_read_error(self, err):
-        """ Errors that occur after the request has been started, so we should
-        assume that the server began processing it.
-        """
-        return isinstance(err, (ReadTimeoutError, ProtocolError))
-
-    def is_forced_retry(self, method, status_code):
-        """ Is this method/status code retryable? (Based on method/codes whitelists)
-        """
-        if self.method_whitelist and method.upper() not in self.method_whitelist:
-            return False
-
-        return self.status_forcelist and status_code in self.status_forcelist
-
-    def is_exhausted(self):
-        """ Are we out of retries? """
-        retry_counts = (self.total, self.connect, self.read, self.redirect)
-        retry_counts = list(filter(None, retry_counts))
-        if not retry_counts:
-            return False
-
-        return min(retry_counts) < 0
-
-    def increment(self, method=None, url=None, response=None, error=None,
-                  _pool=None, _stacktrace=None):
-        """ Return a new Retry object with incremented retry counters.
-
-        :param response: A response object, or None, if the server did not
-            return a response.
-        :type response: :class:`~urllib3.response.HTTPResponse`
-        :param Exception error: An error encountered during the request, or
-            None if the response was received successfully.
-
-        :return: A new ``Retry`` object.
-        """
-        if self.total is False and error:
-            # Disabled, indicate to re-raise the error.
-            raise six.reraise(type(error), error, _stacktrace)
-
-        total = self.total
-        if total is not None:
-            total -= 1
-
-        _observed_errors = self._observed_errors
-        connect = self.connect
-        read = self.read
-        redirect = self.redirect
-        cause = 'unknown'
-
-        if error and self._is_connection_error(error):
-            # Connect retry?
-            if connect is False:
-                raise six.reraise(type(error), error, _stacktrace)
-            elif connect is not None:
-                connect -= 1
-            _observed_errors += 1
-
-        elif error and self._is_read_error(error):
-            # Read retry?
-            if read is False:
-                raise six.reraise(type(error), error, _stacktrace)
-            elif read is not None:
-                read -= 1
-            _observed_errors += 1
-
-        elif response and response.get_redirect_location():
-            # Redirect retry?
-            if redirect is not None:
-                redirect -= 1
-            cause = 'too many redirects'
-
-        else:
-            # Incrementing because of a server error like a 500 in
-            # status_forcelist and a the given method is in the whitelist
-            _observed_errors += 1
-            cause = ResponseError.GENERIC_ERROR
-            if response and response.status:
-                cause = ResponseError.SPECIFIC_ERROR.format(
-                    status_code=response.status)
-
-        new_retry = self.new(
-            total=total,
-            connect=connect, read=read, redirect=redirect,
-            _observed_errors=_observed_errors)
-
-        if new_retry.is_exhausted():
-            raise MaxRetryError(_pool, url, error or ResponseError(cause))
-
-        log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
-
-        return new_retry
-
-    def __repr__(self):
-        return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
-                'read={self.read}, redirect={self.redirect})').format(
-                    cls=type(self), self=self)
-
-
-# For backwards compatibility (equivalent to pre-v1.9):
-Retry.DEFAULT = Retry(3)
diff --git a/python/ext-libs/requests/packages/urllib3/util/ssl_.py b/python/ext-libs/requests/packages/urllib3/util/ssl_.py
deleted file mode 100644
index e8d9e7d..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/ssl_.py
+++ /dev/null
@@ -1,320 +0,0 @@
-from __future__ import absolute_import
-import errno
-import warnings
-import hmac
-
-from binascii import hexlify, unhexlify
-from hashlib import md5, sha1, sha256
-
-from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
-
-
-SSLContext = None
-HAS_SNI = False
-create_default_context = None
-IS_PYOPENSSL = False
-
-# Maps the length of a digest to a possible hash function producing this digest
-HASHFUNC_MAP = {
-    32: md5,
-    40: sha1,
-    64: sha256,
-}
-
-
-def _const_compare_digest_backport(a, b):
-    """
-    Compare two digests of equal length in constant time.
-
-    The digests must be of type str/bytes.
-    Returns True if the digests match, and False otherwise.
-    """
-    result = abs(len(a) - len(b))
-    for l, r in zip(bytearray(a), bytearray(b)):
-        result |= l ^ r
-    return result == 0
-
-
-_const_compare_digest = getattr(hmac, 'compare_digest',
-                                _const_compare_digest_backport)
-
-
-try:  # Test for SSL features
-    import ssl
-    from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
-    from ssl import HAS_SNI  # Has SNI?
-except ImportError:
-    pass
-
-
-try:
-    from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
-except ImportError:
-    OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
-    OP_NO_COMPRESSION = 0x20000
-
-# A secure default.
-# Sources for more information on TLS ciphers:
-#
-# - https://wiki.mozilla.org/Security/Server_Side_TLS
-# - https://www.ssllabs.com/projects/best-practices/index.html
-# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
-#
-# The general intent is:
-# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
-# - prefer ECDHE over DHE for better performance,
-# - prefer any AES-GCM over any AES-CBC for better performance and security,
-# - use 3DES as fallback which is secure but slow,
-# - disable NULL authentication, MD5 MACs and DSS for security reasons.
-DEFAULT_CIPHERS = (
-    'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
-    'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
-    '!eNULL:!MD5'
-)
-
-try:
-    from ssl import SSLContext  # Modern SSL?
-except ImportError:
-    import sys
-
-    class SSLContext(object):  # Platform-specific: Python 2 & 3.1
-        supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
-                                (3, 2) <= sys.version_info)
-
-        def __init__(self, protocol_version):
-            self.protocol = protocol_version
-            # Use default values from a real SSLContext
-            self.check_hostname = False
-            self.verify_mode = ssl.CERT_NONE
-            self.ca_certs = None
-            self.options = 0
-            self.certfile = None
-            self.keyfile = None
-            self.ciphers = None
-
-        def load_cert_chain(self, certfile, keyfile):
-            self.certfile = certfile
-            self.keyfile = keyfile
-
-        def load_verify_locations(self, cafile=None, capath=None):
-            self.ca_certs = cafile
-
-            if capath is not None:
-                raise SSLError("CA directories not supported in older Pythons")
-
-        def set_ciphers(self, cipher_suite):
-            if not self.supports_set_ciphers:
-                raise TypeError(
-                    'Your version of Python does not support setting '
-                    'a custom cipher suite. Please upgrade to Python '
-                    '2.7, 3.2, or later if you need this functionality.'
-                )
-            self.ciphers = cipher_suite
-
-        def wrap_socket(self, socket, server_hostname=None, server_side=False):
-            warnings.warn(
-                'A true SSLContext object is not available. This prevents '
-                'urllib3 from configuring SSL appropriately and may cause '
-                'certain SSL connections to fail. You can upgrade to a newer '
-                'version of Python to solve this. For more information, see '
-                'https://urllib3.readthedocs.org/en/latest/security.html'
-                '#insecureplatformwarning.',
-                InsecurePlatformWarning
-            )
-            kwargs = {
-                'keyfile': self.keyfile,
-                'certfile': self.certfile,
-                'ca_certs': self.ca_certs,
-                'cert_reqs': self.verify_mode,
-                'ssl_version': self.protocol,
-                'server_side': server_side,
-            }
-            if self.supports_set_ciphers:  # Platform-specific: Python 2.7+
-                return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
-            else:  # Platform-specific: Python 2.6
-                return wrap_socket(socket, **kwargs)
-
-
-def assert_fingerprint(cert, fingerprint):
-    """
-    Checks if given fingerprint matches the supplied certificate.
-
-    :param cert:
-        Certificate as bytes object.
-    :param fingerprint:
-        Fingerprint as string of hexdigits, can be interspersed by colons.
-    """
-
-    fingerprint = fingerprint.replace(':', '').lower()
-    digest_length = len(fingerprint)
-    hashfunc = HASHFUNC_MAP.get(digest_length)
-    if not hashfunc:
-        raise SSLError(
-            'Fingerprint of invalid length: {0}'.format(fingerprint))
-
-    # We need encode() here for py32; works on py2 and p33.
-    fingerprint_bytes = unhexlify(fingerprint.encode())
-
-    cert_digest = hashfunc(cert).digest()
-
-    if not _const_compare_digest(cert_digest, fingerprint_bytes):
-        raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
-                       .format(fingerprint, hexlify(cert_digest)))
-
-
-def resolve_cert_reqs(candidate):
-    """
-    Resolves the argument to a numeric constant, which can be passed to
-    the wrap_socket function/method from the ssl module.
-    Defaults to :data:`ssl.CERT_NONE`.
-    If given a string it is assumed to be the name of the constant in the
-    :mod:`ssl` module or its abbrevation.
-    (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
-    If it's neither `None` nor a string we assume it is already the numeric
-    constant which can directly be passed to wrap_socket.
-    """
-    if candidate is None:
-        return CERT_NONE
-
-    if isinstance(candidate, str):
-        res = getattr(ssl, candidate, None)
-        if res is None:
-            res = getattr(ssl, 'CERT_' + candidate)
-        return res
-
-    return candidate
-
-
-def resolve_ssl_version(candidate):
-    """
-    like resolve_cert_reqs
-    """
-    if candidate is None:
-        return PROTOCOL_SSLv23
-
-    if isinstance(candidate, str):
-        res = getattr(ssl, candidate, None)
-        if res is None:
-            res = getattr(ssl, 'PROTOCOL_' + candidate)
-        return res
-
-    return candidate
-
-
-def create_urllib3_context(ssl_version=None, cert_reqs=None,
-                           options=None, ciphers=None):
-    """All arguments have the same meaning as ``ssl_wrap_socket``.
-
-    By default, this function does a lot of the same work that
-    ``ssl.create_default_context`` does on Python 3.4+. It:
-
-    - Disables SSLv2, SSLv3, and compression
-    - Sets a restricted set of server ciphers
-
-    If you wish to enable SSLv3, you can do::
-
-        from urllib3.util import ssl_
-        context = ssl_.create_urllib3_context()
-        context.options &= ~ssl_.OP_NO_SSLv3
-
-    You can do the same to enable compression (substituting ``COMPRESSION``
-    for ``SSLv3`` in the last line above).
-
-    :param ssl_version:
-        The desired protocol version to use. This will default to
-        PROTOCOL_SSLv23 which will negotiate the highest protocol that both
-        the server and your installation of OpenSSL support.
-    :param cert_reqs:
-        Whether to require the certificate verification. This defaults to
-        ``ssl.CERT_REQUIRED``.
-    :param options:
-        Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
-        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
-    :param ciphers:
-        Which cipher suites to allow the server to select.
-    :returns:
-        Constructed SSLContext object with specified options
-    :rtype: SSLContext
-    """
-    context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
-
-    # Setting the default here, as we may have no ssl module on import
-    cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
-
-    if options is None:
-        options = 0
-        # SSLv2 is easily broken and is considered harmful and dangerous
-        options |= OP_NO_SSLv2
-        # SSLv3 has several problems and is now dangerous
-        options |= OP_NO_SSLv3
-        # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
-        # (issue #309)
-        options |= OP_NO_COMPRESSION
-
-    context.options |= options
-
-    if getattr(context, 'supports_set_ciphers', True):  # Platform-specific: Python 2.6
-        context.set_ciphers(ciphers or DEFAULT_CIPHERS)
-
-    context.verify_mode = cert_reqs
-    if getattr(context, 'check_hostname', None) is not None:  # Platform-specific: Python 3.2
-        # We do our own verification, including fingerprints and alternative
-        # hostnames. So disable it here
-        context.check_hostname = False
-    return context
-
-
-def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
-                    ca_certs=None, server_hostname=None,
-                    ssl_version=None, ciphers=None, ssl_context=None,
-                    ca_cert_dir=None):
-    """
-    All arguments except for server_hostname, ssl_context, and ca_cert_dir have
-    the same meaning as they do when using :func:`ssl.wrap_socket`.
-
-    :param server_hostname:
-        When SNI is supported, the expected hostname of the certificate
-    :param ssl_context:
-        A pre-made :class:`SSLContext` object. If none is provided, one will
-        be created using :func:`create_urllib3_context`.
-    :param ciphers:
-        A string of ciphers we wish the client to support. This is not
-        supported on Python 2.6 as the ssl module does not support it.
-    :param ca_cert_dir:
-        A directory containing CA certificates in multiple separate files, as
-        supported by OpenSSL's -CApath flag or the capath argument to
-        SSLContext.load_verify_locations().
-    """
-    context = ssl_context
-    if context is None:
-        context = create_urllib3_context(ssl_version, cert_reqs,
-                                         ciphers=ciphers)
-
-    if ca_certs or ca_cert_dir:
-        try:
-            context.load_verify_locations(ca_certs, ca_cert_dir)
-        except IOError as e:  # Platform-specific: Python 2.6, 2.7, 3.2
-            raise SSLError(e)
-        # Py33 raises FileNotFoundError which subclasses OSError
-        # These are not equivalent unless we check the errno attribute
-        except OSError as e:  # Platform-specific: Python 3.3 and beyond
-            if e.errno == errno.ENOENT:
-                raise SSLError(e)
-            raise
-
-    if certfile:
-        context.load_cert_chain(certfile, keyfile)
-    if HAS_SNI:  # Platform-specific: OpenSSL with enabled SNI
-        return context.wrap_socket(sock, server_hostname=server_hostname)
-
-    warnings.warn(
-        'An HTTPS request has been made, but the SNI (Subject Name '
-        'Indication) extension to TLS is not available on this platform. '
-        'This may cause the server to present an incorrect TLS '
-        'certificate, which can cause validation failures. You can upgrade to '
-        'a newer version of Python to solve this. For more information, see '
-        'https://urllib3.readthedocs.org/en/latest/security.html'
-        '#snimissingwarning.',
-        SNIMissingWarning
-    )
-    return context.wrap_socket(sock)
diff --git a/python/ext-libs/requests/packages/urllib3/util/timeout.py b/python/ext-libs/requests/packages/urllib3/util/timeout.py
deleted file mode 100644
index ff62f47..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/timeout.py
+++ /dev/null
@@ -1,242 +0,0 @@
-from __future__ import absolute_import
-# The default socket timeout, used by httplib to indicate that no timeout was
-# specified by the user
-from socket import _GLOBAL_DEFAULT_TIMEOUT
-import time
-
-from ..exceptions import TimeoutStateError
-
-# A sentinel value to indicate that no timeout was specified by the user in
-# urllib3
-_Default = object()
-
-
-def current_time():
-    """
-    Retrieve the current time. This function is mocked out in unit testing.
-    """
-    return time.time()
-
-
-class Timeout(object):
-    """ Timeout configuration.
-
-    Timeouts can be defined as a default for a pool::
-
-        timeout = Timeout(connect=2.0, read=7.0)
-        http = PoolManager(timeout=timeout)
-        response = http.request('GET', 'http://example.com/')
-
-    Or per-request (which overrides the default for the pool)::
-
-        response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
-
-    Timeouts can be disabled by setting all the parameters to ``None``::
-
-        no_timeout = Timeout(connect=None, read=None)
-        response = http.request('GET', 'http://example.com/, timeout=no_timeout)
-
-
-    :param total:
-        This combines the connect and read timeouts into one; the read timeout
-        will be set to the time leftover from the connect attempt. In the
-        event that both a connect timeout and a total are specified, or a read
-        timeout and a total are specified, the shorter timeout will be applied.
-
-        Defaults to None.
-
-    :type total: integer, float, or None
-
-    :param connect:
-        The maximum amount of time to wait for a connection attempt to a server
-        to succeed. Omitting the parameter will default the connect timeout to
-        the system default, probably `the global default timeout in socket.py
-        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
-        None will set an infinite timeout for connection attempts.
-
-    :type connect: integer, float, or None
-
-    :param read:
-        The maximum amount of time to wait between consecutive
-        read operations for a response from the server. Omitting
-        the parameter will default the read timeout to the system
-        default, probably `the global default timeout in socket.py
-        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
-        None will set an infinite timeout.
-
-    :type read: integer, float, or None
-
-    .. note::
-
-        Many factors can affect the total amount of time for urllib3 to return
-        an HTTP response.
-
-        For example, Python's DNS resolver does not obey the timeout specified
-        on the socket. Other factors that can affect total request time include
-        high CPU load, high swap, the program running at a low priority level,
-        or other behaviors.
-
-        In addition, the read and total timeouts only measure the time between
-        read operations on the socket connecting the client and the server,
-        not the total amount of time for the request to return a complete
-        response. For most requests, the timeout is raised because the server
-        has not sent the first byte in the specified time. This is not always
-        the case; if a server streams one byte every fifteen seconds, a timeout
-        of 20 seconds will not trigger, even though the request will take
-        several minutes to complete.
-
-        If your goal is to cut off any request after a set amount of wall clock
-        time, consider having a second "watcher" thread to cut off a slow
-        request.
-    """
-
-    #: A sentinel object representing the default timeout value
-    DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
-
-    def __init__(self, total=None, connect=_Default, read=_Default):
-        self._connect = self._validate_timeout(connect, 'connect')
-        self._read = self._validate_timeout(read, 'read')
-        self.total = self._validate_timeout(total, 'total')
-        self._start_connect = None
-
-    def __str__(self):
-        return '%s(connect=%r, read=%r, total=%r)' % (
-            type(self).__name__, self._connect, self._read, self.total)
-
-    @classmethod
-    def _validate_timeout(cls, value, name):
-        """ Check that a timeout attribute is valid.
-
-        :param value: The timeout value to validate
-        :param name: The name of the timeout attribute to validate. This is
-            used to specify in error messages.
-        :return: The validated and casted version of the given value.
-        :raises ValueError: If the type is not an integer or a float, or if it
-            is a numeric value less than zero.
-        """
-        if value is _Default:
-            return cls.DEFAULT_TIMEOUT
-
-        if value is None or value is cls.DEFAULT_TIMEOUT:
-            return value
-
-        try:
-            float(value)
-        except (TypeError, ValueError):
-            raise ValueError("Timeout value %s was %s, but it must be an "
-                             "int or float." % (name, value))
-
-        try:
-            if value < 0:
-                raise ValueError("Attempted to set %s timeout to %s, but the "
-                                 "timeout cannot be set to a value less "
-                                 "than 0." % (name, value))
-        except TypeError:  # Python 3
-            raise ValueError("Timeout value %s was %s, but it must be an "
-                             "int or float." % (name, value))
-
-        return value
-
-    @classmethod
-    def from_float(cls, timeout):
-        """ Create a new Timeout from a legacy timeout value.
-
-        The timeout value used by httplib.py sets the same timeout on the
-        connect(), and recv() socket requests. This creates a :class:`Timeout`
-        object that sets the individual timeouts to the ``timeout`` value
-        passed to this function.
-
-        :param timeout: The legacy timeout value.
-        :type timeout: integer, float, sentinel default object, or None
-        :return: Timeout object
-        :rtype: :class:`Timeout`
-        """
-        return Timeout(read=timeout, connect=timeout)
-
-    def clone(self):
-        """ Create a copy of the timeout object
-
-        Timeout properties are stored per-pool but each request needs a fresh
-        Timeout object to ensure each one has its own start/stop configured.
-
-        :return: a copy of the timeout object
-        :rtype: :class:`Timeout`
-        """
-        # We can't use copy.deepcopy because that will also create a new object
-        # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
-        # detect the user default.
-        return Timeout(connect=self._connect, read=self._read,
-                       total=self.total)
-
-    def start_connect(self):
-        """ Start the timeout clock, used during a connect() attempt
-
-        :raises urllib3.exceptions.TimeoutStateError: if you attempt
-            to start a timer that has been started already.
-        """
-        if self._start_connect is not None:
-            raise TimeoutStateError("Timeout timer has already been started.")
-        self._start_connect = current_time()
-        return self._start_connect
-
-    def get_connect_duration(self):
-        """ Gets the time elapsed since the call to :meth:`start_connect`.
-
-        :return: Elapsed time.
-        :rtype: float
-        :raises urllib3.exceptions.TimeoutStateError: if you attempt
-            to get duration for a timer that hasn't been started.
-        """
-        if self._start_connect is None:
-            raise TimeoutStateError("Can't get connect duration for timer "
-                                    "that has not started.")
-        return current_time() - self._start_connect
-
-    @property
-    def connect_timeout(self):
-        """ Get the value to use when setting a connection timeout.
-
-        This will be a positive float or integer, the value None
-        (never timeout), or the default system timeout.
-
-        :return: Connect timeout.
-        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
-        """
-        if self.total is None:
-            return self._connect
-
-        if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
-            return self.total
-
-        return min(self._connect, self.total)
-
-    @property
-    def read_timeout(self):
-        """ Get the value for the read timeout.
-
-        This assumes some time has elapsed in the connection timeout and
-        computes the read timeout appropriately.
-
-        If self.total is set, the read timeout is dependent on the amount of
-        time taken by the connect timeout. If the connection time has not been
-        established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
-        raised.
-
-        :return: Value to use for the read timeout.
-        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
-        :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
-            has not yet been called on this object.
-        """
-        if (self.total is not None and
-                self.total is not self.DEFAULT_TIMEOUT and
-                self._read is not None and
-                self._read is not self.DEFAULT_TIMEOUT):
-            # In case the connect timeout has not yet been established.
-            if self._start_connect is None:
-                return self._read
-            return max(0, min(self.total - self.get_connect_duration(),
-                              self._read))
-        elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
-            return max(0, self.total - self.get_connect_duration())
-        else:
-            return self._read
diff --git a/python/ext-libs/requests/packages/urllib3/util/url.py b/python/ext-libs/requests/packages/urllib3/util/url.py
deleted file mode 100644
index e996204..0000000
--- a/python/ext-libs/requests/packages/urllib3/util/url.py
+++ /dev/null
@@ -1,217 +0,0 @@
-from __future__ import absolute_import
-from collections import namedtuple
-
-from ..exceptions import LocationParseError
-
-
-url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
-
-
-class Url(namedtuple('Url', url_attrs)):
-    """
-    Datastructure for representing an HTTP URL. Used as a return value for
-    :func:`parse_url`.
-    """
-    slots = ()
-
-    def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
-                query=None, fragment=None):
-        if path and not path.startswith('/'):
-            path = '/' + path
-        return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
-                                       query, fragment)
-
-    @property
-    def hostname(self):
-        """For backwards-compatibility with urlparse. We're nice like that."""
-        return self.host
-
-    @property
-    def request_uri(self):
-        """Absolute path including the query string."""
-        uri = self.path or '/'
-
-        if self.query is not None:
-            uri += '?' + self.query
-
-        return uri
-
-    @property
-    def netloc(self):
-        """Network location including host and port"""
-        if self.port:
-            return '%s:%d' % (self.host, self.port)
-        return self.host
-
-    @property
-    def url(self):
-        """
-        Convert self into a url
-
-        This function should more or less round-trip with :func:`.parse_url`. The
-        returned url may not be exactly the same as the url inputted to
-        :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
-        with a blank port will have : removed).
-
-        Example: ::
-
-            >>> U = parse_url('http://google.com/mail/')
-            >>> U.url
-            'http://google.com/mail/'
-            >>> Url('http', 'username:password', 'host.com', 80,
-            ... '/path', 'query', 'fragment').url
-            'http://username:password@host.com:80/path?query#fragment'
-        """
-        scheme, auth, host, port, path, query, fragment = self
-        url = ''
-
-        # We use "is not None" we want things to happen with empty strings (or 0 port)
-        if scheme is not None:
-            url += scheme + '://'
-        if auth is not None:
-            url += auth + '@'
-        if host is not None:
-            url += host
-        if port is not None:
-            url += ':' + str(port)
-        if path is not None:
-            url += path
-        if query is not None:
-            url += '?' + query
-        if fragment is not None:
-            url += '#' + fragment
-
-        return url
-
-    def __str__(self):
-        return self.url
-
-
-def split_first(s, delims):
-    """
-    Given a string and an iterable of delimiters, split on the first found
-    delimiter. Return two split parts and the matched delimiter.
-
-    If not found, then the first part is the full input string.
-
-    Example::
-
-        >>> split_first('foo/bar?baz', '?/=')
-        ('foo', 'bar?baz', '/')
-        >>> split_first('foo/bar?baz', '123')
-        ('foo/bar?baz', '', None)
-
-    Scales linearly with number of delims. Not ideal for large number of delims.
-    """
-    min_idx = None
-    min_delim = None
-    for d in delims:
-        idx = s.find(d)
-        if idx < 0:
-            continue
-
-        if min_idx is None or idx < min_idx:
-            min_idx = idx
-            min_delim = d
-
-    if min_idx is None or min_idx < 0:
-        return s, '', None
-
-    return s[:min_idx], s[min_idx + 1:], min_delim
-
-
-def parse_url(url):
-    """
-    Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
-    performed to parse incomplete urls. Fields not provided will be None.
-
-    Partly backwards-compatible with :mod:`urlparse`.
-
-    Example::
-
-        >>> parse_url('http://google.com/mail/')
-        Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
-        >>> parse_url('google.com:80')
-        Url(scheme=None, host='google.com', port=80, path=None, ...)
-        >>> parse_url('/foo?bar')
-        Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
-    """
-
-    # While this code has overlap with stdlib's urlparse, it is much
-    # simplified for our needs and less annoying.
-    # Additionally, this implementations does silly things to be optimal
-    # on CPython.
-
-    if not url:
-        # Empty
-        return Url()
-
-    scheme = None
-    auth = None
-    host = None
-    port = None
-    path = None
-    fragment = None
-    query = None
-
-    # Scheme
-    if '://' in url:
-        scheme, url = url.split('://', 1)
-
-    # Find the earliest Authority Terminator
-    # (http://tools.ietf.org/html/rfc3986#section-3.2)
-    url, path_, delim = split_first(url, ['/', '?', '#'])
-
-    if delim:
-        # Reassemble the path
-        path = delim + path_
-
-    # Auth
-    if '@' in url:
-        # Last '@' denotes end of auth part
-        auth, url = url.rsplit('@', 1)
-
-    # IPv6
-    if url and url[0] == '[':
-        host, url = url.split(']', 1)
-        host += ']'
-
-    # Port
-    if ':' in url:
-        _host, port = url.split(':', 1)
-
-        if not host:
-            host = _host
-
-        if port:
-            # If given, ports must be integers.
-            if not port.isdigit():
-                raise LocationParseError(url)
-            port = int(port)
-        else:
-            # Blank ports are cool, too. (rfc3986#section-3.2.3)
-            port = None
-
-    elif not host and url:
-        host = url
-
-    if not path:
-        return Url(scheme, auth, host, port, path, query, fragment)
-
-    # Fragment
-    if '#' in path:
-        path, fragment = path.split('#', 1)
-
-    # Query
-    if '?' in path:
-        path, query = path.split('?', 1)
-
-    return Url(scheme, auth, host, port, path, query, fragment)
-
-
-def get_host(url):
-    """
-    Deprecated. Use :func:`.parse_url` instead.
-    """
-    p = parse_url(url)
-    return p.scheme or 'http', p.hostname, p.port
diff --git a/python/ext-libs/requests/sessions.py b/python/ext-libs/requests/sessions.py
deleted file mode 100644
index 45be973..0000000
--- a/python/ext-libs/requests/sessions.py
+++ /dev/null
@@ -1,689 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.session
-~~~~~~~~~~~~~~~~
-
-This module provides a Session object to manage and persist settings across
-requests (cookies, auth, proxies).
-
-"""
-import os
-from collections import Mapping
-from datetime import datetime
-
-from .auth import _basic_auth_str
-from .compat import cookielib, OrderedDict, urljoin, urlparse
-from .cookies import (
-    cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
-from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
-from .hooks import default_hooks, dispatch_hook
-from .utils import to_key_val_list, default_headers, to_native_string
-from .exceptions import (
-    TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
-from .packages.urllib3._collections import RecentlyUsedContainer
-from .structures import CaseInsensitiveDict
-
-from .adapters import HTTPAdapter
-
-from .utils import (
-    requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
-    get_auth_from_url
-)
-
-from .status_codes import codes
-
-# formerly defined here, reexposed here for backward compatibility
-from .models import REDIRECT_STATI
-
-REDIRECT_CACHE_SIZE = 1000
-
-
-def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
-    """
-    Determines appropriate setting for a given request, taking into account the
-    explicit setting on that request, and the setting in the session. If a
-    setting is a dictionary, they will be merged together using `dict_class`
-    """
-
-    if session_setting is None:
-        return request_setting
-
-    if request_setting is None:
-        return session_setting
-
-    # Bypass if not a dictionary (e.g. verify)
-    if not (
-            isinstance(session_setting, Mapping) and
-            isinstance(request_setting, Mapping)
-    ):
-        return request_setting
-
-    merged_setting = dict_class(to_key_val_list(session_setting))
-    merged_setting.update(to_key_val_list(request_setting))
-
-    # Remove keys that are set to None. Extract keys first to avoid altering
-    # the dictionary during iteration.
-    none_keys = [k for (k, v) in merged_setting.items() if v is None]
-    for key in none_keys:
-        del merged_setting[key]
-
-    return merged_setting
-
-
-def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
-    """
-    Properly merges both requests and session hooks.
-
-    This is necessary because when request_hooks == {'response': []}, the
-    merge breaks Session hooks entirely.
-    """
-    if session_hooks is None or session_hooks.get('response') == []:
-        return request_hooks
-
-    if request_hooks is None or request_hooks.get('response') == []:
-        return session_hooks
-
-    return merge_setting(request_hooks, session_hooks, dict_class)
-
-
-class SessionRedirectMixin(object):
-    def resolve_redirects(self, resp, req, stream=False, timeout=None,
-                          verify=True, cert=None, proxies=None, **adapter_kwargs):
-        """Receives a Response. Returns a generator of Responses."""
-
-        i = 0
-        hist = [] # keep track of history
-
-        while resp.is_redirect:
-            prepared_request = req.copy()
-
-            if i > 0:
-                # Update history and keep track of redirects.
-                hist.append(resp)
-                new_hist = list(hist)
-                resp.history = new_hist
-
-            try:
-                resp.content  # Consume socket so it can be released
-            except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
-                resp.raw.read(decode_content=False)
-
-            if i >= self.max_redirects:
-                raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)
-
-            # Release the connection back into the pool.
-            resp.close()
-
-            url = resp.headers['location']
-
-            # Handle redirection without scheme (see: RFC 1808 Section 4)
-            if url.startswith('//'):
-                parsed_rurl = urlparse(resp.url)
-                url = '%s:%s' % (parsed_rurl.scheme, url)
-
-            # The scheme should be lower case...
-            parsed = urlparse(url)
-            url = parsed.geturl()
-
-            # Facilitate relative 'location' headers, as allowed by RFC 7231.
-            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
-            # Compliant with RFC3986, we percent encode the url.
-            if not parsed.netloc:
-                url = urljoin(resp.url, requote_uri(url))
-            else:
-                url = requote_uri(url)
-
-            prepared_request.url = to_native_string(url)
-            # Cache the url, unless it redirects to itself.
-            if resp.is_permanent_redirect and req.url != prepared_request.url:
-                self.redirect_cache[req.url] = prepared_request.url
-
-            self.rebuild_method(prepared_request, resp)
-
-            # https://github.com/kennethreitz/requests/issues/1084
-            if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
-                if 'Content-Length' in prepared_request.headers:
-                    del prepared_request.headers['Content-Length']
-
-                prepared_request.body = None
-
-            headers = prepared_request.headers
-            try:
-                del headers['Cookie']
-            except KeyError:
-                pass
-
-            # Extract any cookies sent on the response to the cookiejar
-            # in the new request. Because we've mutated our copied prepared
-            # request, use the old one that we haven't yet touched.
-            extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
-            prepared_request._cookies.update(self.cookies)
-            prepared_request.prepare_cookies(prepared_request._cookies)
-
-            # Rebuild auth and proxy information.
-            proxies = self.rebuild_proxies(prepared_request, proxies)
-            self.rebuild_auth(prepared_request, resp)
-
-            # Override the original request.
-            req = prepared_request
-
-            resp = self.send(
-                req,
-                stream=stream,
-                timeout=timeout,
-                verify=verify,
-                cert=cert,
-                proxies=proxies,
-                allow_redirects=False,
-                **adapter_kwargs
-            )
-
-            extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
-
-            i += 1
-            yield resp
-
-    def rebuild_auth(self, prepared_request, response):
-        """
-        When being redirected we may want to strip authentication from the
-        request to avoid leaking credentials. This method intelligently removes
-        and reapplies authentication where possible to avoid credential loss.
-        """
-        headers = prepared_request.headers
-        url = prepared_request.url
-
-        if 'Authorization' in headers:
-            # If we get redirected to a new host, we should strip out any
-            # authentication headers.
-            original_parsed = urlparse(response.request.url)
-            redirect_parsed = urlparse(url)
-
-            if (original_parsed.hostname != redirect_parsed.hostname):
-                del headers['Authorization']
-
-        # .netrc might have more auth for us on our new host.
-        new_auth = get_netrc_auth(url) if self.trust_env else None
-        if new_auth is not None:
-            prepared_request.prepare_auth(new_auth)
-
-        return
-
-    def rebuild_proxies(self, prepared_request, proxies):
-        """
-        This method re-evaluates the proxy configuration by considering the
-        environment variables. If we are redirected to a URL covered by
-        NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
-        proxy keys for this URL (in case they were stripped by a previous
-        redirect).
-
-        This method also replaces the Proxy-Authorization header where
-        necessary.
-        """
-        headers = prepared_request.headers
-        url = prepared_request.url
-        scheme = urlparse(url).scheme
-        new_proxies = proxies.copy() if proxies is not None else {}
-
-        if self.trust_env and not should_bypass_proxies(url):
-            environ_proxies = get_environ_proxies(url)
-
-            proxy = environ_proxies.get(scheme)
-
-            if proxy:
-                new_proxies.setdefault(scheme, environ_proxies[scheme])
-
-        if 'Proxy-Authorization' in headers:
-            del headers['Proxy-Authorization']
-
-        try:
-            username, password = get_auth_from_url(new_proxies[scheme])
-        except KeyError:
-            username, password = None, None
-
-        if username and password:
-            headers['Proxy-Authorization'] = _basic_auth_str(username, password)
-
-        return new_proxies
-
-    def rebuild_method(self, prepared_request, response):
-        """When being redirected we may want to change the method of the request
-        based on certain specs or browser behavior.
-        """
-        method = prepared_request.method
-
-        # http://tools.ietf.org/html/rfc7231#section-6.4.4
-        if response.status_code == codes.see_other and method != 'HEAD':
-            method = 'GET'
-
-        # Do what the browsers do, despite standards...
-        # First, turn 302s into GETs.
-        if response.status_code == codes.found and method != 'HEAD':
-            method = 'GET'
-
-        # Second, if a POST is responded to with a 301, turn it into a GET.
-        # This bizarre behaviour is explained in Issue 1704.
-        if response.status_code == codes.moved and method == 'POST':
-            method = 'GET'
-
-        prepared_request.method = method
-
-
-class Session(SessionRedirectMixin):
-    """A Requests session.
-
-    Provides cookie persistence, connection-pooling, and configuration.
-
-    Basic Usage::
-
-      >>> import requests
-      >>> s = requests.Session()
-      >>> s.get('http://httpbin.org/get')
-      <Response [200]>
-
-    Or as a context manager::
-
-      >>> with requests.Session() as s:
-      >>>     s.get('http://httpbin.org/get')
-      <Response [200]>
-    """
-
-    __attrs__ = [
-        'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
-        'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
-        'max_redirects',
-    ]
-
-    def __init__(self):
-
-        #: A case-insensitive dictionary of headers to be sent on each
-        #: :class:`Request <Request>` sent from this
-        #: :class:`Session <Session>`.
-        self.headers = default_headers()
-
-        #: Default Authentication tuple or object to attach to
-        #: :class:`Request <Request>`.
-        self.auth = None
-
-        #: Dictionary mapping protocol or protocol and host to the URL of the proxy
-        #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
-        #: be used on each :class:`Request <Request>`.
-        self.proxies = {}
-
-        #: Event-handling hooks.
-        self.hooks = default_hooks()
-
-        #: Dictionary of querystring data to attach to each
-        #: :class:`Request <Request>`. The dictionary values may be lists for
-        #: representing multivalued query parameters.
-        self.params = {}
-
-        #: Stream response content default.
-        self.stream = False
-
-        #: SSL Verification default.
-        self.verify = True
-
-        #: SSL certificate default.
-        self.cert = None
-
-        #: Maximum number of redirects allowed. If the request exceeds this
-        #: limit, a :class:`TooManyRedirects` exception is raised.
-        self.max_redirects = DEFAULT_REDIRECT_LIMIT
-
-        #: Trust environment settings for proxy configuration, default
-        #: authentication and similar.
-        self.trust_env = True
-
-        #: A CookieJar containing all currently outstanding cookies set on this
-        #: session. By default it is a
-        #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
-        #: may be any other ``cookielib.CookieJar`` compatible object.
-        self.cookies = cookiejar_from_dict({})
-
-        # Default connection adapters.
-        self.adapters = OrderedDict()
-        self.mount('https://', HTTPAdapter())
-        self.mount('http://', HTTPAdapter())
-
-        # Only store 1000 redirects to prevent using infinite memory
-        self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args):
-        self.close()
-
-    def prepare_request(self, request):
-        """Constructs a :class:`PreparedRequest <PreparedRequest>` for
-        transmission and returns it. The :class:`PreparedRequest` has settings
-        merged from the :class:`Request <Request>` instance and those of the
-        :class:`Session`.
-
-        :param request: :class:`Request` instance to prepare with this
-            session's settings.
-        """
-        cookies = request.cookies or {}
-
-        # Bootstrap CookieJar.
-        if not isinstance(cookies, cookielib.CookieJar):
-            cookies = cookiejar_from_dict(cookies)
-
-        # Merge with session cookies
-        merged_cookies = merge_cookies(
-            merge_cookies(RequestsCookieJar(), self.cookies), cookies)
-
-
-        # Set environment's basic authentication if not explicitly set.
-        auth = request.auth
-        if self.trust_env and not auth and not self.auth:
-            auth = get_netrc_auth(request.url)
-
-        p = PreparedRequest()
-        p.prepare(
-            method=request.method.upper(),
-            url=request.url,
-            files=request.files,
-            data=request.data,
-            json=request.json,
-            headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
-            params=merge_setting(request.params, self.params),
-            auth=merge_setting(auth, self.auth),
-            cookies=merged_cookies,
-            hooks=merge_hooks(request.hooks, self.hooks),
-        )
-        return p
-
-    def request(self, method, url,
-        params=None,
-        data=None,
-        headers=None,
-        cookies=None,
-        files=None,
-        auth=None,
-        timeout=None,
-        allow_redirects=True,
-        proxies=None,
-        hooks=None,
-        stream=None,
-        verify=None,
-        cert=None,
-        json=None):
-        """Constructs a :class:`Request <Request>`, prepares it and sends it.
-        Returns :class:`Response <Response>` object.
-
-        :param method: method for the new :class:`Request` object.
-        :param url: URL for the new :class:`Request` object.
-        :param params: (optional) Dictionary or bytes to be sent in the query
-            string for the :class:`Request`.
-        :param data: (optional) Dictionary, bytes, or file-like object to send
-            in the body of the :class:`Request`.
-        :param json: (optional) json to send in the body of the
-            :class:`Request`.
-        :param headers: (optional) Dictionary of HTTP Headers to send with the
-            :class:`Request`.
-        :param cookies: (optional) Dict or CookieJar object to send with the
-            :class:`Request`.
-        :param files: (optional) Dictionary of ``'filename': file-like-objects``
-            for multipart encoding upload.
-        :param auth: (optional) Auth tuple or callable to enable
-            Basic/Digest/Custom HTTP Auth.
-        :param timeout: (optional) How long to wait for the server to send
-            data before giving up, as a float, or a :ref:`(connect timeout,
-            read timeout) <timeouts>` tuple.
-        :type timeout: float or tuple
-        :param allow_redirects: (optional) Set to True by default.
-        :type allow_redirects: bool
-        :param proxies: (optional) Dictionary mapping protocol or protocol and
-            hostname to the URL of the proxy.
-        :param stream: (optional) whether to immediately download the response
-            content. Defaults to ``False``.
-        :param verify: (optional) whether the SSL cert will be verified.
-            A CA_BUNDLE path can also be provided. Defaults to ``True``.
-        :param cert: (optional) if String, path to ssl client cert file (.pem).
-            If Tuple, ('cert', 'key') pair.
-        :rtype: requests.Response
-	"""
-        # Create the Request.
-        req = Request(
-            method = method.upper(),
-            url = url,
-            headers = headers,
-            files = files,
-            data = data or {},
-            json = json,
-            params = params or {},
-            auth = auth,
-            cookies = cookies,
-            hooks = hooks,
-        )
-        prep = self.prepare_request(req)
-
-        proxies = proxies or {}
-
-        settings = self.merge_environment_settings(
-            prep.url, proxies, stream, verify, cert
-        )
-
-        # Send the request.
-        send_kwargs = {
-            'timeout': timeout,
-            'allow_redirects': allow_redirects,
-        }
-        send_kwargs.update(settings)
-        resp = self.send(prep, **send_kwargs)
-
-        return resp
-
-    def get(self, url, **kwargs):
-        """Sends a GET request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        kwargs.setdefault('allow_redirects', True)
-        return self.request('GET', url, **kwargs)
-
-    def options(self, url, **kwargs):
-        """Sends a OPTIONS request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        kwargs.setdefault('allow_redirects', True)
-        return self.request('OPTIONS', url, **kwargs)
-
-    def head(self, url, **kwargs):
-        """Sends a HEAD request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        kwargs.setdefault('allow_redirects', False)
-        return self.request('HEAD', url, **kwargs)
-
-    def post(self, url, data=None, json=None, **kwargs):
-        """Sends a POST request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-        :param json: (optional) json to send in the body of the :class:`Request`.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('POST', url, data=data, json=json, **kwargs)
-
-    def put(self, url, data=None, **kwargs):
-        """Sends a PUT request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('PUT', url, data=data, **kwargs)
-
-    def patch(self, url, data=None, **kwargs):
-        """Sends a PATCH request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('PATCH', url,  data=data, **kwargs)
-
-    def delete(self, url, **kwargs):
-        """Sends a DELETE request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('DELETE', url, **kwargs)
-
-    def send(self, request, **kwargs):
-        """Send a given PreparedRequest."""
-        # Set defaults that the hooks can utilize to ensure they always have
-        # the correct parameters to reproduce the previous request.
-        kwargs.setdefault('stream', self.stream)
-        kwargs.setdefault('verify', self.verify)
-        kwargs.setdefault('cert', self.cert)
-        kwargs.setdefault('proxies', self.proxies)
-
-        # It's possible that users might accidentally send a Request object.
-        # Guard against that specific failure case.
-        if isinstance(request, Request):
-            raise ValueError('You can only send PreparedRequests.')
-
-        # Set up variables needed for resolve_redirects and dispatching of hooks
-        allow_redirects = kwargs.pop('allow_redirects', True)
-        stream = kwargs.get('stream')
-        hooks = request.hooks
-
-        # Resolve URL in redirect cache, if available.
-        if allow_redirects:
-            checked_urls = set()
-            while request.url in self.redirect_cache:
-                checked_urls.add(request.url)
-                new_url = self.redirect_cache.get(request.url)
-                if new_url in checked_urls:
-                    break
-                request.url = new_url
-
-        # Get the appropriate adapter to use
-        adapter = self.get_adapter(url=request.url)
-
-        # Start time (approximately) of the request
-        start = datetime.utcnow()
-
-        # Send the request
-        r = adapter.send(request, **kwargs)
-
-        # Total elapsed time of the request (approximately)
-        r.elapsed = datetime.utcnow() - start
-
-        # Response manipulation hooks
-        r = dispatch_hook('response', hooks, r, **kwargs)
-
-        # Persist cookies
-        if r.history:
-
-            # If the hooks create history then we want those cookies too
-            for resp in r.history:
-                extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
-
-        extract_cookies_to_jar(self.cookies, request, r.raw)
-
-        # Redirect resolving generator.
-        gen = self.resolve_redirects(r, request, **kwargs)
-
-        # Resolve redirects if allowed.
-        history = [resp for resp in gen] if allow_redirects else []
-
-        # Shuffle things around if there's history.
-        if history:
-            # Insert the first (original) request at the start
-            history.insert(0, r)
-            # Get the last request made
-            r = history.pop()
-            r.history = history
-
-        if not stream:
-            r.content
-
-        return r
-
-    def merge_environment_settings(self, url, proxies, stream, verify, cert):
-        """Check the environment and merge it with some settings."""
-        # Gather clues from the surrounding environment.
-        if self.trust_env:
-            # Set environment's proxies.
-            env_proxies = get_environ_proxies(url) or {}
-            for (k, v) in env_proxies.items():
-                proxies.setdefault(k, v)
-
-            # Look for requests environment configuration and be compatible
-            # with cURL.
-            if verify is True or verify is None:
-                verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
-                          os.environ.get('CURL_CA_BUNDLE'))
-
-        # Merge all the kwargs.
-        proxies = merge_setting(proxies, self.proxies)
-        stream = merge_setting(stream, self.stream)
-        verify = merge_setting(verify, self.verify)
-        cert = merge_setting(cert, self.cert)
-
-        return {'verify': verify, 'proxies': proxies, 'stream': stream,
-                'cert': cert}
-
-    def get_adapter(self, url):
-        """Returns the appropriate connection adapter for the given URL."""
-        for (prefix, adapter) in self.adapters.items():
-
-            if url.lower().startswith(prefix):
-                return adapter
-
-        # Nothing matches :-/
-        raise InvalidSchema("No connection adapters were found for '%s'" % url)
-
-    def close(self):
-        """Closes all adapters and as such the session"""
-        for v in self.adapters.values():
-            v.close()
-
-    def mount(self, prefix, adapter):
-        """Registers a connection adapter to a prefix.
-
-        Adapters are sorted in descending order by key length."""
-
-        self.adapters[prefix] = adapter
-        keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
-
-        for key in keys_to_move:
-            self.adapters[key] = self.adapters.pop(key)
-
-    def __getstate__(self):
-        state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
-        state['redirect_cache'] = dict(self.redirect_cache)
-        return state
-
-    def __setstate__(self, state):
-        redirect_cache = state.pop('redirect_cache', {})
-        for attr, value in state.items():
-            setattr(self, attr, value)
-
-        self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
-        for redirect, to in redirect_cache.items():
-            self.redirect_cache[redirect] = to
-
-
-def session():
-    """Returns a :class:`Session` for context-management."""
-
-    return Session()
diff --git a/python/ext-libs/requests/status_codes.py b/python/ext-libs/requests/status_codes.py
deleted file mode 100644
index 0137c91..0000000
--- a/python/ext-libs/requests/status_codes.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from .structures import LookupDict
-
-_codes = {
-
-    # Informational.
-    100: ('continue',),
-    101: ('switching_protocols',),
-    102: ('processing',),
-    103: ('checkpoint',),
-    122: ('uri_too_long', 'request_uri_too_long'),
-    200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
-    201: ('created',),
-    202: ('accepted',),
-    203: ('non_authoritative_info', 'non_authoritative_information'),
-    204: ('no_content',),
-    205: ('reset_content', 'reset'),
-    206: ('partial_content', 'partial'),
-    207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
-    208: ('already_reported',),
-    226: ('im_used',),
-
-    # Redirection.
-    300: ('multiple_choices',),
-    301: ('moved_permanently', 'moved', '\\o-'),
-    302: ('found',),
-    303: ('see_other', 'other'),
-    304: ('not_modified',),
-    305: ('use_proxy',),
-    306: ('switch_proxy',),
-    307: ('temporary_redirect', 'temporary_moved', 'temporary'),
-    308: ('permanent_redirect',
-          'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
-
-    # Client Error.
-    400: ('bad_request', 'bad'),
-    401: ('unauthorized',),
-    402: ('payment_required', 'payment'),
-    403: ('forbidden',),
-    404: ('not_found', '-o-'),
-    405: ('method_not_allowed', 'not_allowed'),
-    406: ('not_acceptable',),
-    407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
-    408: ('request_timeout', 'timeout'),
-    409: ('conflict',),
-    410: ('gone',),
-    411: ('length_required',),
-    412: ('precondition_failed', 'precondition'),
-    413: ('request_entity_too_large',),
-    414: ('request_uri_too_large',),
-    415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
-    416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
-    417: ('expectation_failed',),
-    418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
-    421: ('misdirected_request',),
-    422: ('unprocessable_entity', 'unprocessable'),
-    423: ('locked',),
-    424: ('failed_dependency', 'dependency'),
-    425: ('unordered_collection', 'unordered'),
-    426: ('upgrade_required', 'upgrade'),
-    428: ('precondition_required', 'precondition'),
-    429: ('too_many_requests', 'too_many'),
-    431: ('header_fields_too_large', 'fields_too_large'),
-    444: ('no_response', 'none'),
-    449: ('retry_with', 'retry'),
-    450: ('blocked_by_windows_parental_controls', 'parental_controls'),
-    451: ('unavailable_for_legal_reasons', 'legal_reasons'),
-    499: ('client_closed_request',),
-
-    # Server Error.
-    500: ('internal_server_error', 'server_error', '/o\\', '✗'),
-    501: ('not_implemented',),
-    502: ('bad_gateway',),
-    503: ('service_unavailable', 'unavailable'),
-    504: ('gateway_timeout',),
-    505: ('http_version_not_supported', 'http_version'),
-    506: ('variant_also_negotiates',),
-    507: ('insufficient_storage',),
-    509: ('bandwidth_limit_exceeded', 'bandwidth'),
-    510: ('not_extended',),
-    511: ('network_authentication_required', 'network_auth', 'network_authentication'),
-}
-
-codes = LookupDict(name='status_codes')
-
-for code, titles in _codes.items():
-    for title in titles:
-        setattr(codes, title, code)
-        if not title.startswith('\\'):
-            setattr(codes, title.upper(), code)
diff --git a/python/ext-libs/requests/structures.py b/python/ext-libs/requests/structures.py
deleted file mode 100644
index 991056e..0000000
--- a/python/ext-libs/requests/structures.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.structures
-~~~~~~~~~~~~~~~~~~~
-
-Data structures that power Requests.
-
-"""
-
-import collections
-
-from .compat import OrderedDict
-
-
-class CaseInsensitiveDict(collections.MutableMapping):
-    """
-    A case-insensitive ``dict``-like object.
-
-    Implements all methods and operations of
-    ``collections.MutableMapping`` as well as dict's ``copy``. Also
-    provides ``lower_items``.
-
-    All keys are expected to be strings. The structure remembers the
-    case of the last key to be set, and ``iter(instance)``,
-    ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
-    will contain case-sensitive keys. However, querying and contains
-    testing is case insensitive::
-
-        cid = CaseInsensitiveDict()
-        cid['Accept'] = 'application/json'
-        cid['aCCEPT'] == 'application/json'  # True
-        list(cid) == ['Accept']  # True
-
-    For example, ``headers['content-encoding']`` will return the
-    value of a ``'Content-Encoding'`` response header, regardless
-    of how the header name was originally stored.
-
-    If the constructor, ``.update``, or equality comparison
-    operations are given keys that have equal ``.lower()``s, the
-    behavior is undefined.
-
-    """
-    def __init__(self, data=None, **kwargs):
-        self._store = OrderedDict()
-        if data is None:
-            data = {}
-        self.update(data, **kwargs)
-
-    def __setitem__(self, key, value):
-        # Use the lowercased key for lookups, but store the actual
-        # key alongside the value.
-        self._store[key.lower()] = (key, value)
-
-    def __getitem__(self, key):
-        return self._store[key.lower()][1]
-
-    def __delitem__(self, key):
-        del self._store[key.lower()]
-
-    def __iter__(self):
-        return (casedkey for casedkey, mappedvalue in self._store.values())
-
-    def __len__(self):
-        return len(self._store)
-
-    def lower_items(self):
-        """Like iteritems(), but with all lowercase keys."""
-        return (
-            (lowerkey, keyval[1])
-            for (lowerkey, keyval)
-            in self._store.items()
-        )
-
-    def __eq__(self, other):
-        if isinstance(other, collections.Mapping):
-            other = CaseInsensitiveDict(other)
-        else:
-            return NotImplemented
-        # Compare insensitively
-        return dict(self.lower_items()) == dict(other.lower_items())
-
-    # Copy is required
-    def copy(self):
-        return CaseInsensitiveDict(self._store.values())
-
-    def __repr__(self):
-        return str(dict(self.items()))
-
-class LookupDict(dict):
-    """Dictionary lookup object."""
-
-    def __init__(self, name=None):
-        self.name = name
-        super(LookupDict, self).__init__()
-
-    def __repr__(self):
-        return '<lookup \'%s\'>' % (self.name)
-
-    def __getitem__(self, key):
-        # We allow fall-through here, so values default to None
-
-        return self.__dict__.get(key, None)
-
-    def get(self, key, default=None):
-        return self.__dict__.get(key, default)
diff --git a/python/ext-libs/requests/utils.py b/python/ext-libs/requests/utils.py
deleted file mode 100644
index c08448c..0000000
--- a/python/ext-libs/requests/utils.py
+++ /dev/null
@@ -1,728 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.utils
-~~~~~~~~~~~~~~
-
-This module provides utility functions that are used within Requests
-that are also useful for external consumption.
-
-"""
-
-import cgi
-import codecs
-import collections
-import io
-import os
-import re
-import socket
-import struct
-import warnings
-
-from . import __version__
-from . import certs
-from .compat import parse_http_list as _parse_list_header
-from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
-                     builtin_str, getproxies, proxy_bypass, urlunparse,
-                     basestring)
-from .cookies import RequestsCookieJar, cookiejar_from_dict
-from .structures import CaseInsensitiveDict
-from .exceptions import InvalidURL, FileModeWarning
-
-_hush_pyflakes = (RequestsCookieJar,)
-
-NETRC_FILES = ('.netrc', '_netrc')
-
-DEFAULT_CA_BUNDLE_PATH = certs.where()
-
-
-def dict_to_sequence(d):
-    """Returns an internal sequence dictionary update."""
-
-    if hasattr(d, 'items'):
-        d = d.items()
-
-    return d
-
-
-def super_len(o):
-    total_length = 0
-    current_position = 0
-
-    if hasattr(o, '__len__'):
-        total_length = len(o)
-
-    elif hasattr(o, 'len'):
-        total_length = o.len
-
-    elif hasattr(o, 'getvalue'):
-        # e.g. BytesIO, cStringIO.StringIO
-        total_length = len(o.getvalue())
-
-    elif hasattr(o, 'fileno'):
-        try:
-            fileno = o.fileno()
-        except io.UnsupportedOperation:
-            pass
-        else:
-            total_length = os.fstat(fileno).st_size
-
-            # Having used fstat to determine the file length, we need to
-            # confirm that this file was opened up in binary mode.
-            if 'b' not in o.mode:
-                warnings.warn((
-                    "Requests has determined the content-length for this "
-                    "request using the binary size of the file: however, the "
-                    "file has been opened in text mode (i.e. without the 'b' "
-                    "flag in the mode). This may lead to an incorrect "
-                    "content-length. In Requests 3.0, support will be removed "
-                    "for files in text mode."),
-                    FileModeWarning
-                )
-
-    if hasattr(o, 'tell'):
-        try:
-            current_position = o.tell()
-        except (OSError, IOError):
-            # This can happen in some weird situations, such as when the file
-            # is actually a special file descriptor like stdin. In this
-            # instance, we don't know what the length is, so set it to zero and
-            # let requests chunk it instead.
-            current_position = total_length
-
-    return max(0, total_length - current_position)
-
-
-def get_netrc_auth(url, raise_errors=False):
-    """Returns the Requests tuple auth for a given url from netrc."""
-
-    try:
-        from netrc import netrc, NetrcParseError
-
-        netrc_path = None
-
-        for f in NETRC_FILES:
-            try:
-                loc = os.path.expanduser('~/{0}'.format(f))
-            except KeyError:
-                # os.path.expanduser can fail when $HOME is undefined and
-                # getpwuid fails. See http://bugs.python.org/issue20164 &
-                # https://github.com/kennethreitz/requests/issues/1846
-                return
-
-            if os.path.exists(loc):
-                netrc_path = loc
-                break
-
-        # Abort early if there isn't one.
-        if netrc_path is None:
-            return
-
-        ri = urlparse(url)
-
-        # Strip port numbers from netloc. This weird `if...encode`` dance is
-        # used for Python 3.2, which doesn't support unicode literals.
-        splitstr = b':'
-        if isinstance(url, str):
-            splitstr = splitstr.decode('ascii')
-        host = ri.netloc.split(splitstr)[0]
-
-        try:
-            _netrc = netrc(netrc_path).authenticators(host)
-            if _netrc:
-                # Return with login / password
-                login_i = (0 if _netrc[0] else 1)
-                return (_netrc[login_i], _netrc[2])
-        except (NetrcParseError, IOError):
-            # If there was a parsing error or a permissions issue reading the file,
-            # we'll just skip netrc auth unless explicitly asked to raise errors.
-            if raise_errors:
-                raise
-
-    # AppEngine hackiness.
-    except (ImportError, AttributeError):
-        pass
-
-
-def guess_filename(obj):
-    """Tries to guess the filename of the given object."""
-    name = getattr(obj, 'name', None)
-    if (name and isinstance(name, basestring) and name[0] != '<' and
-            name[-1] != '>'):
-        return os.path.basename(name)
-
-
-def from_key_val_list(value):
-    """Take an object and test to see if it can be represented as a
-    dictionary. Unless it can not be represented as such, return an
-    OrderedDict, e.g.,
-
-    ::
-
-        >>> from_key_val_list([('key', 'val')])
-        OrderedDict([('key', 'val')])
-        >>> from_key_val_list('string')
-        ValueError: need more than 1 value to unpack
-        >>> from_key_val_list({'key': 'val'})
-        OrderedDict([('key', 'val')])
-    """
-    if value is None:
-        return None
-
-    if isinstance(value, (str, bytes, bool, int)):
-        raise ValueError('cannot encode objects that are not 2-tuples')
-
-    return OrderedDict(value)
-
-
-def to_key_val_list(value):
-    """Take an object and test to see if it can be represented as a
-    dictionary. If it can be, return a list of tuples, e.g.,
-
-    ::
-
-        >>> to_key_val_list([('key', 'val')])
-        [('key', 'val')]
-        >>> to_key_val_list({'key': 'val'})
-        [('key', 'val')]
-        >>> to_key_val_list('string')
-        ValueError: cannot encode objects that are not 2-tuples.
-    """
-    if value is None:
-        return None
-
-    if isinstance(value, (str, bytes, bool, int)):
-        raise ValueError('cannot encode objects that are not 2-tuples')
-
-    if isinstance(value, collections.Mapping):
-        value = value.items()
-
-    return list(value)
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def parse_list_header(value):
-    """Parse lists as described by RFC 2068 Section 2.
-
-    In particular, parse comma-separated lists where the elements of
-    the list may include quoted-strings.  A quoted-string could
-    contain a comma.  A non-quoted string could have quotes in the
-    middle.  Quotes are removed automatically after parsing.
-
-    It basically works like :func:`parse_set_header` just that items
-    may appear multiple times and case sensitivity is preserved.
-
-    The return value is a standard :class:`list`:
-
-    >>> parse_list_header('token, "quoted value"')
-    ['token', 'quoted value']
-
-    To create a header from the :class:`list` again, use the
-    :func:`dump_header` function.
-
-    :param value: a string with a list header.
-    :return: :class:`list`
-    """
-    result = []
-    for item in _parse_list_header(value):
-        if item[:1] == item[-1:] == '"':
-            item = unquote_header_value(item[1:-1])
-        result.append(item)
-    return result
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def parse_dict_header(value):
-    """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
-    convert them into a python dict:
-
-    >>> d = parse_dict_header('foo="is a fish", bar="as well"')
-    >>> type(d) is dict
-    True
-    >>> sorted(d.items())
-    [('bar', 'as well'), ('foo', 'is a fish')]
-
-    If there is no value for a key it will be `None`:
-
-    >>> parse_dict_header('key_without_value')
-    {'key_without_value': None}
-
-    To create a header from the :class:`dict` again, use the
-    :func:`dump_header` function.
-
-    :param value: a string with a dict header.
-    :return: :class:`dict`
-    """
-    result = {}
-    for item in _parse_list_header(value):
-        if '=' not in item:
-            result[item] = None
-            continue
-        name, value = item.split('=', 1)
-        if value[:1] == value[-1:] == '"':
-            value = unquote_header_value(value[1:-1])
-        result[name] = value
-    return result
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def unquote_header_value(value, is_filename=False):
-    r"""Unquotes a header value.  (Reversal of :func:`quote_header_value`).
-    This does not use the real unquoting but what browsers are actually
-    using for quoting.
-
-    :param value: the header value to unquote.
-    """
-    if value and value[0] == value[-1] == '"':
-        # this is not the real unquoting, but fixing this so that the
-        # RFC is met will result in bugs with internet explorer and
-        # probably some other browsers as well.  IE for example is
-        # uploading files with "C:\foo\bar.txt" as filename
-        value = value[1:-1]
-
-        # if this is a filename and the starting characters look like
-        # a UNC path, then just return the value without quotes.  Using the
-        # replace sequence below on a UNC path has the effect of turning
-        # the leading double slash into a single slash and then
-        # _fix_ie_filename() doesn't work correctly.  See #458.
-        if not is_filename or value[:2] != '\\\\':
-            return value.replace('\\\\', '\\').replace('\\"', '"')
-    return value
-
-
-def dict_from_cookiejar(cj):
-    """Returns a key/value dictionary from a CookieJar.
-
-    :param cj: CookieJar object to extract cookies from.
-    """
-
-    cookie_dict = {}
-
-    for cookie in cj:
-        cookie_dict[cookie.name] = cookie.value
-
-    return cookie_dict
-
-
-def add_dict_to_cookiejar(cj, cookie_dict):
-    """Returns a CookieJar from a key/value dictionary.
-
-    :param cj: CookieJar to insert cookies into.
-    :param cookie_dict: Dict of key/values to insert into CookieJar.
-    """
-
-    cj2 = cookiejar_from_dict(cookie_dict)
-    cj.update(cj2)
-    return cj
-
-
-def get_encodings_from_content(content):
-    """Returns encodings from given content string.
-
-    :param content: bytestring to extract encodings from.
-    """
-    warnings.warn((
-        'In requests 3.0, get_encodings_from_content will be removed. For '
-        'more information, please see the discussion on issue #2266. (This'
-        ' warning should only appear once.)'),
-        DeprecationWarning)
-
-    charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
-    pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
-    xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
-
-    return (charset_re.findall(content) +
-            pragma_re.findall(content) +
-            xml_re.findall(content))
-
-
-def get_encoding_from_headers(headers):
-    """Returns encodings from given HTTP Header Dict.
-
-    :param headers: dictionary to extract encoding from.
-    """
-
-    content_type = headers.get('content-type')
-
-    if not content_type:
-        return None
-
-    content_type, params = cgi.parse_header(content_type)
-
-    if 'charset' in params:
-        return params['charset'].strip("'\"")
-
-    if 'text' in content_type:
-        return 'ISO-8859-1'
-
-
-def stream_decode_response_unicode(iterator, r):
-    """Stream decodes a iterator."""
-
-    if r.encoding is None:
-        for item in iterator:
-            yield item
-        return
-
-    decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
-    for chunk in iterator:
-        rv = decoder.decode(chunk)
-        if rv:
-            yield rv
-    rv = decoder.decode(b'', final=True)
-    if rv:
-        yield rv
-
-
-def iter_slices(string, slice_length):
-    """Iterate over slices of a string."""
-    pos = 0
-    while pos < len(string):
-        yield string[pos:pos + slice_length]
-        pos += slice_length
-
-
-def get_unicode_from_response(r):
-    """Returns the requested content back in unicode.
-
-    :param r: Response object to get unicode content from.
-
-    Tried:
-
-    1. charset from content-type
-    2. fall back and replace all unicode characters
-
-    """
-    warnings.warn((
-        'In requests 3.0, get_unicode_from_response will be removed. For '
-        'more information, please see the discussion on issue #2266. (This'
-        ' warning should only appear once.)'),
-        DeprecationWarning)
-
-    tried_encodings = []
-
-    # Try charset from content-type
-    encoding = get_encoding_from_headers(r.headers)
-
-    if encoding:
-        try:
-            return str(r.content, encoding)
-        except UnicodeError:
-            tried_encodings.append(encoding)
-
-    # Fall back:
-    try:
-        return str(r.content, encoding, errors='replace')
-    except TypeError:
-        return r.content
-
-
-# The unreserved URI characters (RFC 3986)
-UNRESERVED_SET = frozenset(
-    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
-    + "0123456789-._~")
-
-
-def unquote_unreserved(uri):
-    """Un-escape any percent-escape sequences in a URI that are unreserved
-    characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
-    """
-    parts = uri.split('%')
-    for i in range(1, len(parts)):
-        h = parts[i][0:2]
-        if len(h) == 2 and h.isalnum():
-            try:
-                c = chr(int(h, 16))
-            except ValueError:
-                raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
-
-            if c in UNRESERVED_SET:
-                parts[i] = c + parts[i][2:]
-            else:
-                parts[i] = '%' + parts[i]
-        else:
-            parts[i] = '%' + parts[i]
-    return ''.join(parts)
-
-
-def requote_uri(uri):
-    """Re-quote the given URI.
-
-    This function passes the given URI through an unquote/quote cycle to
-    ensure that it is fully and consistently quoted.
-    """
-    safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
-    safe_without_percent = "!#$&'()*+,/:;=?@[]~"
-    try:
-        # Unquote only the unreserved characters
-        # Then quote only illegal characters (do not quote reserved,
-        # unreserved, or '%')
-        return quote(unquote_unreserved(uri), safe=safe_with_percent)
-    except InvalidURL:
-        # We couldn't unquote the given URI, so let's try quoting it, but
-        # there may be unquoted '%'s in the URI. We need to make sure they're
-        # properly quoted so they do not cause issues elsewhere.
-        return quote(uri, safe=safe_without_percent)
-
-
-def address_in_network(ip, net):
-    """
-    This function allows you to check if on IP belongs to a network subnet
-    Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
-             returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
-    """
-    ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
-    netaddr, bits = net.split('/')
-    netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
-    network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
-    return (ipaddr & netmask) == (network & netmask)
-
-
-def dotted_netmask(mask):
-    """
-    Converts mask from /xx format to xxx.xxx.xxx.xxx
-    Example: if mask is 24 function returns 255.255.255.0
-    """
-    bits = 0xffffffff ^ (1 << 32 - mask) - 1
-    return socket.inet_ntoa(struct.pack('>I', bits))
-
-
-def is_ipv4_address(string_ip):
-    try:
-        socket.inet_aton(string_ip)
-    except socket.error:
-        return False
-    return True
-
-
-def is_valid_cidr(string_network):
-    """Very simple check of the cidr format in no_proxy variable"""
-    if string_network.count('/') == 1:
-        try:
-            mask = int(string_network.split('/')[1])
-        except ValueError:
-            return False
-
-        if mask < 1 or mask > 32:
-            return False
-
-        try:
-            socket.inet_aton(string_network.split('/')[0])
-        except socket.error:
-            return False
-    else:
-        return False
-    return True
-
-
-def should_bypass_proxies(url):
-    """
-    Returns whether we should bypass proxies or not.
-    """
-    get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
-
-    # First check whether no_proxy is defined. If it is, check that the URL
-    # we're getting isn't in the no_proxy list.
-    no_proxy = get_proxy('no_proxy')
-    netloc = urlparse(url).netloc
-
-    if no_proxy:
-        # We need to check whether we match here. We need to see if we match
-        # the end of the netloc, both with and without the port.
-        no_proxy = (
-            host for host in no_proxy.replace(' ', '').split(',') if host
-        )
-
-        ip = netloc.split(':')[0]
-        if is_ipv4_address(ip):
-            for proxy_ip in no_proxy:
-                if is_valid_cidr(proxy_ip):
-                    if address_in_network(ip, proxy_ip):
-                        return True
-        else:
-            for host in no_proxy:
-                if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
-                    # The URL does match something in no_proxy, so we don't want
-                    # to apply the proxies on this URL.
-                    return True
-
-    # If the system proxy settings indicate that this URL should be bypassed,
-    # don't proxy.
-    # The proxy_bypass function is incredibly buggy on OS X in early versions
-    # of Python 2.6, so allow this call to fail. Only catch the specific
-    # exceptions we've seen, though: this call failing in other ways can reveal
-    # legitimate problems.
-    try:
-        bypass = proxy_bypass(netloc)
-    except (TypeError, socket.gaierror):
-        bypass = False
-
-    if bypass:
-        return True
-
-    return False
-
-
-def get_environ_proxies(url):
-    """Return a dict of environment proxies."""
-    if should_bypass_proxies(url):
-        return {}
-    else:
-        return getproxies()
-
-
-def select_proxy(url, proxies):
-    """Select a proxy for the url, if applicable.
-
-    :param url: The url being for the request
-    :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
-    """
-    proxies = proxies or {}
-    urlparts = urlparse(url)
-    if urlparts.hostname is None:
-        proxy = None
-    else:
-        proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname)
-    if proxy is None:
-        proxy = proxies.get(urlparts.scheme)
-    return proxy
-
-
-def default_user_agent(name="python-requests"):
-    """Return a string representing the default user agent."""
-    return '%s/%s' % (name, __version__)
-
-
-def default_headers():
-    return CaseInsensitiveDict({
-        'User-Agent': default_user_agent(),
-        'Accept-Encoding': ', '.join(('gzip', 'deflate')),
-        'Accept': '*/*',
-        'Connection': 'keep-alive',
-    })
-
-
-def parse_header_links(value):
-    """Return a dict of parsed link headers proxies.
-
-    i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
-
-    """
-
-    links = []
-
-    replace_chars = ' \'"'
-
-    for val in re.split(', *<', value):
-        try:
-            url, params = val.split(';', 1)
-        except ValueError:
-            url, params = val, ''
-
-        link = {'url': url.strip('<> \'"')}
-
-        for param in params.split(';'):
-            try:
-                key, value = param.split('=')
-            except ValueError:
-                break
-
-            link[key.strip(replace_chars)] = value.strip(replace_chars)
-
-        links.append(link)
-
-    return links
-
-
-# Null bytes; no need to recreate these on each call to guess_json_utf
-_null = '\x00'.encode('ascii')  # encoding to ASCII for Python 3
-_null2 = _null * 2
-_null3 = _null * 3
-
-
-def guess_json_utf(data):
-    # JSON always starts with two ASCII characters, so detection is as
-    # easy as counting the nulls and from their location and count
-    # determine the encoding. Also detect a BOM, if present.
-    sample = data[:4]
-    if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):
-        return 'utf-32'     # BOM included
-    if sample[:3] == codecs.BOM_UTF8:
-        return 'utf-8-sig'  # BOM included, MS style (discouraged)
-    if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
-        return 'utf-16'     # BOM included
-    nullcount = sample.count(_null)
-    if nullcount == 0:
-        return 'utf-8'
-    if nullcount == 2:
-        if sample[::2] == _null2:   # 1st and 3rd are null
-            return 'utf-16-be'
-        if sample[1::2] == _null2:  # 2nd and 4th are null
-            return 'utf-16-le'
-        # Did not detect 2 valid UTF-16 ascii-range characters
-    if nullcount == 3:
-        if sample[:3] == _null3:
-            return 'utf-32-be'
-        if sample[1:] == _null3:
-            return 'utf-32-le'
-        # Did not detect a valid UTF-32 ascii-range character
-    return None
-
-
-def prepend_scheme_if_needed(url, new_scheme):
-    """Given a URL that may or may not have a scheme, prepend the given scheme.
-    Does not replace a present scheme with the one provided as an argument."""
-    scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
-
-    # urlparse is a finicky beast, and sometimes decides that there isn't a
-    # netloc present. Assume that it's being over-cautious, and switch netloc
-    # and path if urlparse decided there was no netloc.
-    if not netloc:
-        netloc, path = path, netloc
-
-    return urlunparse((scheme, netloc, path, params, query, fragment))
-
-
-def get_auth_from_url(url):
-    """Given a url with authentication components, extract them into a tuple of
-    username,password."""
-    parsed = urlparse(url)
-
-    try:
-        auth = (unquote(parsed.username), unquote(parsed.password))
-    except (AttributeError, TypeError):
-        auth = ('', '')
-
-    return auth
-
-
-def to_native_string(string, encoding='ascii'):
-    """
-    Given a string object, regardless of type, returns a representation of that
-    string in the native string type, encoding and decoding where necessary.
-    This assumes ASCII unless told otherwise.
-    """
-    if isinstance(string, builtin_str):
-        out = string
-    else:
-        if is_py2:
-            out = string.encode(encoding)
-        else:
-            out = string.decode(encoding)
-
-    return out
-
-
-def urldefragauth(url):
-    """
-    Given a url remove the fragment and the authentication part
-    """
-    scheme, netloc, path, params, query, fragment = urlparse(url)
-
-    # see func:`prepend_scheme_if_needed`
-    if not netloc:
-        netloc, path = path, netloc
-
-    netloc = netloc.rsplit('@', 1)[-1]
-
-    return urlunparse((scheme, netloc, path, params, query, ''))
diff --git a/python/gui/attributetable/qgsdualview.sip b/python/gui/attributetable/qgsdualview.sip
index d67105b..ba7a3f6 100644
--- a/python/gui/attributetable/qgsdualview.sip
+++ b/python/gui/attributetable/qgsdualview.sip
@@ -32,16 +32,8 @@ class QgsDualView : QStackedWidget
     explicit QgsDualView( QWidget* parent /TransferThis/ = 0 );
     virtual ~QgsDualView();
 
-    /**
-     * Has to be called to initialize the dual view.
-     *
-     * @param layer      The layer which should be used to fetch features
-     * @param mapCanvas  The mapCanvas (used for the FilterMode
-     *                   {@link QgsAttributeTableFilterModel::ShowVisible}
-     * @param request    Use a modified request to limit the shown features
-     * @param context    The context in which this view is shown
-     */
-    void init( QgsVectorLayer* layer, QgsMapCanvas* mapCanvas, const QgsFeatureRequest& request = QgsFeatureRequest(), const QgsAttributeEditorContext& context = QgsAttributeEditorContext() );
+    void init( QgsVectorLayer* layer, QgsMapCanvas* mapCanvas, const QgsFeatureRequest& request = QgsFeatureRequest(), const QgsAttributeEditorContext& context = QgsAttributeEditorContext(),
+               bool loadFeatures = true );
 
     /**
      * Change the current view mode.
diff --git a/python/gui/qgsgeometryrubberband.sip b/python/gui/qgsgeometryrubberband.sip
index 4d8cc7a..3cb22a7 100644
--- a/python/gui/qgsgeometryrubberband.sip
+++ b/python/gui/qgsgeometryrubberband.sip
@@ -54,7 +54,7 @@ class QgsGeometryRubberBand: QgsMapCanvasItem
     ~QgsGeometryRubberBand();
 
     /** Sets geometry (takes ownership). Geometry is expected to be in map coordinates */
-    void setGeometry( QgsAbstractGeometryV2* geom );
+    void setGeometry( QgsAbstractGeometryV2* geom /Transfer/ );
     /** Returns a pointer to the geometry*/
     const QgsAbstractGeometryV2* geometry();
     /** Moves vertex to new position (in map coordinates)*/
diff --git a/python/plugins/MetaSearch/dialogs/maindialog.py b/python/plugins/MetaSearch/dialogs/maindialog.py
index 70e6fdf..6b8267e 100644
--- a/python/plugins/MetaSearch/dialogs/maindialog.py
+++ b/python/plugins/MetaSearch/dialogs/maindialog.py
@@ -49,10 +49,10 @@ from MetaSearch.dialogs.manageconnectionsdialog import ManageConnectionsDialog
 from MetaSearch.dialogs.newconnectiondialog import NewConnectionDialog
 from MetaSearch.dialogs.recorddialog import RecordDialog
 from MetaSearch.dialogs.xmldialog import XMLDialog
-from MetaSearch.util import (get_connections_from_file, get_ui_class,
-                             get_help_url, highlight_xml, normalize_text,
-                             open_url, render_template, serialize_string,
-                             StaticContext)
+from MetaSearch.util import (clean_ows_url, get_connections_from_file,
+                             get_ui_class, get_help_url, highlight_xml,
+                             normalize_text, open_url, render_template,
+                             serialize_string, StaticContext)
 
 BASE_CLASS = get_ui_class('maindialog.ui')
 
@@ -718,7 +718,7 @@ class MetaSearchDialog(QDialog, BASE_CLASS):
 
         # no dups detected or overwrite is allowed
         self.settings.beginGroup('/Qgis/connections-%s' % stype[1])
-        self.settings.setValue('/%s/url' % sname, data_url)
+        self.settings.setValue('/%s/url' % sname, clean_ows_url(data_url))
         self.settings.endGroup()
 
         # open provider window
diff --git a/python/plugins/MetaSearch/metadata.txt b/python/plugins/MetaSearch/metadata.txt
index 509e8dd..d679f8f 100644
--- a/python/plugins/MetaSearch/metadata.txt
+++ b/python/plugins/MetaSearch/metadata.txt
@@ -3,7 +3,7 @@ name=MetaSearch Catalogue Client
 description=MetaSearch is a QGIS plugin to interact with metadata catalogue services (CSW).
 about=MetaSearch is a QGIS plugin to interact with metadata catalogue services, supporting the OGC Catalogue Service for the Web (CSW) standard. MetaSearch provides an easy and intuitive approach and user-friendly interface to searching metadata catalogues within QGIS.
 category=Web
-version=0.3.4
+version=0.3.5
 qgisMinimumVersion=2.0
 icon=images/MetaSearch.png
 author=Tom Kralidis
@@ -14,9 +14,7 @@ tracker=https://hub.qgis.org/projects/quantum-gis/issues?category_id=107&set_fil
 repository=https://github.com/qgis/QGIS/tree/master/python/plugins/MetaSearch
 experimental=False
 deprecated=False
-changelog=Version 0.3.4 (2015-09-28)
-    - fix crash due to CSW error causing crash (#12280)
-    - add error handling for paging workflows
-    - fix loading of custom CSW connections XML files
-    - update OWS connection naming to overwrite or serialize string (#12327)
-    - fix syntax errors in default connections list
+changelog=Version 0.3.5 (2017-03-05)
+    - fix help functionality (#16176)
+    - clean OWS URL when adding from search (#16261)
+    - updates to core CSW list (Greece, Portugal data.gov https)
diff --git a/python/plugins/MetaSearch/resources/connections-default.xml b/python/plugins/MetaSearch/resources/connections-default.xml
index 4681d2a..49a8dbf 100644
--- a/python/plugins/MetaSearch/resources/connections-default.xml
+++ b/python/plugins/MetaSearch/resources/connections-default.xml
@@ -1,7 +1,7 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Guidance: https://hub.qgis.org/wiki/quantum-gis/MetaSearch_Default_Connections_List -->
 <qgsCSWConnections version="1.0">
-    <csw name="USA: Data.gov CSW" url="http://catalog.data.gov/csw-all"/>
+    <csw name="USA: Data.gov CSW" url="https://catalog.data.gov/csw-all"/>
     <csw name="Danmark: National CSW" url="http://www.geodata-info.dk/registrant/srv/en/csw"/>
     <csw name="Finland: National CSW (Paikkatietohakemisto)" url="http://www.paikkatietohakemisto.fi/geonetwork/srv/fi/csw"/>
     <csw name="Greece: Geodata.gov.gr CSW" url="http://geodata.gov.gr/csw"/>
diff --git a/python/plugins/MetaSearch/util.py b/python/plugins/MetaSearch/util.py
index e041289..e6c5427 100644
--- a/python/plugins/MetaSearch/util.py
+++ b/python/plugins/MetaSearch/util.py
@@ -40,7 +40,7 @@ from jinja2 import Environment, FileSystemLoader
 from pygments import highlight
 from pygments.lexers import XmlLexer
 from pygments.formatters import HtmlFormatter
-from qgis.PyQt.QtCore import QSettings
+from qgis.PyQt.QtCore import QSettings, QUrl
 from qgis.PyQt.QtWidgets import QMessageBox
 from qgis.PyQt.uic import loadUiType
 
@@ -133,7 +133,12 @@ def get_help_url():
     """return QGIS MetaSearch help documentation link"""
 
     locale_name = QSettings().value('locale/userLocale')[0:2]
-    version = QGis.QGIS_VERSION.rsplit('.', 1)[0]
+    major, minor = QGis.QGIS_VERSION.split('.')[:2]
+
+    if minor == '99':  # master
+        version = 'testing'
+    else:
+        version = '.'.join([major, minor])
 
     path = '%s/%s/docs/user_manual/plugins/plugins_metasearch.html' % \
            (version, locale_name)
@@ -167,3 +172,15 @@ def serialize_string(input_string):
         value = '%s 1' % input_string
 
     return value
+
+
+def clean_ows_url(url):
+    """clean an OWS URL of added basic service parameters"""
+
+    url2 = QUrl(url)
+    url2.removeEncodedQueryItem('service')
+    url2.removeEncodedQueryItem('SERVICE')
+    url2.removeEncodedQueryItem('request')
+    url2.removeEncodedQueryItem('REQUEST')
+
+    return url2.toString()
diff --git a/python/plugins/processing/algs/qgis/Merge.py b/python/plugins/processing/algs/qgis/Merge.py
index ba16d93..6eec00d 100644
--- a/python/plugins/processing/algs/qgis/Merge.py
+++ b/python/plugins/processing/algs/qgis/Merge.py
@@ -35,6 +35,7 @@ from processing.core.GeoAlgorithm import GeoAlgorithm
 from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
 from processing.core.parameters import ParameterMultipleInput
 from processing.core.outputs import OutputVector
+from processing.tools import dataobjects
 
 pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
 
diff --git a/python/plugins/processing/algs/saga/CMakeLists.txt b/python/plugins/processing/algs/saga/CMakeLists.txt
index ff69a4c..47ce2fa 100644
--- a/python/plugins/processing/algs/saga/CMakeLists.txt
+++ b/python/plugins/processing/algs/saga/CMakeLists.txt
@@ -5,6 +5,7 @@ FILE(GLOB DESCR214_FILES description/2.1.4/*.txt)
 FILE(GLOB DESCR220_FILES description/2.2.0/*.txt)
 FILE(GLOB DESCR222_FILES description/2.2.2/*.txt)
 FILE(GLOB DESCR223_FILES description/2.2.3/*.txt)
+FILE(GLOB DESCR230_FILES description/2.3.0/*.txt)
 FILE(GLOB HELP_FILES help/*.rst)
 
 ADD_SUBDIRECTORY(ext)
@@ -16,4 +17,5 @@ PLUGIN_INSTALL(processing algs/saga/description/2.1.4 ${DESCR214_FILES})
 PLUGIN_INSTALL(processing algs/saga/description/2.2.0 ${DESCR220_FILES})
 PLUGIN_INSTALL(processing algs/saga/description/2.2.2 ${DESCR222_FILES})
 PLUGIN_INSTALL(processing algs/saga/description/2.2.3 ${DESCR223_FILES})
+PLUGIN_INSTALL(processing algs/saga/description/2.3.0 ${DESCR230_FILES})
 PLUGIN_INSTALL(processing algs/saga/help ${HELP_FILES})
diff --git a/python/plugins/processing/algs/saga/SagaAlgorithm230.py b/python/plugins/processing/algs/saga/SagaAlgorithm230.py
new file mode 100644
index 0000000..75d9e3e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/SagaAlgorithm230.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+"""
+***************************************************************************
+    SagaAlgorithm230.py
+    ---------------------
+    Date                 : March 2017
+    Copyright            : (C) 2017 by Victor Olaya
+    Email                : volayaf at gmail dot com
+***************************************************************************
+*                                                                         *
+*   This program is free software; you can redistribute it and/or modify  *
+*   it under the terms of the GNU General Public License as published by  *
+*   the Free Software Foundation; either version 2 of the License, or     *
+*   (at your option) any later version.                                   *
+*                                                                         *
+***************************************************************************
+"""
+
+__author__ = 'Victor Olaya'
+__date__ = 'March 2017'
+__copyright__ = '(C) 2017, Victor Olaya'
+
+# This will get replaced with a git SHA1 when you do a git archive
+
+__revision__ = '$Format:%H$'
+
+import os
+from SagaAlgorithm214 import SagaAlgorithm214
+from processing.tools import dataobjects
+from processing.tools.system import getTempFilenameInTempFolder
+
+sessionExportedLayers = {}
+
+
+class SagaAlgorithm230(SagaAlgorithm214):
+
+    def getCopy(self):
+        newone = SagaAlgorithm230(self.descriptionFile)
+        newone.provider = self.provider
+        return newone
+
+    def exportRasterLayer(self, source):
+        global sessionExportedLayers
+        if source in sessionExportedLayers:
+            exportedLayer = sessionExportedLayers[source]
+            if os.path.exists(exportedLayer):
+                self.exportedLayers[source] = exportedLayer
+                return None
+            else:
+                del sessionExportedLayers[source]
+        layer = dataobjects.getObjectFromUri(source, False)
+        if layer:
+            filename = layer.name()
+        else:
+            filename = os.path.basename(source)
+        validChars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789:'
+        filename = ''.join(c for c in filename if c in validChars)
+        if len(filename) == 0:
+            filename = 'layer'
+        destFilename = getTempFilenameInTempFolder(filename + '.sgrd')
+        self.exportedLayers[source] = destFilename
+        sessionExportedLayers[source] = destFilename
+        return 'io_gdal 0 -TRANSFORM 1 -RESAMPLING 0 -GRIDS "' + destFilename + '" -FILES "' + source + '"'
diff --git a/python/plugins/processing/algs/saga/SagaAlgorithmProvider.py b/python/plugins/processing/algs/saga/SagaAlgorithmProvider.py
index 2956b17..2f7ff56 100644
--- a/python/plugins/processing/algs/saga/SagaAlgorithmProvider.py
+++ b/python/plugins/processing/algs/saga/SagaAlgorithmProvider.py
@@ -33,6 +33,7 @@ from processing.core.ProcessingLog import ProcessingLog
 from .SagaAlgorithm212 import SagaAlgorithm212
 from .SagaAlgorithm213 import SagaAlgorithm213
 from .SagaAlgorithm214 import SagaAlgorithm214
+from SagaAlgorithm230 import SagaAlgorithm230
 from .SplitRGBBands import SplitRGBBands
 from . import SagaUtils
 from processing.tools.system import isWindows, isMac
@@ -50,7 +51,9 @@ class SagaAlgorithmProvider(AlgorithmProvider):
                          "2.2.0": ("2.2.0", SagaAlgorithm214),
                          "2.2.1": ("2.2.0", SagaAlgorithm214),
                          "2.2.2": ("2.2.2", SagaAlgorithm214),
-                         "2.2.3": ("2.2.3", SagaAlgorithm214)}
+                         "2.2.3": ("2.2.3", SagaAlgorithm214),
+                         "2.3.0": ("2.3.0", SagaAlgorithm230),
+                         "2.3.1": ("2.3.1", SagaAlgorithm230)}
 
     def __init__(self):
         AlgorithmProvider.__init__(self)
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AccumulatedCost(Anisotropic).txt b/python/plugins/processing/algs/saga/description/2.3.0/AccumulatedCost(Anisotropic).txt
new file mode 100644
index 0000000..b50c05a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AccumulatedCost(Anisotropic).txt
@@ -0,0 +1,8 @@
+Accumulated Cost (Anisotropic)
+grid_analysis
+ParameterRaster|COST|Cost Grid|False
+ParameterRaster|DIRECTION|Direction of max cost|False
+ParameterRaster|POINTS|Destination Points|False
+ParameterNumber|K|k factor|None|None|1
+ParameterNumber|THRESHOLD|Threshold for different route|None|None|0
+OutputRaster|ACCCOST|Accumulated Cost
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AccumulatedCost(Isotropic).txt b/python/plugins/processing/algs/saga/description/2.3.0/AccumulatedCost(Isotropic).txt
new file mode 100644
index 0000000..e4d4a49
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AccumulatedCost(Isotropic).txt
@@ -0,0 +1,7 @@
+Accumulated Cost (Isotropic)
+grid_analysis
+ParameterRaster|COST|Cost Grid|False
+ParameterRaster|POINTS|Destination Points|False
+ParameterNumber|THRESHOLD|Threshold for different route|None|None|0.0
+OutputRaster|ACCCOST|Accumulated Cost
+OutputRaster|CLOSESTPT|Closest Point
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AddCoordinatestopoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/AddCoordinatestopoints.txt
new file mode 100644
index 0000000..d4f6a0e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AddCoordinatestopoints.txt
@@ -0,0 +1,4 @@
+Add Coordinates to points
+shapes_points
+ParameterVector|INPUT|Points|0|False
+OutputVector|OUTPUT|Points with coordinates
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AddGridValuestoPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/AddGridValuestoPoints.txt
new file mode 100644
index 0000000..56164a8
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AddGridValuestoPoints.txt
@@ -0,0 +1,7 @@
+Add Grid Values to Points
+shapes_grid
+ParameterVector|SHAPES|Points|0|False
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterSelection|INTERPOL|Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+OutputVector|RESULT|Result
+AllowUnmatching
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AddGridValuestoShapes.txt b/python/plugins/processing/algs/saga/description/2.3.0/AddGridValuestoShapes.txt
new file mode 100644
index 0000000..1795975
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AddGridValuestoShapes.txt
@@ -0,0 +1,7 @@
+Add Grid Values to Shapes
+shapes_grid
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterSelection|INTERPOL|Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+OutputVector|RESULT|Result
+AllowUnmatching
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AddPointAttributestoPolygons.txt b/python/plugins/processing/algs/saga/description/2.3.0/AddPointAttributestoPolygons.txt
new file mode 100644
index 0000000..cbae721
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AddPointAttributestoPolygons.txt
@@ -0,0 +1,7 @@
+Add Point Attributes to Polygons
+shapes_polygons
+ParameterVector|INPUT|Polygons|2|False
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELDS|Attributes|POINTS|-1|False
+ParameterBoolean|ADD_LOCATION_INFO|Add location info|False
+OutputVector|OUTPUT|Result
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AddPolygonAttributestoPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/AddPolygonAttributestoPoints.txt
new file mode 100644
index 0000000..1515e6a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AddPolygonAttributestoPoints.txt
@@ -0,0 +1,6 @@
+Add Polygon Attributes to Points
+shapes_points
+ParameterVector|INPUT|Points|0|False
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|FIELDS|Attribute|POLYGONS|-1|False
+OutputVector|OUTPUT|Result
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Aggregate.txt b/python/plugins/processing/algs/saga/description/2.3.0/Aggregate.txt
new file mode 100644
index 0000000..eec522c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Aggregate.txt
@@ -0,0 +1,5 @@
+Aggregate
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|SIZE|Aggregation Size|None|None|3
+ParameterSelection|METHOD|Method|[0] Sum;[1] Min;[2] Max
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AggregatePointObservations.txt b/python/plugins/processing/algs/saga/description/2.3.0/AggregatePointObservations.txt
new file mode 100644
index 0000000..98bb011
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AggregatePointObservations.txt
@@ -0,0 +1,14 @@
+Aggregate Point Observations
+shapes_points
+ParameterVector|REFERENCE|Reference Points|-1|False
+ParameterTableField|REFERENCE_ID|ID|REFERENCE|-1|False
+ParameterTable|OBSERVATIONS|Observations|False
+ParameterTableField|X|X|OBSERVATIONS|-1|False
+ParameterTableField|Y|Y|OBSERVATIONS|-1|False
+ParameterTableField|TRACK|Track|OBSERVATIONS|-1|False
+ParameterTableField|DATE|Date|OBSERVATIONS|-1|False
+ParameterTableField|TIME|Time|OBSERVATIONS|-1|False
+ParameterTableField|PARAMETER|Parameter|OBSERVATIONS|-1|False
+ParameterNumber|EPS_TIME|Maximum Time Span (Seconds)|None|None|60.0
+ParameterNumber|EPS_SPACE|Maximum Distance|None|None|0.002
+OutputTable|AGGREGATED|Aggregated
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AggregationIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/AggregationIndex.txt
new file mode 100644
index 0000000..c533829
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AggregationIndex.txt
@@ -0,0 +1,5 @@
+Aggregation Index
+grid_analysis
+ParameterRaster|INPUT|Input Grid|False
+ParameterNumber|MAXNUMCLASS|Max. Number of Classes|None|None|5
+OutputTable|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AnalyticalHierarchyProcess.txt b/python/plugins/processing/algs/saga/description/2.3.0/AnalyticalHierarchyProcess.txt
new file mode 100644
index 0000000..426c781
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AnalyticalHierarchyProcess.txt
@@ -0,0 +1,5 @@
+Analytical Hierarchy Process
+grid_analysis
+ParameterMultipleInput|GRIDS|Input Grids|3|False
+ParameterTable|TABLE|Pairwise Comparisons Table|False
+OutputRaster|OUTPUT|Output Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/AnalyticalHillshading.txt b/python/plugins/processing/algs/saga/description/2.3.0/AnalyticalHillshading.txt
new file mode 100644
index 0000000..524651c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/AnalyticalHillshading.txt
@@ -0,0 +1,8 @@
+Analytical Hillshading
+ta_lighting
+ParameterRaster|ELEVATION|Elevation|False
+ParameterSelection|METHOD|Shading Method|[0] Standard;[1] Standard (max. 90Degree);[2] Combined Shading;[3] Ray Tracing
+ParameterNumber|AZIMUTH|Azimuth [Degree]|None|None|315.0
+ParameterNumber|DECLINATION|Declination [Degree]|None|None|45.0
+ParameterNumber|EXAGGERATION|Exaggeration|None|None|4.0
+OutputRaster|SHADE|Analytical Hillshading
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/B-SplineApproximation.txt b/python/plugins/processing/algs/saga/description/2.3.0/B-SplineApproximation.txt
new file mode 100644
index 0000000..79a2bf5
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/B-SplineApproximation.txt
@@ -0,0 +1,10 @@
+B-Spline Approximation
+grid_spline
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterNumber|LEVEL|Resolution|0.001|None|1.0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/BurnStreamNetworkintoDEM.txt b/python/plugins/processing/algs/saga/description/2.3.0/BurnStreamNetworkintoDEM.txt
new file mode 100644
index 0000000..22f289c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/BurnStreamNetworkintoDEM.txt
@@ -0,0 +1,8 @@
+Burn Stream Network into DEM
+ta_preprocessor
+ParameterRaster|DEM|DEM|False
+ParameterRaster|STREAM|Streams|False
+ParameterRaster|FLOWDIR|Flow direction|False
+ParameterSelection|METHOD|Method|[0] simply decrease cell's value by epsilon;[1] lower cell's value to neighbours minimum value minus epsilon;[2] trace stream network downstream
+ParameterNumber|EPSILON|Epsilon|0.0|None|1.0
+OutputRaster|BURN|Processed DEM
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CellBalance.txt b/python/plugins/processing/algs/saga/description/2.3.0/CellBalance.txt
new file mode 100644
index 0000000..80edebc
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CellBalance.txt
@@ -0,0 +1,7 @@
+Cell Balance
+ta_hydrology
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|WEIGHTS|Parameter|True
+ParameterNumber|WEIGHTS_DEFAULT|Default Weight|0.0|None|1.0
+ParameterSelection|METHOD|Method|[0] Deterministic 8;[1] Multiple Flow Direction
+OutputRaster|BALANCE|Cell Balance
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ChangeDateFormat.txt b/python/plugins/processing/algs/saga/description/2.3.0/ChangeDateFormat.txt
new file mode 100644
index 0000000..9adfcd0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ChangeDateFormat.txt
@@ -0,0 +1,7 @@
+Change Date Format
+table_tools
+ParameterTable|TABLE|Table|False
+ParameterTableField|FIELD|Date Field|TABLE|-1|False
+ParameterSelection|FMT_IN|Input Format|[0] dd.mm.yy;[1] yy.mm.dd;[2] dd:mm:yy;[3] yy:mm:dd;[4] ddmmyyyy, fix size;[5] yyyymmdd, fix size;[6] ddmmyy, fix size;[7] yymmdd, fix size;[8] Julian Day
+ParameterSelection|FMT_OUT|Output Format|[0] dd.mm.yy;[1] yy.mm.dd;[2] dd:mm:yy;[3] yy:mm:dd;[4] ddmmyyyy, fix size;[5] yyyymmdd, fix size;[6] ddmmyy, fix size;[7] yymmdd, fix size;[8] Julian Day
+OutputTable|OUTPUT|Output
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ChangeDetection.txt b/python/plugins/processing/algs/saga/description/2.3.0/ChangeDetection.txt
new file mode 100644
index 0000000..f171b9f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ChangeDetection.txt
@@ -0,0 +1,16 @@
+Change Detection
+imagery_classification
+ParameterRaster|INITIAL|Initial State|False
+ParameterTable|INI_LUT|Look-up Table|True
+ParameterTableField|INI_LUT_MIN|Value|INI_LUT|-1|False
+ParameterTableField|INI_LUT_MAX|Value (Maximum)|INI_LUT|-1|False
+ParameterTableField|INI_LUT_NAM|Name|INI_LUT|-1|False
+ParameterRaster|FINAL|Final State|False
+ParameterTable|FIN_LUT|Look-up Table|True
+ParameterTableField|FIN_LUT_MIN|Value|FIN_LUT|-1|False
+ParameterTableField|FIN_LUT_MAX|Value (Maximum)|FIN_LUT|-1|False
+ParameterTableField|FIN_LUT_NAM|Name|FIN_LUT|-1|False
+ParameterBoolean|NOCHANGE         |Report Unchanged Classes|True
+ParameterSelection|OUTPUT|Output as...|[0] cells;[1] percent;[2] area
+OutputRaster|CHANGE|Changes
+OutputTable|CHANGES|Changes
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ChangeGridValues.txt b/python/plugins/processing/algs/saga/description/2.3.0/ChangeGridValues.txt
new file mode 100644
index 0000000..8d3bd69
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ChangeGridValues.txt
@@ -0,0 +1,6 @@
+Change Grid Values
+grid_tools
+ParameterRaster|GRID_IN|Grid|False
+ParameterSelection|METHOD|Replace Condition|[0] Grid value equals low value;[1] Low value < grid value < high value;[2] Low value <= grid value < high value
+ParameterFixedTable|LOOKUP|Lookup Table|3|Low Value;High Value;Replace with|False
+OutputRaster|GRID_OUT|Changed Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ChangeTimeFormat.txt b/python/plugins/processing/algs/saga/description/2.3.0/ChangeTimeFormat.txt
new file mode 100644
index 0000000..0a384af
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ChangeTimeFormat.txt
@@ -0,0 +1,7 @@
+Change Time Format
+table_tools
+ParameterTable|TABLE|Table|False
+ParameterTableField|FIELD|Time Field|TABLE|-1|False
+ParameterSelection|FMT_IN|Input Format|[0] hh.mm.ss;[1] hh:mm:ss;[2] hhmmss, fix size;[3] hours;[4] minutes;[5] seconds
+ParameterSelection|FMT_OUT|Output Format|[0] hh.mm.ss;[1] hh:mm:ss;[2] hhmmss, fix size;[3] hours;[4] minutes;[5] seconds
+OutputTable|OUTPUT|Output
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ChannelNetwork.txt b/python/plugins/processing/algs/saga/description/2.3.0/ChannelNetwork.txt
new file mode 100644
index 0000000..b7bf16f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ChannelNetwork.txt
@@ -0,0 +1,14 @@
+Channel Network
+ta_channels
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|SINKROUTE|Flow Direction|True
+ParameterRaster|INIT_GRID|Initiation Grid|False
+ParameterSelection|INIT_METHOD|Initiation Type|[0] Less than;[1] Equals;[2] Greater than
+ParameterNumber|INIT_VALUE|Initiation Threshold|None|None|0.0
+ParameterRaster|DIV_GRID|Divergence|True
+ParameterNumber|DIV_CELLS|Tracing: Max. Divergence|None|None|10
+ParameterRaster|TRACE_WEIGHT|Tracing: Weight|True
+ParameterNumber|MINLEN|Min. Segment Length|0.0|None|10
+OutputRaster|CHNLNTWRK|Channel Network
+OutputRaster|CHNLROUTE|Channel Direction
+OutputVector|SHAPES|Channel Network
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ChannelNetworkandDrainageBasins.txt b/python/plugins/processing/algs/saga/description/2.3.0/ChannelNetworkandDrainageBasins.txt
new file mode 100644
index 0000000..17e8cca
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ChannelNetworkandDrainageBasins.txt
@@ -0,0 +1,11 @@
+Channel Network and Drainage Basins
+ta_channels
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|THRESHOLD|Threshold|None|None|5.0
+OutputRaster|DIRECTION|Flow Direction
+OutputRaster|CONNECTION|Flow Connectivity
+OutputRaster|ORDER|Strahler Order
+OutputRaster|BASIN|Drainage Basins
+OutputVector|SEGMENTS|Channels
+OutputVector|BASINS|Drainage Basins
+OutputVector|NODES|Junctions
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ClipGridwithPolygon.txt b/python/plugins/processing/algs/saga/description/2.3.0/ClipGridwithPolygon.txt
new file mode 100644
index 0000000..b76d1a2
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ClipGridwithPolygon.txt
@@ -0,0 +1,5 @@
+Clip Grid with Polygon
+shapes_grid
+ParameterRaster|INPUT|Input|False
+ParameterVector|POLYGONS|Polygons|2|False
+OutputRaster|OUTPUT|Clipped
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ClipPointswithPolygons.txt b/python/plugins/processing/algs/saga/description/2.3.0/ClipPointswithPolygons.txt
new file mode 100644
index 0000000..90b8f12
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ClipPointswithPolygons.txt
@@ -0,0 +1,7 @@
+Clip Points with Polygons
+shapes_points
+ParameterVector|POINTS|Points|0|False
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|FIELD|Add Attribute to Clipped Points|POLYGONS|-1|False
+ParameterSelection|METHOD|Clipping Options|[0] one layer for all points;[1] separate layer for each polygon
+OutputVector|CLIPS|Clipped Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CloseGaps.txt b/python/plugins/processing/algs/saga/description/2.3.0/CloseGaps.txt
new file mode 100644
index 0000000..ffa217f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CloseGaps.txt
@@ -0,0 +1,6 @@
+Close Gaps
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterRaster|MASK|Mask|True
+ParameterNumber|THRESHOLD|Tension Threshold|None|None|0.1
+OutputRaster|RESULT|Changed Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CloseGapswithSpline.txt b/python/plugins/processing/algs/saga/description/2.3.0/CloseGapswithSpline.txt
new file mode 100644
index 0000000..cd70312
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CloseGapswithSpline.txt
@@ -0,0 +1,12 @@
+Close Gaps with Spline
+grid_tools
+ParameterRaster|GRID|Grid|False
+ParameterRaster|MASK|Mask|True
+ParameterNumber|MAXGAPCELLS|Only Process Gaps with Less Cells|None|None|0
+ParameterNumber|MAXPOINTS|Maximum Points|None|None|1000
+ParameterNumber|LOCALPOINTS|Number of Points for Local Interpolation|None|None|10
+ParameterBoolean|EXTENDED         |Extended Neighourhood|True
+ParameterSelection|NEIGHBOURS|Neighbourhood|[0] Neumann;[1] Moore
+ParameterNumber|RADIUS|Radius (Cells)|None|None|0
+ParameterNumber|RELAXATION|Relaxation|None|None|0.0
+OutputRaster|CLOSED|Closed Gaps Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CloseOneCellGaps.txt b/python/plugins/processing/algs/saga/description/2.3.0/CloseOneCellGaps.txt
new file mode 100644
index 0000000..d3b4aa0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CloseOneCellGaps.txt
@@ -0,0 +1,4 @@
+Close One Cell Gaps
+grid_tools
+ParameterRaster|INPUT|Grid|False
+OutputRaster|RESULT|Changed Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ClusterAnalysisforGrids.txt b/python/plugins/processing/algs/saga/description/2.3.0/ClusterAnalysisforGrids.txt
new file mode 100644
index 0000000..0c477e9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ClusterAnalysisforGrids.txt
@@ -0,0 +1,9 @@
+Cluster Analysis for Grids
+imagery_classification
+ParameterMultipleInput|GRIDS|Grids|3.0|False
+ParameterSelection|METHOD|Method|[0] Iterative Minimum Distance (Forgy 1965);[1] Hill-Climbing (Rubin 1967);[2] Combined Minimum Distance / Hillclimbing
+ParameterNumber|NCLUSTER|Clusters|None|None|5
+ParameterBoolean|NORMALISE       |Normalise|True
+ParameterBoolean|OLDVERSION      |Old Version|True
+OutputRaster|CLUSTER|Clusters
+OutputTable|STATISTICS|Statistics
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConfusionMatrix(GridPolygons).txt b/python/plugins/processing/algs/saga/description/2.3.0/ConfusionMatrix(GridPolygons).txt
new file mode 100644
index 0000000..496cb9b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConfusionMatrix(GridPolygons).txt
@@ -0,0 +1,12 @@
+Confusion Matrix (Polygons / Grid)
+imagery_classification
+ParameterRaster|GRID|Classification|False
+ParameterTable|GRID_LUT|Look-up Table|True
+ParameterTableField|GRID_LUT_MIN|Value|GRID_LUT|-1|True
+ParameterTableField|GRID_LUT_MAX|Value (Maximum)|GRID_LUT|-1|True
+ParameterTableField|GRID_LUT_NAM|Name|GRID_LUT|-1|True
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|FIELD|Classes|POLYGONS|-1|False
+OutputTable|CONFUSION|Confusion Matrix
+OutputTable|CLASSES|Class Values
+OutputTable|SUMMARY|Summary
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ContourLinesfromGrid.txt b/python/plugins/processing/algs/saga/description/2.3.0/ContourLinesfromGrid.txt
new file mode 100644
index 0000000..d0924e0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ContourLinesfromGrid.txt
@@ -0,0 +1,8 @@
+Contour Lines from Grid
+shapes_grid
+ParameterRaster|GRID|Grid|False
+ParameterSelection|VERTEX|Vertex type|[0] x,y;[1] x,y,z
+ParameterNumber|ZMIN|Minimum Contour Value|None|None|0.0
+ParameterNumber|ZMAX|Maximum Contour Value|None|None|10000.0
+ParameterNumber|ZSTEP|Equidistance|None|None|100.0
+OutputVector|CONTOUR|Contour Lines
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvergenceIndex(SearchRadius).txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvergenceIndex(SearchRadius).txt
new file mode 100644
index 0000000..11730fa
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvergenceIndex(SearchRadius).txt
@@ -0,0 +1,11 @@
+Convergence Index (Search Radius)
+ta_morphometry
+ParameterRaster|ELEVATION|Elevation|False
+ParameterNumber|RADIUS|Radius [Cells]|1.0|None|10.0
+ParameterSelection|DISTANCE_WEIGHTING_DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DISTANCE_WEIGHTING_DW_IDW_POWER|Inverse Distance Weighting Power|0.0|None|1.0
+ParameterBoolean|DISTANCE_WEIGHTING_DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DISTANCE_WEIGHTING_DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|0.0|None|1.0
+ParameterBoolean|SLOPE|Gradient|True
+ParameterSelection|DIFFERENCE|Weighting Function|[0] direction to the center cell;[1] center cell's aspect direction
+OutputRaster|CONVERGENCE|Convergence Index
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvergenceIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvergenceIndex.txt
new file mode 100644
index 0000000..7d1649a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvergenceIndex.txt
@@ -0,0 +1,6 @@
+Convergence Index
+ta_morphometry
+ParameterRaster|ELEVATION|Elevation|False
+ParameterSelection|METHOD|Method|[0] Aspect;[1] Gradient
+ParameterSelection|NEIGHBOURS|Gradient Calculation|[0] 2 x 2;[1] 3 x 3
+OutputRaster|RESULT|Convergence Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertDataStorageType.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertDataStorageType.txt
new file mode 100644
index 0000000..51de5d9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertDataStorageType.txt
@@ -0,0 +1,5 @@
+Convert Data Storage Type
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|TYPE|Data storage type|[0] bit;[1] unsigned 1 byte integer;[2] signed 1 byte integer;[3] unsigned 2 byte integer;[4] signed 2 byte integer;[5] unsigned 4 byte integer;[6] signed 4 byte integer;[7] 4 byte floating point number;[8] 8 byte floating point number
+OutputRaster|OUTPUT|Converted Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertLinestoPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertLinestoPoints.txt
new file mode 100644
index 0000000..7a44353
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertLinestoPoints.txt
@@ -0,0 +1,6 @@
+Convert Lines to Points
+shapes_points
+ParameterVector|LINES|Lines|1|False
+ParameterBoolean|ADD         |Insert Additional Points|True
+ParameterNumber|DIST|Insert Distance|0.0|None|1.0
+OutputVector|POINTS|Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertLinestoPolygons.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertLinestoPolygons.txt
new file mode 100644
index 0000000..0053420
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertLinestoPolygons.txt
@@ -0,0 +1,4 @@
+Convert Lines to Polygons
+shapes_polygons
+ParameterVector|LINES|Lines|1|False
+OutputVector|POLYGONS|Polygons
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertMultipointstoPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertMultipointstoPoints.txt
new file mode 100644
index 0000000..6a0fc97
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertMultipointstoPoints.txt
@@ -0,0 +1,4 @@
+Convert Multipoints to Points
+shapes_points
+ParameterVector|MULTIPOINTS|Multipoints|0|False
+OutputVector|POINTS|Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertPointstoLine(s).txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertPointstoLine(s).txt
new file mode 100644
index 0000000..f7e8b6b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertPointstoLine(s).txt
@@ -0,0 +1,6 @@
+Convert Points to Line(s)
+shapes_lines
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|ORDER|Order by...|POINTS|-1|False
+ParameterTableField|SEPARATE|Separate by...|POINTS|-1|False
+OutputVector|LINES|Lines
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertPolygonLineVerticestoPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertPolygonLineVerticestoPoints.txt
new file mode 100644
index 0000000..e8e9a68
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertPolygonLineVerticestoPoints.txt
@@ -0,0 +1,4 @@
+Convert Polygon/Line Vertices to Points
+shapes_polygons
+ParameterVector|SHAPES|Shapes|-1|False
+OutputVector|POINTS|Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvertPolygonstoLines.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvertPolygonstoLines.txt
new file mode 100644
index 0000000..def3f04
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvertPolygonstoLines.txt
@@ -0,0 +1,4 @@
+Convert Polygons to Lines
+shapes_lines
+ParameterVector|POLYGONS|Polygons|2|False
+OutputVector|LINES|Lines
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ConvexHull.txt b/python/plugins/processing/algs/saga/description/2.3.0/ConvexHull.txt
new file mode 100644
index 0000000..81084ab
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ConvexHull.txt
@@ -0,0 +1,6 @@
+Convex Hull
+shapes_points
+ParameterVector|SHAPES|Points|0|False
+ParameterSelection|POLYPOINTS|Hull Construction|[0] one hull for all shapes;[1] one hull per shape;[2] one hull per shape part
+OutputVector|HULLS|Convex Hull
+OutputVector|BOXES|Minimum Bounding Box
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CreateLinesGraticule.txt b/python/plugins/processing/algs/saga/description/2.3.0/CreateLinesGraticule.txt
new file mode 100644
index 0000000..5935272
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CreateLinesGraticule.txt
@@ -0,0 +1,9 @@
+Create Lines Graticule|Create Graticule
+shapes_tools
+Hardcoded|-TYPE 0
+ParameterVector|EXTENT|Extent|-1|True
+Extent EXTENT_X_MIN EXTENT_X_MAX EXTENT_Y_MIN EXTENT_Y_MAX
+ParameterNumber|DIVISION_X|Division Width|None|None|1.0
+ParameterNumber|DIVISION_Y|Division Height|None|None|1.0
+ParameterSelection|ALIGNMENT|Alignment|[0] Bottom left;[1] Top left;[2] Bottom right;[3] Top right;[4] Centered|0
+OutputVector|GRATICULE_LINE|Lines Graticule
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CreatePolygonsGraticule.txt b/python/plugins/processing/algs/saga/description/2.3.0/CreatePolygonsGraticule.txt
new file mode 100644
index 0000000..45418f8
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CreatePolygonsGraticule.txt
@@ -0,0 +1,9 @@
+Create Polygons Graticule|Create Graticule
+shapes_tools
+Hardcoded|-TYPE 1
+ParameterVector|EXTENT|Extent|-1|True
+Extent EXTENT_X_MIN EXTENT_X_MAX EXTENT_Y_MIN EXTENT_Y_MAX
+ParameterNumber|DIVISION_X|Division Width|None|None|1.0
+ParameterNumber|DIVISION_Y|Division Height|None|None|1.0
+ParameterSelection|ALIGNMENT|Alignment|[0] Bottom left;[1] Top left;[2] Bottom right;[3] Top right;[4] Centered|0
+OutputVector|GRATICULE_RECT|Rectangle Graticule
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CropToData.txt b/python/plugins/processing/algs/saga/description/2.3.0/CropToData.txt
new file mode 100644
index 0000000..e87ea99
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CropToData.txt
@@ -0,0 +1,4 @@
+Crop to Data
+grid_tools
+ParameterRaster|INPUT|Input layer|False
+OutputRaster|OUTPUT|Cropped
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Cross-ClassificationandTabulation.txt b/python/plugins/processing/algs/saga/description/2.3.0/Cross-ClassificationandTabulation.txt
new file mode 100644
index 0000000..19bb607
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Cross-ClassificationandTabulation.txt
@@ -0,0 +1,7 @@
+Cross-Classification and Tabulation
+grid_analysis
+ParameterRaster|INPUT|Input Grid 1|False
+ParameterRaster|INPUT2|Input Grid 2|False
+ParameterNumber|MAXNUMCLASS|Max. Number of Classes|None|None|5
+OutputRaster|RESULTGRID|Cross-Classification Grid
+OutputTable|RESULTTABLE|Cross-Tabulation Table
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CrossProfiles.txt b/python/plugins/processing/algs/saga/description/2.3.0/CrossProfiles.txt
new file mode 100644
index 0000000..149b892
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CrossProfiles.txt
@@ -0,0 +1,8 @@
+Cross Profiles
+ta_profiles
+ParameterRaster|DEM|DEM|False
+ParameterVector|LINES|Lines|1|False
+ParameterNumber|DIST_LINE|Profile Distance|0.0|None|10.0
+ParameterNumber|DIST_PROFILE|Profile Length|0.0|None|10.0
+ParameterNumber|NUM_PROFILE|Profile Samples|1.0|None|10.0
+OutputVector|PROFILES|Cross Profiles
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CubicSplineApproximation.txt b/python/plugins/processing/algs/saga/description/2.3.0/CubicSplineApproximation.txt
new file mode 100644
index 0000000..c60b700
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CubicSplineApproximation.txt
@@ -0,0 +1,13 @@
+Cubic Spline Approximation
+grid_spline
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterNumber|NPMIN|Minimal Number of Points|0|None|3
+ParameterNumber|NPMAX|Maximal Number of Points|11|59|20
+ParameterNumber|NPPC|Points per Square|1|None|5
+ParameterNumber|K|Tolerance|0|None|140.0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CurvatureClassification.txt b/python/plugins/processing/algs/saga/description/2.3.0/CurvatureClassification.txt
new file mode 100644
index 0000000..aff4b66
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CurvatureClassification.txt
@@ -0,0 +1,5 @@
+Curvature Classification
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|THRESHOLD|Threshold for plane|0.0000|None|0.0005
+OutputRaster|CLASS|Curvature Classification
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/CutShapesLayer.txt b/python/plugins/processing/algs/saga/description/2.3.0/CutShapesLayer.txt
new file mode 100644
index 0000000..f60347a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/CutShapesLayer.txt
@@ -0,0 +1,7 @@
+Cut Shapes Layer
+shapes_tools
+ParameterVector|SHAPES|Vector layer to cut|-1|False
+ParameterSelection|METHOD|Method|[0] completely contained;[1] intersects;[2] center
+Hardcoded|-TARGET 3
+ParameterVector|POLYGONS_POLYGONS|Cutting polygons|2|False
+OutputVector|CUT|Result
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DTMFilter(slope-based).txt b/python/plugins/processing/algs/saga/description/2.3.0/DTMFilter(slope-based).txt
new file mode 100644
index 0000000..7390212
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DTMFilter(slope-based).txt
@@ -0,0 +1,8 @@
+DTM Filter (slope-based)
+grid_filter
+ParameterRaster|INPUT|Grid to filter|False
+ParameterNumber|RADIUS|Search Radius|1.0|None|2
+ParameterNumber|TERRAINSLOPE|Approx. Terrain Slope|None|None|30.0
+ParameterBoolean|STDDEV            |Use Confidence Interval|True
+OutputRaster|GROUND|Bare Earth
+OutputRaster|NONGROUND|Removed Objects
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DiffusiveHillslopeEvolution(ADI).txt b/python/plugins/processing/algs/saga/description/2.3.0/DiffusiveHillslopeEvolution(ADI).txt
new file mode 100644
index 0000000..ffcb58f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DiffusiveHillslopeEvolution(ADI).txt
@@ -0,0 +1,11 @@
+Diffusive Hillslope Evolution (ADI)
+sim_qm_of_esp
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|CHANNELS|Channel Mask|True
+ParameterBoolean|UPDATE|Update|True
+ParameterNumber|KAPPA|Diffusivity [m2/kyr]|0.0|None|10.0
+ParameterNumber|DURATION|Simulation Time [kyr]|0.0|None|10000.0
+ParameterSelection|TIMESTEP|Time Step|[0] user defined;[1] automatically|1
+ParameterNumber|DTIME|Time Step [kyr]|0.0|None|1000.0
+OutputRaster|MODEL|Modelled Elevation
+OutputRaster|DIFF|Elevation Difference
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DiffusiveHillslopeEvolution(FTCS).txt b/python/plugins/processing/algs/saga/description/2.3.0/DiffusiveHillslopeEvolution(FTCS).txt
new file mode 100644
index 0000000..9338a83
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DiffusiveHillslopeEvolution(FTCS).txt
@@ -0,0 +1,11 @@
+Diffusive Hillslope Evolution (FTCS)
+sim_qm_of_esp
+ParameterRaster|DEM|Elevation|False
+ParameterBoolean|UPDATE|Update|True
+ParameterNumber|KAPPA|Diffusivity [m2/kyr]|0.0|None|1.0
+ParameterNumber|DURATION|Simulation Time [kyr]|0.0|None|100.0
+ParameterSelection|TIMESTEP|Time Step|[0] user defined;[1] automatically|1
+ParameterNumber|DTIME|Time Step [kyr]|0.0|None|10.0
+ParameterSelection|NEIGHBOURS|Neighbourhood|[0] Neumann;[1] Moore|1
+OutputRaster|MODEL|Modelled Elevation
+OutputRaster|DIFF|Elevation Difference
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DirectionalStatisticsforSingleGrid.txt b/python/plugins/processing/algs/saga/description/2.3.0/DirectionalStatisticsforSingleGrid.txt
new file mode 100644
index 0000000..5ed456d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DirectionalStatisticsforSingleGrid.txt
@@ -0,0 +1,23 @@
+Directional Statistics for Single Grid
+statistics_grid
+ParameterRaster|GRID|Grid|False
+ParameterVector|POINTS|Points|-1|True
+ParameterNumber|DIRECTION|Direction [Degree]|None|None|0.0
+ParameterNumber|TOLERANCE|Tolerance [Degree]|None|None|0.0
+ParameterNumber|MAXDISTANCE|Maximum Distance [Cells]|None|None|0
+ParameterSelection|DISTANCE_WEIGHTING_DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DISTANCE_WEIGHTING_DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DISTANCE_WEIGHTING_DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DISTANCE_WEIGHTING_DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1.0
+OutputRaster|MEAN|Arithmetic Mean
+OutputRaster|DIFMEAN|Difference from Arithmetic Mean
+OutputRaster|MIN|Minimum
+OutputRaster|MAX|Maximum
+OutputRaster|RANGE|Range
+OutputRaster|VAR|Variance
+OutputRaster|STDDEV|Standard Deviation
+OutputRaster|STDDEVLO|Mean less Standard Deviation
+OutputRaster|STDDEVHI|Mean plus Standard Deviation
+OutputRaster|DEVMEAN|Deviation from Arithmetic Mean
+OutputRaster|PERCENT|Percentile
+OutputVector|POINTS_OUT|Directional Statistics for Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DistanceMatrix.txt b/python/plugins/processing/algs/saga/description/2.3.0/DistanceMatrix.txt
new file mode 100644
index 0000000..059d3c2
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DistanceMatrix.txt
@@ -0,0 +1,4 @@
+Distance Matrix
+shapes_points
+ParameterVector|POINTS|Points|0|False
+OutputTable|TABLE|Distance Matrix Table
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DiurnalAnisotropicHeating.txt b/python/plugins/processing/algs/saga/description/2.3.0/DiurnalAnisotropicHeating.txt
new file mode 100644
index 0000000..5420da9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DiurnalAnisotropicHeating.txt
@@ -0,0 +1,5 @@
+Diurnal Anisotropic Heating
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|ALPHA_MAX|Alpha Max (Degree)|None|None|202.5
+OutputRaster|DAH|Diurnal Anisotropic Heating
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DiversityOfCategories.txt b/python/plugins/processing/algs/saga/description/2.3.0/DiversityOfCategories.txt
new file mode 100644
index 0000000..c75c9da
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DiversityOfCategories.txt
@@ -0,0 +1,15 @@
+Diversity of Categories
+grid_analysis
+ParameterRaster|CATEGORIES|Categories|False
+ParameterSelection|SEARCH_MODE|Search Mode|[0] square;[1] circle|1
+ParameterNumber|SEARCH_RADIUS|Search Radius Distance|1.0|3|3
+ParameterSelection|NB_CASE|Connectivity Neighbourhood|[0] Rook's case;[1] Queen's case|1
+ParameterSelection|DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|0.0|None|1
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|0.0|None|0.7
+OutputRaster|COUNT|Number of Categories
+OutputRaster|DIVERSITY|Diversity
+OutputRaster|SIZE_MEAN|Average Size
+OutputRaster|SIZE_SKEW|Skewness
+OutputRaster|CONNECTIVITY|Connectivity
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/DownslopeDistanceGradient.txt b/python/plugins/processing/algs/saga/description/2.3.0/DownslopeDistanceGradient.txt
new file mode 100644
index 0000000..250e199
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/DownslopeDistanceGradient.txt
@@ -0,0 +1,7 @@
+Downslope Distance Gradient
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|DISTANCE|Vertical Distance|None|None|10
+ParameterSelection|OUTPUT|Output|[0] distance;[1] gradient (tangens);[2] gradient (degree)
+OutputRaster|GRADIENT|Gradient
+OutputRaster|DIFFERENCE|Gradient Difference
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/EdgeContamination.txt b/python/plugins/processing/algs/saga/description/2.3.0/EdgeContamination.txt
new file mode 100644
index 0000000..004ca57
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/EdgeContamination.txt
@@ -0,0 +1,4 @@
+Edge Contamination
+ta_hydrology
+ParameterRaster|DEM|Elevation|False
+OutputRaster|CONTAMINATION|Edge Contamination
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/EffectiveAirFlowHeights.txt b/python/plugins/processing/algs/saga/description/2.3.0/EffectiveAirFlowHeights.txt
new file mode 100644
index 0000000..70e0479
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/EffectiveAirFlowHeights.txt
@@ -0,0 +1,15 @@
+Effective Air Flow Heights
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|DIR|Wind Direction|True
+ParameterRaster|LEN|Wind Speed|True
+ParameterNumber|DIR_CONST|Constant Wind Direction [Degree]|None|None|135
+ParameterBoolean|OLDVER         |Old Version|True
+ParameterNumber|MAXDIST|Search Distance [km]|0.0|None|300
+ParameterNumber|ACCEL|Acceleration|None|None|1.5
+ParameterBoolean|PYRAMIDS       |Use Pyramids with New Version|True
+ParameterNumber|LEEFACT|Lee Factor|None|None|0.5
+ParameterNumber|LUVFACT|Luv Factor|None|None|1.0
+ParameterSelection|DIR_UNITS|Wind Direction Units|[0] radians;[1] degree
+ParameterNumber|LEN_SCALE|Wind Speed Scale Factor|None|None|1.0
+OutputRaster|AFH|Effective Air Flow Heights
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FastRegionGrowingAlgorithm.txt b/python/plugins/processing/algs/saga/description/2.3.0/FastRegionGrowingAlgorithm.txt
new file mode 100644
index 0000000..c7922ee
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FastRegionGrowingAlgorithm.txt
@@ -0,0 +1,7 @@
+Fast Region Growing Algorithm
+imagery_rga
+ParameterMultipleInput|INPUT|Input Grids|3|False
+ParameterRaster|START|Seeds Grid|False
+ParameterRaster|REP|Smooth Rep|True
+OutputRaster|RESULT|Segmente
+OutputRaster|MEAN|Mean
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FastRepresentativeness.txt b/python/plugins/processing/algs/saga/description/2.3.0/FastRepresentativeness.txt
new file mode 100644
index 0000000..b65953b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FastRepresentativeness.txt
@@ -0,0 +1,7 @@
+Fast Representativeness
+statistics_grid
+ParameterRaster|INPUT|Input|False
+ParameterNumber|LOD|Level of Generalisation|None|None|16
+OutputRaster|RESULT|Output
+OutputRaster|RESULT_LOD|Output Lod
+OutputRaster|SEEDS|Output Seeds
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FillGapsinRecords.txt b/python/plugins/processing/algs/saga/description/2.3.0/FillGapsinRecords.txt
new file mode 100644
index 0000000..31974dc
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FillGapsinRecords.txt
@@ -0,0 +1,6 @@
+Fill Gaps in Records
+table_calculus
+ParameterTable|TABLE|Table|False
+ParameterTableField|ORDER|Order|TABLE|-1|False
+ParameterSelection|METHOD|Interpolation|[0] Nearest Neighbour;[1] Linear;[2] Spline
+OutputTable|NOGAPS|Table without Gaps
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(PlanchonDarboux,2001).txt b/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(PlanchonDarboux,2001).txt
new file mode 100644
index 0000000..f1a496d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(PlanchonDarboux,2001).txt
@@ -0,0 +1,5 @@
+Fill Sinks|Fill Sinks (Planchon/Darboux, 2001)
+ta_preprocessor
+ParameterRaster|DEM|DEM|False
+ParameterNumber|MINSLOPE|Minimum Slope [Degree]|0.0|None|0.01
+OutputRaster|RESULT|Filled DEM
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(QMofESP).txt b/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(QMofESP).txt
new file mode 100644
index 0000000..5ba95ca
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(QMofESP).txt
@@ -0,0 +1,6 @@
+Fill Sinks (QM of ESP)
+sim_qm_of_esp
+ParameterRaster|DEM|DEM|False
+ParameterNumber|DZFILL|Fill Increment|0.0|None|0.01
+OutputRaster|FILLED|DEM Without Sinks
+OutputRaster|SINKS|Sinks
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(WangLiu).txt b/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(WangLiu).txt
new file mode 100644
index 0000000..2a36e99
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FillSinks(WangLiu).txt
@@ -0,0 +1,7 @@
+Fill Sinks (Wang & Liu)
+ta_preprocessor
+ParameterRaster|ELEV|DEM|False
+ParameterNumber|MINSLOPE|Minimum Slope [Degree]|0.0|None|0.01
+OutputRaster|FILLED|Filled DEM
+OutputRaster|FDIR|Flow Directions
+OutputRaster|WSHED|Watershed Basins
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FillSinksXXL(WangLiu).txt b/python/plugins/processing/algs/saga/description/2.3.0/FillSinksXXL(WangLiu).txt
new file mode 100644
index 0000000..bd3bd21
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FillSinksXXL(WangLiu).txt
@@ -0,0 +1,5 @@
+Fill Sinks XXL (Wang & Liu)
+ta_preprocessor
+ParameterRaster|ELEV|DEM|False
+ParameterNumber|MINSLOPE|Minimum Slope [Degree]|0.0|None|0.01
+OutputRaster|FILLED|Filled DEM
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FilterClumps.txt b/python/plugins/processing/algs/saga/description/2.3.0/FilterClumps.txt
new file mode 100644
index 0000000..3ac291f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FilterClumps.txt
@@ -0,0 +1,5 @@
+Filter Clumps
+grid_filter
+ParameterRaster|GRID|Input Grid|False
+ParameterNumber|THRESHOLD|Min. Size|1.0|None|10
+OutputRaster|OUTPUT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FireRiskAnalysis.txt b/python/plugins/processing/algs/saga/description/2.3.0/FireRiskAnalysis.txt
new file mode 100644
index 0000000..7f9788d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FireRiskAnalysis.txt
@@ -0,0 +1,18 @@
+Fire Risk Analysis
+sim_fire_spreading
+ParameterRaster|DEM|DEM|False
+ParameterRaster|FUEL|Fuel Model|False
+ParameterRaster|WINDSPD|Wind Speed|False
+ParameterRaster|WINDDIR|Wind Direction|False
+ParameterRaster|M1H|Dead Fuel Moisture 1H|False
+ParameterRaster|M10H|Dead Fuel Moisture 10H|False
+ParameterRaster|M100H|Dead Fuel Moisture 100H|False
+ParameterRaster|MHERB|Herbaceous Fuel Moisture|False
+ParameterRaster|MWOOD|Wood Fuel Moisture|False
+ParameterRaster|VALUE|Value|True
+ParameterRaster|BASEPROB|Base Probability|True
+ParameterNumber|MONTECARLO|Number of Events|None|None|1000
+ParameterNumber|INTERVAL|Fire Length|None|None|100
+OutputRaster|DANGER|Danger
+OutputRaster|COMPPROB|Compound Probability
+OutputRaster|PRIORITY|Priority Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FitNPointstoshape.txt b/python/plugins/processing/algs/saga/description/2.3.0/FitNPointstoshape.txt
new file mode 100644
index 0000000..727ebdb
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FitNPointstoshape.txt
@@ -0,0 +1,5 @@
+Fit N Points to shape
+shapes_points
+ParameterVector|SHAPES|Shapes|2|False
+ParameterNumber|NUMPOINTS|Number of points|1.0|None|10
+OutputVector|POINTS|Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlatDetection.txt b/python/plugins/processing/algs/saga/description/2.3.0/FlatDetection.txt
new file mode 100644
index 0000000..e106c6e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlatDetection.txt
@@ -0,0 +1,6 @@
+Flat Detection
+ta_preprocessor
+ParameterRaster|DEM|DEM|False
+ParameterSelection|FLAT_OUTPUT|Flat Area Values|[0] elevation;[1] enumeration
+OutputRaster|NOFLATS|No Flats
+OutputRaster|FLATS|Flat Areas
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlattenPolygonLayer.txt b/python/plugins/processing/algs/saga/description/2.3.0/FlattenPolygonLayer.txt
new file mode 100644
index 0000000..0657662
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlattenPolygonLayer.txt
@@ -0,0 +1,4 @@
+Flatten Polygon Layer
+shapes_polygons
+ParameterVector|INPUT|Input|2|False
+OutputVector|OUTPUT|Output
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(FlowTracing).txt b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(FlowTracing).txt
new file mode 100644
index 0000000..ac9e521
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(FlowTracing).txt
@@ -0,0 +1,17 @@
+Flow Accumulation (Flow Tracing)
+ta_hydrology
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|SINKROUTE|Sink Routes|True
+ParameterRaster|WEIGHT|Weight|True
+ParameterRaster|MATERIAL|Material|True
+ParameterRaster|VAL_INPUT|Input for Mean over Catchment Calculation|True
+ParameterRaster|TARGET|Target|True
+ParameterNumber|STEP|Step|None|None|1
+ParameterSelection|METHOD|Method|[0] Rho 8;[1] Kinematic Routing Algorithm;[2] DEMON
+ParameterNumber|MINDQV|DEMON - Min. DQV|None|None|0.0
+ParameterBoolean|CORRECT|Flow Correction|True
+OutputRaster|FLOW|Flow Accumulation
+OutputRaster|VAL_MEAN|Mean over Catchment
+OutputRaster|ACCU_TOTAL|Total accumulated Material
+OutputRaster|ACCU_LEFT|Accumulated Material from left side
+OutputRaster|ACCU_RIGHT|Accumulated Material from right side
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(QMofESP).txt b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(QMofESP).txt
new file mode 100644
index 0000000..a835e2f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(QMofESP).txt
@@ -0,0 +1,6 @@
+Flow Accumulation (QM of ESP)
+sim_qm_of_esp
+ParameterRaster|DEM|DEM|False
+ParameterSelection|PREPROC|Preprocessing|[0] none;[1] fill sinks temporarily;[2] fill sinks permanently|1
+ParameterNumber|DZFILL|Fill Increment|0.00|None|0.01
+OutputRaster|FLOW|Contributing Area
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(Recursive).txt b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(Recursive).txt
new file mode 100644
index 0000000..9bdab3e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(Recursive).txt
@@ -0,0 +1,18 @@
+Flow Accumulation (Recursive)
+ta_hydrology
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|SINKROUTE|Sink Routes|True
+ParameterRaster|WEIGHT|Weight|True
+ParameterRaster|MATERIAL|Material|True
+ParameterRaster|VAL_INPUT|Input for Mean over Catchment Calculation|True
+ParameterRaster|TARGET|Target|True
+ParameterNumber|STEP|Step|None|None|1
+ParameterRaster|TARGETS|Target Areas|True
+ParameterSelection|METHOD|Method|[0] Deterministic 8;[1] Rho 8;[2] Deterministic Infinity;[3] Multiple Flow Direction
+ParameterNumber|CONVERGENCE|Convergence|None|None|1.1
+OutputRaster|CAREA|Catchment Area
+OutputRaster|VAL_MEAN|Mean over Catchment
+OutputRaster|ACCU_TOT|Total accumulated Material
+OutputRaster|ACCU_LEFT|Accumulated Material from left side
+OutputRaster|ACCU_RIGHT|Accumulated Material from right side
+OutputRaster|FLOWLEN|Flow Path Length
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(Top-Down).txt b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(Top-Down).txt
new file mode 100644
index 0000000..f575a7f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlowAccumulation(Top-Down).txt
@@ -0,0 +1,5 @@
+Flow Accumulation (Top-Down)
+ta_hydrology
+ParameterRaster|ELEVATION|Elevation|False
+ParameterSelection|METHOD|Method|[0] Deterministic 8;[1] Rho 8;[2] Braunschweiger Reliefmodell;[3] Deterministic Infinity;[4] Multiple Flow Direction;[5] Multiple Triangular Flow Directon
+OutputRaster|CAREA|Catchment Area
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlowPathLength.txt b/python/plugins/processing/algs/saga/description/2.3.0/FlowPathLength.txt
new file mode 100644
index 0000000..da16699
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlowPathLength.txt
@@ -0,0 +1,8 @@
+Flow Path Length
+ta_hydrology
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|SEED|Seeds|True
+ParameterBoolean|SEEDS_ONLY       |Seeds Only|True
+ParameterSelection|METHOD|Flow Routing Algorithm|[0] Deterministic 8 (D8);[1] Multiple Flow Direction (FD8)
+ParameterNumber|CONVERGENCE|Convergence (FD8)|None|None|1.1
+OutputRaster|LENGTH|Flow Path Length
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FlowWidthandSpecificCatchmentArea.txt b/python/plugins/processing/algs/saga/description/2.3.0/FlowWidthandSpecificCatchmentArea.txt
new file mode 100644
index 0000000..a8d32a8
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FlowWidthandSpecificCatchmentArea.txt
@@ -0,0 +1,7 @@
+Flow Width and Specific Catchment Area
+ta_hydrology
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|TCA|Total Catchment Area (TCA)|True
+ParameterSelection|METHOD|Method|[0] Deterministic 8;[1] Multiple Flow Direction (Quinn et al. 1991);[2] Aspect
+OutputRaster|WIDTH|Flow Width
+OutputRaster|SCA|Specific Catchment Area (SCA)
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Fragmentation(Alternative).txt b/python/plugins/processing/algs/saga/description/2.3.0/Fragmentation(Alternative).txt
new file mode 100644
index 0000000..162d7ae
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Fragmentation(Alternative).txt
@@ -0,0 +1,17 @@
+Fragmentation (Alternative)
+grid_analysis
+ParameterRaster|CLASSES|Classification|False
+ParameterNumber|CLASS|Class Identifier|None|None|1
+ParameterNumber|NEIGHBORHOOD_MIN|Neighborhood Min|1.0|None|1
+ParameterNumber|NEIGHBORHOOD_MAX|Neighborhood Max|1.0|None|1
+ParameterSelection|AGGREGATION|Level Aggregation|[0] average;[1] multiplicative
+ParameterBoolean|BORDER                |Add Border|True
+ParameterNumber|WEIGHT|Connectivity Weighting|None|None|1.1
+ParameterNumber|DENSITY_MIN|Minimum Density [Percent]|None|None|10
+ParameterNumber|DENSITY_INT|Minimum Density for Interior Forest [Percent]|None|None|99
+ParameterNumber|LEVEL_GROW|Search Distance Increment|None|None|0.0
+ParameterBoolean|DENSITY_MEAN          |Density from Neighbourhood|True
+OutputRaster|DENSITY|Density [Percent]
+OutputRaster|CONNECTIVITY|Connectivity [Percent]
+OutputRaster|FRAGMENTATION|Fragmentation
+OutputTable|FRAGSTATS|Summary
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Fragmentation(Standard).txt b/python/plugins/processing/algs/saga/description/2.3.0/Fragmentation(Standard).txt
new file mode 100644
index 0000000..0dc554a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Fragmentation(Standard).txt
@@ -0,0 +1,17 @@
+Fragmentation (Standard)
+grid_analysis
+ParameterRaster|CLASSES|Classification|False
+ParameterNumber|CLASS|Class Identifier|None|None|1
+ParameterNumber|NEIGHBORHOOD_MIN|Neighborhood Min|1.0|None|1
+ParameterNumber|NEIGHBORHOOD_MAX|Neighborhood Max|1.0|None|3
+ParameterSelection|AGGREGATION|Level Aggregation|[0] average;[1] multiplicative
+ParameterBoolean|BORDER|Add Border|True
+ParameterNumber|WEIGHT|Connectivity Weighting|None|None|1.1
+ParameterNumber|DENSITY_MIN|Minimum Density [Percent]|None|None|10
+ParameterNumber|DENSITY_INT|Minimum Density for Interior Forest [Percent]|None|None|99
+ParameterSelection|CIRCULAR|Neighborhood Type|[0] square;[1] circle
+ParameterBoolean|DIAGONAL|Include diagonal neighbour relations|True
+OutputRaster|DENSITY|Density [Percent]
+OutputRaster|CONNECTIVITY|Connectivity [Percent]
+OutputRaster|FRAGMENTATION|Fragmentation
+OutputTable|FRAGSTATS|Summary
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FragmentationClassesfromDensityandConnectivity.txt b/python/plugins/processing/algs/saga/description/2.3.0/FragmentationClassesfromDensityandConnectivity.txt
new file mode 100644
index 0000000..99e30c4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FragmentationClassesfromDensityandConnectivity.txt
@@ -0,0 +1,9 @@
+Fragmentation Classes from Density and Connectivity
+grid_analysis
+ParameterRaster|DENSITY|Density [Percent]|False
+ParameterRaster|CONNECTIVITY|Connectivity [Percent]|False
+ParameterBoolean|BORDER|Add Border|True
+ParameterNumber|WEIGHT|Connectivity Weighting|None|None|0
+ParameterNumber|DENSITY_MIN|Minimum Density [Percent]|0.0|100.0|10
+ParameterNumber|DENSITY_INT|Minimum Density for Interior Forest [Percent]|0.0|100.0|99
+OutputRaster|FRAGMENTATION|Fragmentation
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Function.txt b/python/plugins/processing/algs/saga/description/2.3.0/Function.txt
new file mode 100644
index 0000000..e058e9b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Function.txt
@@ -0,0 +1,8 @@
+Function
+grid_calculus
+ParameterNumber|XMIN|xmin|None|None|0.0
+ParameterNumber|XMAX|xmax|None|None|0.0
+ParameterNumber|YMIN|ymin|None|None|0.0
+ParameterNumber|YMAX|ymax|None|None|0.0
+ParameterString|FORMUL|Formula|
+OutputRaster|RESULT|Function
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Fuzzify.txt b/python/plugins/processing/algs/saga/description/2.3.0/Fuzzify.txt
new file mode 100644
index 0000000..d78f12b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Fuzzify.txt
@@ -0,0 +1,10 @@
+Fuzzify
+grid_calculus
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|A|A|None|None|0.0
+ParameterNumber|B|B|None|None|0.0
+ParameterNumber|C|C|None|None|0.0
+ParameterNumber|D|D|None|None|0.0
+ParameterSelection|TYPE|Membership Function Type|[0] linear;[1] sigmoidal;[2] j-shaped
+ParameterBoolean|AUTOFIT     |Adjust to Grid|True
+OutputRaster|OUTPUT|Fuzzified Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FuzzyIntersection(AND).txt b/python/plugins/processing/algs/saga/description/2.3.0/FuzzyIntersection(AND).txt
new file mode 100644
index 0000000..a00eff2
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FuzzyIntersection(AND).txt
@@ -0,0 +1,5 @@
+Fuzzy Intersection (AND)
+grid_calculus
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterSelection|TYPE|Operator Type|[0] min(a, b) (non-interactive);[1] a * b;[2] max(0, a + b - 1)
+OutputRaster|AND|Intersection
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/FuzzyUnion(OR).txt b/python/plugins/processing/algs/saga/description/2.3.0/FuzzyUnion(OR).txt
new file mode 100644
index 0000000..5925179
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/FuzzyUnion(OR).txt
@@ -0,0 +1,5 @@
+Fuzzy Union (OR)
+grid_calculus
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterSelection|TYPE|Operator Type|[0] max(a, b) (non-interactive);[1] a + b - a * b;[2] min(1, a + b)
+OutputRaster|OR|Union
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GaussianFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/GaussianFilter.txt
new file mode 100644
index 0000000..0cf7e31
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GaussianFilter.txt
@@ -0,0 +1,7 @@
+Gaussian Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|SIGMA|Standard Deviation|None|None|1
+ParameterSelection|MODE|Search Mode|[0] Square;[1] Circle
+ParameterNumber|RADIUS|Search Radius|None|None|3
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression(Points).txt b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression(Points).txt
new file mode 100644
index 0000000..fd483f1
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression(Points).txt
@@ -0,0 +1,16 @@
+GWR for Multiple Predictors
+statistics_regression
+ParameterVector|POINTS|Points|-1|False
+ParameterTableField|DEPENDENT|Dependent Variable|POINTS|-1|False
+ParameterTableField|PREDICTORS|Predictor|POINTS|-1|False
+ParameterSelection|DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1
+ParameterSelection|SEARCH_RANGE|Search Range|[0] search radius (local);[1] no search radius (global)
+ParameterNumber|SEARCH_RADIUS|Search Radius|None|None|100
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+OutputVector|REGRESSION|Regression
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression(PointsGrids).txt b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression(PointsGrids).txt
new file mode 100644
index 0000000..1573751
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression(PointsGrids).txt
@@ -0,0 +1,20 @@
+GWR for Multiple Predictor Grids
+statistics_regression
+ParameterMultipleInput|PREDICTORS|Predictors|3.0|False
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|DEPENDENT|Dependent Variable|POINTS|-1|False
+ParameterSelection|RESOLUTION|Model Resolution|[0] same as predictors;[1] user defined|1
+ParameterNumber|RESOLUTION_VAL|Resolution|0|None|1
+ParameterSelection|DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1
+ParameterSelection|SEARCH_RANGE|Search Range|[0] search radius (local);[1] no search radius (global)
+ParameterNumber|SEARCH_RADIUS|Search Radius|None|None|100
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+OutputRaster|REGRESSION|Regression
+OutputRaster|QUALITY|Coefficient of Determination
+OutputVector|RESIDUALS|Residuals
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression.txt b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression.txt
new file mode 100644
index 0000000..e4f8a0e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedMultipleRegression.txt
@@ -0,0 +1,23 @@
+GWR for Multiple Predictors (Gridded Model Output)
+statistics_regression
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|DEPENDENT|Dependent Variable|POINTS|-1|False
+ParameterTableField|PREDICTORS|Predictor|POINTS|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterSelection|DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1
+ParameterSelection|SEARCH_RANGE|Search Range|[0] search radius (local);[1] no search radius (global)
+ParameterNumber|SEARCH_RADIUS|Search Radius|None|None|100
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputVector|REGRESSION|Regression
+OutputRaster|SLOPES|Slopes
+OutputRaster|TARGET_INTERCEPT|Intercept
+OutputRaster|TARGET_QUALITY|Quality
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedRegression(PointsGrid).txt b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedRegression(PointsGrid).txt
new file mode 100644
index 0000000..77d54b1
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedRegression(PointsGrid).txt
@@ -0,0 +1,20 @@
+GWR for Single Predictor Grid
+statistics_regression
+ParameterRaster|PREDICTOR|Predictor|False
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|DEPENDENT|Dependent Variable|POINTS|-1|False
+ParameterSelection|DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1
+ParameterSelection|SEARCH_RANGE|Search Range|[0] search radius (local);[1] no search radius (global)
+ParameterNumber|SEARCH_RADIUS|Search Radius|None|None|100
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+OutputRaster|REGRESSION|Regression
+OutputRaster|QUALITY|Coefficient of Determination
+OutputRaster|INTERCEPT|Intercept
+OutputRaster|SLOPE|Slope
+OutputVector|RESIDUALS|Residuals
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedRegression.txt b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedRegression.txt
new file mode 100644
index 0000000..f8ac07c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GeographicallyWeightedRegression.txt
@@ -0,0 +1,23 @@
+GWR for Single Predictor (Gridded Model Output)
+statistics_regression
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|DEPENDENT|Dependent Variable|POINTS|-1|False
+ParameterTableField|PREDICTOR|Predictor|POINTS|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterSelection|DW_WEIGHTING|Weighting Function|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1
+ParameterSelection|SEARCH_RANGE|Search Range|[0] search radius (local);[1] no search radius (global)
+ParameterNumber|SEARCH_RADIUS|Search Radius|None|None|100
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
+OutputRaster|TARGET_INTERCEPT|Intercept
+OutputRaster|TARGET_SLOPE|Slope
+OutputRaster|TARGET_QUALITY|Quality
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GeometricFigures.txt b/python/plugins/processing/algs/saga/description/2.3.0/GeometricFigures.txt
new file mode 100644
index 0000000..09a288d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GeometricFigures.txt
@@ -0,0 +1,7 @@
+Geometric Figures
+grid_calculus
+ParameterNumber|CELL_COUNT|Cell Count|None|None|0
+ParameterNumber|CELL_SIZE|Cell Size|None|None|0
+ParameterSelection|FIGURE|Figure|[0] Cone (up);[1] Cone (down);[2] Plane
+ParameterNumber|PLANE|Direction of Plane [Degree]|None|None|0
+OutputRaster|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GetShapesExtents.txt b/python/plugins/processing/algs/saga/description/2.3.0/GetShapesExtents.txt
new file mode 100644
index 0000000..9ef1cc7
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GetShapesExtents.txt
@@ -0,0 +1,5 @@
+Get Shapes Extents
+shapes_tools
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterSelection|OUTPUT|Get Extent for|[0] all shapes;[1] each shape;[2] each shape's part
+OutputVector|EXTENTS|Extents
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GlobalMoransIforGrids.txt b/python/plugins/processing/algs/saga/description/2.3.0/GlobalMoransIforGrids.txt
new file mode 100644
index 0000000..e5de7f6
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GlobalMoransIforGrids.txt
@@ -0,0 +1,5 @@
+Global Moran's I for Grids
+statistics_grid
+ParameterRaster|GRID|Grid|False
+ParameterSelection|CONTIGUITY|Case of contiguity|[0] Rook;[1] Queen
+OutputTable|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorfromCartesiantoPolarCoordinates.txt b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorfromCartesiantoPolarCoordinates.txt
new file mode 100644
index 0000000..8a725be
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorfromCartesiantoPolarCoordinates.txt
@@ -0,0 +1,10 @@
+Gradient Vector from Cartesian to Polar Coordinates
+grid_calculus
+ParameterRaster|DX|X Component|False
+ParameterRaster|DY|Y Component|False
+ParameterSelection|UNITS|Polar Angle Units|[0] radians;[1] degree
+ParameterSelection|SYSTEM|Polar Coordinate System|[0] mathematical;[1] geographical;[2] user defined
+ParameterNumber|SYSTEM_ZERO|User defined Zero Direction|None|None|0.0
+ParameterSelection|SYSTEM_ORIENT|User defined Orientation|[0] clockwise;[1] counterclockwise
+OutputRaster|DIR|Direction
+OutputRaster|LEN|Length
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorfromPolartoCartesianCoordinates.txt b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorfromPolartoCartesianCoordinates.txt
new file mode 100644
index 0000000..382aa9e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorfromPolartoCartesianCoordinates.txt
@@ -0,0 +1,10 @@
+Gradient Vector from Polar to Cartesian Coordinates
+grid_calculus
+ParameterRaster|DIR|Direction|False
+ParameterRaster|LEN|Length|False
+ParameterSelection|UNITS|Polar Angle Units|[0] radians;[1] degree
+ParameterSelection|SYSTEM|Polar Coordinate System|[0] mathematical;[1] geographical;[2] user defined
+ParameterNumber|SYSTEM_ZERO|User defined Zero Direction|None|None|0.0
+ParameterSelection|SYSTEM_ORIENT|User defined Orientation|[0] clockwise;[1] counterclockwise
+OutputRaster|DX|X Component
+OutputRaster|DY|Y Component
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromDirectionalComponents.txt b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromDirectionalComponents.txt
new file mode 100644
index 0000000..1a1abf4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromDirectionalComponents.txt
@@ -0,0 +1,10 @@
+Gradient Vectors from Directional Components
+shapes_grid
+ParameterRaster|X|X Component|False
+ParameterRaster|Y|Y Component|False
+ParameterNumber|STEP|Step|None|None|1
+ParameterNumber|SIZE_MIN|Size Range Min|None|None|25.0
+ParameterNumber|SIZE_MAX|Size Range Max|None|None|100.0
+ParameterSelection|AGGR|Aggregation|[0] nearest neighbour;[1] mean value
+ParameterSelection|STYLE|Style|[0] simple line;[1] arrow;[2] arrow (centered to cell)
+OutputVector|VECTORS|Gradient Vectors
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromDirectionandLength.txt b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromDirectionandLength.txt
new file mode 100644
index 0000000..628665a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromDirectionandLength.txt
@@ -0,0 +1,10 @@
+Gradient Vectors from Direction and Length
+shapes_grid
+ParameterRaster|DIR|Direction|False
+ParameterRaster|LEN|Length|False
+ParameterNumber|STEP|Step|None|None|1
+ParameterNumber|SIZE_MIN|Size Range Min|None|None|25.0
+ParameterNumber|SIZE_MAX|Size Range Max|None|None|100.0
+ParameterSelection|AGGR|Aggregation|[0] nearest neighbour;[1] mean value
+ParameterSelection|STYLE|Style|[0] simple line;[1] arrow;[2] arrow (centered to cell)
+OutputVector|VECTORS|Gradient Vectors
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromSurface.txt b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromSurface.txt
new file mode 100644
index 0000000..582bf10
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GradientVectorsfromSurface.txt
@@ -0,0 +1,9 @@
+Gradient Vectors from Surface
+shapes_grid
+ParameterRaster|SURFACE|Surface|False
+ParameterNumber|STEP|Step|None|None|1
+ParameterNumber|SIZE_MIN|Size Range Min|None|None|25.0
+ParameterNumber|SIZE_MAX|Size Range Max|None|None|100.0
+ParameterSelection|AGGR|Aggregation|[0] nearest neighbour;[1] mean value
+ParameterSelection|STYLE|Style|[0] simple line;[1] arrow;[2] arrow (centered to cell)
+OutputVector|VECTORS|Gradient Vectors
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridBuffer.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridBuffer.txt
new file mode 100644
index 0000000..da961e9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridBuffer.txt
@@ -0,0 +1,6 @@
+Grid Buffer
+grid_tools
+ParameterRaster|FEATURES|Features Grid|False
+ParameterNumber|DIST|Distance|None|None|1000
+ParameterSelection|BUFFERTYPE|Buffer Distance|[0] Fixed;[1] Cell value
+OutputRaster|BUFFER|Buffer Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridCalculator.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridCalculator.txt
new file mode 100644
index 0000000..e849948
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridCalculator.txt
@@ -0,0 +1,9 @@
+Raster calculator|Grid Calculator
+grid_calculus
+AllowUnmatching
+ParameterRaster|GRIDS|Main input layer|False
+ParameterMultipleInput|XGRIDS|Additional layers|3|True
+ParameterString|FORMULA|Formula|
+ParameterBoolean|USE_NODATA|Use NoData|False
+ParameterSelection|TYPE|Output Data Type|[0] bit;[1] unsigned 1 byte integer;[2] signed 1 byte integer;[3] unsigned 2 byte integer;[4] signed 2 byte integer;[5] unsigned 4 byte integer;[6] signed 4 byte integer;[7] 4 byte floating point number;[8] 8 byte floating point number|7
+OutputRaster|RESULT|Calculated
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridCellIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridCellIndex.txt
new file mode 100644
index 0000000..0b8e3ee
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridCellIndex.txt
@@ -0,0 +1,5 @@
+Grid Cell Index
+grid_tools
+ParameterRaster|GRID|Input Grid|False
+ParameterSelection|ORDER|Index|[0] ascending;[1] descending|0
+OutputRaster|INDEX|Sorted Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridDifference.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridDifference.txt
new file mode 100644
index 0000000..c2fa03d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridDifference.txt
@@ -0,0 +1,5 @@
+Grid Difference
+grid_calculus
+ParameterRaster|A|A|False
+ParameterRaster|B|B|False
+OutputRaster|C|Difference (A - B)
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridDivision.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridDivision.txt
new file mode 100644
index 0000000..718d010
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridDivision.txt
@@ -0,0 +1,5 @@
+Grid Division
+grid_calculus
+ParameterRaster|A|Dividend|False
+ParameterRaster|B|Divisor|False
+OutputRaster|C|Quotient
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridMasking.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridMasking.txt
new file mode 100644
index 0000000..d15e2f0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridMasking.txt
@@ -0,0 +1,6 @@
+Grid Masking
+grid_tools
+ParameterRaster|GRID|Grid|False
+ParameterRaster|MASK|Mask|False
+OutputRaster|MASKED|Masked Grid
+AllowUnmatching
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridNormalisation.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridNormalisation.txt
new file mode 100644
index 0000000..b16dcab
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridNormalisation.txt
@@ -0,0 +1,6 @@
+Grid Normalisation
+grid_calculus
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|RANGE_MIN|Target Range (min)|None|None|0
+ParameterNumber|RANGE_MAX|Target Range (max)|None|None|1
+OutputRaster|OUTPUT|Normalised Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridOrientation.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridOrientation.txt
new file mode 100644
index 0000000..2eb0de1
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridOrientation.txt
@@ -0,0 +1,5 @@
+Grid Orientation
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|METHOD|Method|[0] Copy;[1] Flip;[2] Mirror;[3] Invert
+OutputRaster|RESULT|Changed Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridProximityBuffer.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridProximityBuffer.txt
new file mode 100644
index 0000000..be5df90
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridProximityBuffer.txt
@@ -0,0 +1,8 @@
+Grid Proximity Buffer
+grid_tools
+ParameterRaster|SOURCE|Source Grid|False
+ParameterNumber|DIST|Buffer distance|None|None|500.0
+ParameterNumber|IVAL|Equidistance|None|None|100.0
+OutputRaster|DISTANCE|Distance Grid
+OutputRaster|ALLOC|Allocation Grid
+OutputRaster|BUFFER|Buffer Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridSkeletonization.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridSkeletonization.txt
new file mode 100644
index 0000000..eacd23f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridSkeletonization.txt
@@ -0,0 +1,9 @@
+Grid Skeletonization
+imagery_segmentation
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|METHOD|Method|[0] Standard;[1] Hilditch's Algorithm;[2] Channel Skeleton
+ParameterSelection|INIT_METHOD|Initialisation|[0] Less than;[1] Greater than
+ParameterNumber|INIT_THRESHOLD|Threshold (Init.)|None|None|0.0
+ParameterNumber|CONVERGENCE|Convergence|None|None|3.0
+OutputRaster|RESULT|Skeleton
+OutputVector|VECTOR|Skeleton
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridStandardisation.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridStandardisation.txt
new file mode 100644
index 0000000..743f2e0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridStandardisation.txt
@@ -0,0 +1,5 @@
+Grid Standardisation
+grid_calculus
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|STRETCH|Stretch Factor|0.0|None|1.0
+OutputRaster|OUTPUT|Standardised Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridStatisticsforPolygons.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridStatisticsforPolygons.txt
new file mode 100644
index 0000000..1d3d170
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridStatisticsforPolygons.txt
@@ -0,0 +1,16 @@
+Grid Statistics for Polygons
+shapes_grid
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterSelection|METHOD|Method|[0] Standard;[1] Shape wise, supports overlapping polygons|1
+ParameterSelection|NAMING|Grid Naming|[0] Grid number;[1] Grid name|1
+ParameterBoolean|COUNT|Number of Cells|True
+ParameterBoolean|MIN|Minimum|True
+ParameterBoolean|MAX|Maximum|True
+ParameterBoolean|RANGE|Range|True
+ParameterBoolean|SUM|Sum|True
+ParameterBoolean|MEAN|Mean|True
+ParameterBoolean|VAR|Variance|True
+ParameterBoolean|STDDEV|Standard Deviation|True
+ParameterNumber|QUANTILE|Quantiles|None|None|0
+OutputVector|RESULT|Statistics
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridValuestoPoints(randomly).txt b/python/plugins/processing/algs/saga/description/2.3.0/GridValuestoPoints(randomly).txt
new file mode 100644
index 0000000..1f04323
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridValuestoPoints(randomly).txt
@@ -0,0 +1,5 @@
+Grid Values to Points (randomly)
+shapes_grid
+ParameterRaster|GRID|Grid|False
+ParameterNumber|FREQ|Frequency|None|None|100
+OutputVector|POINTS|Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridValuestoPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridValuestoPoints.txt
new file mode 100644
index 0000000..0e33a30
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridValuestoPoints.txt
@@ -0,0 +1,7 @@
+Grid Values to Points
+shapes_grid
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterVector|POLYGONS|Polygons|-1|True
+ParameterBoolean|NODATA        |Exclude NoData Cells|True
+ParameterSelection|TYPE|Type|[0] nodes;[1] cells
+OutputVector|SHAPES|Shapes
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridVolume.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridVolume.txt
new file mode 100644
index 0000000..f20c27c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridVolume.txt
@@ -0,0 +1,5 @@
+Grid Volume
+grid_calculus
+ParameterRaster|GRID|Grid|False
+ParameterSelection|METHOD|Method|[0] Count Only Above Base Level;[1] Count Only Below Base Level;[2] Subtract Volumes Below Base Level;[3] Add Volumes Below Base Level
+ParameterNumber|LEVEL|Base Level|None|None|0.0
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridsProduct.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridsProduct.txt
new file mode 100644
index 0000000..59d864f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridsProduct.txt
@@ -0,0 +1,4 @@
+Grids Product
+grid_calculus
+ParameterMultipleInput|GRIDS|Grids|3|False
+OutputRaster|RESULT|Product
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/GridsSum.txt b/python/plugins/processing/algs/saga/description/2.3.0/GridsSum.txt
new file mode 100644
index 0000000..60b0bee
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/GridsSum.txt
@@ -0,0 +1,4 @@
+Grids Sum
+grid_calculus
+ParameterMultipleInput|GRIDS|Grids|3|False
+OutputRaster|RESULT|Sum
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/HistogramSurface.txt b/python/plugins/processing/algs/saga/description/2.3.0/HistogramSurface.txt
new file mode 100644
index 0000000..bde5571
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/HistogramSurface.txt
@@ -0,0 +1,5 @@
+Histogram Surface
+grid_visualisation
+ParameterRaster|GRID|Grid|False
+ParameterSelection|METHOD|Method|[0] rows;[1] columns;[2] circle
+OutputRaster|HIST|Histogram
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Hypsometry.txt b/python/plugins/processing/algs/saga/description/2.3.0/Hypsometry.txt
new file mode 100644
index 0000000..2ca4a17
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Hypsometry.txt
@@ -0,0 +1,10 @@
+Hypsometry
+ta_morphometry
+ParameterRaster|ELEVATION|Elevation|False
+ParameterNumber|COUNT|Number of Classes|None|None|100.0
+ParameterSelection|SORTING|Sort|[0] up;[1] down
+ParameterSelection|METHOD|Classification Constant|[0] height;[1] area
+ParameterBoolean|BZRANGE         |Use Z-Range|True
+ParameterNumber|ZRANGE_MIN|Z-Range Min|None|None|0.0
+ParameterNumber|ZRANGE_MAX|Z-Range Max|None|None|1000.0
+OutputTable|TABLE|Hypsometry
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/InverseDistanceWeighted.txt b/python/plugins/processing/algs/saga/description/2.3.0/InverseDistanceWeighted.txt
new file mode 100644
index 0000000..11b2d2a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/InverseDistanceWeighted.txt
@@ -0,0 +1,22 @@
+Inverse Distance Weighted
+grid_gridding
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+ParameterSelection|DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting scheme
+ParameterNumber|DW_IDW_POWER|Inverse Distance Power|0.0|None|2
+ParameterBoolean|DW_IDW_OFFSET|Inverse Distance Offset|False
+ParameterNumber|DW_BANDWIDTH|Exponential and Gaussian Weighting Bandwidth|0.0|None|1
+ParameterSelection|SEARCH_RANGE|Search Range|[0] search radius (local);[1] no search radius (global)|1
+ParameterNumber|SEARCH_RADIUS|Search Radius|None|None|100.0
+ParameterSelection|SEARCH_POINTS_ALL|Search Range|[0] maximum number of nearest points;[1] all points whitin search distance|0
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|-1|None|-1
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|20|1
+ParameterSelection|SEARCH_DIRECTION|Search Mode|[0] all directions;[1] quadrants
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points
+ParameterNumber|SEARCH_POINTS_MAX|Maximum Number of Points|None|None|10
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_DEFINITION|Target Grid System|[0] user defined;[1] grid or grid system
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+ParameterRaster|TARGET_TEMPLATE|Target System|True
+OutputRaster|TARGET_OUT_GRID|Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/InvertDataNo-Data.txt b/python/plugins/processing/algs/saga/description/2.3.0/InvertDataNo-Data.txt
new file mode 100644
index 0000000..79a626f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/InvertDataNo-Data.txt
@@ -0,0 +1,4 @@
+Invert Data/No-Data
+grid_tools
+ParameterRaster|INPUT|Grid|False
+OutputRaster|OUTPUT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/KernelDensityEstimation.txt b/python/plugins/processing/algs/saga/description/2.3.0/KernelDensityEstimation.txt
new file mode 100644
index 0000000..ec91195
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/KernelDensityEstimation.txt
@@ -0,0 +1,11 @@
+Kernel Density Estimation
+grid_gridding
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|POPULATION|Weight|POINTS|-1|False
+ParameterNumber|RADIUS|Radius|None|None|10
+ParameterSelection|KERNEL|Kernel|[0] quartic kernel;[1] gaussian kernel
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Kernel
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LSFactor.txt b/python/plugins/processing/algs/saga/description/2.3.0/LSFactor.txt
new file mode 100644
index 0000000..6522dc4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LSFactor.txt
@@ -0,0 +1,9 @@
+LS Factor
+ta_hydrology
+ParameterRaster|SLOPE|Slope|False
+ParameterRaster|AREA|Catchment Area|False
+ParameterSelection|CONV|Area to Length Conversion|[0] no conversion (areas already given as specific catchment area);[1] 1 / cell size (specific catchment area);[2] square root (catchment length)
+ParameterSelection|METHOD|Method (LS)|[0] Moore et al. 1991;[1] Desmet & Govers 1996;[2] Boehner & Selige 2006
+ParameterNumber|EROSIVITY|Rill/Interrill Erosivity|None|None|0.0
+ParameterSelection|STABILITY|Stability|[0] stable;[1] instable (thawing)
+OutputRaster|LS|LS Factor
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LakeFlood.txt b/python/plugins/processing/algs/saga/description/2.3.0/LakeFlood.txt
new file mode 100644
index 0000000..0328fc6
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LakeFlood.txt
@@ -0,0 +1,7 @@
+Lake Flood
+ta_hydrology
+ParameterRaster|ELEV|DEM|False
+ParameterRaster|SEEDS|Seeds|False
+ParameterBoolean|LEVEL         |Absolute Water Levels|True
+OutputRaster|OUTDEPTH|Lake
+OutputRaster|OUTLEVEL|Surface
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LandSurfaceTemperature.txt b/python/plugins/processing/algs/saga/description/2.3.0/LandSurfaceTemperature.txt
new file mode 100644
index 0000000..7983389
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LandSurfaceTemperature.txt
@@ -0,0 +1,10 @@
+Land Surface Temperature
+ta_morphometry
+ParameterRaster|DEM|Elevation [m]|False
+ParameterRaster|SWR|Short Wave Radiation [kW/m2]|False
+ParameterRaster|LAI|Leaf Area Index|False
+ParameterNumber|Z_REFERENCE|Elevation at Reference Station [m]|None|None|0.0
+ParameterNumber|T_REFERENCE|Temperature at Reference Station [Deg.Celsius]|None|None|0.0
+ParameterNumber|T_GRADIENT|Temperature Gradient [Deg.Celsius/km]|None|None|6.5
+ParameterNumber|C_FACTOR|C Factor|None|None|1.0
+OutputRaster|LST|Land Surface Temperature [Deg.Celsius]
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LaplacianFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/LaplacianFilter.txt
new file mode 100644
index 0000000..e828148
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LaplacianFilter.txt
@@ -0,0 +1,8 @@
+Laplacian Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|METHOD|Method|[0] standard kernel 1;[1] standard kernel 2;[2] Standard kernel 3;[3] user defined kernel
+ParameterNumber|SIGMA|Standard Deviation (Percent of Radius)|None|None|0
+ParameterNumber|RADIUS|Radius|None|None|1
+ParameterSelection|MODE|Search Mode|[0] square;[1] circle
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Layerofextremevalue.txt b/python/plugins/processing/algs/saga/description/2.3.0/Layerofextremevalue.txt
new file mode 100644
index 0000000..15c6ed2
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Layerofextremevalue.txt
@@ -0,0 +1,5 @@
+Layer of extreme value
+grid_analysis
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterSelection|CRITERIA|Method|[0] Maximum;[1] Minimum
+OutputRaster|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LeastCostPaths.txt b/python/plugins/processing/algs/saga/description/2.3.0/LeastCostPaths.txt
new file mode 100644
index 0000000..ecbdd48
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LeastCostPaths.txt
@@ -0,0 +1,7 @@
+Least Cost Paths
+grid_analysis
+ParameterVector|SOURCE|Source Point(s)|0|False
+ParameterRaster|DEM|Accumulated cost|False
+ParameterMultipleInput|VALUES|Values|3|True
+OutputVector|POINTS|Profile (points)
+OutputVector|LINE|Profile (lines)
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Line-PolygonIntersection.txt b/python/plugins/processing/algs/saga/description/2.3.0/Line-PolygonIntersection.txt
new file mode 100644
index 0000000..9c15f25
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Line-PolygonIntersection.txt
@@ -0,0 +1,6 @@
+Line-Polygon Intersection
+shapes_lines
+ParameterVector|LINES|Lines|1|False
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterSelection|METHOD|Output|[0] one multi-line per polygon;[1] keep original line attributes
+OutputVector|INTERSECT|Intersection
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LineDissolve.txt b/python/plugins/processing/algs/saga/description/2.3.0/LineDissolve.txt
new file mode 100644
index 0000000..fb704cc
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LineDissolve.txt
@@ -0,0 +1,8 @@
+Line Dissolve
+shapes_lines
+ParameterVector|LINES|Lines|-1|False
+ParameterTableField|FIELD_1|1. Attribute|LINES|-1|False
+ParameterTableField|FIELD_2|2. Attribute|LINES|-1|False
+ParameterTableField|FIELD_3|3. Attribute|LINES|-1|False
+ParameterSelection|ALL|Dissolve...|[0] lines with same attribute value(s);[1] all lines
+OutputVector|DISSOLVED|Dissolved Lines
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LineProperties.txt b/python/plugins/processing/algs/saga/description/2.3.0/LineProperties.txt
new file mode 100644
index 0000000..d23d392
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LineProperties.txt
@@ -0,0 +1,7 @@
+Line Properties
+shapes_lines
+ParameterVector|LINES|Lines|1|False
+ParameterBoolean|BPARTS      |Number of Parts|True
+ParameterBoolean|BPOINTS     |Number of Vertices|True
+ParameterBoolean|BLENGTH     |Length|True
+OutputVector|OUTPUT|Lines with Property Attributes
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LineSimplification.txt b/python/plugins/processing/algs/saga/description/2.3.0/LineSimplification.txt
new file mode 100644
index 0000000..b1bb26e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LineSimplification.txt
@@ -0,0 +1,5 @@
+Line Simplification
+shapes_lines
+ParameterVector|LINES|Lines|1|False
+ParameterNumber|TOLERANCE|Tolerance|None|None|1.0
+OutputVector|OUTPUT|Simplified Lines
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/LocalMinimaandMaxima.txt b/python/plugins/processing/algs/saga/description/2.3.0/LocalMinimaandMaxima.txt
new file mode 100644
index 0000000..4b415bc
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/LocalMinimaandMaxima.txt
@@ -0,0 +1,5 @@
+Local Minima and Maxima
+shapes_grid
+ParameterRaster|GRID|Grid|False
+OutputVector|MINIMA|Minima
+OutputVector|MAXIMA|Maxima
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MajorityFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/MajorityFilter.txt
new file mode 100644
index 0000000..7cd6fe5
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MajorityFilter.txt
@@ -0,0 +1,7 @@
+Majority Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|MODE|Search Mode|[0] Square;[1] Circle
+ParameterNumber|RADIUS|Radius|1.0|None|1
+ParameterNumber|THRESHOLD|Threshold [Percent]|0.0|None|0
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MassBalanceIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/MassBalanceIndex.txt
new file mode 100644
index 0000000..e65fabf
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MassBalanceIndex.txt
@@ -0,0 +1,8 @@
+Mass Balance Index
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|HREL|Vertical Distance to Channel Network|True
+ParameterNumber|TSLOPE|T Slope|None|None|15.0
+ParameterNumber|TCURVE|T Curvature|None|None|0.01
+ParameterNumber|THREL|T Vertical Distance to Channel Network|None|None|15.0
+OutputRaster|MBI|Mass Balance Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MergeLayers.txt b/python/plugins/processing/algs/saga/description/2.3.0/MergeLayers.txt
new file mode 100644
index 0000000..f0d5bb7
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MergeLayers.txt
@@ -0,0 +1,6 @@
+Merge Layers
+shapes_tools
+ParameterMultipleInput|INPUT|Input Layers|-1|True
+ParameterBoolean|SRCINFO|Add source information|True
+ParameterBoolean|MATCH|Match Fields by Name|True
+OutputVector|MERGED|Merged Layer
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MetricConversions.txt b/python/plugins/processing/algs/saga/description/2.3.0/MetricConversions.txt
new file mode 100644
index 0000000..3501a9e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MetricConversions.txt
@@ -0,0 +1,5 @@
+Metric Conversions
+grid_calculus
+ParameterRaster|GRID|Grid|False
+ParameterSelection|CONVERSION|Conversion|[0] radians to degree;[1] degree to radians;[2] Celsius to Fahrenheit;[3] Fahrenheit to Celsius
+OutputRaster|CONV|Converted Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MinimumDistanceAnalysis.txt b/python/plugins/processing/algs/saga/description/2.3.0/MinimumDistanceAnalysis.txt
new file mode 100644
index 0000000..200a565
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MinimumDistanceAnalysis.txt
@@ -0,0 +1,4 @@
+Minimum Distance Analysis
+statistics_points
+ParameterVector|POINTS|Points|0|False
+OutputTable|TABLE|Minimum Distance Analysis
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ModifiedQuadraticShepard.txt b/python/plugins/processing/algs/saga/description/2.3.0/ModifiedQuadraticShepard.txt
new file mode 100644
index 0000000..3c96918
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ModifiedQuadraticShepard.txt
@@ -0,0 +1,11 @@
+Modified Quadratic Shepard
+grid_gridding
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterNumber|QUADRATIC_NEIGHBORS|Quadratic Neighbors|5.0|None|13
+ParameterNumber|WEIGHTING_NEIGHBORS|Weighting Neighbors|3.0|None|19
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MorphologicalFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/MorphologicalFilter.txt
new file mode 100644
index 0000000..28dfd3f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MorphologicalFilter.txt
@@ -0,0 +1,7 @@
+Morphological Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|MODE|Search Mode|[0] Square;[1] Circle
+ParameterNumber|RADIUS|Radius|None|None|1
+ParameterSelection|METHOD|Method|[0] Dilation;[1] Erosion;[2] Opening;[3] Closing
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MorphometricProtectionIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/MorphometricProtectionIndex.txt
new file mode 100644
index 0000000..48d4443
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MorphometricProtectionIndex.txt
@@ -0,0 +1,5 @@
+Morphometric Protection Index
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|RADIUS|Radius|None|None|2000.0
+OutputRaster|PROTECTION|Protection Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Mosaicking.txt b/python/plugins/processing/algs/saga/description/2.3.0/Mosaicking.txt
new file mode 100644
index 0000000..16f0439
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Mosaicking.txt
@@ -0,0 +1,15 @@
+Mosaick raster layers|Mosaicking
+grid_tools
+AllowUnmatching
+ParameterMultipleInput|GRIDS|Input Grids|3|False
+ParameterString|NAME|Name|Mosaic
+ParameterSelection|TYPE|Preferred data storage type|[0] 1 bit;[1] 1 byte unsigned integer;[2] 1 byte signed integer;[3] 2 byte unsigned integer;[4] 2 byte signed integer;[5] 4 byte unsigned integer;[6] 4 byte signed integer;[7] 4 byte floating point;[8] 8 byte floating point|7
+ParameterSelection|INTERPOL|Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation|0
+ParameterSelection|OVERLAP|Overlapping Areas|[0] first;[1] last;[2] minimum;[3] maximum;[4] mean;[5] blend boundary;[6] feathering|1
+ParameterNumber|BLEND_DIST|Blending Distance|0.0|None|10.0
+ParameterSelection|MATCH|Match|[0] none;[1] regression|0
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Multi-BandVariation.txt b/python/plugins/processing/algs/saga/description/2.3.0/Multi-BandVariation.txt
new file mode 100644
index 0000000..8dcf7d0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Multi-BandVariation.txt
@@ -0,0 +1,11 @@
+Multi-Band Variation
+statistics_grid
+ParameterMultipleInput|BANDS|Grids|3|False
+ParameterNumber|RADIUS|Radius [Cells]|None|None|1
+ParameterSelection|DISTANCE_WEIGHTING_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DISTANCE_WEIGHTING_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DISTANCE_WEIGHTING_IDW_OFFSET     |Inverse Distance Offset|True
+ParameterNumber|DISTANCE_WEIGHTING_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|1.0
+OutputRaster|MEAN|Mean Distance
+OutputRaster|STDDEV|Standard Deviation
+OutputRaster|DIFF|Distance
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultiDirectionLeeFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/MultiDirectionLeeFilter.txt
new file mode 100644
index 0000000..a768db9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultiDirectionLeeFilter.txt
@@ -0,0 +1,10 @@
+Multi Direction Lee Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|NOISE_ABS|Estimated Noise (absolute)|None|None|1.0
+ParameterNumber|NOISE_REL|Estimated Noise (relative)|None|None|1.0
+ParameterBoolean|WEIGHTED       |Weighted|True
+ParameterSelection|METHOD|Method|[0] noise variance given as absolute value;[1] noise variance given relative to mean standard deviation;[2] original calculation (Ringeler)
+OutputRaster|RESULT|Filtered Grid
+OutputRaster|STDDEV|Minimum Standard Deviation
+OutputRaster|DIR|Direction of Minimum Standard Deviation
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolation(fromGrid).txt b/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolation(fromGrid).txt
new file mode 100644
index 0000000..9c59c92
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolation(fromGrid).txt
@@ -0,0 +1,13 @@
+Multilevel B-Spline Interpolation (from Grid)
+grid_spline
+ParameterRaster|GRID|Grid|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterSelection|METHOD|Method|[0] without B-spline refinement;[1] with B-spline refinement
+ParameterNumber|EPSILON|Threshold Error|0|None|0.0001
+ParameterNumber|LEVEL_MAX|Maximum Level|1|14|11.0
+ParameterBoolean|UPDATE|Update View|False
+ParameterSelection|DATATYPE|Data Type|[0] same as input grid;[1] floating point
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolation.txt b/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolation.txt
new file mode 100644
index 0000000..118904a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolation.txt
@@ -0,0 +1,11 @@
+Multilevel B-Spline Interpolation
+grid_spline
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+ParameterSelection|METHOD|Method|[0] without B-spline refinement;[1] with B-spline refinement
+ParameterNumber|EPSILON|Threshold Error|0|None|0.0001
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolationforCategories.txt b/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolationforCategories.txt
new file mode 100644
index 0000000..e4104cb
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultilevelB-SplineInterpolationforCategories.txt
@@ -0,0 +1,11 @@
+Multilevel B-Spline Interpolation for Categories
+grid_spline
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+ParameterRaster|TARGET_TEMPLATE|Target system|True
+OutputRaster|TARGET_CATEGORIES|Categories
+OutputRaster|TARGET_PROPABILITY|Propability
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultipleRegressionAnalysis(GridGrids).txt b/python/plugins/processing/algs/saga/description/2.3.0/MultipleRegressionAnalysis(GridGrids).txt
new file mode 100644
index 0000000..3263553
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultipleRegressionAnalysis(GridGrids).txt
@@ -0,0 +1,15 @@
+Multiple Regression Analysis (Grid/Grids)
+statistics_regression
+ParameterRaster|DEPENDENT|Dependent|False
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterSelection|INTERPOL|Grid Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+ParameterBoolean|COORD_X         |Include X Coordinate|True
+ParameterBoolean|COORD_Y         |Include Y Coordinate|True
+ParameterSelection|METHOD|Method|[0] include all;[1] forward;[2] backward;[3] stepwise
+ParameterNumber|P_IN|P in|None|None|5
+ParameterNumber|P_OUT|P out|None|None|5
+OutputRaster|REGRESSION|Regression
+OutputRaster|RESIDUALS|Residuals
+OutputTable|INFO_COEFF|Details: Coefficients
+OutputTable|INFO_MODEL|Details: Model
+OutputTable|INFO_STEPS|Details: Steps
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultipleRegressionAnalysis(PointsGrids).txt b/python/plugins/processing/algs/saga/description/2.3.0/MultipleRegressionAnalysis(PointsGrids).txt
new file mode 100644
index 0000000..c5dbbf0
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultipleRegressionAnalysis(PointsGrids).txt
@@ -0,0 +1,16 @@
+Multiple Regression Analysis (Points/Grids)
+statistics_regression
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterTableField|ATTRIBUTE|Attribute|SHAPES|-1|False
+ParameterSelection|INTERPOL|Grid Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+ParameterBoolean|COORD_X         |Include X Coordinate|True
+ParameterBoolean|COORD_Y         |Include Y Coordinate|True
+ParameterSelection|METHOD|Method|[0] include all;[1] forward;[2] backward;[3] stepwise
+ParameterNumber|P_IN|P in|None|None|5
+ParameterNumber|P_OUT|P out|None|None|5
+OutputTable|INFO_COEFF|Details: Coefficients
+OutputTable|INFO_MODEL|Details: Model
+OutputTable|INFO_STEPS|Details: Steps
+OutputVector|RESIDUALS|Residuals
+OutputRaster|REGRESSION|Regression
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/MultiresolutionIndexofValleyBottomFlatness(MRVBF).txt b/python/plugins/processing/algs/saga/description/2.3.0/MultiresolutionIndexofValleyBottomFlatness(MRVBF).txt
new file mode 100644
index 0000000..04d4c3c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/MultiresolutionIndexofValleyBottomFlatness(MRVBF).txt
@@ -0,0 +1,13 @@
+Multiresolution Index of Valley Bottom Flatness (MRVBF)
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|T_SLOPE|Initial Threshold for Slope|None|None|16
+ParameterNumber|T_PCTL_V|Threshold for Elevation Percentile (Lowness)|None|None|0.4
+ParameterNumber|T_PCTL_R|Threshold for Elevation Percentile (Upness)|None|None|0.35
+ParameterNumber|P_SLOPE|Shape Parameter for Slope|None|None|4.0
+ParameterNumber|P_PCTL|Shape Parameter for Elevation Percentile|None|None|3.0
+ParameterBoolean|UPDATE        |Update Views|True
+ParameterBoolean|CLASSIFY      |Classify|True
+ParameterNumber|MAX_RES|Maximum Resolution (Percentage)|None|None|100
+OutputRaster|MRVBF|MRVBF
+OutputRaster|MRRTF|MRRTF
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/NaturalNeighbour.txt b/python/plugins/processing/algs/saga/description/2.3.0/NaturalNeighbour.txt
new file mode 100644
index 0000000..70353b1
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/NaturalNeighbour.txt
@@ -0,0 +1,10 @@
+Natural Neighbour
+grid_gridding
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterBoolean|SIBSON|Sibson|True
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/NearestNeighbour.txt b/python/plugins/processing/algs/saga/description/2.3.0/NearestNeighbour.txt
new file mode 100644
index 0000000..50a99a9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/NearestNeighbour.txt
@@ -0,0 +1,9 @@
+Nearest Neighbour
+grid_gridding
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/OrderedWeightedAveraging(OWA).txt b/python/plugins/processing/algs/saga/description/2.3.0/OrderedWeightedAveraging(OWA).txt
new file mode 100644
index 0000000..19b0b85
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/OrderedWeightedAveraging(OWA).txt
@@ -0,0 +1,5 @@
+Ordered Weighted Averaging|Ordered Weighted Averaging (OWA)
+grid_analysis
+ParameterMultipleInput|GRIDS|Input Grids|3|False
+ParameterFixedTable|WEIGHTS|Weights|3|Weight|False
+OutputRaster|OUTPUT|Output Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/OrdinaryKriging(Global).txt b/python/plugins/processing/algs/saga/description/2.3.0/OrdinaryKriging(Global).txt
new file mode 100644
index 0000000..9d3c71e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/OrdinaryKriging(Global).txt
@@ -0,0 +1,27 @@
+Universal Kriging
+statistics_kriging
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterSelection|TQUALITY|Type of Quality Measure|[0] standard deviation;[1] variance
+ParameterBoolean|LOG|Logarithmic Transformation|True
+ParameterBoolean|BLOCK|Block Kriging|True
+ParameterNumber|DBLOCK|Block Size|0|None|100
+ParameterNumber|VAR_MAXDIST|Maximum Distance|None|None|-1.0
+ParameterNumber|VAR_NCLASSES|Lag Distance Classes|1|None|100
+ParameterNumber|VAR_NSKIP|Skip|1|None|1
+ParameterString|VAR_MODEL|Variogram Model|a + b * x
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|PREDICTION|Prediction
+OutputRaster|VARIANCE|Quality Measure
+ParameterSelection|SEARCH_RANGE|Search Range|[0] local;[1] global
+ParameterNumber|SEARCH_RADIUS|Maximum Search Distance|0|None|1000
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+ParameterMultipleInput|PREDICTORS|Predictors|3|True
+ParameterSelection|INTERPOL|Grid Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+ParameterBoolean|COORDS|Coordinates|False
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/OrdinaryKriging.txt b/python/plugins/processing/algs/saga/description/2.3.0/OrdinaryKriging.txt
new file mode 100644
index 0000000..ec7a858
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/OrdinaryKriging.txt
@@ -0,0 +1,24 @@
+Ordinary Kriging
+statistics_kriging
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterSelection|TQUALITY|Type of Quality Measure|[0] standard deviation;[1] variance
+ParameterBoolean|LOG|Logarithmic Transformation|True
+ParameterBoolean|BLOCK|Block Kriging|True
+ParameterNumber|DBLOCK|Block Size|0|None|100
+ParameterNumber|VAR_MAXDIST|Maximum Distance|None|None|-1.0
+ParameterNumber|VAR_NCLASSES|Lag Distance Classes|1|None|100
+ParameterNumber|VAR_NSKIP|Skip|1|None|1
+ParameterString|VAR_MODEL|Variogram Model|a + b * x
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|PREDICTION|Prediction
+OutputRaster|VARIANCE|Quality Measure
+ParameterSelection|SEARCH_RANGE|Search Range|[0] local;[1] global
+ParameterNumber|SEARCH_RADIUS|Maximum Search Distance|0|None|1000
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/OverlandFlow-KinematicWaveD8.txt b/python/plugins/processing/algs/saga/description/2.3.0/OverlandFlow-KinematicWaveD8.txt
new file mode 100644
index 0000000..0a28700
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/OverlandFlow-KinematicWaveD8.txt
@@ -0,0 +1,13 @@
+Overland Flow - Kinematic Wave D8
+sim_hydrology
+ParameterRaster|DEM|Elevation|False
+ParameterVector|GAUGES|Gauges|-1|True
+ParameterNumber|TIME_SPAN|Simulation Time [h]|None|None|24
+ParameterNumber|TIME_STEP|Simulation Time Step [h]|None|None|0.1
+ParameterNumber|ROUGHNESS|Manning's Roughness|None|None|0.03
+ParameterNumber|NEWTON_MAXITER|Max. Iterations|None|None|100
+ParameterNumber|NEWTON_EPSILON|Epsilon|None|None|0.0001
+ParameterSelection|PRECIP|Precipitation|[0] Homogenous;[1] Above Elevation;[2] Left Half
+ParameterNumber|THRESHOLD|Threshold Elevation|None|None|0.0
+OutputRaster|FLOW|Runoff
+OutputTable|GAUGES_FLOW|Flow at Gauges
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/OverlandFlowDistancetoChannelNetwork.txt b/python/plugins/processing/algs/saga/description/2.3.0/OverlandFlowDistancetoChannelNetwork.txt
new file mode 100644
index 0000000..58f9044
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/OverlandFlowDistancetoChannelNetwork.txt
@@ -0,0 +1,8 @@
+Overland Flow Distance to Channel Network
+ta_channels
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|CHANNELS|Channel Network|False
+ParameterSelection|METHOD|Flow Algorithm|[0] D8;[1] MFD
+OutputRaster|DISTANCE|Overland Flow Distance
+OutputRaster|DISTVERT|Vertical Overland Flow Distance
+OutputRaster|DISTHORZ|Horizontal Overland Flow Distance
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Patching.txt b/python/plugins/processing/algs/saga/description/2.3.0/Patching.txt
new file mode 100644
index 0000000..6a70404
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Patching.txt
@@ -0,0 +1,6 @@
+Patching
+grid_tools
+ParameterRaster|ORIGINAL|Grid|False
+ParameterRaster|ADDITIONAL|Patch Grid|False
+ParameterSelection|INTERPOLATION|Interpolation Method|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+OutputRaster|COMPLETED|Completed Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PatternAnalysis.txt b/python/plugins/processing/algs/saga/description/2.3.0/PatternAnalysis.txt
new file mode 100644
index 0000000..5a6d4de
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PatternAnalysis.txt
@@ -0,0 +1,11 @@
+Pattern Analysis
+grid_analysis
+ParameterRaster|INPUT|Input Grid|False
+ParameterSelection|WINSIZE|Size of Analysis Window|[0] 3 X 3;[1] 5 X 5;[2] 7 X 7
+ParameterNumber|MAXNUMCLASS|Max. Number of Classes|None|None|0
+OutputRaster|RELATIVE|Relative Richness
+OutputRaster|DIVERSITY|Diversity
+OutputRaster|DOMINANCE|Dominance
+OutputRaster|FRAGMENTATION|Fragmentation
+OutputRaster|NDC|Number of Different Classes
+OutputRaster|CVN|Center Versus Neighbours
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PointStatisticsforPolygons.txt b/python/plugins/processing/algs/saga/description/2.3.0/PointStatisticsforPolygons.txt
new file mode 100644
index 0000000..02e0aba
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PointStatisticsforPolygons.txt
@@ -0,0 +1,14 @@
+Point Statistics for Polygons
+shapes_polygons
+ParameterVector|POINTS|Points|0|False
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|FIELDS|Attribute Table field|POINTS|-1|False
+ParameterSelection|FIELD_NAME|Field Naming Choice|[0] variable type + original name;[1] original name + variable type;[2] original name;[3] variable type
+ParameterBoolean|SUM             |Sum|True
+ParameterBoolean|AVG             |Mean|True
+ParameterBoolean|VAR             |Variance|True
+ParameterBoolean|DEV             |Deviation|True
+ParameterBoolean|MIN             |Minimum|True
+ParameterBoolean|MAX             |Maximum|True
+ParameterBoolean|NUM             |Count|True
+OutputVector|STATISTICS|Statistics
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PointsFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/PointsFilter.txt
new file mode 100644
index 0000000..afb3ee9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PointsFilter.txt
@@ -0,0 +1,12 @@
+Points Filter
+shapes_points
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterNumber|RADIUS|Radius|None|None|1
+ParameterNumber|MINNUM|Minimum Number of Points|None|None|0
+ParameterNumber|MAXNUM|Maximum Number of Points|None|None|0
+ParameterBoolean|QUADRANTS      |Quadrants|True
+ParameterSelection|METHOD|Filter Criterion|[0] keep maxima (with tolerance);[1] keep minima (with tolerance);[2] remove maxima (with tolerance);[3] remove minima (with tolerance);[4] remove below percentile;[5] remove above percentile
+ParameterNumber|TOLERANCE|Tolerance|None|None|0.0
+ParameterNumber|PERCENT|Percentile|None|None|50
+OutputVector|FILTER|Filtered Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PointsThinning.txt b/python/plugins/processing/algs/saga/description/2.3.0/PointsThinning.txt
new file mode 100644
index 0000000..00643e8
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PointsThinning.txt
@@ -0,0 +1,6 @@
+Points Thinning
+shapes_points
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterNumber|RESOLUTION|Resolution|0.0|None|1.0
+OutputVector|THINNED|Thinned Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolartoCartesianCoordinates.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolartoCartesianCoordinates.txt
new file mode 100644
index 0000000..8c9b76b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolartoCartesianCoordinates.txt
@@ -0,0 +1,8 @@
+Polar to Cartesian Coordinates
+shapes_tools
+ParameterVector|POLAR|Polar Coordinates|-1|False
+ParameterTableField|F_EXAGG|Exaggeration|POLAR|-1|False
+ParameterNumber|D_EXAGG|Exaggeration Factor|None|None|1
+ParameterNumber|RADIUS|Radius|None|None|6371000.0
+ParameterBoolean|DEGREE       |Degree|True
+OutputVector|CARTES|Cartesian Coordinates
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Polygon-LineIntersection.txt b/python/plugins/processing/algs/saga/description/2.3.0/Polygon-LineIntersection.txt
new file mode 100644
index 0000000..cb4dd7e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Polygon-LineIntersection.txt
@@ -0,0 +1,5 @@
+Polygon-Line Intersection
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterVector|LINES|Lines|1|False
+OutputVector|INTERSECT|Intersection
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonCentroids.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonCentroids.txt
new file mode 100644
index 0000000..5672b7e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonCentroids.txt
@@ -0,0 +1,5 @@
+Polygon Centroids
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterBoolean|METHOD         |Centroids for each part|True
+OutputVector|CENTROIDS|Centroids
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonClipping.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonClipping.txt
new file mode 100644
index 0000000..08c4993
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonClipping.txt
@@ -0,0 +1,6 @@
+Polygon Clipping
+shapes_polygons
+ParameterVector|CLIP|Clip features|2|False
+ParameterVector|S_INPUT|Input features|-1|False
+OutputVector|S_OUTPUT|Output features
+Hardcoded|-MULTIPLE 0
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonDifference.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonDifference.txt
new file mode 100644
index 0000000..ffb1996
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonDifference.txt
@@ -0,0 +1,6 @@
+Difference
+shapes_polygons
+ParameterVector|A|Layer A|2|False
+ParameterVector|B|Layer B|2|False
+ParameterBoolean|SPLIT|Split Parts|True
+OutputVector|RESULT|Difference
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonDissolve.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonDissolve.txt
new file mode 100644
index 0000000..3166386
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonDissolve.txt
@@ -0,0 +1,8 @@
+Polygon dissolve (by attribute)|Polygon Dissolve
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|FIELD_1|1. Attribute|POLYGONS|-1|True
+ParameterTableField|FIELD_2|2. Attribute|POLYGONS|-1|True
+ParameterTableField|FIELD_3|3. Attribute|POLYGONS|-1|True
+ParameterBoolean|BND_KEEP|Keep inner boundaries|True
+OutputVector|DISSOLVED|Dissolved
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonDissolveAllPolygs.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonDissolveAllPolygs.txt
new file mode 100644
index 0000000..4913356
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonDissolveAllPolygs.txt
@@ -0,0 +1,5 @@
+Polygon dissolve (all polygons)|Polygon Dissolve
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterBoolean|BND_KEEP|Keep inner boundaries|True
+OutputVector|DISSOLVED|Dissolved
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonIdentity.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonIdentity.txt
new file mode 100644
index 0000000..6cb365f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonIdentity.txt
@@ -0,0 +1,6 @@
+Identity
+shapes_polygons
+ParameterVector|A|Layer A|2|False
+ParameterVector|B|Layer B|2|False
+ParameterBoolean|SPLIT|Split Parts|True
+OutputVector|RESULT|Identity
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonIntersect.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonIntersect.txt
new file mode 100644
index 0000000..2a822db
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonIntersect.txt
@@ -0,0 +1,6 @@
+Intersect
+shapes_polygons
+ParameterVector|A|Layer A|2|False
+ParameterVector|B|Layer B|2|False
+ParameterBoolean|SPLIT|Split Parts|True
+OutputVector|RESULT|Intersection
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonPartstoSeparatePolygons.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonPartstoSeparatePolygons.txt
new file mode 100644
index 0000000..1cab12c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonPartstoSeparatePolygons.txt
@@ -0,0 +1,5 @@
+Polygon Parts to Separate Polygons
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterBoolean|LAKES         |Ignore Lakes|True
+OutputVector|PARTS|Polygon Parts
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonProperties.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonProperties.txt
new file mode 100644
index 0000000..eeb063b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonProperties.txt
@@ -0,0 +1,8 @@
+Polygon Properties
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterBoolean|BPARTS        |Number of Parts|True
+ParameterBoolean|BPOINTS       |Number of Vertices|True
+ParameterBoolean|BLENGTH       |Perimeter|True
+ParameterBoolean|BAREA         |Area|True
+OutputVector|OUTPUT|Polygons with Property Attributes
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonSelfIntersection.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonSelfIntersection.txt
new file mode 100644
index 0000000..14359a4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonSelfIntersection.txt
@@ -0,0 +1,5 @@
+Polygon Self-Intersection
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|ID|Identifier|POLYGONS|-1|False
+OutputVector|INTERSECT|Intersection
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonShapeIndices.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonShapeIndices.txt
new file mode 100644
index 0000000..468ac18
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonShapeIndices.txt
@@ -0,0 +1,4 @@
+Polygon Shape Indices
+shapes_polygons
+ParameterVector|SHAPES|Shapes|2|False
+OutputVector|INDEX|Shape Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonSymmetricalDifference.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonSymmetricalDifference.txt
new file mode 100644
index 0000000..d1e4e3d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonSymmetricalDifference.txt
@@ -0,0 +1,6 @@
+Symmetrical Difference
+shapes_polygons
+ParameterVector|A|Layer A|2|False
+ParameterVector|B|Layer B|2|False
+ParameterBoolean|SPLIT|Split Parts|True
+OutputVector|RESULT|Symmetrical Difference
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonUnion.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonUnion.txt
new file mode 100644
index 0000000..8dd3e56
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonUnion.txt
@@ -0,0 +1,6 @@
+Union
+shapes_polygons
+ParameterVector|A|Layer A|2|False
+ParameterVector|B|Layer B|2|False
+ParameterBoolean|SPLIT|Split Parts|True
+OutputVector|RESULT|Union
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonUpdate.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonUpdate.txt
new file mode 100644
index 0000000..71914cd
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonUpdate.txt
@@ -0,0 +1,6 @@
+Update
+shapes_polygons
+ParameterVector|A|Layer A|2|False
+ParameterVector|B|Layer B|2|False
+ParameterBoolean|SPLIT|Split Parts|True
+OutputVector|RESULT|Updated polygons
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolygonstoEdgesandNodes.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolygonstoEdgesandNodes.txt
new file mode 100644
index 0000000..e9f4628
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolygonstoEdgesandNodes.txt
@@ -0,0 +1,5 @@
+Polygons to Edges and Nodes
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+OutputVector|EDGES|Edges
+OutputVector|NODES|Nodes
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PolynomialRegression.txt b/python/plugins/processing/algs/saga/description/2.3.0/PolynomialRegression.txt
new file mode 100644
index 0000000..e7ccd30
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PolynomialRegression.txt
@@ -0,0 +1,14 @@
+Polynomial Regression
+statistics_regression
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|ATTRIBUTE|Attribute|POINTS|-1|False
+ParameterSelection|POLYNOM|Polynom|[0] simple planar surface;[1] bi-linear saddle;[2] quadratic surface;[3] cubic surface;[4] user defined
+ParameterNumber|XORDER|Maximum X Order|1|None|4
+ParameterNumber|YORDER|Maximum Y Order|1|None|4
+ParameterNumber|TORDER|Maximum Total Order|0|None|4
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
+OutputVector|RESIDUALS|Residuals
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/PrincipleComponentsAnalysis.txt b/python/plugins/processing/algs/saga/description/2.3.0/PrincipleComponentsAnalysis.txt
new file mode 100644
index 0000000..f43035a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/PrincipleComponentsAnalysis.txt
@@ -0,0 +1,6 @@
+Principle Components Analysis
+table_calculus
+ParameterTable|TABLE|Table|False
+ParameterSelection|METHOD|Method|[0] correlation matrix;[1] variance-covariance matrix;[2] sums-of-squares-and-cross-products matrix
+ParameterNumber|NFIRST|Number of Components|None|None|3
+OutputTable|PCA|Principle Components
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Profilefrompoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/Profilefrompoints.txt
new file mode 100644
index 0000000..9e21d5f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Profilefrompoints.txt
@@ -0,0 +1,7 @@
+Profile from points table|Profile from points
+ta_profiles
+ParameterRaster|GRID|Grid|False
+ParameterTable|TABLE|Input|False
+ParameterTableField|X|X|TABLE|-1|False
+ParameterTableField|Y|Y|TABLE|-1|False
+OutputTable|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ProfilesfromLines.txt b/python/plugins/processing/algs/saga/description/2.3.0/ProfilesfromLines.txt
new file mode 100644
index 0000000..bc32a9f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ProfilesfromLines.txt
@@ -0,0 +1,9 @@
+Profiles from Lines
+ta_profiles
+ParameterRaster|DEM|DEM|False
+ParameterMultipleInput|VALUES|Values|3|True
+ParameterVector|LINES|Lines|1|False
+ParameterTableField|NAME|Name|LINES|-1|False
+ParameterBoolean|SPLIT         |Each Line as new Profile|True
+OutputVector|PROFILE|Profiles
+OutputVector|PROFILES|Profiles
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ProximityGrid.txt b/python/plugins/processing/algs/saga/description/2.3.0/ProximityGrid.txt
new file mode 100644
index 0000000..b78dad3
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ProximityGrid.txt
@@ -0,0 +1,6 @@
+Proximity Grid
+grid_tools
+ParameterRaster|FEATURES|Features|False
+OutputRaster|DISTANCE|Distance
+OutputRaster|DIRECTION|Direction
+OutputRaster|ALLOCATION|Allocation
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/QuadTreeStructuretoShapes.txt b/python/plugins/processing/algs/saga/description/2.3.0/QuadTreeStructuretoShapes.txt
new file mode 100644
index 0000000..14d3cb7
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/QuadTreeStructuretoShapes.txt
@@ -0,0 +1,7 @@
+QuadTree Structure to Shapes
+shapes_tools
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterTableField|ATTRIBUTE|Attribute|SHAPES|-1|False
+OutputVector|POLYGONS|Polygons
+OutputVector|LINES|Lines
+OutputVector|POINTS|Duplicated Points
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RGBComposite.txt b/python/plugins/processing/algs/saga/description/2.3.0/RGBComposite.txt
new file mode 100644
index 0000000..271bbc6
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RGBComposite.txt
@@ -0,0 +1,24 @@
+RGB Composite
+grid_visualisation
+ParameterRaster|GRID_R|R|False
+ParameterRaster|GRID_G|G|False
+ParameterRaster|GRID_B|B|False
+ParameterSelection|R_METHOD|Method for R value|0 - 255;Rescale to 0 - 255;User defined rescale;Percentiles;Percentage of standard deviation
+ParameterSelection|G_METHOD|Method for G value|0 - 255;Rescale to 0 - 255;User defined rescale;Percentiles;Percentage of standard deviation
+ParameterSelection|B_METHOD|Method for B value|0 - 255;Rescale to 0 - 255;User defined rescale;Percentiles;Percentage of standard deviation
+ParameterNumber|R_RANGE_MIN|Rescale Range for RED min|0|255|0
+ParameterNumber|R_RANGE_MAX|Rescale Range for RED max|0|255|255
+ParameterNumber|R_PERCTL_MIN|Percentiles Range for RED max|1|99|1
+ParameterNumber|R_PERCTL_MAX|Percentiles Range for RED max|1|99|99
+ParameterNumber|R_PERCENT|Percentage of standard deviation for RED|0|None|150.0
+ParameterNumber|G_RANGE_MIN|Rescale Range for GREEN min|0|255|0
+ParameterNumber|G_RANGE_MAX|Rescale Range for GREEN max|0|255|255
+ParameterNumber|G_PERCTL_MIN|Percentiles Range for GREEN max|1|99|1
+ParameterNumber|G_PERCTL_MAX|Percentiles Range for GREEN max|1|99|99
+ParameterNumber|G_PERCENT|Percentage of standard deviation for GREEN|0|None|150.0
+ParameterNumber|B_RANGE_MIN|Rescale Range for BLUE min|0|255|0
+ParameterNumber|B_RANGE_MAX|Rescale Range for BLUE max|0|255|255
+ParameterNumber|B_PERCTL_MIN|Percentiles Range for BLUE max|1|99|1
+ParameterNumber|B_PERCTL_MAX|Percentiles Range for BLUE max|1|99|99
+ParameterNumber|B_PERCENT|Percentage of standard deviation for BLUE|0|None|150.0
+OutputRaster|GRID_RGB|Output RGB
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RadiusofVariance(Grid).txt b/python/plugins/processing/algs/saga/description/2.3.0/RadiusofVariance(Grid).txt
new file mode 100644
index 0000000..6e6d3ed
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RadiusofVariance(Grid).txt
@@ -0,0 +1,7 @@
+Radius of Variance (Grid)
+statistics_grid
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|VARIANCE|Standard Deviation|0.0|None|1.0
+ParameterNumber|RADIUS|Maximum Search Radius (cells)|0.0|None|20
+ParameterSelection|OUTPUT|Type of Output|[0] Cells;[1] Map Units
+OutputRaster|RESULT|Variance Radius
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RandomField.txt b/python/plugins/processing/algs/saga/description/2.3.0/RandomField.txt
new file mode 100644
index 0000000..abf32b4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RandomField.txt
@@ -0,0 +1,12 @@
+Random Field
+grid_calculus
+Hardcoded|-DEFINITION 0
+ParameterSelection|METHOD|Method|[0] Uniform;[1] Gaussian
+ParameterNumber|RANGE_MIN|Range Min|None|None|0.0
+ParameterNumber|RANGE_MAX|Range Max|None|None|1.0
+ParameterNumber|MEAN|Arithmetic Mean|None|None|0.0
+ParameterNumber|STDDEV|Standard Deviation|None|None|1.0
+Extent USER_XMIN USER_XMAX USER_YMIN USER_YMAX
+ParameterNumber|USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|USER_FITS|Method|[0] nodes;[1] cells
+OutputRaster|OUT_GRID|Random Field
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RandomTerrainGeneration.txt b/python/plugins/processing/algs/saga/description/2.3.0/RandomTerrainGeneration.txt
new file mode 100644
index 0000000..18043bf
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RandomTerrainGeneration.txt
@@ -0,0 +1,9 @@
+Random Terrain Generation
+grid_calculus
+ParameterNumber|RADIUS|Radius (cells)|None|None|10
+ParameterNumber|ITERATIONS|Iterations|None|None|10
+ParameterSelection|TARGET_TYPE|Target Dimensions|[0] User defined
+ParameterNumber|USER_CELL_SIZE|Grid Size|0.0|None|1.0
+ParameterNumber|USER_COLS|Cols|1.0|None|100
+ParameterNumber|USER_ROWS|Rows|1.0|None|100
+OutputRaster|TARGET_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RankFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/RankFilter.txt
new file mode 100644
index 0000000..b676b15
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RankFilter.txt
@@ -0,0 +1,7 @@
+Rank Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|MODE|Search Mode|[0] Square;[1] Circle
+ParameterNumber|RADIUS|Radius|1.0|None|1
+ParameterNumber|RANK|Rank [Percent]|None|None|50
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RealSurfaceArea.txt b/python/plugins/processing/algs/saga/description/2.3.0/RealSurfaceArea.txt
new file mode 100644
index 0000000..065c34e
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RealSurfaceArea.txt
@@ -0,0 +1,4 @@
+Real Surface Area
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+OutputRaster|AREA|Surface Area
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ReclassifyGridValues.txt b/python/plugins/processing/algs/saga/description/2.3.0/ReclassifyGridValues.txt
new file mode 100644
index 0000000..d76e5a5
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ReclassifyGridValues.txt
@@ -0,0 +1,18 @@
+Reclassify Grid Values
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|METHOD|Method|[0] single;[1] range;[2] simple table
+ParameterNumber|OLD|old value (for single value change)|None|None|0.0
+ParameterNumber|NEW|new value (for single value change)|None|None|1.0
+ParameterSelection|SOPERATOR|operator (for single value change)|[0] =;[1] <;[2] <=;[3] >=;[4] >
+ParameterNumber|MIN|minimum value (for range)|None|None|0.0
+ParameterNumber|MAX|maximum value (for range)|None|None|1.0
+ParameterNumber|RNEW|new value(for range)|None|None|2.0
+ParameterSelection|ROPERATOR|operator (for range)|[0] <=;[1] <
+ParameterFixedTable|RETAB|Lookup Table|3|minimum;maximum;new|False
+ParameterSelection|TOPERATOR|operator (for table)|[0] min <= value < max;[1] min <= value <= max;[2] min < value <= max;[3] min < value < max
+ParameterBoolean|NODATAOPT      |replace no data values|True
+ParameterNumber|NODATA|new value for no data values|None|None|0.0
+ParameterBoolean|OTHEROPT       |replace other values|True
+ParameterNumber|OTHERS|new value for other values|None|None|0.0
+OutputRaster|RESULT|Reclassified Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RegressionAnalysis(PointsGrid).txt b/python/plugins/processing/algs/saga/description/2.3.0/RegressionAnalysis(PointsGrid).txt
new file mode 100644
index 0000000..fd41340
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RegressionAnalysis(PointsGrid).txt
@@ -0,0 +1,9 @@
+Regression analysis|Regression Analysis (Points/Grid)
+statistics_regression
+ParameterRaster|GRID|Grid|False
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterTableField|ATTRIBUTE|Attribute|SHAPES|-1|False
+ParameterSelection|INTERPOL|Grid Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+ParameterSelection|METHOD|Regression Function|[0] Y = a + b * X (linear);[1] Y = a + b / X;[2] Y = a / (b - X);[3] Y = a * X^b (power);[4] Y = a e^(b * X) (exponential);[5] Y = a + b * ln(X) (logarithmic)
+OutputRaster|REGRESSION|Regression
+OutputVector|RESIDUAL|Residuals
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RelativeHeightsandSlopePositions.txt b/python/plugins/processing/algs/saga/description/2.3.0/RelativeHeightsandSlopePositions.txt
new file mode 100644
index 0000000..ab3c30b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RelativeHeightsandSlopePositions.txt
@@ -0,0 +1,11 @@
+Relative Heights and Slope Positions
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|W|w|None|None|0.5
+ParameterNumber|T|t|None|None|10.0
+ParameterNumber|E|e|None|None|2.0
+OutputRaster|HO|Slope Height
+OutputRaster|HU|Valley Depth
+OutputRaster|NH|Normalized Height
+OutputRaster|SH|Standardized Height
+OutputRaster|MS|Mid-Slope Positon
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RemoveDuplicatePoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/RemoveDuplicatePoints.txt
new file mode 100644
index 0000000..5a4d4c1
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RemoveDuplicatePoints.txt
@@ -0,0 +1,7 @@
+Remove Duplicate Points
+shapes_points
+ParameterVector|POINTS|Points|-1|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterSelection|METHOD|Point to Keep|[0] first point;[1] last point;[2] point with minimum attribute value;[3] point with maximum attribute value
+ParameterSelection|NUMERIC|Numeric Attribute Values|[0] take value from the point to be kept;[1] minimum value of all duplicates;[2] maximum value of all duplicates;[3] mean value of all duplicates
+OutputVector|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Representativeness(Grid).txt b/python/plugins/processing/algs/saga/description/2.3.0/Representativeness(Grid).txt
new file mode 100644
index 0000000..35a19a4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Representativeness(Grid).txt
@@ -0,0 +1,6 @@
+Representativeness|Representativeness (Grid)
+statistics_grid
+ParameterRaster|INPUT|Grid|False
+ParameterNumber|RADIUS|Radius (Cells)|1.0|None|10
+ParameterNumber|EXPONENT|Exponent|None|None|1
+OutputRaster|RESULT|Representativeness
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Resampling.txt b/python/plugins/processing/algs/saga/description/2.3.0/Resampling.txt
new file mode 100644
index 0000000..d07dffa
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Resampling.txt
@@ -0,0 +1,12 @@
+Resampling
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterBoolean|KEEP_TYPE|Preserve Data Type|True
+Hardcoded|-TARGET_DEFINITION 0
+ParameterSelection|SCALE_UP|Upscaling Method|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation;[5] Mean Value;[6] Mean Value (cell area weighted);[7] Minimum Value;[8] Maximum Value;[9] Majority
+ParameterSelection|SCALE_DOWN|Downscaling Method|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+ParameterRaster|TARGET_TEMPLATE|Target system|True
+OutputRaster|OUTPUT|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ResidualAnalysis(Grid).txt b/python/plugins/processing/algs/saga/description/2.3.0/ResidualAnalysis(Grid).txt
new file mode 100644
index 0000000..33471e7
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ResidualAnalysis(Grid).txt
@@ -0,0 +1,18 @@
+Residual analysis|Residual Analysis (Grid)
+statistics_grid
+ParameterRaster|GRID|Grid|False
+ParameterSelection|MODE|Search Mode|[0] square;[1] circle|1
+ParameterNumber|RADIUS|Radius (Cells)|1.0|None|7
+ParameterBoolean|BCENTER|Include Center Cell|True
+ParameterSelection|DISTANCE_WEIGHTING_DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting|0
+ParameterNumber|DISTANCE_WEIGHTING_DW_IDW_POWER|Inverse Distance Weighting Power|0.0|None|1
+ParameterBoolean|DISTANCE_WEIGHTING_DW_IDW_OFFSET|Inverse Distance Offset|True
+ParameterNumber|DISTANCE_WEIGHTING_DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|0.0|None|1.0
+OutputRaster|MEAN|Mean Value
+OutputRaster|DIFF|Difference from Mean Value
+OutputRaster|STDDEV|Standard Deviation
+OutputRaster|RANGE|Value Range
+OutputRaster|MIN|Minimum Value
+OutputRaster|MAX|Maximum Value
+OutputRaster|DEVMEAN|Deviation from Mean Value
+OutputRaster|PERCENT|Percentile
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/RunningAverage.txt b/python/plugins/processing/algs/saga/description/2.3.0/RunningAverage.txt
new file mode 100644
index 0000000..c0e23d7
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/RunningAverage.txt
@@ -0,0 +1,6 @@
+Running Average
+table_calculus
+ParameterTable|INPUT|Input|False
+ParameterTableField|FIELD|Attribute|INPUT|-1|False
+ParameterNumber|COUNT|Number of Records|0.0|None|10
+OutputTable|OUTPUT|Output
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SAGAWetnessIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/SAGAWetnessIndex.txt
new file mode 100644
index 0000000..564f34b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SAGAWetnessIndex.txt
@@ -0,0 +1,13 @@
+SAGA Wetness Index
+ta_hydrology
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|SUCTION|Suction|0.0|None|10.0
+ParameterSelection|AREA_TYPE|Type of Area|[0] absolute catchment area;[1] square root of catchment area;[2] specific catchment area
+ParameterSelection|SLOPE_TYPE|Type of Slope|[0] local slope;[1] catchment slope
+ParameterNumber|SLOPE_MIN|Suction|0.0|None|0.0
+ParameterNumber|SLOPE_OFF|Suction|0.0|None|0.1
+ParameterNumber|SLOPE_WEIGHT|Suction|0.0|None|1.0
+OutputRaster|AREA|Catchment area
+OutputRaster|SLOPE|Catchment slope
+OutputRaster|AREA_MOD|Modified catchment area
+OutputRaster|TWI|Topographic Wetness Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SeedGeneration.txt b/python/plugins/processing/algs/saga/description/2.3.0/SeedGeneration.txt
new file mode 100644
index 0000000..be4f357
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SeedGeneration.txt
@@ -0,0 +1,11 @@
+Seed Generation
+imagery_segmentation
+ParameterMultipleInput|GRIDS|Features|3|False
+ParameterNumber|FACTOR|Bandwidth (Cells)|0.0|None|2
+ParameterSelection|TYPE_SURFACE|Type of Surface|[0] smoothed surface;[1] variance (a);[2] variance (b)
+ParameterSelection|TYPE_SEEDS|Extraction of...|[0] minima;[1] maxima;[2] minima and maxima
+ParameterSelection|TYPE_MERGE|Feature Aggregation|[0] additive;[1] multiplicative
+ParameterBoolean|NORMALIZE         |Normalized|True
+OutputRaster|SURFACE|Surface
+OutputRaster|SEEDS_GRID|Seeds Grid
+OutputVector|SEEDS|Seeds
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Separatepointsbydirection.txt b/python/plugins/processing/algs/saga/description/2.3.0/Separatepointsbydirection.txt
new file mode 100644
index 0000000..db1ec26
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Separatepointsbydirection.txt
@@ -0,0 +1,6 @@
+Separate points by direction
+shapes_points
+ParameterVector|POINTS|Points|0|False
+ParameterNumber|DIRECTIONS|Number of Directions|1.0|None|4
+ParameterNumber|TOLERANCE|Tolerance (Degree)|0.0|None|5
+OutputVector|OUTPUT|Point direction
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ShapesBuffer.txt b/python/plugins/processing/algs/saga/description/2.3.0/ShapesBuffer.txt
new file mode 100644
index 0000000..57e0051
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ShapesBuffer.txt
@@ -0,0 +1,10 @@
+Shapes Buffer (Attribute distance)|Shapes Buffer
+shapes_tools
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterTableField|DIST_FIELD|Buffer Distance|SHAPES|-1|False
+ParameterNumber|DIST_SCALE|Scaling Factor for Attribute Value|None|None|1.0
+ParameterNumber|NZONES|Number of Buffer Zones|1.0|None|1.0
+ParameterNumber|DARC|Arc Vertex Distance [Degree]|0.01|45.0|5.0
+ParameterBoolean|DISSOLVE       |Dissolve Buffers|True
+ParameterBoolean|POLY_INNER       |Inner Buffer|False
+OutputVector|BUFFER|Buffer
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ShapesBufferFixed.txt b/python/plugins/processing/algs/saga/description/2.3.0/ShapesBufferFixed.txt
new file mode 100644
index 0000000..c8c3878
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ShapesBufferFixed.txt
@@ -0,0 +1,9 @@
+Shapes Buffer (Fixed distance)|Shapes Buffer
+shapes_tools
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterNumber|DIST_FIELD_DEFAULT|Buffer distance|0.0|None|100.0
+ParameterNumber|NZONES|Number of Buffer Zones|1.0|None|1.0
+ParameterNumber|DARC|Arc Vertex Distance [Degree]|0.01|45.0|5.0
+ParameterBoolean|DISSOLVE       |Dissolve Buffers|True
+ParameterBoolean|POLY_INNER       |Inner Buffer|False
+OutputVector|BUFFER|Buffer
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ShapestoGrid.txt b/python/plugins/processing/algs/saga/description/2.3.0/ShapestoGrid.txt
new file mode 100644
index 0000000..7c8db2b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ShapestoGrid.txt
@@ -0,0 +1,14 @@
+Shapes to Grid
+grid_gridding
+Hardcoded|-TARGET_DEFINITION 0
+ParameterVector|INPUT|Shapes|-1|False
+ParameterTableField|FIELD|Attribute|INPUT|-1|False
+ParameterSelection|OUTPUT|Output Values|[0] data / no-data;[1] index number;[2] attribute|2
+ParameterSelection|MULTIPLE|Method for Multiple Values|[0] first;[1] last;[2] minimum;[3] maximum;[4] mean|4
+ParameterSelection|LINE_TYPE|Method for Lines|[0] thin;[1] thick
+ParameterSelection|POLY_TYPE|Method for Lines|[0] node;[1] cell
+ParameterSelection|GRID_TYPE|Preferred Target Grid Type|[0] Integer (1 byte);[1] Integer (2 byte);[2] Integer (4 byte);[3] Floating Point (4 byte);[4] Floating Point (8 byte)|3
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|GRID|Rasterized
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SharedPolygonEdges.txt b/python/plugins/processing/algs/saga/description/2.3.0/SharedPolygonEdges.txt
new file mode 100644
index 0000000..c21fb21
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SharedPolygonEdges.txt
@@ -0,0 +1,8 @@
+Shared Polygon Edges
+shapes_polygons
+ParameterVector|POLYGONS|Polygons|2|False
+ParameterTableField|ATTRIBUTE|Attribute|POLYGONS|-1|False
+ParameterNumber|EPSILON|Tolerance|0|None|0.000
+ParameterBoolean|VERTICES|Check vertices|False
+ParameterBoolean|DOUBLE|Double edges|False
+OutputVector|EDGES|Edges
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ShrinkAndExpand.txt b/python/plugins/processing/algs/saga/description/2.3.0/ShrinkAndExpand.txt
new file mode 100644
index 0000000..8967545
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ShrinkAndExpand.txt
@@ -0,0 +1,8 @@
+Shrink and Expand
+grid_tools
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|OPERATION|Operation|[0] Shrink;[1] Expand;[2] shrink and expand;[3] expand and shrink|3
+ParameterSelection|CIRCLE|Search Mode|[0] Square;[1] Circle
+ParameterNumber|RADIUS|Radius|1|None|1
+ParameterSelection|EXPAND|Method|[0] min;[1] max;[2] mean;[3] majority|3
+OutputRaster|RESULT|Result Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SimpleFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/SimpleFilter.txt
new file mode 100644
index 0000000..580d42a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SimpleFilter.txt
@@ -0,0 +1,7 @@
+Simple Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterSelection|MODE|Search Mode|[0] Square;[1] Circle
+ParameterSelection|METHOD|Filter|[0] Smooth;[1] Sharpen;[2] Edge
+ParameterNumber|RADIUS|Radius|None|None|2
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SimpleRegionGrowing.txt b/python/plugins/processing/algs/saga/description/2.3.0/SimpleRegionGrowing.txt
new file mode 100644
index 0000000..9fd3d0a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SimpleRegionGrowing.txt
@@ -0,0 +1,14 @@
+Simple Region Growing
+imagery_segmentation
+ParameterRaster|SEEDS|Seeds|False
+ParameterMultipleInput|FEATURES|Features|3|False
+ParameterSelection|METHOD|Method|[0] feature space and position;[1] feature space
+ParameterSelection|NEIGHBOUR|Neighbourhood|[0] 4 (von Neumann);[1] 8 (Moore)
+ParameterNumber|SIG_1|Variance in Feature Space|None|None|1.0
+ParameterNumber|SIG_2|Variance in Position Space|None|None|1.0
+ParameterNumber|THRESHOLD|Threshold - Similarity|None|None|0.0
+ParameterBoolean|REFRESH         |Refresh|True
+ParameterNumber|LEAFSIZE|Leaf Size (for Speed Optimisation)|None|None|256
+OutputRaster|SEGMENTS|Segments
+OutputRaster|SIMILARITY|Similarity
+OutputTable|TABLE|Seeds
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Simulation.txt b/python/plugins/processing/algs/saga/description/2.3.0/Simulation.txt
new file mode 100644
index 0000000..61e9234
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Simulation.txt
@@ -0,0 +1,16 @@
+Simulation
+sim_fire_spreading
+ParameterRaster|DEM|DEM|False
+ParameterRaster|FUEL|Fuel Model|False
+ParameterRaster|WINDSPD|Wind Speed|False
+ParameterRaster|WINDDIR|Wind Direction|False
+ParameterRaster|M1H|Dead Fuel Moisture 1H|False
+ParameterRaster|M10H|Dead Fuel Moisture 10H|False
+ParameterRaster|M100H|Dead Fuel Moisture 100H|False
+ParameterRaster|MHERB|Herbaceous Fuel Moisture|False
+ParameterRaster|MWOOD|Wood Fuel Moisture|False
+ParameterRaster|IGNITION|Ignition Points|False
+ParameterBoolean|UPDATEVIEW     |Update View|True
+OutputRaster|TIME|Time
+OutputRaster|FLAME|Flame Length
+OutputRaster|INTENSITY|Intensity
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SinkDrainageRouteDetection.txt b/python/plugins/processing/algs/saga/description/2.3.0/SinkDrainageRouteDetection.txt
new file mode 100644
index 0000000..c9fc911
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SinkDrainageRouteDetection.txt
@@ -0,0 +1,6 @@
+Sink Drainage Route Detection
+ta_preprocessor
+ParameterRaster|ELEVATION|Elevation|False
+ParameterBoolean|THRESHOLD       |Threshold|True
+ParameterNumber|THRSHEIGHT|Threshold Height|None|None|100.0
+OutputRaster|SINKROUTE|Sink Route
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SinkRemoval.txt b/python/plugins/processing/algs/saga/description/2.3.0/SinkRemoval.txt
new file mode 100644
index 0000000..16ccd4d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SinkRemoval.txt
@@ -0,0 +1,8 @@
+Sink Removal
+ta_preprocessor
+ParameterRaster|DEM|DEM|False
+ParameterRaster|SINKROUTE|Sink Route|True
+ParameterSelection|METHOD|Method|[0] Deepen Drainage Routes;[1] Fill Sinks
+ParameterBoolean|THRESHOLD        |Threshold|True
+ParameterNumber|THRSHEIGHT|Threshold Height|None|None|100.0
+OutputRaster|DEM_PREPROC|Preprocessed DEM
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SkyViewFactor.txt b/python/plugins/processing/algs/saga/description/2.3.0/SkyViewFactor.txt
new file mode 100644
index 0000000..2114c28
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SkyViewFactor.txt
@@ -0,0 +1,12 @@
+Sky View Factor
+ta_lighting
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|RADIUS|Maximum Search Radius|0.0|None|10000
+ParameterSelection|METHOD|Method|[0] multi scale;[1] sectors
+ParameterNumber|DLEVEL|Multi Scale Factor|1.25|None|3.00
+ParameterNumber|NDIRS|Number of Sectors|3|None|8
+OutputRaster|VISIBLE|Visible Sky
+OutputRaster|SVF|Sky View Factor
+OutputRaster|SIMPLE|Sky View Factor (Simplified)
+OutputRaster|TERRAIN|Terrain View Factor
+OutputRaster|DISTANCE|Terrain View Factor
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Slope,Aspect,Curvature.txt b/python/plugins/processing/algs/saga/description/2.3.0/Slope,Aspect,Curvature.txt
new file mode 100644
index 0000000..e9d707c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Slope,Aspect,Curvature.txt
@@ -0,0 +1,18 @@
+Slope, Aspect, Curvature
+ta_morphometry
+ParameterRaster|ELEVATION|Elevation|False
+ParameterSelection|METHOD|Method|[0] Maximum Slope (Travis et al. 1975);[1] Maximum Triangle Slope (Tarboton 1997);[2] Least Squares Fitted Plane (Horn 1981, Costa-Cabral & Burgess 1996);[3] 6 parameter 2nd order polynom (Evans 1979);[4] 6 parameter 2nd order polynom (Heerdegen & Beran 1982);[5] 6 parameter 2nd order polynom (Bauer, Rohdenburg, Bork 1985);[6] 9 parameter 2nd order polynom (Zevenbergen & Thorne 1987);[7]10 parameter 3rd order polynom (Haralick 1983)|6
+ParameterSelection|UNIT_SLOPE|Slope Units|[0] radians;[1] degree;[2] percent|1
+ParameterSelection|UNIT_ASPECT|Aspect Units|[0] radians;[1] degree|1
+OutputRaster|SLOPE|Slope
+OutputRaster|ASPECT|Aspect
+OutputRaster|C_GENE|General Curvature
+OutputRaster|C_PLAN|Plan Curvature
+OutputRaster|C_PROF|Profile Curvature
+OutputRaster|C_TANG|Tangential Curvature
+OutputRaster|C_LONG|Longitudinal Curvature
+OutputRaster|C_CROS|Cross-Sectional Curvature
+OutputRaster|C_MINI|Minimal Curvature
+OutputRaster|C_MAXI|Maximal Curvature
+OutputRaster|C_TOTA|Total Curvature
+OutputRaster|C_ROTO|Flow-Line Curvature
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SlopeLength.txt b/python/plugins/processing/algs/saga/description/2.3.0/SlopeLength.txt
new file mode 100644
index 0000000..f0e8d33
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SlopeLength.txt
@@ -0,0 +1,4 @@
+Slope Length
+ta_hydrology
+ParameterRaster|DEM|Elevation|False
+OutputRaster|LENGTH|Slope Length
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SoilTextureClassification.txt b/python/plugins/processing/algs/saga/description/2.3.0/SoilTextureClassification.txt
new file mode 100644
index 0000000..9752948
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SoilTextureClassification.txt
@@ -0,0 +1,7 @@
+Soil Texture Classification
+grid_analysis
+ParameterRaster|SAND|Sand|True
+ParameterRaster|SILT|Silt|True
+ParameterRaster|CLAY|Clay|True
+OutputRaster|TEXTURE|Soil Texture
+OutputRaster|SUM|Sum
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SpatialPointPatternAnalysis.txt b/python/plugins/processing/algs/saga/description/2.3.0/SpatialPointPatternAnalysis.txt
new file mode 100644
index 0000000..4a90797
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SpatialPointPatternAnalysis.txt
@@ -0,0 +1,7 @@
+Spatial Point Pattern Analysis
+statistics_points
+ParameterVector|POINTS|Points|0|False
+ParameterNumber|STEP|Vertex Distance [Degree]|None|None|5
+OutputVector|CENTRE|Mean Centre
+OutputVector|STDDIST|Standard Distance
+OutputVector|BBOX|Bounding Box
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SplitShapesLayerRandomly.txt b/python/plugins/processing/algs/saga/description/2.3.0/SplitShapesLayerRandomly.txt
new file mode 100644
index 0000000..8d2648b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SplitShapesLayerRandomly.txt
@@ -0,0 +1,7 @@
+Split Shapes Layer Randomly
+shapes_tools
+ParameterVector|SHAPES|Shapes|-1|False
+ParameterNumber|PERCENT|Split ratio (%)|0|100|50
+ParameterBoolean|EXACT       |Split exactly|True
+OutputVector|A|Group A
+OutputVector|B|Group B
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/StatisticsforGrids.txt b/python/plugins/processing/algs/saga/description/2.3.0/StatisticsforGrids.txt
new file mode 100644
index 0000000..e21300c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/StatisticsforGrids.txt
@@ -0,0 +1,14 @@
+Statistics for Grids
+statistics_grid
+ParameterMultipleInput|GRIDS|Grids|3|False
+ParameterNumber|PCTL_VAL|Percentile|0.0|100.0|50.0
+OutputRaster|MEAN|Arithmetic Mean
+OutputRaster|MIN|Minimum
+OutputRaster|MAX|Maximum
+OutputRaster|VAR|Variance
+OutputRaster|SUM|Sum
+OutputRaster|RANGE|Range
+OutputRaster|PCTL|Percentile
+OutputRaster|STDDEV|Standard Deviation
+OutputRaster|STDDEVLO|Mean less Standard Deviation
+OutputRaster|STDDEVHI|Mean plus Standard Deviation
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/StrahlerOrder.txt b/python/plugins/processing/algs/saga/description/2.3.0/StrahlerOrder.txt
new file mode 100644
index 0000000..87d9147
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/StrahlerOrder.txt
@@ -0,0 +1,4 @@
+Strahler Order
+ta_channels
+ParameterRaster|DEM|Elevation|False
+OutputRaster|STRAHLER|Strahler Order
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/StreamPowerIndex.txt b/python/plugins/processing/algs/saga/description/2.3.0/StreamPowerIndex.txt
new file mode 100644
index 0000000..e33da9b
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/StreamPowerIndex.txt
@@ -0,0 +1,6 @@
+Stream Power Index
+ta_hydrology
+ParameterRaster|SLOPE|Slope|False
+ParameterRaster|AREA|Catchment Area|False
+ParameterSelection|CONV|Area Conversion|[0] no conversion (areas already given as specific catchment area);[1] 1 / cell size (pseudo specific catchment area)
+OutputRaster|SPI|Stream Power Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SuccessiveFlowRouting.txt b/python/plugins/processing/algs/saga/description/2.3.0/SuccessiveFlowRouting.txt
new file mode 100644
index 0000000..93aea05
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SuccessiveFlowRouting.txt
@@ -0,0 +1,7 @@
+Successive Flow Routing
+sim_qm_of_esp
+ParameterRaster|DEM|DEM|False
+ParameterNumber|ITERATIONS|Iterations|1|None|100
+ParameterNumber|RUNOFF|Runoff|None|None|1.00
+ParameterNumber|MANNING|Manning's Roughness|None|None|0.20
+OutputRaster|FLOW|Flow
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SupervisedClassification.txt b/python/plugins/processing/algs/saga/description/2.3.0/SupervisedClassification.txt
new file mode 100644
index 0000000..96afab5
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SupervisedClassification.txt
@@ -0,0 +1,22 @@
+Supervised Classification
+imagery_classification
+ParameterMultipleInput|GRIDS|Grids|3.0|False
+ParameterVector|ROI|Training Areas|2|False
+ParameterTableField|ROI_ID|Class Identifier|ROI|-1|False
+ParameterTable|STATS|Class Statistics|False
+ParameterSelection|STATS_SRC|Get Class Statistics from...|[0] training areas;[1] table
+ParameterSelection|METHOD|Method|[0] Binary Encoding;[1] Parallelepiped;[2] Minimum Distance;[3] Mahalanobis Distance;[4] Maximum Likelihood;[5] Spectral Angle Mapping; [6] Winner Takes All
+ParameterBoolean|NORMALISE|Normalise|False
+ParameterNumber|THRESHOLD_DIST|Distance Threshold|0.0|None|0.0
+ParameterNumber|THRESHOLD_PROB|Probability Threshold (Percent)|0.0|100.0|0.0
+ParameterSelection|RELATIVE_PROB|Probability Reference|[0] absolute;[1] relative
+ParameterNumber|THRESHOLD_ANGLE|Spectral Angle Threshold (Degree)|0.0|90.0|0.0
+ParameterBoolean|WTA_0|Binary Encoding|False
+ParameterBoolean|WTA_1|Parallelepiped|False
+ParameterBoolean|WTA_2|Minimum Distance|False
+ParameterBoolean|WTA_3|Mahalanobis Distance|False
+ParameterBoolean|WTA_4|Maximum Likelihood|False
+ParameterBoolean|WTA_5|Spectral Angle Mapping|False
+OutputTable|CLASS_INFO|Class Information
+OutputRaster|CLASSES|Classification
+OutputRaster|QUALITY|Quality
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/SurfaceSpecificPoints.txt b/python/plugins/processing/algs/saga/description/2.3.0/SurfaceSpecificPoints.txt
new file mode 100644
index 0000000..b0762ce
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/SurfaceSpecificPoints.txt
@@ -0,0 +1,6 @@
+Surface Specific Points
+ta_morphometry
+ParameterRaster|ELEVATION|Elevation|False
+ParameterSelection|METHOD|Method|[0] Mark Highest Neighbour;[1] Opposite Neighbours;[2] Flow Direction;[3] Flow Direction (up and down);[4] Peucker & Douglas
+ParameterNumber|THRESHOLD|Threshold|None|None|2.0
+OutputRaster|RESULT|Result
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TPIBasedLandformClassification.txt b/python/plugins/processing/algs/saga/description/2.3.0/TPIBasedLandformClassification.txt
new file mode 100644
index 0000000..bfed438
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TPIBasedLandformClassification.txt
@@ -0,0 +1,12 @@
+TPI Based Landform Classification
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|RADIUS_A_MIN|Min Radius A|None|None|0
+ParameterNumber|RADIUS_A_MAX|Max Radius A|None|None|100
+ParameterNumber|RADIUS_B_MIN|Min Radius B|None|None|0
+ParameterNumber|RADIUS_B_MAX|Max Radius B|None|None|1000
+ParameterSelection|DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|0.0|None|1
+ParameterBoolean|DW_IDW_OFFSET     |Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|75.0
+OutputRaster|LANDFORMS|Landforms
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TerrainRuggednessIndex(TRI).txt b/python/plugins/processing/algs/saga/description/2.3.0/TerrainRuggednessIndex(TRI).txt
new file mode 100644
index 0000000..bec50e9
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TerrainRuggednessIndex(TRI).txt
@@ -0,0 +1,9 @@
+Terrain Ruggedness Index (TRI)
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|RADIUS|Radius (Cells)|1.0|None|1
+ParameterSelection|DISTANCE_WEIGHTING_DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DISTANCE_WEIGHTING_DW_IDW_POWER|Inverse Distance Weighting Power|0.0|None|1
+ParameterBoolean|DISTANCE_WEIGHTING_DW_IDW_OFFSET     |Inverse Distance Offset|True
+ParameterNumber|DISTANCE_WEIGHTING_DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|0.0|None|1.0
+OutputRaster|TRI|Terrain Ruggedness Index (TRI)
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ThinPlateSpline(TIN).txt b/python/plugins/processing/algs/saga/description/2.3.0/ThinPlateSpline(TIN).txt
new file mode 100644
index 0000000..0ded5a7
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ThinPlateSpline(TIN).txt
@@ -0,0 +1,12 @@
+Thin Plate Spline (TIN)
+grid_spline
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterNumber|REGULARISATION|Regularisation|0.0000|None|0.0001
+ParameterSelection|LEVEL|Neighbourhood|[0] immediate;[1] level 1;[2] level 2
+ParameterBoolean|FRAME|Add Frame|True
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ThinPlateSpline.txt b/python/plugins/processing/algs/saga/description/2.3.0/ThinPlateSpline.txt
new file mode 100644
index 0000000..f55c020
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ThinPlateSpline.txt
@@ -0,0 +1,17 @@
+Thin Plate Spline
+grid_spline
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+ParameterNumber|REGULARISATION|Regularisation|0.0000|None|0.0001
+ParameterSelection|SEARCH_RANGE|Search Range|[0] local;[1] global
+ParameterNumber|SEARCH_RADIUS|Search Radius|0.0|None|1000.0
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Maximum Number of Points|1|None|16
+ParameterNumber|SEARCH_POINTS_MAX|Maximum Number of Points|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+ParameterRaster|TARGET_TEMPLATE|Target system|True
+OutputRaster|TARGET_OUT_GRID|Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ThresholdBuffer.txt b/python/plugins/processing/algs/saga/description/2.3.0/ThresholdBuffer.txt
new file mode 100644
index 0000000..ef26b40
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ThresholdBuffer.txt
@@ -0,0 +1,8 @@
+Threshold Buffer
+grid_tools
+ParameterRaster|FEATURES|Features Grid|False
+ParameterRaster|VALUE|Value Grid|False
+ParameterRaster|THRESHOLDGRID|Threshold Grid|True
+ParameterNumber|THRESHOLD|Threshold|None|None|0.0
+ParameterSelection|THRESHOLDTYPE|Threshold Type|[0] Absolute;[1] Relative from cell value
+OutputRaster|BUFFER|Buffer Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TopographicCorrection.txt b/python/plugins/processing/algs/saga/description/2.3.0/TopographicCorrection.txt
new file mode 100644
index 0000000..7f80c24
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TopographicCorrection.txt
@@ -0,0 +1,11 @@
+Topographic Correction
+ta_lighting
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|ORIGINAL|Original Image|False
+ParameterNumber|AZI|Azimuth|None|None|180.0
+ParameterNumber|HGT|Height|None|None|45.0
+ParameterSelection|METHOD|Method|[0] Cosine Correction (Teillet et al. 1982);[1] Cosine Correction (Civco 1989);[2] Minnaert Correction;[3] Minnaert Correction with Slope (Riano et al. 2003);[4] Minnaert Correction with Slope (Law & Nichol 2004);[5] C Correction;[6] Normalization (after Civco, modified by Law & Nichol)
+ParameterNumber|MINNAERT|Minnaert Correction|None|None|0.5
+ParameterNumber|MAXCELLS|Maximum Cells (C Correction Analysis)|None|None|1000
+ParameterSelection|MAXVALUE|Value Range|[0] 1 byte (0-255);[1] 2 byte (0-65535)
+OutputRaster|CORRECTED|Corrected Image
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TopographicPositionIndex(TPI).txt b/python/plugins/processing/algs/saga/description/2.3.0/TopographicPositionIndex(TPI).txt
new file mode 100644
index 0000000..13a397f
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TopographicPositionIndex(TPI).txt
@@ -0,0 +1,11 @@
+Topographic Position Index (TPI)
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterBoolean|STANDARD                          |Standardize|True
+ParameterNumber|RADIUS_MIN|Min Radius|0.0|None|0.0
+ParameterNumber|RADIUS_MAX|Max Radius|None|None|100.0
+ParameterSelection|DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DW_IDW_POWER|Inverse Distance Weighting Power|None|None|1
+ParameterBoolean|DW_IDW_OFFSET     |Inverse Distance Offset|True
+ParameterNumber|DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|None|None|75.0
+OutputRaster|TPI|Topographic Position Index
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TopographicWetnessIndex(TWI).txt b/python/plugins/processing/algs/saga/description/2.3.0/TopographicWetnessIndex(TWI).txt
new file mode 100644
index 0000000..b2d63cc
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TopographicWetnessIndex(TWI).txt
@@ -0,0 +1,8 @@
+Topographic Wetness Index (TWI)
+ta_hydrology
+ParameterRaster|SLOPE|Slope|False
+ParameterRaster|AREA|Catchment Area|False
+ParameterRaster|TRANS|Transmissivity|True
+ParameterSelection|CONV|Area Conversion|[0] no conversion (areas already given as specific catchment area);[1] 1 / cell size (pseudo specific catchment area)|1
+ParameterSelection|METHOD|Method (TWI)|[0] Standard;[1] TOPMODEL
+OutputRaster|TWI|Topographic Wetness Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Transectthroughpolygonshapefile.txt b/python/plugins/processing/algs/saga/description/2.3.0/Transectthroughpolygonshapefile.txt
new file mode 100644
index 0000000..99eb665
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Transectthroughpolygonshapefile.txt
@@ -0,0 +1,6 @@
+Transect through polygon shapefile
+shapes_transect
+ParameterVector|TRANSECT|Line Transect(s)|1|False
+ParameterVector|THEME|Theme|-1|False
+ParameterTableField|THEME_FIELD|Theme Field|THEME|-1|False
+OutputTable|TRANSECT_RESULT|Result table
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TransformShapes.txt b/python/plugins/processing/algs/saga/description/2.3.0/TransformShapes.txt
new file mode 100644
index 0000000..40e5862
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TransformShapes.txt
@@ -0,0 +1,11 @@
+Transform Shapes
+shapes_tools
+ParameterVector|IN|Shapes|-1|False
+ParameterNumber|DX|dX|None|None|0.0
+ParameterNumber|DY|dY|None|None|0.0
+ParameterNumber|ANGLE|Angle|None|None|0.0
+ParameterNumber|SCALEX|Scale Factor X|None|None|1.0
+ParameterNumber|SCALEY|Scale Factor Y|None|None|1.0
+ParameterNumber|ANCHORX|X|None|None|0.0
+ParameterNumber|ANCHORY|Y|None|None|0.0
+OutputVector|OUT|Transformed
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/TransposeGrids.txt b/python/plugins/processing/algs/saga/description/2.3.0/TransposeGrids.txt
new file mode 100644
index 0000000..8ff6bff
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/TransposeGrids.txt
@@ -0,0 +1,6 @@
+Transpose Grids
+grid_tools
+ParameterRaster|GRIDS|Input Grid|False
+ParameterBoolean|MIRROR_X|Mirror Horizontally|False
+ParameterBoolean|MIRROR_Y|Mirror Vertically|False
+OutputRaster|TRANSPOSED|Transposed Grid
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/Triangulation.txt b/python/plugins/processing/algs/saga/description/2.3.0/Triangulation.txt
new file mode 100644
index 0000000..39a2dcb
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/Triangulation.txt
@@ -0,0 +1,9 @@
+Triangulation
+grid_gridding
+ParameterVector|SHAPES|Points|0|False
+ParameterTableField|FIELD|Attribute|SHAPES|-1|False
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|TARGET_OUT_GRID|Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/UniversalKriging(Global).txt b/python/plugins/processing/algs/saga/description/2.3.0/UniversalKriging(Global).txt
new file mode 100644
index 0000000..c27e4e4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/UniversalKriging(Global).txt
@@ -0,0 +1,32 @@
+Regression Kriging
+statistics_kriging
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterMultipleInput|PREDICTORS|Predictors|3|False
+OutputRaster|REGRESSION|Regression
+OutputRaster|PREDICTION|Prediction
+OutputRaster|RESIDUALS|Residuals
+OutputRaster|VARIANCE|Quality Measure
+ParameterSelection|TQUALITY|Type of Quality Measure|[0] standard deviation;[1] variance
+ParameterBoolean|LOG|Logarithmic Transformation|True
+ParameterBoolean|BLOCK|Block Kriging|True
+ParameterNumber|DBLOCK|Block Size|0|None|100
+ParameterNumber|VAR_MAXDIST|Maximum Distance|None|None|-1.0
+ParameterNumber|VAR_NCLASSES|Lag Distance Classes|1|None|100
+ParameterNumber|VAR_NSKIP|Skip|1|None|1
+ParameterString|VAR_MODEL|Variogram Model|a + b * x
+OutputTable|INFO_COEFF|Regression: Coefficients
+OutputTable|INFO_MODEL|Regression: Model
+OutputTable|INFO_STEPS|Regression: Steps
+ParameterBoolean|COORD_X|Include X Coordinate|False
+ParameterBoolean|COORD_Y|Include Y Coordinate|False
+ParameterBoolean|INTERCEPT|Intercept|True
+ParameterSelection|METHOD|Method|[0] include all;[1] forward;[2] backward;[3] stepwise|3
+ParameterNumber|P_VALUE|Significance Level|0|100.0|5.0
+ParameterSelection|INTERPOL|Grid Interpolation|[0] Nearest Neighbor;[1] Bilinear Interpolation;[2] Inverse Distance Interpolation;[3] Bicubic Spline Interpolation;[4] B-Spline Interpolation
+ParameterSelection|SEARCH_RANGE|Search Range|[0] local;[1] global
+ParameterNumber|SEARCH_RADIUS|Maximum Search Distance|0|None|1000
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/UniversalKriging.txt b/python/plugins/processing/algs/saga/description/2.3.0/UniversalKriging.txt
new file mode 100644
index 0000000..45d4d35
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/UniversalKriging.txt
@@ -0,0 +1,24 @@
+Simple Kriging
+statistics_kriging
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterSelection|TQUALITY|Type of Quality Measure|[0] standard deviation;[1] variance
+ParameterBoolean|LOG|Logarithmic Transformation|True
+ParameterBoolean|BLOCK|Block Kriging|True
+ParameterNumber|DBLOCK|Block Size|0|None|100
+ParameterNumber|VAR_MAXDIST|Maximum Distance|None|None|-1.0
+ParameterNumber|VAR_NCLASSES|Lag Distance Classes|1|None|100
+ParameterNumber|VAR_NSKIP|Skip|1|None|1
+ParameterString|VAR_MODEL|Variogram Model|a + b * x
+Hardcoded|-TARGET_DEFINITION 0
+Extent TARGET_USER_XMIN TARGET_USER_XMAX TARGET_USER_YMIN TARGET_USER_YMAX
+ParameterNumber|TARGET_USER_SIZE|Cellsize|None|None|100.0
+ParameterSelection|TARGET_USER_FITS|Fit|[0] nodes;[1] cells
+OutputRaster|PREDICTION|Prediction
+OutputRaster|VARIANCE|Quality Measure
+ParameterSelection|SEARCH_RANGE|Search Range|[0] local;[1] global
+ParameterNumber|SEARCH_RADIUS|Maximum Search Distance|0|None|1000
+ParameterSelection|SEARCH_POINTS_ALL|Number of Points|[0] maximum number of nearest points;[1] all points within search distance
+ParameterNumber|SEARCH_POINTS_MIN|Minimum|1|None|4
+ParameterNumber|SEARCH_POINTS_MAX|Maximum|1|None|20
+ParameterSelection|SEARCH_DIRECTION|Search Direction|[0] all directions;[1] quadrants
\ No newline at end of file
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/UpslopeArea.txt b/python/plugins/processing/algs/saga/description/2.3.0/UpslopeArea.txt
new file mode 100644
index 0000000..903e4ba
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/UpslopeArea.txt
@@ -0,0 +1,10 @@
+Upslope Area|4
+ta_hydrology
+ParameterRaster|TARGET|Target Area|True
+ParameterNumber|TARGET_PT_X|Target X coordinate|None|None|0.0
+ParameterNumber|TARGET_PT_Y|Target Y coordinate|None|None|0.0
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|SINKROUTE|Sink Routes|True
+ParameterSelection|METHOD|Method|[0] Deterministic 8;[1] Deterministic Infinity;[2] Multiple Flow Direction
+ParameterNumber|CONVERGE|Convergence|None|None|1.1
+OutputRaster|AREA|Upslope Area
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/UserDefinedFilter.txt b/python/plugins/processing/algs/saga/description/2.3.0/UserDefinedFilter.txt
new file mode 100644
index 0000000..e28c9e4
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/UserDefinedFilter.txt
@@ -0,0 +1,6 @@
+User Defined Filter
+grid_filter
+ParameterRaster|INPUT|Grid|False
+ParameterTable|FILTER|Filter Matrix|True
+ParameterFixedTable|FILTER_3X3|Default Filter Matrix (3x3)|3|1;2;3|True
+OutputRaster|RESULT|Filtered Grid
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/VariogramCloud.txt b/python/plugins/processing/algs/saga/description/2.3.0/VariogramCloud.txt
new file mode 100644
index 0000000..74d1463
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/VariogramCloud.txt
@@ -0,0 +1,7 @@
+Variogram Cloud
+statistics_points
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterNumber|DISTMAX|Maximum Distance|None|None|0.0
+ParameterNumber|NSKIP|Skip Number|None|None|1
+OutputTable|RESULT|Variogram Cloud
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/VariogramSurface.txt b/python/plugins/processing/algs/saga/description/2.3.0/VariogramSurface.txt
new file mode 100644
index 0000000..4f09961
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/VariogramSurface.txt
@@ -0,0 +1,9 @@
+Variogram Surface
+statistics_points
+ParameterVector|POINTS|Points|0|False
+ParameterTableField|FIELD|Attribute|POINTS|-1|False
+ParameterNumber|DISTCOUNT|Number of Distance Classes|1.0|None|10
+ParameterNumber|NSKIP|Skip Number|None|None|1
+OutputRaster|COUNT|Number of Pairs
+OutputRaster|VARIANCE|Variogram Surface
+OutputRaster|COVARIANCE|Covariance Surface
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/VectorRuggednessMeasure(VRM).txt b/python/plugins/processing/algs/saga/description/2.3.0/VectorRuggednessMeasure(VRM).txt
new file mode 100644
index 0000000..072311d
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/VectorRuggednessMeasure(VRM).txt
@@ -0,0 +1,9 @@
+Vector Ruggedness Measure (VRM)
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterNumber|RADIUS|Radius (Cells)|None|None|1
+ParameterSelection|DISTANCE_WEIGHTING_DW_WEIGHTING|Distance Weighting|[0] no distance weighting;[1] inverse distance to a power;[2] exponential;[3] gaussian weighting
+ParameterNumber|DISTANCE_WEIGHTING_DW_IDW_POWER|Inverse Distance Weighting Power|0.0|None|1
+ParameterBoolean|DISTANCE_WEIGHTING_DW_IDW_OFFSET     |Inverse Distance Offset|True
+ParameterNumber|DISTANCE_WEIGHTING_DW_BANDWIDTH|Gaussian and Exponential Weighting Bandwidth|0.0|None|1
+OutputRaster|VRM|Vector Terrain Ruggedness (VRM)
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/VectorisingGridClasses.txt b/python/plugins/processing/algs/saga/description/2.3.0/VectorisingGridClasses.txt
new file mode 100644
index 0000000..d3e37cd
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/VectorisingGridClasses.txt
@@ -0,0 +1,7 @@
+Vectorising Raster Classes
+shapes_grid
+ParameterRaster|GRID|Grid|False
+ParameterSelection|CLASS_ALL|Class Selection|[0] one single class specified by class identifier;[1] all classes|1
+ParameterNumber|CLASS_ID|Class Identifier|None|None|0
+ParameterSelection|SPLIT|Vectorised class as...|[0] one single (multi-)polygon object;[1] each island as separated polygon|1
+OutputVector|POLYGONS|Vectorized
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/VegetationIndex(SlopeBased).txt b/python/plugins/processing/algs/saga/description/2.3.0/VegetationIndex(SlopeBased).txt
new file mode 100644
index 0000000..fd89e6a
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/VegetationIndex(SlopeBased).txt
@@ -0,0 +1,13 @@
+Vegetation Index (Slope Based)
+imagery_tools
+ParameterRaster|NIR|Near Infrared Reflectance|False
+ParameterRaster|RED|Red Reflectance|False
+ParameterNumber|SOIL|Soil Adjustment Factor|0.0|1.0|0.5
+OutputRaster|DVI|Difference Vegetation Index
+OutputRaster|NDVI|Normalized Difference Vegetation Index
+OutputRaster|RVI|Ratio Vegetation Index
+OutputRaster|NRVI|Normalized Ratio Vegetation Index
+OutputRaster|TVI|Transformed Vegetation Index
+OutputRaster|CTVI|Corrected Transformed Vegetation Index
+OutputRaster|TTVI|Thiam's Transformed Vegetation Index
+OutputRaster|SAVI|Soil Adjusted Vegetation Index
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/VerticalDistancetoChannelNetwork.txt b/python/plugins/processing/algs/saga/description/2.3.0/VerticalDistancetoChannelNetwork.txt
new file mode 100644
index 0000000..5e1c7b2
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/VerticalDistancetoChannelNetwork.txt
@@ -0,0 +1,8 @@
+Vertical Distance to Channel Network
+ta_channels
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|CHANNELS|Channel Network|False
+ParameterNumber|THRESHOLD|Tension Threshold [Percentage of Cell Size]|None|None|1
+ParameterBoolean|NOUNDERGROUND  |Keep Base Level below Surface|True
+OutputRaster|DISTANCE|Vertical Distance to Channel Network
+OutputRaster|BASELEVEL|Channel Network Base Level
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/WaterRetentionCapacity.txt b/python/plugins/processing/algs/saga/description/2.3.0/WaterRetentionCapacity.txt
new file mode 100644
index 0000000..ccc8c6c
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/WaterRetentionCapacity.txt
@@ -0,0 +1,6 @@
+Water Retention Capacity
+sim_hydrology
+ParameterVector|SHAPES|Plot Holes|-1|False
+ParameterRaster|DEM|DEM|False
+OutputVector|OUTPUT|Final Parameters
+OutputRaster|RETENTION|Water Retention Capacity
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/WatershedBasins.txt b/python/plugins/processing/algs/saga/description/2.3.0/WatershedBasins.txt
new file mode 100644
index 0000000..d79f537
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/WatershedBasins.txt
@@ -0,0 +1,7 @@
+Watershed Basins
+ta_channels
+ParameterRaster|ELEVATION|Elevation|False
+ParameterRaster|CHANNELS|Channel Network|False
+ParameterRaster|SINKROUTE|Sink Route|True
+ParameterNumber|MINSIZE|Min. Size|None|None|0
+OutputRaster|BASINS|Watershed Basins
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/WatershedSegmentation.txt b/python/plugins/processing/algs/saga/description/2.3.0/WatershedSegmentation.txt
new file mode 100644
index 0000000..02edfa1
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/WatershedSegmentation.txt
@@ -0,0 +1,12 @@
+Watershed Segmentation
+imagery_segmentation
+ParameterRaster|GRID|Grid|False
+ParameterSelection|OUTPUT|Output|[0] Seed Value;[1] Segment ID
+ParameterSelection|DOWN|Method|[0] Minima;[1] Maxima
+ParameterSelection|JOIN|Join Segments based on Threshold Value|[0] do not join;[1] seed to saddle difference;[2] seeds difference
+ParameterNumber|THRESHOLD|Threshold|None|None|0
+ParameterBoolean|EDGE           |Allow Edge Pixels to be Seeds|True
+ParameterBoolean|BBORDERS       |Borders|True
+OutputRaster|SEGMENTS|Segments
+OutputVector|SEEDS|Seed Points
+OutputRaster|BORDERS|Borders
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/WindEffect(WindwardLeewardIndex).txt b/python/plugins/processing/algs/saga/description/2.3.0/WindEffect(WindwardLeewardIndex).txt
new file mode 100644
index 0000000..57495dc
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/WindEffect(WindwardLeewardIndex).txt
@@ -0,0 +1,15 @@
+Wind effect|Wind Effect (Windward / Leeward Index)
+ta_morphometry
+ParameterRaster|DEM|Elevation|False
+ParameterRaster|DIR|Wind Direction|True
+ParameterRaster|LEN|Wind Speed|True
+ParameterNumber|DIR_CONST|Constant Wind Direction [Degree]|None|None|135
+ParameterBoolean|OLDVER         |Old Version|True
+ParameterNumber|MAXDIST|Search Distance [km]|0.0|None|300.0
+ParameterNumber|ACCEL|Acceleration|0.0|None|1.5
+ParameterBoolean|PYRAMIDS       |Use Pyramids|True
+ParameterSelection|DIR_UNITS|Wind Direction Units|[0] radians;[1] degree
+ParameterNumber|LEN_SCALE|Wind Speed Scale Factor|0.0|None|1.0
+OutputRaster|EFFECT|Wind Effect
+OutputRaster|LUV|Windward Effect
+OutputRaster|LEE|Leeward Effect
diff --git a/python/plugins/processing/algs/saga/description/2.3.0/ZonalGridStatistics.txt b/python/plugins/processing/algs/saga/description/2.3.0/ZonalGridStatistics.txt
new file mode 100644
index 0000000..10d91c8
--- /dev/null
+++ b/python/plugins/processing/algs/saga/description/2.3.0/ZonalGridStatistics.txt
@@ -0,0 +1,8 @@
+Zonal Grid Statistics
+statistics_grid
+ParameterRaster|ZONES|Zone Grid|False
+ParameterMultipleInput|CATLIST|Categorial Grids|3|True
+ParameterMultipleInput|STATLIST|Grids to analyse|3|True
+ParameterRaster|ASPECT|Aspect|True
+ParameterBoolean|SHORTNAMES|Short Field Names|True
+OutputTable|OUTTAB|Zonal Statistics
diff --git a/python/plugins/processing/gui/ProcessingToolbox.py b/python/plugins/processing/gui/ProcessingToolbox.py
index 503c736..aa6097d 100644
--- a/python/plugins/processing/gui/ProcessingToolbox.py
+++ b/python/plugins/processing/gui/ProcessingToolbox.py
@@ -180,9 +180,9 @@ class ProcessingToolbox(BASE, WIDGET):
 
     def showPopupMenu(self, point):
         item = self.algorithmTree.itemAt(point)
+        popupmenu = QMenu()
         if isinstance(item, TreeAlgorithmItem):
             alg = item.alg
-            popupmenu = QMenu()
             executeAction = QAction(self.tr('Execute'), self.algorithmTree)
             executeAction.triggered.connect(self.executeAlgorithm)
             popupmenu.addAction(executeAction)
diff --git a/python/plugins/processing/tools/vector.py b/python/plugins/processing/tools/vector.py
index 1ae8dc4..32e1102 100644
--- a/python/plugins/processing/tools/vector.py
+++ b/python/plugins/processing/tools/vector.py
@@ -113,12 +113,20 @@ def features(layer, request=QgsFeatureRequest()):
         def __iter__(self):
             return self.iter
 
+        def __next__(self):
+            '''Iterator next method in python 3'''
+            return next(self.iter)
+
         def __len__(self):
             if self.selection:
                 return int(self.layer.selectedFeatureCount())
             else:
                 return int(self.layer.featureCount())
 
+        def next(self):
+            '''Iterator next method in python 2'''
+            return self.__next__()
+
     return Features(layer, request)
 
 
diff --git a/rpm/qgis.spec.template b/rpm/qgis.spec.template
index 07a9f7d..0258b09 100644
--- a/rpm/qgis.spec.template
+++ b/rpm/qgis.spec.template
@@ -214,11 +214,8 @@ gzip ChangeLog
       -D BINDINGS_GLOBAL_INSTALL:BOOL=TRUE \
       -D ENABLE_TESTS:BOOL=FALSE \
       -D WITH_INTERNAL_DATEUTIL:BOOL=FALSE \
-      -D WITH_INTERNAL_HTTPLIB2:BOOL=FALSE \
-      -D WITH_INTERNAL_JINJA2:BOOL=FALSE \
       -D WITH_INTERNAL_MARKUPSAFE:BOOL=FALSE \
       -D WITH_INTERNAL_OWSLIB:BOOL=FALSE \
-      -D WITH_INTERNAL_PYGMENTS:BOOL=FALSE \
       -D WITH_INTERNAL_PYTZ:BOOL=FALSE \
       -D WITH_INTERNAL_QEXTSERIALPORT:BOOL=FALSE \
       -D WITH_INTERNAL_QWTPOLAR:BOOL=FALSE \
diff --git a/src/app/composer/qgscomposer.cpp b/src/app/composer/qgscomposer.cpp
index ce4ffa0..6a2fd65 100644
--- a/src/app/composer/qgscomposer.cpp
+++ b/src/app/composer/qgscomposer.cpp
@@ -1753,7 +1753,7 @@ void QgsComposer::exportCompositionAsPDF( QgsComposer::OutputMode mode )
       if ( !printReady )
       {
         QMessageBox::warning( this, tr( "Atlas processing error" ),
-                              QString( tr( "Error creating %1." ) ).arg( outputFileName ),
+                              QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( outputFileName ),
                               QMessageBox::Ok,
                               QMessageBox::Ok );
         mView->setPaintingEnabled( true );
@@ -1799,7 +1799,7 @@ void QgsComposer::exportCompositionAsPDF( QgsComposer::OutputMode mode )
         if ( !printReady )
         {
           QMessageBox::warning( this, tr( "Atlas processing error" ),
-                                QString( tr( "Error creating %1." ) ).arg( outputFileName ),
+                                QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( outputFileName ),
                                 QMessageBox::Ok,
                                 QMessageBox::Ok );
           mView->setPaintingEnabled( true );
@@ -1830,7 +1830,7 @@ void QgsComposer::exportCompositionAsPDF( QgsComposer::OutputMode mode )
     if ( !exportOk )
     {
       QMessageBox::warning( this, tr( "Atlas processing error" ),
-                            QString( tr( "Error creating %1." ) ).arg( outputFileName ),
+                            QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( outputFileName ),
                             QMessageBox::Ok,
                             QMessageBox::Ok );
       mView->setPaintingEnabled( true );
@@ -2146,7 +2146,7 @@ void QgsComposer::exportCompositionAsImage( QgsComposer::OutputMode mode )
       if ( !saveOk )
       {
         QMessageBox::warning( this, tr( "Image export error" ),
-                              QString( tr( "Error creating %1." ) ).arg( fileNExt.first ),
+                              QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( fileNExt.first ),
                               QMessageBox::Ok,
                               QMessageBox::Ok );
         mView->setPaintingEnabled( true );
@@ -2371,7 +2371,7 @@ void QgsComposer::exportCompositionAsImage( QgsComposer::OutputMode mode )
         if ( !saveOk )
         {
           QMessageBox::warning( this, tr( "Atlas processing error" ),
-                                QString( tr( "Error creating %1." ) ).arg( imageFilename ),
+                                QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( imageFilename ),
                                 QMessageBox::Ok,
                                 QMessageBox::Ok );
           mView->setPaintingEnabled( true );
@@ -2721,7 +2721,7 @@ void QgsComposer::exportCompositionAsSVG( QgsComposer::OutputMode mode )
         if ( !createOk )
         {
           QMessageBox::warning( this, tr( "SVG export error" ),
-                                QString( tr( "Error creating %1." ) ).arg( currentFileName ),
+                                QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( currentFileName ),
                                 QMessageBox::Ok,
                                 QMessageBox::Ok );
           mView->setPaintingEnabled( true );
@@ -2879,7 +2879,7 @@ void QgsComposer::exportCompositionAsSVG( QgsComposer::OutputMode mode )
         if ( !openOk )
         {
           QMessageBox::warning( this, tr( "SVG export error" ),
-                                QString( tr( "Error creating %1." ) ).arg( currentFileName ),
+                                QString( tr( "Cannot write to %1.\n\nThis file may be open in another application." ) ).arg( currentFileName ),
                                 QMessageBox::Ok,
                                 QMessageBox::Ok );
           mView->setPaintingEnabled( true );
diff --git a/src/app/main.cpp b/src/app/main.cpp
index 2ba0afa..3294cbff 100644
--- a/src/app/main.cpp
+++ b/src/app/main.cpp
@@ -806,7 +806,12 @@ int main( int argc, char *argv[] )
 
   QgsApplication myApp( argc, argv, myUseGuiFlag, configpath );
 
+#ifdef Q_OS_MAC
+  // Set 1024x1024 icon for dock, app switcher, etc., rendering
+  myApp.setWindowIcon( QIcon( QgsApplication::iconsPath() + QLatin1String( "qgis-icon-macos.png" ) ) );
+#else
   myApp.setWindowIcon( QIcon( QgsApplication::appIconPath() ) );
+#endif
 
   //
   // Set up the QSettings environment must be done after qapp is created
diff --git a/src/app/qgisapp.cpp b/src/app/qgisapp.cpp
index bcd7a46..55cffc7 100644
--- a/src/app/qgisapp.cpp
+++ b/src/app/qgisapp.cpp
@@ -1426,14 +1426,9 @@ QgisAppStyleSheet* QgisApp::styleSheetBuilder()
   return mStyleSheetBuilder;
 }
 
-// restore any application settings stored in QSettings
-void QgisApp::readSettings()
+void QgisApp::readRecentProjects()
 {
   QSettings settings;
-  QString themename = settings.value( "UI/UITheme", "default" ).toString();
-  setTheme( themename );
-
-  // Read legacy settings
   mRecentProjects.clear();
 
   settings.beginGroup( "/UI" );
@@ -1477,6 +1472,16 @@ void QgisApp::readSettings()
     mRecentProjects.append( data );
   }
   settings.endGroup();
+}
+
+void QgisApp::readSettings()
+{
+  QSettings settings;
+  QString themename = settings.value( "UI/UITheme", "default" ).toString();
+  setTheme( themename );
+
+  // Read legacy settings
+  readRecentProjects();
 
   // this is a new session! reset enable macros value to "ask"
   // whether set to "just for this session"
@@ -3258,6 +3263,10 @@ void QgisApp::updateRecentProjectPaths()
 // add this file to the recently opened/saved projects list
 void QgisApp::saveRecentProjectPath( const QString& projectPath, bool savePreviewImage )
 {
+  // first, re-read the recent project paths. This prevents loss of recent
+  // projects when multiple QGIS sessions are open
+  readRecentProjects();
+
   QSettings settings;
 
   // Get canonical absolute path
diff --git a/src/app/qgisapp.h b/src/app/qgisapp.h
index 9bc115c..cf2d3fd 100644
--- a/src/app/qgisapp.h
+++ b/src/app/qgisapp.h
@@ -1472,6 +1472,9 @@ class APP_EXPORT QgisApp : public QMainWindow, private Ui::MainWindow
     /** Copy a vector style from a layer to another one, if they have the same geometry type */
     void duplicateVectorStyle( QgsVectorLayer* srcLayer, QgsVectorLayer* destLayer );
 
+    //! Loads the list of recent projects from settings
+    void readRecentProjects();
+
     QgisAppStyleSheet *mStyleSheetBuilder;
 
     // actions for menus and toolbars -----------------
diff --git a/src/app/qgsalignrasterdialog.cpp b/src/app/qgsalignrasterdialog.cpp
index dcb08b2..8d56f4d 100644
--- a/src/app/qgsalignrasterdialog.cpp
+++ b/src/app/qgsalignrasterdialog.cpp
@@ -380,6 +380,7 @@ void QgsAlignRasterDialog::runAlign()
 
 QgsAlignRasterLayerConfigDialog::QgsAlignRasterLayerConfigDialog()
 {
+  setWindowTitle( tr( "Configure Layer Resampling" ) );
   QVBoxLayout* layout = new QVBoxLayout();
 
   cboLayers = new QgsMapLayerComboBox( this );
diff --git a/src/app/qgsattributetabledialog.cpp b/src/app/qgsattributetabledialog.cpp
index 28f683f..ff0fc4f 100644
--- a/src/app/qgsattributetabledialog.cpp
+++ b/src/app/qgsattributetabledialog.cpp
@@ -131,8 +131,10 @@ QgsAttributeTableDialog::QgsAttributeTableDialog( QgsVectorLayer *theLayer, QWid
   mEditorContext.setVectorLayerTools( QgisApp::instance()->vectorLayerTools() );
 
   QgsFeatureRequest r;
+  bool needsGeom = false;
+  QgsAttributeTableFilterModel::FilterMode initialMode = static_cast< QgsAttributeTableFilterModel::FilterMode>( settings.value( QString( "/qgis/attributeTableBehaviour" ), QgsAttributeTableFilterModel::ShowAll ).toInt() );
   if ( mLayer->geometryType() != QGis::NoGeometry &&
-       settings.value( "/qgis/attributeTableBehaviour", QgsAttributeTableFilterModel::ShowAll ).toInt() == QgsAttributeTableFilterModel::ShowVisible )
+       initialMode == QgsAttributeTableFilterModel::ShowVisible )
   {
     QgsMapCanvas *mc = QgisApp::instance()->mapCanvas();
     QgsRectangle extent( mc->mapSettings().mapToLayerCoordinates( theLayer, mc->extent() ) );
@@ -144,10 +146,18 @@ QgsAttributeTableDialog::QgsAttributeTableDialog( QgsVectorLayer *theLayer, QWid
     delete g;
 
     mActionShowAllFilter->setText( tr( "Show All Features In Initial Canvas Extent" ) );
+    needsGeom = true;
   }
+  else if ( initialMode == QgsAttributeTableFilterModel::ShowSelected )
+  {
+    if ( theLayer->selectedFeatureCount() > 0 )
+      r.setFilterFids( theLayer->selectedFeaturesIds() );
+  }
+  if ( !needsGeom )
+    r.setFlags( QgsFeatureRequest::NoGeometry );
 
   // Initialize dual view
-  mMainView->init( mLayer, QgisApp::instance()->mapCanvas(), r, mEditorContext );
+  mMainView->init( mLayer, QgisApp::instance()->mapCanvas(), r, mEditorContext, false );
 
   QgsAttributeTableConfig config = mLayer->attributeTableConfig();
   mMainView->setAttributeTableConfig( config );
@@ -322,7 +332,7 @@ void QgsAttributeTableDialog::updateTitle()
   QWidget *w = mDock ? qobject_cast<QWidget*>( mDock ) : qobject_cast<QWidget*>( this );
   w->setWindowTitle( tr( " %1 :: Features total: %2, filtered: %3, selected: %4%5" )
                      .arg( mLayer->name() )
-                     .arg( mMainView->featureCount() )
+                     .arg( qMax( static_cast< long >( mMainView->featureCount() ), mLayer->featureCount() ) ) // layer count may be estimated, so use larger of the two
                      .arg( mMainView->filteredFeatureCount() )
                      .arg( mLayer->selectedFeatureCount() )
                      .arg( mRubberBand ? tr( ", spatially limited" ) : "" )
diff --git a/src/app/qgsbookmarks.cpp b/src/app/qgsbookmarks.cpp
index 20a3eed..a5dbc69 100644
--- a/src/app/qgsbookmarks.cpp
+++ b/src/app/qgsbookmarks.cpp
@@ -337,7 +337,7 @@ void QgsBookmarks::exportToXML()
     return;
   }
 
-  // ensure the user never ommited the extension from the file name
+  // ensure the user never omitted the extension from the file name
   if ( !fileName.endsWith( ".xml", Qt::CaseInsensitive ) )
   {
     fileName += ".xml";
@@ -351,8 +351,7 @@ void QgsBookmarks::exportToXML()
   int colCount = mModel->columnCount();
 
   QList<QString> headerList;
-  headerList << "id" << "name" << "project" << "xmin"
-  << "ymin" << "xmax" << "ymax" << "sr_id";
+  headerList << "id" << "name" << "project" << "xmin" << "ymin" << "xmax" << "ymax" << "sr_id";
 
   for ( int i = 0; i < rowCount; ++i )
   {
@@ -406,8 +405,7 @@ int QgsProjectBookmarksTableModel::rowCount( const QModelIndex& parent ) const
 int QgsProjectBookmarksTableModel::columnCount( const QModelIndex& parent ) const
 {
   Q_UNUSED( parent );
-
-  return 8;
+  return 7;
 }
 
 QVariant QgsProjectBookmarksTableModel::data( const QModelIndex& index, int role ) const
diff --git a/src/app/qgsfieldsproperties.cpp b/src/app/qgsfieldsproperties.cpp
index 24440da..c737fbf 100644
--- a/src/app/qgsfieldsproperties.cpp
+++ b/src/app/qgsfieldsproperties.cpp
@@ -1300,7 +1300,7 @@ void DesignerTree::onItemDoubleClicked( QTreeWidgetItem* item, int column )
   baseData->setLayout( baseLayout );
   QCheckBox* showLabelCheckbox = new QCheckBox( "Show label" );
   showLabelCheckbox->setChecked( itemData.showLabel() );
-  baseLayout->addWidget( showLabelCheckbox );
+  baseLayout->addRow( showLabelCheckbox );
   QWidget* baseWidget = new QWidget();
   baseWidget->setLayout( baseLayout );
 
@@ -1310,12 +1310,12 @@ void DesignerTree::onItemDoubleClicked( QTreeWidgetItem* item, int column )
     dlg.setWindowTitle( tr( "Configure container" ) );
     QFormLayout* layout = new QFormLayout() ;
     dlg.setLayout( layout );
-    layout->addWidget( baseWidget );
+    layout->addRow( baseWidget );
 
     QCheckBox* showAsGroupBox = nullptr;
     QLineEdit* title = new QLineEdit( itemData.name() );
     QSpinBox* columnCount = new QSpinBox();
-    QGroupBox* visibilityExpressionGroupBox = new QGroupBox( tr( "Control visibility by expression " ) );
+    QGroupBox* visibilityExpressionGroupBox = new QGroupBox( tr( "Control visibility by expression" ) );
     visibilityExpressionGroupBox->setCheckable( true );
     visibilityExpressionGroupBox->setChecked( itemData.visibilityExpression().enabled() );
     visibilityExpressionGroupBox->setLayout( new QGridLayout );
@@ -1330,13 +1330,13 @@ void DesignerTree::onItemDoubleClicked( QTreeWidgetItem* item, int column )
 
     layout->addRow( tr( "Title" ), title );
     layout->addRow( tr( "Column count" ), columnCount );
-    layout->addWidget( visibilityExpressionGroupBox );
+    layout->addRow( visibilityExpressionGroupBox );
 
     if ( !item->parent() )
     {
       showAsGroupBox = new QCheckBox( tr( "Show as group box" ) );
       showAsGroupBox->setChecked( itemData.showAsGroupBox() );
-      layout->addRow( tr( "Show as group box" ), showAsGroupBox );
+      layout->addRow( showAsGroupBox );
     }
 
     QDialogButtonBox* buttonBox = new QDialogButtonBox( QDialogButtonBox::Ok
diff --git a/src/app/qgsvisibilitypresets.cpp b/src/app/qgsvisibilitypresets.cpp
index 76451ea..a9201d8 100644
--- a/src/app/qgsvisibilitypresets.cpp
+++ b/src/app/qgsvisibilitypresets.cpp
@@ -30,7 +30,7 @@
 #include "qgsnewnamedialog.h"
 
 #include <QInputDialog>
-
+#include <QMessageBox>
 
 QgsVisibilityPresets* QgsVisibilityPresets::sInstance;
 
@@ -181,6 +181,12 @@ void QgsVisibilityPresets::replaceTriggered()
   if ( !actionPreset )
     return;
 
+  int res = QMessageBox::question( mMenu, tr( "Replace preset" ),
+                                   tr( "Are you sure you want to replace the existing preset %1?" ).arg( actionPreset->text() ),
+                                   QMessageBox::Yes | QMessageBox::No, QMessageBox::No );
+  if ( res != QMessageBox::Yes )
+    return;
+
   //adding preset with same name is effectively a replace
   addPreset( actionPreset->text() );
 }
diff --git a/src/core/composer/qgscomposerlegend.cpp b/src/core/composer/qgscomposerlegend.cpp
index eedcae8..1b5b66d 100644
--- a/src/core/composer/qgscomposerlegend.cpp
+++ b/src/core/composer/qgscomposerlegend.cpp
@@ -535,7 +535,7 @@ bool QgsComposerLegend::readXML( const QDomElement& itemElem, const QDomDocument
   {
     // QGIS >= 2.6
     QDomElement layerTreeElem = itemElem.firstChildElement( "layer-tree-group" );
-    setCustomLayerTree( QgsLayerTreeGroup::readXML( layerTreeElem ) );
+    setCustomLayerTree( QgsLayerTreeGroup::readXML( layerTreeElem, true ) );
   }
 
   //restore general composer item properties
diff --git a/src/core/layertree/qgslayertreegroup.cpp b/src/core/layertree/qgslayertreegroup.cpp
index 6a0aa46..ab729a0 100644
--- a/src/core/layertree/qgslayertreegroup.cpp
+++ b/src/core/layertree/qgslayertreegroup.cpp
@@ -254,7 +254,7 @@ QgsLayerTreeGroup* QgsLayerTreeGroup::findGroup( const QString& name )
   return nullptr;
 }
 
-QgsLayerTreeGroup* QgsLayerTreeGroup::readXML( QDomElement& element )
+QgsLayerTreeGroup* QgsLayerTreeGroup::readXML( QDomElement& element, bool looseMatch )
 {
   if ( element.tagName() != "layer-tree-group" )
     return nullptr;
@@ -270,7 +270,7 @@ QgsLayerTreeGroup* QgsLayerTreeGroup::readXML( QDomElement& element )
 
   groupNode->readCommonXML( element );
 
-  groupNode->readChildrenFromXML( element );
+  groupNode->readChildrenFromXML( element, looseMatch );
 
   groupNode->setIsMutuallyExclusive( isMutuallyExclusive, mutuallyExclusiveChildIndex );
 
@@ -298,13 +298,13 @@ void QgsLayerTreeGroup::writeXML( QDomElement& parentElement )
   parentElement.appendChild( elem );
 }
 
-void QgsLayerTreeGroup::readChildrenFromXML( QDomElement& element )
+void QgsLayerTreeGroup::readChildrenFromXML( QDomElement& element, bool looseMatch )
 {
   QList<QgsLayerTreeNode*> nodes;
   QDomElement childElem = element.firstChildElement();
   while ( !childElem.isNull() )
   {
-    QgsLayerTreeNode* newNode = QgsLayerTreeNode::readXML( childElem );
+    QgsLayerTreeNode* newNode = QgsLayerTreeNode::readXML( childElem, looseMatch );
     if ( newNode )
       nodes << newNode;
 
diff --git a/src/core/layertree/qgslayertreegroup.h b/src/core/layertree/qgslayertreegroup.h
index 5f8fd62..943dfcf 100644
--- a/src/core/layertree/qgslayertreegroup.h
+++ b/src/core/layertree/qgslayertreegroup.h
@@ -76,12 +76,22 @@ class CORE_EXPORT QgsLayerTreeGroup : public QgsLayerTreeNode
     //! Find group node with specified name. Searches recursively the whole sub-tree.
     QgsLayerTreeGroup* findGroup( const QString& name );
 
-    //! Read group (tree) from XML element <layer-tree-group> and return the newly created group (or null on error)
-    static QgsLayerTreeGroup* readXML( QDomElement& element );
+    /**
+     * Read group (tree) from XML element <layer-tree-group> and return the newly
+     * created group (or null on error). If the looseMatch
+     * parameter is true then child legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    static QgsLayerTreeGroup* readXML( QDomElement& element, bool looseMatch = false );
+
     //! Write group (tree) as XML element <layer-tree-group> and add it to the given parent element
     virtual void writeXML( QDomElement& parentElement ) override;
-    //! Read children from XML and append them to the group.
-    void readChildrenFromXML( QDomElement& element );
+    /**
+     * Read children from XML and append them to the group. If the looseMatch
+     * parameter is true then legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    void readChildrenFromXML( QDomElement& element, bool looseMatch = false );
 
     //! Return text representation of the tree. For debugging purposes only.
     virtual QString dump() const override;
diff --git a/src/core/layertree/qgslayertreelayer.cpp b/src/core/layertree/qgslayertreelayer.cpp
index e1a9466..dd80e69 100644
--- a/src/core/layertree/qgslayertreelayer.cpp
+++ b/src/core/layertree/qgslayertreelayer.cpp
@@ -18,7 +18,10 @@
 #include "qgslayertreeutils.h"
 #include "qgsmaplayer.h"
 #include "qgsmaplayerregistry.h"
-
+#include "qgsvectorlayer.h"
+#include "qgsrasterlayer.h"
+#include "qgsvectordataprovider.h"
+#include "qgsrasterdataprovider.h"
 
 QgsLayerTreeLayer::QgsLayerTreeLayer( QgsMapLayer *layer )
     : QgsLayerTreeNode( NodeLayer )
@@ -50,6 +53,13 @@ QgsLayerTreeLayer::QgsLayerTreeLayer( const QgsLayerTreeLayer& other )
   attachToLayer();
 }
 
+QgsLayerTreeLayer *QgsLayerTreeLayer::createLayerFromParams( const LayerMatchParams &source )
+{
+  QgsLayerTreeLayer* l = new QgsLayerTreeLayer( QString() );
+  l->attachToSource( source );
+  return l;
+}
+
 void QgsLayerTreeLayer::attachToLayer()
 {
   // layer is not necessarily already loaded
@@ -71,6 +81,35 @@ void QgsLayerTreeLayer::attachToLayer()
   }
 }
 
+bool QgsLayerTreeLayer::layerMatchesSource( QgsMapLayer* layer, const QgsLayerTreeLayer::LayerMatchParams &params ) const
+{
+  if ( layer->publicSource() != params.source ||
+       layer->name() != params.name )
+    return false;
+
+  switch ( layer->type() )
+  {
+    case QgsMapLayer::VectorLayer:
+    {
+      QgsVectorLayer* vl = qobject_cast< QgsVectorLayer* >( layer );
+      if ( vl->dataProvider()->name() != params.providerKey )
+        return false;
+      break;
+    }
+    case QgsMapLayer::RasterLayer:
+    {
+      QgsRasterLayer* rl = qobject_cast< QgsRasterLayer* >( layer );
+      if ( rl->dataProvider()->name() != params.providerKey )
+        return false;
+      break;
+    }
+    case QgsMapLayer::PluginLayer:
+      break;
+
+  }
+  return true;
+}
+
 QString QgsLayerTreeLayer::name() const
 {
   return layerName();
@@ -81,6 +120,29 @@ void QgsLayerTreeLayer::setName( const QString& n )
   setLayerName( n );
 }
 
+void QgsLayerTreeLayer::attachToSource( const LayerMatchParams &source )
+{
+  // check if matching source already open
+  bool foundMatch = false;
+  Q_FOREACH ( QgsMapLayer* layer, QgsMapLayerRegistry::instance()->mapLayers() )
+  {
+    if ( layerMatchesSource( layer, source ) )
+    {
+      // found a source! need to disconnect from layersAdded signal as original attachToLayer call
+      // will have set this up
+      disconnect( QgsMapLayerRegistry::instance(), SIGNAL( layersAdded( QList<QgsMapLayer*> ) ), this, SLOT( registryLayersAdded( QList<QgsMapLayer*> ) ) );
+      mLayerId = layer->id();
+      attachToLayer();
+      emit layerLoaded();
+      foundMatch = true;
+      break;
+    }
+  }
+
+  if ( !foundMatch )
+    mLooseMatchParams = source; // no need to store source if match already made
+}
+
 QString QgsLayerTreeLayer::layerName() const
 {
   return mLayer ? mLayer->name() : mLayerName;
@@ -113,13 +175,17 @@ void QgsLayerTreeLayer::setVisible( Qt::CheckState state )
   emit visibilityChanged( this, state );
 }
 
-QgsLayerTreeLayer* QgsLayerTreeLayer::readXML( QDomElement& element )
+QgsLayerTreeLayer* QgsLayerTreeLayer::readXML( QDomElement& element , bool looseMatch )
 {
   if ( element.tagName() != "layer-tree-layer" )
     return nullptr;
 
   QString layerID = element.attribute( "id" );
   QString layerName = element.attribute( "name" );
+
+  QString source = element.attribute( "source" );
+  QString providerKey = element.attribute( "providerKey" );
+
   Qt::CheckState checked = QgsLayerTreeUtils::checkStateFromXml( element.attribute( "checked" ) );
   bool isExpanded = ( element.attribute( "expanded", "1" ) == "1" );
 
@@ -129,6 +195,14 @@ QgsLayerTreeLayer* QgsLayerTreeLayer::readXML( QDomElement& element )
 
   if ( layer )
     nodeLayer = new QgsLayerTreeLayer( layer );
+  else if ( looseMatch && !source.isEmpty() )
+  {
+    LayerMatchParams params;
+    params.name = layerName;
+    params.source = source;
+    params.providerKey = providerKey;
+    nodeLayer = QgsLayerTreeLayer::createLayerFromParams( params );
+  }
   else
     nodeLayer = new QgsLayerTreeLayer( layerID, layerName );
 
@@ -144,6 +218,31 @@ void QgsLayerTreeLayer::writeXML( QDomElement& parentElement )
   QDomDocument doc = parentElement.ownerDocument();
   QDomElement elem = doc.createElement( "layer-tree-layer" );
   elem.setAttribute( "id", mLayerId );
+  if ( mLayer )
+  {
+    elem.setAttribute( "source", mLayer->publicSource() );
+
+    QString providerKey;
+    switch ( mLayer->type() )
+    {
+      case QgsMapLayer::VectorLayer:
+      {
+        QgsVectorLayer* vl = qobject_cast< QgsVectorLayer* >( mLayer );
+        providerKey = vl->dataProvider()->name();
+        break;
+      }
+      case QgsMapLayer::RasterLayer:
+      {
+        QgsRasterLayer* rl = qobject_cast< QgsRasterLayer* >( mLayer );
+        providerKey = rl->dataProvider()->name();
+        break;
+      }
+      case QgsMapLayer::PluginLayer:
+        break;
+    }
+    elem.setAttribute( "providerKey", providerKey );
+  }
+
   elem.setAttribute( "name", layerName() );
   elem.setAttribute( "checked", QgsLayerTreeUtils::checkStateToXml( mVisible ) );
   elem.setAttribute( "expanded", mExpanded ? "1" : "0" );
@@ -167,6 +266,12 @@ void QgsLayerTreeLayer::registryLayersAdded( const QList<QgsMapLayer*>& layers )
 {
   Q_FOREACH ( QgsMapLayer* l, layers )
   {
+    if ( !mLooseMatchParams.source.isEmpty() && layerMatchesSource( l, mLooseMatchParams ) )
+    {
+      // we are loosely matching, and found a layer with a matching source.
+      // Attach to this!
+      mLayerId = l->id();
+    }
     if ( l->id() == mLayerId )
     {
       disconnect( QgsMapLayerRegistry::instance(), SIGNAL( layersAdded( QList<QgsMapLayer*> ) ), this, SLOT( registryLayersAdded( QList<QgsMapLayer*> ) ) );
diff --git a/src/core/layertree/qgslayertreelayer.h b/src/core/layertree/qgslayertreelayer.h
index 2fb3a05..5cc6910 100644
--- a/src/core/layertree/qgslayertreelayer.h
+++ b/src/core/layertree/qgslayertreelayer.h
@@ -42,9 +42,29 @@ class CORE_EXPORT QgsLayerTreeLayer : public QgsLayerTreeNode
 {
     Q_OBJECT
   public:
+
+    //! Parameters for loose layer matching
+    struct LayerMatchParams
+    {
+      //! Layer public source
+      QString source;
+      //! Layer name
+      QString name;
+      //! Provider
+      QString providerKey;
+    };
+
     explicit QgsLayerTreeLayer( QgsMapLayer* layer );
     QgsLayerTreeLayer( const QgsLayerTreeLayer& other );
 
+    /**
+     * Creates a layer node which will attach to a layer with matching
+     * parameters. This can be used for "looser" layer matching,
+     * avoiding the usual layer id check in favour of attaching to any layer
+     * with an equal source/name/provider.
+     */
+    static QgsLayerTreeLayer* createLayerFromParams( const LayerMatchParams& source );
+
     explicit QgsLayerTreeLayer( const QString& layerId, const QString& name = QString() );
 
     QString layerId() const { return mLayerId; }
@@ -58,13 +78,27 @@ class CORE_EXPORT QgsLayerTreeLayer : public QgsLayerTreeNode
     //! @note added in 2.18.1
     void setName( const QString& n ) override;
 
+    /**
+     * Attempts to attach this layer node to a layer with a matching
+     * QgsMapLayer::publicSource(). This can be used for "looser" layer matching,
+     * avoiding the usual layer id check in favour of attaching to any layer
+     * with an equal source.
+     */
+    void attachToSource( const LayerMatchParams &source );
+
     QString layerName() const;
     void setLayerName( const QString& n );
 
     Qt::CheckState isVisible() const { return mVisible; }
     void setVisible( Qt::CheckState visible );
 
-    static QgsLayerTreeLayer* readXML( QDomElement& element );
+    /**
+     * Creates a new layer from an XML definition. If the looseMatch
+     * parameter is true then legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    static QgsLayerTreeLayer* readXML( QDomElement& element, bool looseMatch = false );
+
     virtual void writeXML( QDomElement& parentElement ) override;
 
     virtual QString dump() const override;
@@ -90,8 +124,17 @@ class CORE_EXPORT QgsLayerTreeLayer : public QgsLayerTreeNode
 
     QString mLayerId;
     QString mLayerName; // only used if layer does not exist
+
+    //! Only used when loosely matching to layers - eg when creating a composer legend from template
+    //! If set this will attach to the first matching layer with equal parameters
+    LayerMatchParams mLooseMatchParams;
+
     QgsMapLayer* mLayer; // not owned! may be null
     Qt::CheckState mVisible;
+
+  private:
+
+    bool layerMatchesSource( QgsMapLayer *layer, const LayerMatchParams& params ) const;
 };
 
 
diff --git a/src/core/layertree/qgslayertreenode.cpp b/src/core/layertree/qgslayertreenode.cpp
index a3513fe..4d2b4a3 100644
--- a/src/core/layertree/qgslayertreenode.cpp
+++ b/src/core/layertree/qgslayertreenode.cpp
@@ -47,13 +47,13 @@ QgsLayerTreeNode::~QgsLayerTreeNode()
   qDeleteAll( mChildren );
 }
 
-QgsLayerTreeNode* QgsLayerTreeNode::readXML( QDomElement& element )
+QgsLayerTreeNode* QgsLayerTreeNode::readXML( QDomElement& element, bool looseMatch )
 {
   QgsLayerTreeNode* node = nullptr;
   if ( element.tagName() == "layer-tree-group" )
-    node = QgsLayerTreeGroup::readXML( element );
+    node = QgsLayerTreeGroup::readXML( element, looseMatch );
   else if ( element.tagName() == "layer-tree-layer" )
-    node = QgsLayerTreeLayer::readXML( element );
+    node = QgsLayerTreeLayer::readXML( element, looseMatch );
 
   return node;
 }
diff --git a/src/core/layertree/qgslayertreenode.h b/src/core/layertree/qgslayertreenode.h
index 9f5eb62..3388f30 100644
--- a/src/core/layertree/qgslayertreenode.h
+++ b/src/core/layertree/qgslayertreenode.h
@@ -90,8 +90,13 @@ class CORE_EXPORT QgsLayerTreeNode : public QObject
     //! @note added in 2.18.1
     virtual void setName( const QString& name ) = 0;
 
-    //! Read layer tree from XML. Returns new instance
-    static QgsLayerTreeNode *readXML( QDomElement &element );
+    /**
+     * Read layer tree from XML. Returns new instance. If the looseMatch
+     * parameter is true then child legend layers will use looser matching criteria,
+     * eg testing layer source instead of layer IDs.
+     */
+    static QgsLayerTreeNode *readXML( QDomElement &element, bool looseMatch = false );
+
     //! Write layer tree to XML
     virtual void writeXML( QDomElement &parentElement ) = 0;
 
diff --git a/src/core/qgsmaprenderercustompainterjob.cpp b/src/core/qgsmaprenderercustompainterjob.cpp
index 8385188..afefadc 100644
--- a/src/core/qgsmaprenderercustompainterjob.cpp
+++ b/src/core/qgsmaprenderercustompainterjob.cpp
@@ -126,14 +126,7 @@ void QgsMapRendererCustomPainterJob::cancel()
 
   QgsDebugMsg( "QPAINTER cancelling" );
   disconnect( &mFutureWatcher, SIGNAL( finished() ), this, SLOT( futureFinished() ) );
-
-  mLabelingRenderContext.setRenderingStopped( true );
-  for ( LayerRenderJobs::iterator it = mLayerJobs.begin(); it != mLayerJobs.end(); ++it )
-  {
-    it->context.setRenderingStopped( true );
-    if ( it->renderer && it->renderer->feedback() )
-      it->renderer->feedback()->cancel();
-  }
+  cancelWithoutBlocking();
 
   QTime t;
   t.start();
@@ -144,7 +137,24 @@ void QgsMapRendererCustomPainterJob::cancel()
 
   futureFinished();
 
-  QgsDebugMsg( "QPAINTER cancelled" );
+  QgsDebugMsg( "QPAINTER canceled" );
+}
+
+void QgsMapRendererCustomPainterJob::cancelWithoutBlocking()
+{
+  if ( !isActive() )
+  {
+    QgsDebugMsg( "QPAINTER not running!" );
+    return;
+  }
+
+  mLabelingRenderContext.setRenderingStopped( true );
+  for ( LayerRenderJobs::iterator it = mLayerJobs.begin(); it != mLayerJobs.end(); ++it )
+  {
+    it->context.setRenderingStopped( true );
+    if ( it->renderer && it->renderer->feedback() )
+      it->renderer->feedback()->cancel();
+  }
 }
 
 void QgsMapRendererCustomPainterJob::waitForFinished()
diff --git a/src/core/qgsmaprenderercustompainterjob.h b/src/core/qgsmaprenderercustompainterjob.h
index f8dcd79..b6b7a8a 100644
--- a/src/core/qgsmaprenderercustompainterjob.h
+++ b/src/core/qgsmaprenderercustompainterjob.h
@@ -38,6 +38,7 @@ class CORE_EXPORT QgsMapRendererCustomPainterJob : public QgsMapRendererJob
 
     virtual void start() override;
     virtual void cancel() override;
+    virtual void cancelWithoutBlocking() override;
     virtual void waitForFinished() override;
     virtual bool isActive() const override;
     virtual QgsLabelingResults* takeLabelingResults() override;
diff --git a/src/core/qgsmaprendererjob.h b/src/core/qgsmaprendererjob.h
index acaacfc..62c844a 100644
--- a/src/core/qgsmaprendererjob.h
+++ b/src/core/qgsmaprendererjob.h
@@ -96,6 +96,13 @@ class CORE_EXPORT QgsMapRendererJob : public QObject
     //! Does nothing if the rendering is not active.
     virtual void cancel() = 0;
 
+    /**
+     * Triggers cancelation of the rendering job without blocking. The render job will continue
+     * to operate until it is able to cancel, at which stage the finished() signal will be emitted.
+     * Does nothing if the rendering is not active.
+     */
+    virtual void cancelWithoutBlocking() = 0;
+
     //! Block until the job has finished.
     virtual void waitForFinished() = 0;
 
@@ -206,6 +213,7 @@ class CORE_EXPORT QgsMapRendererQImageJob : public QgsMapRendererJob
 
     //! Get a preview/resulting image
     virtual QImage renderedImage() = 0;
+
 };
 
 
diff --git a/src/core/qgsmaprendererparalleljob.cpp b/src/core/qgsmaprendererparalleljob.cpp
index 5441df0..7c25233 100644
--- a/src/core/qgsmaprendererparalleljob.cpp
+++ b/src/core/qgsmaprendererparalleljob.cpp
@@ -123,6 +123,28 @@ void QgsMapRendererParallelJob::cancel()
   Q_ASSERT( mStatus == Idle );
 }
 
+void QgsMapRendererParallelJob::cancelWithoutBlocking()
+{
+  if ( !isActive() )
+    return;
+
+  QgsDebugMsg( QString( "PARALLEL cancel at status %1" ).arg( mStatus ) );
+
+  mLabelingRenderContext.setRenderingStopped( true );
+  for ( LayerRenderJobs::iterator it = mLayerJobs.begin(); it != mLayerJobs.end(); ++it )
+  {
+    it->context.setRenderingStopped( true );
+    if ( it->renderer && it->renderer->feedback() )
+      it->renderer->feedback()->cancel();
+  }
+
+  if ( mStatus == RenderingLayers )
+  {
+    disconnect( &mFutureWatcher, SIGNAL( finished() ), this, SLOT( renderLayersFinished() ) );
+    connect( &mFutureWatcher, SIGNAL( finished() ), this, SLOT( renderingFinished() ) );
+  }
+}
+
 void QgsMapRendererParallelJob::waitForFinished()
 {
   if ( !isActive() )
diff --git a/src/core/qgsmaprendererparalleljob.h b/src/core/qgsmaprendererparalleljob.h
index 21c6083..dc418ac 100644
--- a/src/core/qgsmaprendererparalleljob.h
+++ b/src/core/qgsmaprendererparalleljob.h
@@ -35,6 +35,7 @@ class CORE_EXPORT QgsMapRendererParallelJob : public QgsMapRendererQImageJob
 
     virtual void start() override;
     virtual void cancel() override;
+    virtual void cancelWithoutBlocking() override;
     virtual void waitForFinished() override;
     virtual bool isActive() const override;
 
diff --git a/src/core/qgsmaprenderersequentialjob.cpp b/src/core/qgsmaprenderersequentialjob.cpp
index 7b37d17..fcdbf5a 100644
--- a/src/core/qgsmaprenderersequentialjob.cpp
+++ b/src/core/qgsmaprenderersequentialjob.cpp
@@ -86,6 +86,15 @@ void QgsMapRendererSequentialJob::cancel()
   Q_ASSERT( !mInternalJob && !mPainter );
 }
 
+void QgsMapRendererSequentialJob::cancelWithoutBlocking()
+{
+  if ( !isActive() )
+    return;
+
+  QgsDebugMsg( "sequential - cancel internal" );
+  mInternalJob->cancelWithoutBlocking();
+}
+
 void QgsMapRendererSequentialJob::waitForFinished()
 {
   if ( !isActive() )
diff --git a/src/core/qgsmaprenderersequentialjob.h b/src/core/qgsmaprenderersequentialjob.h
index 7cbd11d..f1252df 100644
--- a/src/core/qgsmaprenderersequentialjob.h
+++ b/src/core/qgsmaprenderersequentialjob.h
@@ -37,6 +37,7 @@ class CORE_EXPORT QgsMapRendererSequentialJob : public QgsMapRendererQImageJob
 
     virtual void start() override;
     virtual void cancel() override;
+    virtual void cancelWithoutBlocking() override;
     virtual void waitForFinished() override;
     virtual bool isActive() const override;
 
diff --git a/src/core/qgsofflineediting.cpp b/src/core/qgsofflineediting.cpp
index 3bfc7d6..73e76d5 100644
--- a/src/core/qgsofflineediting.cpp
+++ b/src/core/qgsofflineediting.cpp
@@ -30,6 +30,7 @@
 #include "qgsvectorlayereditbuffer.h"
 #include "qgsvectorlayerjoinbuffer.h"
 #include "qgsslconnect.h"
+#include "qgsvisibilitypresetcollection.h"
 
 #include <QDir>
 #include <QDomDocument>
@@ -257,6 +258,7 @@ void QgsOfflineEditing::synchronize()
       // copy style
       copySymbology( offlineLayer, remoteLayer );
       updateRelations( offlineLayer, remoteLayer );
+      updateVisibilityPresets( offlineLayer, remoteLayer );
 
       // apply layer edit log
       QString qgisLayerId = layer->id();
@@ -632,6 +634,7 @@ QgsVectorLayer* QgsOfflineEditing::copyVectorLayer( QgsVectorLayer* layer, sqlit
       }
 
       updateRelations( layer, newLayer );
+      updateVisibilityPresets( layer, newLayer );
       // copy features
       newLayer->startEditing();
       QgsFeature f;
@@ -970,6 +973,36 @@ void QgsOfflineEditing::updateRelations( QgsVectorLayer* sourceLayer, QgsVectorL
   }
 }
 
+void QgsOfflineEditing::updateVisibilityPresets( QgsVectorLayer* sourceLayer, QgsVectorLayer* targetLayer )
+{
+  QgsVisibilityPresetCollection* presetCollection = QgsProject::instance()->visibilityPresetCollection();
+  QStringList visibilityPresets = presetCollection->presets();
+
+  Q_FOREACH ( const QString& preset, visibilityPresets )
+  {
+    QgsVisibilityPresetCollection::PresetRecord record = presetCollection->presetState( preset );
+
+    if ( record.mVisibleLayerIDs.removeOne( sourceLayer->id() ) )
+      record.mVisibleLayerIDs.append( targetLayer->id() );
+
+    QString style = record.mPerLayerCurrentStyle.value( sourceLayer->id() );
+    if ( !style.isNull() )
+    {
+      record.mPerLayerCurrentStyle.remove( sourceLayer->id() );
+      record.mPerLayerCurrentStyle.insert( targetLayer->id(), style );
+    }
+
+    if ( !record.mPerLayerCheckedLegendSymbols.contains( sourceLayer->id() ) )
+    {
+      QSet<QString> checkedSymbols = record.mPerLayerCheckedLegendSymbols.value( sourceLayer->id() );
+      record.mPerLayerCheckedLegendSymbols.remove( sourceLayer->id() );
+      record.mPerLayerCheckedLegendSymbols.insert( targetLayer->id(), checkedSymbols );
+    }
+
+    QgsProject::instance()->visibilityPresetCollection()->update( preset, record );
+  }
+}
+
 // NOTE: use this to map column indices in case the remote geometry column is not last
 QMap<int, int> QgsOfflineEditing::attributeLookup( QgsVectorLayer* offlineLayer, QgsVectorLayer* remoteLayer )
 {
diff --git a/src/core/qgsofflineediting.h b/src/core/qgsofflineediting.h
index 0923156..ac1568a 100644
--- a/src/core/qgsofflineediting.h
+++ b/src/core/qgsofflineediting.h
@@ -112,6 +112,11 @@ class CORE_EXPORT QgsOfflineEditing : public QObject
      * Updates all relations that reference or are referenced by the source layer to the targetLayer.
      */
     void updateRelations( QgsVectorLayer* sourceLayer, QgsVectorLayer* targetLayer );
+    /**
+     * Update all visibility presets that affect the source layer.
+     */
+    void updateVisibilityPresets( QgsVectorLayer* sourceLayer, QgsVectorLayer* targetLayer );
+
     QMap<int, int> attributeLookup( QgsVectorLayer* offlineLayer, QgsVectorLayer* remoteLayer );
 
     void showWarning( const QString& message );
diff --git a/src/core/qgsogrutils.cpp b/src/core/qgsogrutils.cpp
index 3df9d8a..859661e 100644
--- a/src/core/qgsogrutils.cpp
+++ b/src/core/qgsogrutils.cpp
@@ -30,6 +30,13 @@
 #define FROM8(x) QString::fromLocal8Bit(x)
 #endif
 
+// Starting with GDAL 2.2, there are 2 concepts: unset fields and null fields
+// whereas previously there was only unset fields. For QGIS purposes, both
+// states (unset/null) are equivalent.
+#ifndef OGRNullMarker
+#define OGR_F_IsFieldSetAndNotNull OGR_F_IsFieldSet
+#endif
+
 QgsFeature QgsOgrUtils::readOgrFeature( OGRFeatureH ogrFet, const QgsFields& fields, QTextCodec* encoding )
 {
   QgsFeature feature;
@@ -132,7 +139,7 @@ QVariant QgsOgrUtils::getOgrFeatureAttribute( OGRFeatureH ogrFet, const QgsField
   if ( ok )
     *ok = true;
 
-  if ( OGR_F_IsFieldSet( ogrFet, attIndex ) )
+  if ( OGR_F_IsFieldSetAndNotNull( ogrFet, attIndex ) )
   {
     switch ( fields.at( attIndex ).type() )
     {
diff --git a/src/core/qgsvectorlayer.cpp b/src/core/qgsvectorlayer.cpp
index 68282fd..70f66d6 100644
--- a/src/core/qgsvectorlayer.cpp
+++ b/src/core/qgsvectorlayer.cpp
@@ -1655,18 +1655,6 @@ bool QgsVectorLayer::readXml( const QDomNode& layer_node )
   updateFields();
   connect( QgsMapLayerRegistry::instance(), SIGNAL( layerWillBeRemoved( QString ) ), this, SLOT( checkJoinLayerRemove( QString ) ) );
 
-  QDomNode prevExpNode = layer_node.namedItem( "previewExpression" );
-
-  if ( prevExpNode.isNull() )
-  {
-    mDisplayExpression = "";
-  }
-  else
-  {
-    QDomElement prevExpElem = prevExpNode.toElement();
-    mDisplayExpression = prevExpElem.text();
-  }
-
   QString errorMsg;
   if ( !readSymbology( layer_node, errorMsg ) )
   {
@@ -1852,12 +1840,6 @@ bool QgsVectorLayer::writeXml( QDomNode & layer_node,
     layer_node.appendChild( provider );
   }
 
-  // save preview expression
-  QDomElement prevExpElem = document.createElement( "previewExpression" );
-  QDomText prevExpText = document.createTextNode( mDisplayExpression );
-  prevExpElem.appendChild( prevExpText );
-  layer_node.appendChild( prevExpElem );
-
   //save joins
   mJoinBuffer->writeXml( layer_node, document );
 
@@ -1976,13 +1958,25 @@ bool QgsVectorLayer::readSymbology( const QDomNode& node, QString& errorMessage
 
   mConditionalStyles->readXml( node );
 
+  QDomNode prevExpNode = node.namedItem( "previewExpression" );
+
+  if ( prevExpNode.isNull() )
+  {
+    mDisplayExpression = "";
+  }
+  else
+  {
+    QDomElement prevExpElem = prevExpNode.toElement();
+    mDisplayExpression = prevExpElem.text();
+  }
+
+
   readCustomProperties( node, "variable" );
 
   QDomElement mapLayerNode = node.toElement();
   if ( mapLayerNode.attribute( "readOnly", "0" ).toInt() == 1 )
     mReadOnly = true;
 
-
   return true;
 }
 
@@ -2193,6 +2187,12 @@ bool QgsVectorLayer::writeSymbology( QDomNode& node, QDomDocument& doc, QString&
   }
   node.appendChild( defaultsElem );
 
+  // preview expression
+  QDomElement prevExpElem = doc.createElement( "previewExpression" );
+  QDomText prevExpText = doc.createTextNode( mDisplayExpression );
+  prevExpElem.appendChild( prevExpText );
+  node.appendChild( prevExpElem );
+
   return true;
 }
 
diff --git a/src/core/qgsvectorlayercache.cpp b/src/core/qgsvectorlayercache.cpp
index 69b28f9..3eea570 100644
--- a/src/core/qgsvectorlayercache.cpp
+++ b/src/core/qgsvectorlayercache.cpp
@@ -58,7 +58,9 @@ int QgsVectorLayerCache::cacheSize()
 
 void QgsVectorLayerCache::setCacheGeometry( bool cacheGeometry )
 {
-  mCacheGeometry = cacheGeometry && mLayer->hasGeometryType();
+  bool shouldCacheGeometry = cacheGeometry && mLayer->hasGeometryType();
+  bool mustInvalidate = shouldCacheGeometry && !mCacheGeometry; // going from no geometry -> geometry, so have to clear existing cache entries
+  mCacheGeometry = shouldCacheGeometry;
   if ( cacheGeometry )
   {
     connect( mLayer, SIGNAL( geometryChanged( QgsFeatureId, QgsGeometry& ) ), SLOT( geometryChanged( QgsFeatureId, QgsGeometry& ) ) );
@@ -67,6 +69,10 @@ void QgsVectorLayerCache::setCacheGeometry( bool cacheGeometry )
   {
     disconnect( mLayer, SIGNAL( geometryChanged( QgsFeatureId, QgsGeometry& ) ), this, SLOT( geometryChanged( QgsFeatureId, QgsGeometry& ) ) );
   }
+  if ( mustInvalidate )
+  {
+    invalidate();
+  }
 }
 
 void QgsVectorLayerCache::setCacheSubsetOfAttributes( const QgsAttributeList& attributes )
@@ -231,7 +237,7 @@ void QgsVectorLayerCache::attributeAdded( int field )
 {
   Q_UNUSED( field )
   mCachedAttributes.append( field );
-  mCache.clear();
+  invalidate();
 }
 
 void QgsVectorLayerCache::attributeDeleted( int field )
@@ -267,6 +273,7 @@ void QgsVectorLayerCache::layerDeleted()
 void QgsVectorLayerCache::invalidate()
 {
   mCache.clear();
+  mFullCache = false;
   emit invalidated();
 }
 
diff --git a/src/core/qgsvectorlayercache.h b/src/core/qgsvectorlayercache.h
index 69ede43..9056764 100644
--- a/src/core/qgsvectorlayercache.h
+++ b/src/core/qgsvectorlayercache.h
@@ -103,9 +103,16 @@ class CORE_EXPORT QgsVectorLayerCache : public QObject
      * Enable or disable the caching of geometries
      *
      * @param cacheGeometry    Enable or disable the caching of geometries
+     * @see cacheGeometry()
      */
     void setCacheGeometry( bool cacheGeometry );
 
+    /**
+     * Returns true if the cache will fetch and cache feature geometries.
+     * @note added in QGIS 3.0
+     * @see setCacheGeometry()
+     */
+    bool cacheGeometry() const { return mCacheGeometry; }
 
     /**
      * Set the subset of attributes to be cached
@@ -131,6 +138,8 @@ class CORE_EXPORT QgsVectorLayerCache : public QObject
      * be used for slow data sources, be aware, that the call to this method might take a long time.
      *
      * @param fullCache   True: enable full caching, False: disable full caching
+     * @note when a cache is invalidated() (e.g. by adding an attribute to a layer) this setting
+     * is reset. A full cache rebuild must be performed by calling setFullCache( true ) again.
      * @see hasFullCache()
      */
     void setFullCache( bool fullCache );
@@ -273,7 +282,9 @@ class CORE_EXPORT QgsVectorLayerCache : public QObject
     void featureAdded( QgsFeatureId fid );
 
     /**
-     * The cache has been invalidated and cleared.
+     * The cache has been invalidated and cleared. Note that when a cache is invalidated
+     * the fullCache() setting will be cleared, and a full cache rebuild via setFullCache( true )
+     * will need to be performed.
      */
     void invalidated();
 
diff --git a/src/core/qgsvectorlayerfeatureiterator.cpp b/src/core/qgsvectorlayerfeatureiterator.cpp
index cd8492c..64055e8 100644
--- a/src/core/qgsvectorlayerfeatureiterator.cpp
+++ b/src/core/qgsvectorlayerfeatureiterator.cpp
@@ -112,6 +112,11 @@ QgsVectorLayerFeatureIterator::QgsVectorLayerFeatureIterator( QgsVectorLayerFeat
           mRequest.setSubsetOfAttributes( mRequest.subsetOfAttributes() << attrIdx );
       }
     }
+
+    // Required for local filtering
+    // Also requred with compiler enabled for updateAttributeValues() on fetched features.
+    if ( mRequest.filterExpression()->needsGeometry() )
+      mRequest.setFlags( mRequest.flags() & ~QgsFeatureRequest::NoGeometry );
   }
 
   prepareFields();
diff --git a/src/core/raster/qgsrasterlayerrenderer.cpp b/src/core/raster/qgsrasterlayerrenderer.cpp
index a9dc5de..6921b30 100644
--- a/src/core/raster/qgsrasterlayerrenderer.cpp
+++ b/src/core/raster/qgsrasterlayerrenderer.cpp
@@ -206,7 +206,7 @@ bool QgsRasterLayerRenderer::render()
   // params in QgsRasterProjector
   if ( projector )
   {
-    projector->setCRS( mRasterViewPort->mSrcCRS, mRasterViewPort->mDestCRS );
+    projector->setCRS( mRasterViewPort->mSrcCRS, mRasterViewPort->mDestCRS, mRasterViewPort->mSrcDatumTransform, mRasterViewPort->mDestDatumTransform );
   }
 
   // Drawer to pipe?
diff --git a/src/gui/CMakeLists.txt b/src/gui/CMakeLists.txt
index a526b37..226d6e3 100644
--- a/src/gui/CMakeLists.txt
+++ b/src/gui/CMakeLists.txt
@@ -715,6 +715,25 @@ IF (WITH_TOUCH)
 ENDIF (WITH_TOUCH)
 
 SET(QGIS_GUI_UI_HDRS
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthauthoritieseditor.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthcertificateinfo.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthcertificatemanager.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthconfigedit.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthconfigeditor.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthconfigidedit.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthconfigselect.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthconfiguriedit.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsautheditorwidgets.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthidentitieseditor.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthimportcertdialog.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthimportidentitydialog.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthmethodplugins.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthserverseditor.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthsslconfigwidget.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthsslerrorsdialog.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthsslimportdialog.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthsslimporterrors.h
+  ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsauthtrustedcasdialog.h
   ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgscredentialdialog.h
   ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsdetaileditemwidgetbase.h
   ${CMAKE_CURRENT_BINARY_DIR}/../ui/ui_qgsexpressionbuilderdialogbase.h
diff --git a/src/gui/attributetable/qgsattributetablemodel.cpp b/src/gui/attributetable/qgsattributetablemodel.cpp
index 795ca86..f57b4d9 100644
--- a/src/gui/attributetable/qgsattributetablemodel.cpp
+++ b/src/gui/attributetable/qgsattributetablemodel.cpp
@@ -606,7 +606,7 @@ QVariant QgsAttributeTableModel::data( const QModelIndex &index, int role ) cons
 
   if ( role == Qt::TextAlignmentRole )
   {
-    return mWidgetFactories.at( index.column() )->alignmentFlag( layer(), fieldId, mWidgetConfigs.at( index.column() ) );
+    return QVariant( mWidgetFactories.at( index.column() )->alignmentFlag( layer(), fieldId, mWidgetConfigs.at( index.column() ) ) | Qt::AlignVCenter );
   }
 
   if ( mFeat.id() != rowId || !mFeat.isValid() )
diff --git a/src/gui/attributetable/qgsdualview.cpp b/src/gui/attributetable/qgsdualview.cpp
index 97f788d..3454a03 100644
--- a/src/gui/attributetable/qgsdualview.cpp
+++ b/src/gui/attributetable/qgsdualview.cpp
@@ -48,6 +48,7 @@ QgsDualView::QgsDualView( QWidget* parent )
     , mProgressDlg( nullptr )
     , mFeatureSelectionManager( nullptr )
     , mAttributeEditorScrollArea( nullptr )
+    , mMapCanvas( nullptr )
 {
   setupUi( this );
 
@@ -67,11 +68,15 @@ QgsDualView::QgsDualView( QWidget* parent )
   connect( mFeatureList, SIGNAL( displayExpressionChanged( QString ) ), this, SLOT( previewExpressionChanged( QString ) ) );
 }
 
-void QgsDualView::init( QgsVectorLayer* layer, QgsMapCanvas* mapCanvas, const QgsFeatureRequest &request, const QgsAttributeEditorContext &context )
+void QgsDualView::init( QgsVectorLayer *layer, QgsMapCanvas *mapCanvas, const QgsFeatureRequest &request, const QgsAttributeEditorContext &context, bool loadFeatures )
 {
+  mMapCanvas = mapCanvas;
+
   if ( !layer )
     return;
 
+  mLayer = layer;
+
   mEditorContext = context;
 
   connect( mTableView, SIGNAL( willShowContextMenu( QMenu*, QModelIndex ) ), this, SLOT( viewWillShowContextMenu( QMenu*, QModelIndex ) ) );
@@ -79,15 +84,15 @@ void QgsDualView::init( QgsVectorLayer* layer, QgsMapCanvas* mapCanvas, const Qg
   connect( mTableView->horizontalHeader(), SIGNAL( customContextMenuRequested( QPoint ) ), this, SLOT( showViewHeaderMenu( QPoint ) ) );
   connect( mTableView, SIGNAL( columnResized( int, int ) ), this, SLOT( tableColumnResized( int, int ) ) );
 
-  initLayerCache( layer, !request.filterRect().isNull() );
-  initModels( mapCanvas, request );
+  initLayerCache( !( request.flags() & QgsFeatureRequest::NoGeometry ) || !request.filterRect().isNull() );
+  initModels( mapCanvas, request, loadFeatures );
 
-  mConditionalFormatWidget->setLayer( layer );
+  mConditionalFormatWidget->setLayer( mLayer );
 
   mTableView->setModel( mFilterModel );
   mFeatureList->setModel( mFeatureListModel );
   delete mAttributeForm;
-  mAttributeForm = new QgsAttributeForm( layer, QgsFeature(), mEditorContext );
+  mAttributeForm = new QgsAttributeForm( mLayer, QgsFeature(), mEditorContext );
   if ( !context.parentContext() )
   {
     mAttributeEditorScrollArea = new QScrollArea();
@@ -119,19 +124,19 @@ void QgsDualView::init( QgsVectorLayer* layer, QgsMapCanvas* mapCanvas, const Qg
 void QgsDualView::columnBoxInit()
 {
   // load fields
-  QList<QgsField> fields = mLayerCache->layer()->fields().toList();
+  QList<QgsField> fields = mLayer->fields().toList();
 
   QString defaultField;
 
   // default expression: saved value
-  QString displayExpression = mLayerCache->layer()->displayExpression();
+  QString displayExpression = mLayer->displayExpression();
 
   // if no display expression is saved: use display field instead
   if ( displayExpression.isEmpty() )
   {
-    if ( !mLayerCache->layer()->displayField().isEmpty() )
+    if ( !mLayer->displayField().isEmpty() )
     {
-      defaultField = mLayerCache->layer()->displayField();
+      defaultField = mLayer->displayField();
       displayExpression = QString( "COALESCE(\"%1\", '<NULL>')" ).arg( defaultField );
     }
   }
@@ -139,7 +144,7 @@ void QgsDualView::columnBoxInit()
   // if neither display expression nor display field is saved...
   if ( displayExpression.isEmpty() )
   {
-    QgsAttributeList pkAttrs = mLayerCache->layer()->pkAttributeList();
+    QgsAttributeList pkAttrs = mLayer->pkAttributeList();
 
     if ( !pkAttrs.isEmpty() )
     {
@@ -182,13 +187,13 @@ void QgsDualView::columnBoxInit()
 
   Q_FOREACH ( const QgsField& field, fields )
   {
-    int fieldIndex = mLayerCache->layer()->fieldNameIndex( field.name() );
+    int fieldIndex = mLayer->fieldNameIndex( field.name() );
     if ( fieldIndex == -1 )
       continue;
 
-    if ( mLayerCache->layer()->editFormConfig()->widgetType( fieldIndex ) != "Hidden" )
+    if ( mLayer->editFormConfig()->widgetType( fieldIndex ) != "Hidden" )
     {
-      QIcon icon = mLayerCache->layer()->fields().iconForField( fieldIndex );
+      QIcon icon = mLayer->fields().iconForField( fieldIndex );
       QString text = field.name();
 
       // Generate action for the preview popup button of the feature list
@@ -233,6 +238,71 @@ QgsDualView::ViewMode QgsDualView::view() const
 
 void QgsDualView::setFilterMode( QgsAttributeTableFilterModel::FilterMode filterMode )
 {
+  // cleanup any existing connections
+  switch ( mFilterModel->filterMode() )
+  {
+    case QgsAttributeTableFilterModel::ShowVisible:
+      disconnect( mMapCanvas, SIGNAL( extentsChanged() ), this, SLOT( extentChanged() ) );
+      break;
+
+    case QgsAttributeTableFilterModel::ShowAll:
+    case QgsAttributeTableFilterModel::ShowEdited:
+    case QgsAttributeTableFilterModel::ShowFilteredList:
+      break;
+
+    case QgsAttributeTableFilterModel::ShowSelected:
+      disconnect( masterModel()->layer(), SIGNAL( selectionChanged() ), this,
+                  SLOT( updateSelectedFeatures() ) );
+      break;
+  }
+
+  QgsFeatureRequest r = mMasterModel->request();
+  bool needsGeometry = filterMode == QgsAttributeTableFilterModel::ShowVisible;
+
+  bool requiresTableReload = ( r.filterType() != QgsFeatureRequest::FilterNone || !r.filterRect().isNull() ) // previous request was subset
+                             || ( needsGeometry && r.flags() & QgsFeatureRequest::NoGeometry ) // no geometry for last request
+                             || ( mMasterModel->rowCount() == 0 ); // no features
+
+  if ( !needsGeometry )
+    r.setFlags( r.flags() | QgsFeatureRequest::NoGeometry );
+  else
+    r.setFlags( r.flags() & ~( QgsFeatureRequest::NoGeometry ) );
+  r.setFilterFids( QgsFeatureIds() );
+  r.setFilterRect( QgsRectangle() );
+  r.disableFilter();
+
+  // setup new connections and filter request parameters
+  switch ( filterMode )
+  {
+    case QgsAttributeTableFilterModel::ShowVisible:
+      connect( mMapCanvas, SIGNAL( extentsChanged() ), this, SLOT( extentChanged() ) );
+      if ( mMapCanvas )
+      {
+        QgsRectangle rect = mMapCanvas->mapSettings().mapToLayerCoordinates( mLayer, mMapCanvas->extent() );
+        r.setFilterRect( rect );
+      }
+      break;
+
+    case QgsAttributeTableFilterModel::ShowAll:
+    case QgsAttributeTableFilterModel::ShowEdited:
+    case QgsAttributeTableFilterModel::ShowFilteredList:
+      break;
+
+    case QgsAttributeTableFilterModel::ShowSelected:
+      connect( masterModel()->layer(), SIGNAL( selectionChanged() ), this, SLOT( updateSelectedFeatures() ) );
+      if ( masterModel()->layer()->selectedFeatureCount() > 0 )
+        r.setFilterFids( masterModel()->layer()->selectedFeaturesIds() );
+      break;
+  }
+
+  if ( requiresTableReload )
+  {
+    mMasterModel->setRequest( r );
+    whileBlocking( mLayerCache )->setCacheGeometry( needsGeometry );
+    mMasterModel->loadLayer();
+  }
+
+  //update filter model
   mFilterModel->setFilterMode( filterMode );
   emit filterChanged();
 }
@@ -242,23 +312,21 @@ void QgsDualView::setSelectedOnTop( bool selectedOnTop )
   mFilterModel->setSelectedOnTop( selectedOnTop );
 }
 
-void QgsDualView::initLayerCache( QgsVectorLayer* layer, bool cacheGeometry )
+void QgsDualView::initLayerCache( bool cacheGeometry )
 {
   // Initialize the cache
   QSettings settings;
   int cacheSize = settings.value( "/qgis/attributeTableRowCache", "10000" ).toInt();
-  mLayerCache = new QgsVectorLayerCache( layer, cacheSize, this );
+  mLayerCache = new QgsVectorLayerCache( mLayer, cacheSize, this );
   mLayerCache->setCacheGeometry( cacheGeometry );
-  if ( 0 == cacheSize || 0 == ( QgsVectorDataProvider::SelectAtId & mLayerCache->layer()->dataProvider()->capabilities() ) )
+  if ( 0 == cacheSize || 0 == ( QgsVectorDataProvider::SelectAtId & mLayer->dataProvider()->capabilities() ) )
   {
-    connect( mLayerCache, SIGNAL( progress( int, bool & ) ), this, SLOT( progress( int, bool & ) ) );
-    connect( mLayerCache, SIGNAL( finished() ), this, SLOT( finished() ) );
-
-    mLayerCache->setFullCache( true );
+    connect( mLayerCache, SIGNAL( invalidated() ), this, SLOT( rebuildFullLayerCache() ) );
+    rebuildFullLayerCache();
   }
 }
 
-void QgsDualView::initModels( QgsMapCanvas* mapCanvas, const QgsFeatureRequest& request )
+void QgsDualView::initModels( QgsMapCanvas *mapCanvas, const QgsFeatureRequest &request, bool loadFeatures )
 {
   delete mFeatureListModel;
   delete mFilterModel;
@@ -274,7 +342,8 @@ void QgsDualView::initModels( QgsMapCanvas* mapCanvas, const QgsFeatureRequest&
 
   connect( mConditionalFormatWidget, SIGNAL( rulesUpdated( QString ) ), mMasterModel, SLOT( fieldConditionalStyleChanged( QString ) ) );
 
-  mMasterModel->loadLayer();
+  if ( loadFeatures )
+    mMasterModel->loadLayer();
 
   mFilterModel = new QgsAttributeTableFilterModel( mapCanvas, mMasterModel, mMasterModel );
 
@@ -285,13 +354,13 @@ void QgsDualView::initModels( QgsMapCanvas* mapCanvas, const QgsFeatureRequest&
 
 void QgsDualView::on_mFeatureList_aboutToChangeEditSelection( bool& ok )
 {
-  if ( mLayerCache->layer()->isEditable() && !mAttributeForm->save() )
+  if ( mLayer->isEditable() && !mAttributeForm->save() )
     ok = false;
 }
 
 void QgsDualView::on_mFeatureList_currentEditSelectionChanged( const QgsFeature &feat )
 {
-  if ( !mLayerCache->layer()->isEditable() || mAttributeForm->save() )
+  if ( !mLayer->isEditable() || mAttributeForm->save() )
   {
     mAttributeForm->setFeature( feat );
     setCurrentEditSelection( QgsFeatureIds() << feat.id() );
@@ -346,9 +415,9 @@ void QgsDualView::previewExpressionBuilder()
   QgsExpressionContext context;
   context << QgsExpressionContextUtils::globalScope()
   << QgsExpressionContextUtils::projectScope()
-  << QgsExpressionContextUtils::layerScope( mLayerCache->layer() );
+  << QgsExpressionContextUtils::layerScope( mLayer );
 
-  QgsExpressionBuilderDialog dlg( mLayerCache->layer(), mFeatureList->displayExpression(), this, "generic", context );
+  QgsExpressionBuilderDialog dlg( mLayer, mFeatureList->displayExpression(), this, "generic", context );
   dlg.setWindowTitle( tr( "Expression based preview" ) );
   dlg.setExpressionText( mFeatureList->displayExpression() );
 
@@ -433,15 +502,15 @@ void QgsDualView::viewWillShowContextMenu( QMenu* menu, const QModelIndex& atInd
   }
 
   //add user-defined actions to context menu
-  if ( mLayerCache->layer()->actions()->size() != 0 )
+  if ( mLayer->actions()->size() != 0 )
   {
 
     QAction *a = menu->addAction( tr( "Run layer action" ) );
     a->setEnabled( false );
 
-    for ( int i = 0; i < mLayerCache->layer()->actions()->size(); i++ )
+    for ( int i = 0; i < mLayer->actions()->size(); i++ )
     {
-      const QgsAction &action = mLayerCache->layer()->actions()->at( i );
+      const QgsAction &action = mLayer->actions()->at( i );
 
       if ( !action.runable() )
         continue;
@@ -452,7 +521,7 @@ void QgsDualView::viewWillShowContextMenu( QMenu* menu, const QModelIndex& atInd
   }
 
   //add actions from QgsMapLayerActionRegistry to context menu
-  QList<QgsMapLayerAction *> registeredActions = QgsMapLayerActionRegistry::instance()->mapLayerActions( mLayerCache->layer() );
+  QList<QgsMapLayerAction *> registeredActions = QgsMapLayerActionRegistry::instance()->mapLayerActions( mLayer );
   if ( !registeredActions.isEmpty() )
   {
     //add a separator between user defined and standard actions
@@ -504,12 +573,12 @@ void QgsDualView::showViewHeaderMenu( QPoint point )
 
 void QgsDualView::organizeColumns()
 {
-  if ( !mLayerCache->layer() )
+  if ( !mLayer )
   {
     return;
   }
 
-  QgsOrganizeTableColumnsDialog dialog( mLayerCache->layer(), this );
+  QgsOrganizeTableColumnsDialog dialog( mLayer, this );
   if ( dialog.exec() == QDialog::Accepted )
   {
     QgsAttributeTableConfig config = dialog.config();
@@ -573,8 +642,7 @@ void QgsDualView::autosizeColumn()
 
 void QgsDualView::modifySort()
 {
-  QgsVectorLayer* layer = mLayerCache->layer();
-  if ( !layer )
+  if ( !mLayer )
     return;
 
   QgsAttributeTableConfig config = mConfig;
@@ -598,12 +666,12 @@ void QgsDualView::modifySort()
   QgsExpressionContext context;
   context << QgsExpressionContextUtils::globalScope()
   << QgsExpressionContextUtils::projectScope()
-  << QgsExpressionContextUtils::layerScope( layer );
+  << QgsExpressionContextUtils::layerScope( mLayer );
   expressionBuilder->setExpressionContext( context );
-  expressionBuilder->setLayer( layer );
+  expressionBuilder->setLayer( mLayer );
   expressionBuilder->loadFieldNames();
   expressionBuilder->loadRecent( "generic" );
-  expressionBuilder->setExpressionText( sortExpression().isEmpty() ? layer->displayExpression() : sortExpression() );
+  expressionBuilder->setExpressionText( sortExpression().isEmpty() ? mLayer->displayExpression() : sortExpression() );
 
   sortingGroupBox->layout()->addWidget( expressionBuilder );
 
@@ -644,18 +712,26 @@ void QgsDualView::zoomToCurrentFeature()
   QgsMapCanvas* canvas = mFilterModel->mapCanvas();
   if ( canvas )
   {
-    canvas->zoomToFeatureIds( mLayerCache->layer(), ids );
+    canvas->zoomToFeatureIds( mLayer, ids );
   }
 }
 
+void QgsDualView::rebuildFullLayerCache()
+{
+  connect( mLayerCache, SIGNAL( progress( int, bool& ) ), this, SLOT( progress( int, bool& ) ), Qt::UniqueConnection );
+  connect( mLayerCache, SIGNAL( finished() ), this, SLOT( finished() ), Qt::UniqueConnection );
+
+  mLayerCache->setFullCache( true );
+}
+
 void QgsDualView::previewExpressionChanged( const QString& expression )
 {
-  mLayerCache->layer()->setDisplayExpression( expression );
+  mLayer->setDisplayExpression( expression );
 }
 
 void QgsDualView::onSortColumnChanged()
 {
-  QgsAttributeTableConfig cfg = mLayerCache->layer()->attributeTableConfig();
+  QgsAttributeTableConfig cfg = mLayer->attributeTableConfig();
   cfg.setSortExpression( mFilterModel->sortExpression() );
   cfg.setSortOrder( mFilterModel->sortOrder() );
   setAttributeTableConfig( cfg );
@@ -671,6 +747,34 @@ void QgsDualView::sortByPreviewExpression()
   setSortExpression( mFeatureList->displayExpression(), sortOrder );
 }
 
+void QgsDualView::updateSelectedFeatures()
+{
+  QgsFeatureRequest r = mMasterModel->request();
+  if ( r.filterType() == QgsFeatureRequest::FilterNone && r.filterRect().isNull() )
+    return; // already requested all features
+
+  if ( masterModel()->layer()->selectedFeatureCount() > 0 )
+    r.setFilterFids( masterModel()->layer()->selectedFeaturesIds() );
+  else
+    r.disableFilter();
+  mMasterModel->setRequest( r );
+  mMasterModel->loadLayer();
+  emit filterChanged();
+}
+
+void QgsDualView::extentChanged()
+{
+  QgsFeatureRequest r = mMasterModel->request();
+  if ( mMapCanvas && ( r.filterType() != QgsFeatureRequest::FilterNone || !r.filterRect().isNull() ) )
+  {
+    QgsRectangle rect = mMapCanvas->mapSettings().mapToLayerCoordinates( mLayer, mMapCanvas->extent() );
+    r.setFilterRect( rect );
+    mMasterModel->setRequest( r );
+    mMasterModel->loadLayer();
+  }
+  emit filterChanged();
+}
+
 void QgsDualView::featureFormAttributeChanged()
 {
   mFeatureList->setCurrentFeatureEdited( true );
@@ -699,7 +803,7 @@ void QgsDualView::setFeatureSelectionManager( QgsIFeatureSelectionManager* featu
 
 void QgsDualView::setAttributeTableConfig( const QgsAttributeTableConfig& config )
 {
-  mLayerCache->layer()->setAttributeTableConfig( config );
+  mLayer->setAttributeTableConfig( config );
   mFilterModel->setAttributeTableConfig( config );
   mTableView->setAttributeTableConfig( config );
   mConfig = config;
diff --git a/src/gui/attributetable/qgsdualview.h b/src/gui/attributetable/qgsdualview.h
index 4e6a96e..e1fde12 100644
--- a/src/gui/attributetable/qgsdualview.h
+++ b/src/gui/attributetable/qgsdualview.h
@@ -80,8 +80,11 @@ class GUI_EXPORT QgsDualView : public QStackedWidget, private Ui::QgsDualViewBas
      *                   {@link QgsAttributeTableFilterModel::ShowVisible}
      * @param request    Use a modified request to limit the shown features
      * @param context    The context in which this view is shown
+     * @param loadFeatures whether to initially load all features into the view. If set to
+     * false, limited features can later be loaded using setFilterMode()
      */
-    void init( QgsVectorLayer* layer, QgsMapCanvas* mapCanvas, const QgsFeatureRequest& request = QgsFeatureRequest(), const QgsAttributeEditorContext& context = QgsAttributeEditorContext() );
+    void init( QgsVectorLayer *layer, QgsMapCanvas *mapCanvas, const QgsFeatureRequest &request = QgsFeatureRequest(), const QgsAttributeEditorContext &context = QgsAttributeEditorContext(),
+               bool loadFeatures = true );
 
     /**
      * Change the current view mode.
@@ -294,6 +297,10 @@ class GUI_EXPORT QgsDualView : public QStackedWidget, private Ui::QgsDualViewBas
 
     void sortByPreviewExpression();
 
+    void updateSelectedFeatures();
+
+    void extentChanged();
+
     /**
      * Will be called whenever the currently shown feature form changes.
      * Will forward this signal to the feature list to visually represent
@@ -318,9 +325,11 @@ class GUI_EXPORT QgsDualView : public QStackedWidget, private Ui::QgsDualViewBas
     /** Zooms to the active feature*/
     void zoomToCurrentFeature();
 
+    void rebuildFullLayerCache();
+
   private:
-    void initLayerCache( QgsVectorLayer *layer, bool cacheGeometry );
-    void initModels( QgsMapCanvas* mapCanvas, const QgsFeatureRequest& request );
+    void initLayerCache( bool cacheGeometry );
+    void initModels( QgsMapCanvas* mapCanvas, const QgsFeatureRequest& request, bool loadFeatures );
 
     QgsAttributeEditorContext mEditorContext;
     QgsAttributeTableModel* mMasterModel;
@@ -331,12 +340,14 @@ class GUI_EXPORT QgsDualView : public QStackedWidget, private Ui::QgsDualViewBas
     QMenu* mPreviewColumnsMenu;
     QMenu* mHorizontalHeaderMenu;
     QgsVectorLayerCache* mLayerCache;
+    QgsVectorLayer *mLayer;
     QProgressDialog* mProgressDlg;
     QgsIFeatureSelectionManager* mFeatureSelectionManager;
     QgsDistanceArea mDistanceArea;
     QString mDisplayExpression;
     QgsAttributeTableConfig mConfig;
     QScrollArea* mAttributeEditorScrollArea;
+    QgsMapCanvas *mMapCanvas;
 
     friend class TestQgsDualView;
 };
diff --git a/src/gui/editorwidgets/qgsdefaultsearchwidgetwrapper.cpp b/src/gui/editorwidgets/qgsdefaultsearchwidgetwrapper.cpp
index 860114c..0de00ee 100644
--- a/src/gui/editorwidgets/qgsdefaultsearchwidgetwrapper.cpp
+++ b/src/gui/editorwidgets/qgsdefaultsearchwidgetwrapper.cpp
@@ -100,10 +100,6 @@ QgsSearchWidgetWrapper::FilterFlags QgsDefaultSearchWidgetWrapper::supportedFlag
     case QVariant::Double:
     case QVariant::LongLong:
     case QVariant::ULongLong:
-      //numeric
-      flags |= GreaterThan | LessThan | GreaterThanOrEqualTo | LessThanOrEqualTo;
-      break;
-
     case QVariant::Date:
     case QVariant::DateTime:
     case QVariant::Time:
diff --git a/src/gui/qgsfiledownloader.h b/src/gui/qgsfiledownloader.h
index c9276f7..841e4b6 100644
--- a/src/gui/qgsfiledownloader.h
+++ b/src/gui/qgsfiledownloader.h
@@ -90,8 +90,10 @@ class GUI_EXPORT QgsFileDownloader : public QObject
     void onSslErrors( QNetworkReply *reply, const QList<QSslError> &errors );
 #endif
 
-  private:
+  protected:
     ~QgsFileDownloader();
+
+  private:
     /**
      * Abort current request and show an error if the instance has GUI
      * notifications enabled.
diff --git a/src/gui/qgsmapcanvas.cpp b/src/gui/qgsmapcanvas.cpp
index f1009a4..827bbbb 100644
--- a/src/gui/qgsmapcanvas.cpp
+++ b/src/gui/qgsmapcanvas.cpp
@@ -825,8 +825,11 @@ QgsRectangle QgsMapCanvas::imageRect( const QImage& img, const QgsMapSettings& m
 
 void QgsMapCanvas::mapUpdateTimeout()
 {
-  const QImage& img = mJob->renderedImage();
-  mMap->setContent( img, imageRect( img, mSettings ) );
+  if ( mJob )
+  {
+    const QImage& img = mJob->renderedImage();
+    mMap->setContent( img, imageRect( img, mSettings ) );
+  }
 }
 
 void QgsMapCanvas::stopRendering()
@@ -835,8 +838,10 @@ void QgsMapCanvas::stopRendering()
   {
     QgsDebugMsg( "CANVAS stop rendering!" );
     mJobCancelled = true;
-    mJob->cancel();
-    Q_ASSERT( !mJob ); // no need to delete here: already deleted in finished()
+    disconnect( mJob, SIGNAL( finished() ), this, SLOT( rendererJobFinished() ) );
+    connect( mJob, SIGNAL( finished() ), mJob, SLOT( deleteLater() ) );
+    mJob->cancelWithoutBlocking();
+    mJob = nullptr;
   }
 }
 
diff --git a/src/gui/qgsmessagelogviewer.cpp b/src/gui/qgsmessagelogviewer.cpp
index b2110c3..f2a5553 100644
--- a/src/gui/qgsmessagelogviewer.cpp
+++ b/src/gui/qgsmessagelogviewer.cpp
@@ -46,6 +46,15 @@ QgsMessageLogViewer::~QgsMessageLogViewer()
 {
 }
 
+void QgsMessageLogViewer::closeEvent( QCloseEvent *e )
+{
+  e->ignore();
+}
+
+void QgsMessageLogViewer::reject()
+{
+}
+
 void QgsMessageLogViewer::logMessage( QString message, QString tag, QgsMessageLog::MessageLevel level )
 {
   if ( tag.isNull() )
@@ -55,7 +64,7 @@ void QgsMessageLogViewer::logMessage( QString message, QString tag, QgsMessageLo
   for ( i = 0; i < tabWidget->count() && tabWidget->tabText( i ) != tag; i++ )
     ;
 
-  QPlainTextEdit *w;
+  QPlainTextEdit *w = nullptr;
   if ( i < tabWidget->count() )
   {
     w = qobject_cast<QPlainTextEdit *>( tabWidget->widget( i ) );
@@ -67,6 +76,7 @@ void QgsMessageLogViewer::logMessage( QString message, QString tag, QgsMessageLo
     w->setReadOnly( true );
     tabWidget->addTab( w, tag );
     tabWidget->setCurrentIndex( tabWidget->count() - 1 );
+    tabWidget->setTabsClosable( true );
   }
 
   QString prefix = QString( "%1\t%2\t" )
@@ -78,6 +88,6 @@ void QgsMessageLogViewer::logMessage( QString message, QString tag, QgsMessageLo
 
 void QgsMessageLogViewer::closeTab( int index )
 {
-  if ( tabWidget->count() > 1 )
-    tabWidget->removeTab( index );
+  tabWidget->removeTab( index );
+  tabWidget->setTabsClosable( tabWidget->count() > 1 );
 }
diff --git a/src/gui/qgsmessagelogviewer.h b/src/gui/qgsmessagelogviewer.h
index 3a780eb..ebfb249 100644
--- a/src/gui/qgsmessagelogviewer.h
+++ b/src/gui/qgsmessagelogviewer.h
@@ -24,9 +24,7 @@
 #include <QString>
 
 class QStatusBar;
-class QToolButton;
-class QShowEvent;
-class QHideEvent;
+class QCloseEvent;
 
 /** \ingroup gui
  * A generic dialog widget for displaying QGIS log messages.
@@ -41,6 +39,10 @@ class GUI_EXPORT QgsMessageLogViewer: public QDialog, private Ui::QgsMessageLogV
   public slots:
     void logMessage( QString message, QString tag, QgsMessageLog::MessageLevel level );
 
+  protected:
+    void closeEvent( QCloseEvent *e ) override;
+    void reject() override;
+
   private slots:
     void closeTab( int index );
 };
diff --git a/src/providers/arcgisrest/qgsafsdataitems.cpp b/src/providers/arcgisrest/qgsafsdataitems.cpp
index 52e1c81..73ec8bc 100644
--- a/src/providers/arcgisrest/qgsafsdataitems.cpp
+++ b/src/providers/arcgisrest/qgsafsdataitems.cpp
@@ -37,9 +37,9 @@ QVector<QgsDataItem*> QgsAfsRootItem::createChildren()
 {
   QVector<QgsDataItem*> connections;
 
-  foreach ( QString connName, QgsOWSConnection::connectionList( "ArcGisFeatureServer" ) )
+  foreach ( QString connName, QgsOWSConnection::connectionList( "arcgisfeatureserver" ) )
   {
-    QgsOWSConnection connection( "ArcGisFeatureServer", connName );
+    QgsOWSConnection connection( "arcgisfeatureserver", connName );
     QString path = "afs:/" + connName;
     connections.append( new QgsAfsConnectionItem( this, connName, path, connection.uri().param( "url" ) ) );
   }
@@ -67,8 +67,8 @@ void QgsAfsRootItem::connectionsChanged()
 
 void QgsAfsRootItem::newConnection()
 {
-  QgsNewHttpConnection nc( 0, "/Qgis/connections-afs/" );
-  nc.setWindowTitle( tr( "Create a new AFS connection" ) );
+  QgsNewHttpConnection nc( 0, "/Qgis/connections-arcgisfeatureserver/" );
+  nc.setWindowTitle( tr( "Create a new ArcGISFeatureServer connection" ) );
 
   if ( nc.exec() )
   {
@@ -82,7 +82,7 @@ QgsAfsConnectionItem::QgsAfsConnectionItem( QgsDataItem* parent, const QString &
     : QgsDataCollectionItem( parent, name, path )
     , mUrl( url )
 {
-  mIconName = "mIconAfs.svg";
+  mIconName = "mIconConnect.png";
 }
 
 QVector<QgsDataItem*> QgsAfsConnectionItem::createChildren()
@@ -130,8 +130,8 @@ QList<QAction*> QgsAfsConnectionItem::actions()
 
 void QgsAfsConnectionItem::editConnection()
 {
-  QgsNewHttpConnection nc( 0, "/Qgis/connections-afs/", mName );
-  nc.setWindowTitle( tr( "Modify AFS connection" ) );
+  QgsNewHttpConnection nc( 0, "/Qgis/connections-arcgisfeatureserver/", mName );
+  nc.setWindowTitle( tr( "Modify ArcGISFeatureServer connection" ) );
 
   if ( nc.exec() )
   {
@@ -141,7 +141,7 @@ void QgsAfsConnectionItem::editConnection()
 
 void QgsAfsConnectionItem::deleteConnection()
 {
-  QgsOWSConnection::deleteConnection( "ArcGisFeatureServer", mName );
+  QgsOWSConnection::deleteConnection( "arcgisfeatureserver", mName );
   mParent->refresh();
 }
 
@@ -152,5 +152,5 @@ QgsAfsLayerItem::QgsAfsLayerItem( QgsDataItem* parent, const QString &name, cons
 {
   mUri = QString( "crs='%1' url='%2'" ).arg( authid ).arg( url );
   setState( Populated );
-  mIconName = "mIconConnect.png";
+  mIconName = "mIconAfs.svg";
 }
diff --git a/src/providers/arcgisrest/qgsafsprovider.cpp b/src/providers/arcgisrest/qgsafsprovider.cpp
index 5b6db4a..ecf886f 100644
--- a/src/providers/arcgisrest/qgsafsprovider.cpp
+++ b/src/providers/arcgisrest/qgsafsprovider.cpp
@@ -162,7 +162,7 @@ bool QgsAfsProvider::getFeature( const QgsFeatureId &id, QgsFeature &f, bool fet
   if ( it != mCache.end() )
   {
     f = it.value();
-    return filterRect.isNull() || f.geometry()->intersects( filterRect );
+    return filterRect.isNull() || ( f.geometry() && f.geometry()->intersects( filterRect ) );
   }
 
   // Determine attributes to fetch
diff --git a/src/providers/arcgisrest/qgsafsproviderextern.cpp b/src/providers/arcgisrest/qgsafsproviderextern.cpp
index 2355238..43ef22f 100644
--- a/src/providers/arcgisrest/qgsafsproviderextern.cpp
+++ b/src/providers/arcgisrest/qgsafsproviderextern.cpp
@@ -66,9 +66,9 @@ QGISEXTERN QgsDataItem *dataItem( QString thePath, QgsDataItem *parentItem )
   if ( thePath.startsWith( "afs:/" ) )
   {
     QString connectionName = thePath.split( '/' ).last();
-    if ( QgsOWSConnection::connectionList( "ArcGisFeatureServer" ).contains( connectionName ) )
+    if ( QgsOWSConnection::connectionList( "arcgisfeatureserver" ).contains( connectionName ) )
     {
-      QgsOWSConnection connection( "ArcGisFeatureServer", connectionName );
+      QgsOWSConnection connection( "arcgisfeatureserver", connectionName );
       return new QgsAfsConnectionItem( parentItem, "ArcGisFeatureServer", thePath, connection.uri().param( "url" ) );
     }
   }
diff --git a/src/providers/arcgisrest/qgsamsdataitems.cpp b/src/providers/arcgisrest/qgsamsdataitems.cpp
index 2ec70c8..231b939 100644
--- a/src/providers/arcgisrest/qgsamsdataitems.cpp
+++ b/src/providers/arcgisrest/qgsamsdataitems.cpp
@@ -33,9 +33,9 @@ QVector<QgsDataItem*> QgsAmsRootItem::createChildren()
 {
   QVector<QgsDataItem*> connections;
 
-  foreach ( QString connName, QgsOWSConnection::connectionList( "ArcGisMapServer" ) )
+  foreach ( QString connName, QgsOWSConnection::connectionList( "arcgismapserver" ) )
   {
-    QgsOWSConnection connection( "ArcGisMapServer", connName );
+    QgsOWSConnection connection( "arcgismapserver", connName );
     QString path = "ams:/" + connName;
     connections.append( new QgsAmsConnectionItem( this, connName, path, connection.uri().param( "url" ) ) );
   }
@@ -64,8 +64,8 @@ void QgsAmsRootItem::connectionsChanged()
 
 void QgsAmsRootItem::newConnection()
 {
-  QgsNewHttpConnection nc( 0 );
-  nc.setWindowTitle( tr( "Create a new AMS connection" ) );
+  QgsNewHttpConnection nc( 0, "/Qgis/connections-arcgismapserver/" );
+  nc.setWindowTitle( tr( "Create a new ArcGisMapServer connection" ) );
 
   if ( nc.exec() )
   {
@@ -79,7 +79,7 @@ QgsAmsConnectionItem::QgsAmsConnectionItem( QgsDataItem* parent, QString name, Q
     : QgsDataCollectionItem( parent, name, path )
     , mUrl( url )
 {
-  mIconName = "mIconAms.png";
+  mIconName = "mIconConnect.png";
 }
 
 QVector<QgsDataItem*> QgsAmsConnectionItem::createChildren()
@@ -145,8 +145,8 @@ QList<QAction*> QgsAmsConnectionItem::actions()
 
 void QgsAmsConnectionItem::editConnection()
 {
-  QgsNewHttpConnection nc( 0, "/Qgis/connections-afs/", mName );
-  nc.setWindowTitle( tr( "Modify AMS connection" ) );
+  QgsNewHttpConnection nc( 0, "/Qgis/connections-arcgismapserver/", mName );
+  nc.setWindowTitle( tr( "Modify ArcGisMapServer connection" ) );
 
   if ( nc.exec() )
   {
@@ -156,7 +156,7 @@ void QgsAmsConnectionItem::editConnection()
 
 void QgsAmsConnectionItem::deleteConnection()
 {
-  QgsOWSConnection::deleteConnection( "ArcGisMapServer", mName );
+  QgsOWSConnection::deleteConnection( "arcgismapserver", mName );
   mParent->refresh();
 }
 
diff --git a/src/providers/arcgisrest/qgsamsproviderextern.cpp b/src/providers/arcgisrest/qgsamsproviderextern.cpp
index 0ade783..9051d7c 100644
--- a/src/providers/arcgisrest/qgsamsproviderextern.cpp
+++ b/src/providers/arcgisrest/qgsamsproviderextern.cpp
@@ -66,9 +66,9 @@ QGISEXTERN QgsDataItem *dataItem( QString thePath, QgsDataItem *parentItem )
   if ( thePath.startsWith( "ams:/" ) )
   {
     QString connectionName = thePath.split( '/' ).last();
-    if ( QgsOWSConnection::connectionList( "ArcGisMapServer" ).contains( connectionName ) )
+    if ( QgsOWSConnection::connectionList( "arcgismapserver" ).contains( connectionName ) )
     {
-      QgsOWSConnection connection( "ArcGisMapServer", connectionName );
+      QgsOWSConnection connection( "arcgismapserver", connectionName );
       return new QgsAmsConnectionItem( parentItem, "ArcGisMapServer", thePath, connection.uri().param( "url" ) );
     }
   }
diff --git a/src/providers/ogr/qgsogrprovider.cpp b/src/providers/ogr/qgsogrprovider.cpp
index 5d2497d..0a529d7 100644
--- a/src/providers/ogr/qgsogrprovider.cpp
+++ b/src/providers/ogr/qgsogrprovider.cpp
@@ -57,6 +57,13 @@ email                : sherman at mrcc.com
 #include <sys/vfs.h>
 #endif
 
+// Starting with GDAL 2.2, there are 2 concepts: unset fields and null fields
+// whereas previously there was only unset fields. For QGIS purposes, both
+// states (unset/null) are equivalent.
+#ifndef OGRNullMarker
+#define OGR_F_IsFieldSetAndNotNull OGR_F_IsFieldSet
+#endif
+
 static const QString TEXT_PROVIDER_KEY = "ogr";
 static const QString TEXT_PROVIDER_DESCRIPTION =
   QString( "OGR data provider" )
@@ -1306,6 +1313,8 @@ bool QgsOgrProvider::addFeatures( QgsFeatureList & flist )
 
   setRelevantFields( ogrLayer, true, attributeIndexes() );
 
+  const bool inTransaction = startTransaction();
+
   bool returnvalue = true;
   for ( QgsFeatureList::iterator it = flist.begin(); it != flist.end(); ++it )
   {
@@ -1315,6 +1324,11 @@ bool QgsOgrProvider::addFeatures( QgsFeatureList & flist )
     }
   }
 
+  if ( inTransaction )
+  {
+    commitTransaction();
+  }
+
   if ( !syncToDisc() )
   {
     returnvalue = false;
@@ -1521,6 +1535,31 @@ bool QgsOgrProvider::renameAttributes( const QgsFieldNameMap& renamedAttributes
 #endif
 }
 
+bool QgsOgrProvider::startTransaction()
+{
+  bool inTransaction = false;
+  if ( OGR_L_TestCapability( ogrLayer, OLCTransactions ) )
+  {
+    // A transaction might already be active, so be robust on failed
+    // StartTransaction.
+    CPLPushErrorHandler( CPLQuietErrorHandler );
+    inTransaction = ( OGR_L_StartTransaction( ogrLayer ) == OGRERR_NONE );
+    CPLPopErrorHandler();
+  }
+  return inTransaction;
+}
+
+
+bool QgsOgrProvider::commitTransaction()
+{
+  if ( OGR_L_CommitTransaction( ogrLayer ) != OGRERR_NONE )
+  {
+    pushError( tr( "OGR error committing transaction: %1" ).arg( CPLGetLastErrorMsg() ) );
+    return false;
+  }
+  return true;
+}
+
 
 bool QgsOgrProvider::changeAttributeValues( const QgsChangedAttributesMap &attr_map )
 {
@@ -1534,6 +1573,8 @@ bool QgsOgrProvider::changeAttributeValues( const QgsChangedAttributesMap &attr_
 
   setRelevantFields( ogrLayer, true, attributeIndexes() );
 
+  const bool inTransaction = startTransaction();
+
   for ( QgsChangedAttributesMap::const_iterator it = attr_map.begin(); it != attr_map.end(); ++it )
   {
     QgsFeatureId fid = it.key();
@@ -1649,6 +1690,11 @@ bool QgsOgrProvider::changeAttributeValues( const QgsChangedAttributesMap &attr_
     OGR_F_Destroy( of );
   }
 
+  if ( inTransaction )
+  {
+    commitTransaction();
+  }
+
   if ( OGR_L_SyncToDisk( ogrLayer ) != OGRERR_NONE )
   {
     pushError( tr( "OGR error syncing to disk: %1" ).arg( CPLGetLastErrorMsg() ) );
@@ -1664,6 +1710,8 @@ bool QgsOgrProvider::changeGeometryValues( const QgsGeometryMap &geometry_map )
 
   setRelevantFields( ogrLayer, true, attributeIndexes() );
 
+  const bool inTransaction = startTransaction();
+
   for ( QgsGeometryMap::const_iterator it = geometry_map.constBegin(); it != geometry_map.constEnd(); ++it )
   {
     if ( FID_TO_NUMBER( it.key() ) > std::numeric_limits<long>::max() )
@@ -1731,6 +1779,12 @@ bool QgsOgrProvider::changeGeometryValues( const QgsGeometryMap &geometry_map )
 
     OGR_F_Destroy( theOGRFeature );
   }
+
+  if ( inTransaction )
+  {
+    commitTransaction();
+  }
+
   QgsOgrConnPool::instance()->invalidateConnections( dataSourceUri() );
   return syncToDisc();
 }
@@ -1780,6 +1834,8 @@ bool QgsOgrProvider::deleteFeatures( const QgsFeatureIds & id )
   if ( !doInitialActionsForEdition() )
     return false;
 
+  const bool inTransaction = startTransaction();
+
   bool returnvalue = true;
   for ( QgsFeatureIds::const_iterator it = id.begin(); it != id.end(); ++it )
   {
@@ -1789,6 +1845,11 @@ bool QgsOgrProvider::deleteFeatures( const QgsFeatureIds & id )
     }
   }
 
+  if ( inTransaction )
+  {
+    commitTransaction();
+  }
+
   if ( !syncToDisc() )
   {
     returnvalue = false;
@@ -2859,7 +2920,7 @@ void QgsOgrProvider::uniqueValues( int index, QList<QVariant> &uniqueValues, int
   OGRFeatureH f;
   while (( f = OGR_L_GetNextFeature( l ) ) )
   {
-    uniqueValues << ( OGR_F_IsFieldSet( f, 0 ) ? convertValue( fld.type(), mEncoding->toUnicode( OGR_F_GetFieldAsString( f, 0 ) ) ) : QVariant( fld.type() ) );
+    uniqueValues << ( OGR_F_IsFieldSetAndNotNull( f, 0 ) ? convertValue( fld.type(), mEncoding->toUnicode( OGR_F_GetFieldAsString( f, 0 ) ) ) : QVariant( fld.type() ) );
     OGR_F_Destroy( f );
 
     if ( limit >= 0 && uniqueValues.size() >= limit )
@@ -2901,7 +2962,7 @@ QVariant QgsOgrProvider::minimumValue( int index )
     return QVariant();
   }
 
-  QVariant value = OGR_F_IsFieldSet( f, 0 ) ? convertValue( fld.type(), mEncoding->toUnicode( OGR_F_GetFieldAsString( f, 0 ) ) ) : QVariant( fld.type() );
+  QVariant value = OGR_F_IsFieldSetAndNotNull( f, 0 ) ? convertValue( fld.type(), mEncoding->toUnicode( OGR_F_GetFieldAsString( f, 0 ) ) ) : QVariant( fld.type() );
   OGR_F_Destroy( f );
 
   OGR_DS_ReleaseResultSet( ogrDataSource, l );
@@ -2940,7 +3001,7 @@ QVariant QgsOgrProvider::maximumValue( int index )
     return QVariant();
   }
 
-  QVariant value = OGR_F_IsFieldSet( f, 0 ) ? convertValue( fld.type(), mEncoding->toUnicode( OGR_F_GetFieldAsString( f, 0 ) ) ) : QVariant( fld.type() );
+  QVariant value = OGR_F_IsFieldSetAndNotNull( f, 0 ) ? convertValue( fld.type(), mEncoding->toUnicode( OGR_F_GetFieldAsString( f, 0 ) ) ) : QVariant( fld.type() );
   OGR_F_Destroy( f );
 
   OGR_DS_ReleaseResultSet( ogrDataSource, l );
diff --git a/src/providers/ogr/qgsogrprovider.h b/src/providers/ogr/qgsogrprovider.h
index de8a0fb..ca99c4e 100644
--- a/src/providers/ogr/qgsogrprovider.h
+++ b/src/providers/ogr/qgsogrprovider.h
@@ -305,6 +305,13 @@ class QgsOgrProvider : public QgsVectorDataProvider
   private:
     unsigned char *getGeometryPointer( OGRFeatureH fet );
     QString ogrWkbGeometryTypeName( OGRwkbGeometryType type ) const;
+
+    //! Starts a transaction if possible and return true in that case
+    bool startTransaction();
+
+    //! Commits a transaction
+    bool commitTransaction();
+
     QgsFields mAttributeFields;
     bool mFirstFieldIsFid;
     OGRDataSourceH ogrDataSource;
diff --git a/src/providers/wfs/qgswfscapabilities.cpp b/src/providers/wfs/qgswfscapabilities.cpp
index 9a96433..ebc21da 100644
--- a/src/providers/wfs/qgswfscapabilities.cpp
+++ b/src/providers/wfs/qgswfscapabilities.cpp
@@ -142,6 +142,32 @@ void QgsWFSCapabilities::capabilitiesReplyFinished()
   // Note: for conveniency, we do not use the elementsByTagNameNS() method as
   // the WFS and OWS namespaces URI are not the same in all versions
 
+  if ( mCaps.version.startsWith( QLatin1String( "1.0" ) ) )
+  {
+    QDomElement capabilityElem = doc.firstChildElement( "Capability" );
+    if ( !capabilityElem.isNull() )
+    {
+      QDomElement requestElem = capabilityElem.firstChildElement( "Request" );
+      if ( !requestElem.isNull() )
+      {
+        QDomElement getFeatureElem = requestElem.firstChildElement( "GetFeature" );
+        if ( !getFeatureElem.isNull() )
+        {
+          QDomElement resultFormatElem = getFeatureElem.firstChildElement( "ResultFormat" );
+          if ( !resultFormatElem.isNull() )
+          {
+            QDomElement child = resultFormatElem.firstChildElement();
+            while ( !child.isNull() )
+            {
+              mCaps.outputFormats << child.tagName();
+              child = child.nextSiblingElement();
+            }
+          }
+        }
+      }
+    }
+  }
+
   // find <ows:OperationsMetadata>
   QDomElement operationsMetadataElem = doc.firstChildElement( "OperationsMetadata" );
   if ( !operationsMetadataElem.isNull() )
@@ -231,6 +257,15 @@ void QgsWFSCapabilities::capabilitiesReplyFinished()
               }
             }
           }
+          else if ( parameter.attribute( "name" ) == QLatin1String( "outputFormat" ) )
+          {
+            QDomNodeList valueList = parameter.elementsByTagName( "Value" );
+            for ( int k = 0; k < valueList.size(); ++k )
+            {
+              QDomElement value = valueList.at( k ).toElement();
+              mCaps.outputFormats << value.text();
+            }
+          }
         }
 
         break;
diff --git a/src/providers/wfs/qgswfscapabilities.h b/src/providers/wfs/qgswfscapabilities.h
index 9b67e56..e4496e0 100644
--- a/src/providers/wfs/qgswfscapabilities.h
+++ b/src/providers/wfs/qgswfscapabilities.h
@@ -97,6 +97,7 @@ class QgsWFSCapabilities : public QgsWFSRequest
       QList<Function> spatialPredicatesList;
       QList<Function> functionList;
       bool useEPSGColumnFormat; // whether to use EPSG:XXXX srsname
+      QList< QString > outputFormats;
 
       QSet< QString > setAllTypenames;
       QMap< QString, QString> mapUnprefixedTypenameToPrefixedTypename;
diff --git a/src/providers/wfs/qgswfsconstants.cpp b/src/providers/wfs/qgswfsconstants.cpp
index 87ff72f..3a359ed 100644
--- a/src/providers/wfs/qgswfsconstants.cpp
+++ b/src/providers/wfs/qgswfsconstants.cpp
@@ -31,6 +31,7 @@ const QString QgsWFSConstants::URI_PARAM_TYPENAME( "typename" );
 const QString QgsWFSConstants::URI_PARAM_SRSNAME( "srsname" );
 const QString QgsWFSConstants::URI_PARAM_BBOX( "bbox" );
 const QString QgsWFSConstants::URI_PARAM_FILTER( "filter" );
+const QString QgsWFSConstants::URI_PARAM_OUTPUTFORMAT( "outputformat" );
 const QString QgsWFSConstants::URI_PARAM_RESTRICT_TO_REQUEST_BBOX( "restrictToRequestBBOX" );
 const QString QgsWFSConstants::URI_PARAM_MAXNUMFEATURES( "maxNumFeatures" );
 const QString QgsWFSConstants::URI_PARAM_IGNOREAXISORIENTATION( "IgnoreAxisOrientation" );
diff --git a/src/providers/wfs/qgswfsconstants.h b/src/providers/wfs/qgswfsconstants.h
index 23345ac..c5d99a2 100644
--- a/src/providers/wfs/qgswfsconstants.h
+++ b/src/providers/wfs/qgswfsconstants.h
@@ -38,6 +38,7 @@ struct QgsWFSConstants
   static const QString URI_PARAM_TYPENAME;
   static const QString URI_PARAM_SRSNAME;
   static const QString URI_PARAM_FILTER;
+  static const QString URI_PARAM_OUTPUTFORMAT;
   static const QString URI_PARAM_BBOX;
   static const QString URI_PARAM_RESTRICT_TO_REQUEST_BBOX;
   static const QString URI_PARAM_MAXNUMFEATURES;
diff --git a/src/providers/wfs/qgswfsdatasourceuri.cpp b/src/providers/wfs/qgswfsdatasourceuri.cpp
index 67d21cc..f52352d 100644
--- a/src/providers/wfs/qgswfsdatasourceuri.cpp
+++ b/src/providers/wfs/qgswfsdatasourceuri.cpp
@@ -22,13 +22,14 @@
 QgsWFSDataSourceURI::QgsWFSDataSourceURI( const QString& uri )
     : mURI( uri )
 {
-  // Compatiblity with QGIS < 2.16 layer URI of the format
+  typedef QPair<QString, QString> queryItem;
+
+  // Compatibility with QGIS < 2.16 layer URI of the format
   // http://example.com/?SERVICE=WFS&VERSION=1.0.0&REQUEST=GetFeature&TYPENAME=x&SRSNAME=y&username=foo&password=
   if ( !mURI.hasParam( QgsWFSConstants::URI_PARAM_URL ) )
   {
     QUrl url( uri );
     // Transform all param keys to lowercase
-    typedef QPair<QString, QString> queryItem;
     QList<queryItem> items( url.queryItems() );
     foreach ( queryItem item, items )
     {
@@ -41,6 +42,7 @@ QgsWFSDataSourceURI::QgsWFSDataSourceURI( const QString& uri )
     QString typeName = url.queryItemValue( QgsWFSConstants::URI_PARAM_TYPENAME );
     QString version = url.queryItemValue( QgsWFSConstants::URI_PARAM_VERSION );
     QString filter = url.queryItemValue( QgsWFSConstants::URI_PARAM_FILTER );
+    QString outputFormat = url.queryItemValue( QgsWFSConstants::URI_PARAM_OUTPUTFORMAT );
     mAuth.mAuthCfg = url.queryItemValue( QgsWFSConstants::URI_PARAM_AUTHCFG );
     // NOTE: A defined authcfg overrides any older username/password auth
     //       Only check for older auth if it is undefined
@@ -56,13 +58,14 @@ QgsWFSDataSourceURI::QgsWFSDataSourceURI( const QString& uri )
     }
 
     // Now remove all stuff that is not the core URL
-    url.removeQueryItem( "SERVICE" );
-    url.removeQueryItem( "VERSION" );
-    url.removeQueryItem( "TYPENAME" );
-    url.removeQueryItem( "REQUEST" );
-    url.removeQueryItem( "BBOX" );
-    url.removeQueryItem( "SRSNAME" );
-    url.removeQueryItem( "FILTER" );
+    url.removeQueryItem( "service" );
+    url.removeQueryItem( QgsWFSConstants::URI_PARAM_VERSION );
+    url.removeQueryItem( QgsWFSConstants::URI_PARAM_TYPENAME );
+    url.removeQueryItem( "request" );
+    url.removeQueryItem( QgsWFSConstants::URI_PARAM_BBOX );
+    url.removeQueryItem( QgsWFSConstants::URI_PARAM_SRSNAME );
+    url.removeQueryItem( QgsWFSConstants::URI_PARAM_FILTER );
+    url.removeQueryItem( QgsWFSConstants::URI_PARAM_OUTPUTFORMAT );
     url.removeQueryItem( QgsWFSConstants::URI_PARAM_USERNAME );
     url.removeQueryItem( QgsWFSConstants::URI_PARAM_PASSWORD );
     url.removeQueryItem( QgsWFSConstants::URI_PARAM_AUTHCFG );
@@ -72,6 +75,7 @@ QgsWFSDataSourceURI::QgsWFSDataSourceURI( const QString& uri )
     setTypeName( typeName );
     setSRSName( srsname );
     setVersion( version );
+    setOutputFormat( outputFormat );
 
     //if the xml comes from the dialog, it needs to be a string to pass the validity test
     if ( filter.startsWith( '\'' ) && filter.endsWith( '\'' ) && filter.size() > 1 )
@@ -86,6 +90,40 @@ QgsWFSDataSourceURI::QgsWFSDataSourceURI( const QString& uri )
   }
   else
   {
+    QUrl url( mURI.param( QgsWFSConstants::URI_PARAM_URL ) );
+    bool URLModified = false;
+    bool somethingChanged = false;
+    do
+    {
+      somethingChanged = false;
+      QList<queryItem> items( url.queryItems() );
+      Q_FOREACH ( const queryItem &item, items )
+      {
+        const QString lowerName( item.first.toLower() );
+        if ( lowerName == QgsWFSConstants::URI_PARAM_OUTPUTFORMAT )
+        {
+          setOutputFormat( item.second );
+          url.removeQueryItem( item.first );
+          somethingChanged = true;
+          URLModified = true;
+          break;
+        }
+        else if ( lowerName == QLatin1String( "service" ) ||
+                  lowerName == QLatin1String( "request" ) )
+        {
+          url.removeQueryItem( item.first );
+          somethingChanged = true;
+          URLModified = true;
+          break;
+        }
+      }
+    }
+    while ( somethingChanged );
+    if ( URLModified )
+    {
+      mURI.setParam( QgsWFSConstants::URI_PARAM_URL, url.toEncoded() );
+    }
+
     mAuth.mUserName = mURI.username();
     mAuth.mPassword = mURI.password();
     mAuth.mAuthCfg = mURI.authConfigId();
@@ -201,6 +239,18 @@ void QgsWFSDataSourceURI::setSql( const QString& sql )
   mURI.setSql( sql );
 }
 
+QString QgsWFSDataSourceURI::outputFormat() const
+{
+  return mURI.param( QgsWFSConstants::URI_PARAM_OUTPUTFORMAT );
+}
+
+void QgsWFSDataSourceURI::setOutputFormat( const QString &outputFormat )
+{
+  mURI.removeParam( QgsWFSConstants::URI_PARAM_OUTPUTFORMAT );
+  if ( !outputFormat.isEmpty() )
+    mURI.setParam( QgsWFSConstants::URI_PARAM_OUTPUTFORMAT, outputFormat );
+}
+
 bool QgsWFSDataSourceURI::isRestrictedToRequestBBOX() const
 {
   if ( mURI.hasParam( QgsWFSConstants::URI_PARAM_RESTRICT_TO_REQUEST_BBOX ) &&
diff --git a/src/providers/wfs/qgswfsdatasourceuri.h b/src/providers/wfs/qgswfsdatasourceuri.h
index d703fa9..57c605c 100644
--- a/src/providers/wfs/qgswfsdatasourceuri.h
+++ b/src/providers/wfs/qgswfsdatasourceuri.h
@@ -117,7 +117,13 @@ class QgsWFSDataSourceURI
     /** Set SQL query */
     void setSql( const QString& sql );
 
-    /** Returns whether GetFeature request should include the request bounding box. Defaults to false */
+    //! Get GetFeature output format
+    QString outputFormat() const;
+
+    //! Set GetFeature output format
+    void setOutputFormat( const QString &outputFormat );
+
+    //! Returns whether GetFeature request should include the request bounding box. Defaults to false
     bool isRestrictedToRequestBBOX() const;
 
     /** Returns whether axis orientation should be ignored (for WFS >= 1.1). Defaults to false */
diff --git a/src/providers/wfs/qgswfsfeatureiterator.cpp b/src/providers/wfs/qgswfsfeatureiterator.cpp
index 356aa7e..ee52aef 100644
--- a/src/providers/wfs/qgswfsfeatureiterator.cpp
+++ b/src/providers/wfs/qgswfsfeatureiterator.cpp
@@ -328,6 +328,30 @@ QUrl QgsWFSFeatureDownloader::buildURL( int startIndex, int maxFeatures, bool fo
     getFeatureUrl.addQueryItem( "SORTBY", mShared->mSortBy );
   }
 
+  if ( !forHits && !mShared->mURI.outputFormat().isEmpty() )
+  {
+    getFeatureUrl.addQueryItem( "OUTPUTFORMAT", mShared->mURI.outputFormat() );
+  }
+  else if ( !forHits && mShared->mWFSVersion.startsWith( QLatin1String( "1.0" ) ) )
+  {
+    QStringList list;
+    list << QLatin1String( "text/xml; subtype=gml/3.2.1" );
+    list << QLatin1String( "application/gml+xml; version=3.2" );
+    list << QLatin1String( "text/xml; subtype=gml/3.1.1" );
+    list << QLatin1String( "application/gml+xml; version=3.1" );
+    list << QLatin1String( "text/xml; subtype=gml/3.0.1" );
+    list << QLatin1String( "application/gml+xml; version=3.0" );
+    list << QLatin1String( "GML3" );
+    Q_FOREACH ( const QString &format, list )
+    {
+      if ( mShared->mCaps.outputFormats.contains( format ) )
+      {
+        getFeatureUrl.addQueryItem( "OUTPUTFORMAT", format );
+        break;
+      }
+    }
+  }
+
   return getFeatureUrl;
 }
 
diff --git a/src/providers/wms/qgswmscapabilities.cpp b/src/providers/wms/qgswmscapabilities.cpp
index 9b5f12b..b518749 100644
--- a/src/providers/wms/qgswmscapabilities.cpp
+++ b/src/providers/wms/qgswmscapabilities.cpp
@@ -41,6 +41,22 @@ bool QgsWmsSettings::parseUri( const QString& uriString )
   QgsDataSourceURI uri;
   uri.setEncodedUri( uriString );
 
+  // Setup authentication
+  mAuth.mUserName = uri.param( "username" );
+  QgsDebugMsg( "set username to " + mAuth.mUserName );
+
+  mAuth.mPassword = uri.param( "password" );
+  QgsDebugMsg( "set password to " + mAuth.mPassword );
+
+  if ( uri.hasParam( "authcfg" ) )
+  {
+    mAuth.mAuthCfg = uri.param( "authcfg" );
+  }
+  QgsDebugMsg( "set authcfg to " + mAuth.mAuthCfg );
+
+  mAuth.mReferer = uri.param( "referer" );
+  QgsDebugMsg( "set referer to " + mAuth.mReferer );
+
   mXyz = false;  // assume WMS / WMTS
 
   if ( uri.param( "type" ) == "xyz" )
@@ -54,10 +70,6 @@ bool QgsWmsSettings::parseUri( const QString& uriString )
     mMaxHeight = 0;
     mHttpUri = uri.param( "url" );
     mBaseUrl = mHttpUri;
-    mAuth.mUserName.clear();
-    mAuth.mPassword.clear();
-    mAuth.mReferer.clear();
-    mAuth.mAuthCfg.clear();
     mIgnoreGetMapUrl = false;
     mIgnoreGetFeatureInfoUrl = false;
     mSmoothPixmapTransform = true;
@@ -87,21 +99,6 @@ bool QgsWmsSettings::parseUri( const QString& uriString )
 
   mDpiMode = uri.hasParam( "dpiMode" ) ? static_cast< QgsWmsDpiMode >( uri.param( "dpiMode" ).toInt() ) : dpiAll;
 
-  mAuth.mUserName = uri.param( "username" );
-  QgsDebugMsg( "set username to " + mAuth.mUserName );
-
-  mAuth.mPassword = uri.param( "password" );
-  QgsDebugMsg( "set password to " + mAuth.mPassword );
-
-  if ( uri.hasParam( "authcfg" ) )
-  {
-    mAuth.mAuthCfg = uri.param( "authcfg" );
-  }
-  QgsDebugMsg( "set authcfg to " + mAuth.mAuthCfg );
-
-  mAuth.mReferer = uri.param( "referer" );
-  QgsDebugMsg( "set referer to " + mAuth.mReferer );
-
   mActiveSubLayers = uri.params( "layers" );
   mActiveSubStyles = uri.params( "styles" );
   QgsDebugMsg( "Entering: layers:" + mActiveSubLayers.join( ", " ) + ", styles:" + mActiveSubStyles.join( ", " ) );
diff --git a/src/server/qgsmslayercache.cpp b/src/server/qgsmslayercache.cpp
index 8e9ac78..78639e3 100644
--- a/src/server/qgsmslayercache.cpp
+++ b/src/server/qgsmslayercache.cpp
@@ -34,6 +34,7 @@ QgsMSLayerCache* QgsMSLayerCache::instance()
 QgsMSLayerCache::QgsMSLayerCache()
     : mProjectMaxLayers( 0 )
 {
+  QgsMessageLog::logMessage( "QgsMSLayerCache initialized", "Server", QgsMessageLog::INFO );
   mDefaultMaxLayers = 100;
   //max layer from environment variable overrides default
   char* maxLayerEnv = getenv( "MAX_CACHE_LAYERS" );
@@ -61,7 +62,7 @@ QgsMSLayerCache::~QgsMSLayerCache()
 
 void QgsMSLayerCache::insertLayer( const QString& url, const QString& layerName, QgsMapLayer* layer, const QString& configFile, const QList<QString>& tempFiles )
 {
-  QgsMessageLog::logMessage( "Layer cache: insert Layer '" + layerName + "' configFile: " + configFile, "Server", QgsMessageLog::INFO );
+  QgsMessageLog::logMessage( "Layer cache: insert Layer '" + layerName + "' url: '" + url + "' configFile: " + configFile, "Server", QgsMessageLog::INFO );
   if ( mEntries.size() > qMax( mDefaultMaxLayers, mProjectMaxLayers ) ) //force cache layer examination after 10 inserted layers
   {
     updateEntries();
@@ -100,7 +101,7 @@ QgsMapLayer* QgsMSLayerCache::searchLayer( const QString& url, const QString& la
   QPair<QString, QString> urlNamePair = qMakePair( url, layerName );
   if ( !mEntries.contains( urlNamePair ) )
   {
-    QgsMessageLog::logMessage( "Layer '" + layerName + "' configFile: " + configFile + " not found in layer cache'", "Server", QgsMessageLog::INFO );
+    QgsMessageLog::logMessage( "Layer '" + layerName + "' url: '" + url + "' configFile: " + configFile + " not found in layer cache'", "Server", QgsMessageLog::INFO );
     return nullptr;
   }
   else
@@ -112,11 +113,11 @@ QgsMapLayer* QgsMSLayerCache::searchLayer( const QString& url, const QString& la
       if ( configFile.isEmpty() || layerIt->configFile == configFile )
       {
         layerIt->lastUsedTime = time( nullptr );
-        QgsMessageLog::logMessage( "Layer '" + layerName + "' configFile: " + configFile + " found in layer cache", "Server", QgsMessageLog::INFO );
+        QgsMessageLog::logMessage( "Layer '" + layerName + "' url: '" + url + "' configFile: " + configFile + " found in layer cache", "Server", QgsMessageLog::INFO );
         return layerIt->layerPointer;
       }
     }
-    QgsMessageLog::logMessage( "Layer '" + layerName + "' configFile: " + configFile + " not found in layer cache'", "Server", QgsMessageLog::INFO );
+    QgsMessageLog::logMessage( "Layer '" + layerName + "' url: '" + url + "' configFile: " + configFile + " not found in layer cache'", "Server", QgsMessageLog::INFO );
     return nullptr;
   }
 }
diff --git a/src/server/qgswcsprojectparser.cpp b/src/server/qgswcsprojectparser.cpp
index 797ac0f..ea4e42c 100644
--- a/src/server/qgswcsprojectparser.cpp
+++ b/src/server/qgswcsprojectparser.cpp
@@ -435,20 +435,24 @@ QList<QgsMapLayer*> QgsWCSProjectParser::mapLayerFromCoverage( const QString& cN
     QString type = elem.attribute( "type" );
     if ( type == "raster" )
     {
+      QString id = mProjectParser->layerId( elem );
+      if ( !wcsLayersId.contains( id ) )
+        continue;
+
+      QString coveName = mProjectParser->layerShortName( elem );
+      if ( coveName.isEmpty() )
+        coveName = mProjectParser->layerName( elem );
+      coveName = coveName.replace( " ", "_" );
+
+      if ( coveName != cName )
+        continue;
+
       QgsMapLayer *mLayer = mProjectParser->createLayerFromElement( elem, useCache );
       QgsRasterLayer* layer = qobject_cast<QgsRasterLayer*>( mLayer );
-      if ( !layer || !wcsLayersId.contains( layer->id() ) )
-        return layerList;
+      if ( !layer )
+        continue;
 
-      QString coveName = layer->name();
-      if ( !layer->shortName().isEmpty() )
-        coveName = layer->shortName();
-      coveName = coveName.replace( " ", "_" );
-      if ( cName == coveName )
-      {
-        layerList.push_back( mLayer );
-        return layerList;
-      }
+      layerList.push_back( mLayer );
     }
   }
   return layerList;
diff --git a/src/server/qgswfsprojectparser.cpp b/src/server/qgswfsprojectparser.cpp
index 50a21a0..ad4910a 100644
--- a/src/server/qgswfsprojectparser.cpp
+++ b/src/server/qgswfsprojectparser.cpp
@@ -564,18 +564,24 @@ QList<QgsMapLayer*> QgsWFSProjectParser::mapLayerFromTypeName( const QString& aT
     QString type = elem.attribute( "type" );
     if ( type == "vector" )
     {
+      QString id = mProjectParser->layerId( elem );
+      if ( !wfsLayersId.contains( id ) )
+        continue;
+
+      QString typeName = mProjectParser->layerShortName( elem );
+      if ( typeName.isEmpty() )
+        typeName = mProjectParser->layerName( elem );
+      typeName = typeName.replace( " ", "_" );
+
+      if ( !aTypeName.isEmpty() && !typeNameList.contains( typeName ) )
+        continue;
+
       QgsMapLayer *mLayer = mProjectParser->createLayerFromElement( elem );
       QgsVectorLayer* layer = qobject_cast<QgsVectorLayer*>( mLayer );
       if ( !layer )
         continue;
 
-      QString typeName = layer->name();
-      if ( !layer->shortName().isEmpty() )
-        typeName = layer->shortName();
-      typeName = typeName.replace( " ", "_" );
-
-      if ( wfsLayersId.contains( layer->id() ) && ( aTypeName == "" || typeNameList.contains( typeName ) ) )
-        layerList.push_back( mLayer );
+      layerList.push_back( mLayer );
     }
   }
   return layerList;
diff --git a/tests/src/app/testqgsattributetable.cpp b/tests/src/app/testqgsattributetable.cpp
index 3e89250..5bec444 100644
--- a/tests/src/app/testqgsattributetable.cpp
+++ b/tests/src/app/testqgsattributetable.cpp
@@ -40,6 +40,7 @@ class TestQgsAttributeTable : public QObject
     void cleanup() {} // will be called after every testfunction.
     void testFieldCalculation();
     void testFieldCalculationArea();
+    void testNoGeom();
 
   private:
     QgisApp * mQgisApp;
@@ -59,6 +60,13 @@ void TestQgsAttributeTable::initTestCase()
   QgsApplication::init();
   QgsApplication::initQgis();
   mQgisApp = new QgisApp();
+
+  // setup the test QSettings environment
+  QCoreApplication::setOrganizationName( QString( "QGIS" ) );
+  QCoreApplication::setOrganizationDomain( QString( "qgis.org" ) );
+  QCoreApplication::setApplicationName( QString( "QGIS-TEST" ) );
+
+  QSettings().setValue( QString( "/qgis/attributeTableBehavior" ), QgsAttributeTableFilterModel::ShowAll );
 }
 
 //runs after all tests
@@ -168,5 +176,36 @@ void TestQgsAttributeTable::testFieldCalculationArea()
   QVERIFY( qgsDoubleNear( f.attribute( "col1" ).toDouble(), expected, 0.001 ) );
 }
 
+void TestQgsAttributeTable::testNoGeom()
+{
+  //test that by default the attribute table DOESN'T fetch geometries (because performance)
+  QScopedPointer< QgsVectorLayer> tempLayer( new QgsVectorLayer( QString( "LineString?crs=epsg:3111&field=pk:int&field=col1:double" ), QString( "vl" ), QString( "memory" ) ) );
+  QVERIFY( tempLayer->isValid() );
+
+  QSettings().setValue( QString( "/qgis/attributeTableBehaviour" ), QgsAttributeTableFilterModel::ShowAll );
+  QScopedPointer< QgsAttributeTableDialog > dlg( new QgsAttributeTableDialog( tempLayer.data() ) );
+
+  QVERIFY( !dlg->mMainView->masterModel()->layerCache()->cacheGeometry() );
+  QVERIFY( dlg->mMainView->masterModel()->request().flags() & QgsFeatureRequest::NoGeometry );
+
+  // but if we are requesting only visible features, then geometry must be fetched...
+
+  QSettings().setValue( QString( "/qgis/attributeTableBehaviour" ), QgsAttributeTableFilterModel::ShowVisible );
+  dlg.reset( new QgsAttributeTableDialog( tempLayer.data() ) );
+  QVERIFY( dlg->mMainView->masterModel()->layerCache()->cacheGeometry() );
+  QVERIFY( !( dlg->mMainView->masterModel()->request().flags() & QgsFeatureRequest::NoGeometry ) );
+
+  // try changing existing dialog to no geometry mode
+  dlg->filterShowAll();
+  QVERIFY( !dlg->mMainView->masterModel()->layerCache()->cacheGeometry() );
+  QVERIFY( dlg->mMainView->masterModel()->request().flags() & QgsFeatureRequest::NoGeometry );
+
+  // and back to a geometry mode
+  dlg->filterVisible();
+  QVERIFY( dlg->mMainView->masterModel()->layerCache()->cacheGeometry() );
+  QVERIFY( !( dlg->mMainView->masterModel()->request().flags() & QgsFeatureRequest::NoGeometry ) );
+
+}
+
 QTEST_MAIN( TestQgsAttributeTable )
 #include "testqgsattributetable.moc"
diff --git a/tests/src/core/testqgscomposition.cpp b/tests/src/core/testqgscomposition.cpp
index 71fd195..154c9c3 100644
--- a/tests/src/core/testqgscomposition.cpp
+++ b/tests/src/core/testqgscomposition.cpp
@@ -26,6 +26,11 @@
 #include "qgsmapsettings.h"
 #include "qgsmultirenderchecker.h"
 #include "qgsfillsymbollayerv2.h"
+#include "qgsmaplayerregistry.h"
+#include "qgscomposerlegend.h"
+#include "qgsvectorlayer.h"
+#include "qgslayertreegroup.h"
+#include "qgslayertreelayer.h"
 
 #include <QObject>
 #include <QtTest/QtTest>
@@ -54,6 +59,7 @@ class TestQgsComposition : public QObject
     void resizeToContentsMultiPage();
     void georeference();
     void variablesEdited();
+    void legendRestoredFromTemplate();
 
   private:
     QgsComposition *mComposition;
@@ -598,5 +604,89 @@ void TestQgsComposition::variablesEdited()
   QVERIFY( spyVariablesChanged.count() == 2 );
 }
 
+void TestQgsComposition::legendRestoredFromTemplate()
+{
+  // load a layer
+
+  QFileInfo vectorFileInfo( QString( TEST_DATA_DIR ) + "/points.shp" );
+  QgsVectorLayer* layer = new QgsVectorLayer( vectorFileInfo.filePath(),
+      vectorFileInfo.completeBaseName(),
+      "ogr" );
+  QgsMapLayerRegistry::instance()->addMapLayer( layer );
+
+  // create composition
+  QgsMapSettings ms;
+  QgsComposition c( ms );
+  // add a legend
+  QgsComposerLegend* legend = new QgsComposerLegend( &c );
+  c.addComposerLegend( legend );
+  legend->setAutoUpdateModel( false );
+
+  QgsLegendModelV2* model = legend->modelV2();
+  QgsLayerTreeNode* node = model->rootGroup()->children().at( 0 );
+  // make sure we've got right node
+  QgsLayerTreeLayer* layerNode = dynamic_cast< QgsLayerTreeLayer* >( node );
+  QVERIFY( layerNode );
+  QCOMPARE( layerNode->layer(), layer );
+
+  // got it!
+  layerNode->setCustomProperty( "legend/title-label", QString( "new title!" ) );
+  // make sure new title stuck
+  QCOMPARE( model->data( model->node2index( layerNode ), Qt::DisplayRole ).toString(), QString( "new title!" ) );
+
+  // save composition to template
+  QDomDocument doc;
+  QDomElement composerElem = doc.createElement( "Composer" );
+  doc.appendChild( composerElem );
+  c.writeXML( composerElem, doc );
+  c.atlasComposition().writeXML( composerElem, doc );
+
+
+  // make a new composition from template
+  QgsComposition c2( ms );
+  QVERIFY( c2.loadFromTemplate( doc ) );
+  // get legend from new composition
+  QList< QgsComposerLegend* > legends2;
+  c2.composerItems( legends2 );
+  QgsComposerLegend* legend2 = legends2.at( 0 );
+  QVERIFY( legend2 );
+
+  QgsLegendModelV2* model2 = legend2->modelV2();
+  QgsLayerTreeNode* node2 = model2->rootGroup()->children().at( 0 );
+  QgsLayerTreeLayer* layerNode2 = dynamic_cast< QgsLayerTreeLayer* >( node2 );
+  QVERIFY( layerNode2 );
+  QCOMPARE( layerNode2->layer(), layer );
+  QCOMPARE( model2->data( model->node2index( layerNode2 ), Qt::DisplayRole ).toString(), QString( "new title!" ) );
+
+  QString oldId = layer->id();
+  // new test
+  // remove existing layer
+  QgsMapLayerRegistry::instance()->removeMapLayer( layer );
+
+  // reload it, with a new id
+  QgsVectorLayer* layer2 = new QgsVectorLayer( vectorFileInfo.filePath(),
+      vectorFileInfo.completeBaseName(),
+      "ogr" );
+  QgsMapLayerRegistry::instance()->addMapLayer( layer2 );
+  QVERIFY( oldId != layer2->id() );
+
+  // load composition from template
+  QgsComposition c3( ms );
+  QVERIFY( c3.loadFromTemplate( doc ) );
+  // get legend from new composition
+  QList< QgsComposerLegend* > legends3;
+  c3.composerItems( legends3 );
+  QgsComposerLegend* legend3 = legends3.at( 0 );
+  QVERIFY( legend3 );
+
+  //make sure customisation remains intact
+  QgsLegendModelV2* model3 = legend3->modelV2();
+  QgsLayerTreeNode* node3 = model3->rootGroup()->children().at( 0 );
+  QgsLayerTreeLayer* layerNode3 = dynamic_cast< QgsLayerTreeLayer* >( node3 );
+  QVERIFY( layerNode3 );
+  QCOMPARE( layerNode3->layer(), layer2 );
+  QCOMPARE( model3->data( model->node2index( layerNode3 ), Qt::DisplayRole ).toString(), QString( "new title!" ) );
+}
+
 QTEST_MAIN( TestQgsComposition )
 #include "testqgscomposition.moc"
diff --git a/tests/src/core/testqgsvectorlayercache.cpp b/tests/src/core/testqgsvectorlayercache.cpp
index dcceb36..f19f2a4 100644
--- a/tests/src/core/testqgsvectorlayercache.cpp
+++ b/tests/src/core/testqgsvectorlayercache.cpp
@@ -54,6 +54,7 @@ class TestVectorLayerCache : public QObject
     void testFullCache();
     void testFullCacheThroughRequest();
     void testCanUseCacheForRequest();
+    void testCacheGeom();
 
     void onCommittedFeaturesAdded( const QString&, const QgsFeatureList& );
 
@@ -240,6 +241,15 @@ void TestVectorLayerCache::testFullCache()
   {
     QVERIFY( cache.isFidCached( f.id() ) );
   }
+
+  // add a feature to the layer
+  mPointsLayer->startEditing();
+  QgsFeature f2( mPointsLayer->fields() );
+  QVERIFY( mPointsLayer->addFeature( f2 ) );
+  QVERIFY( cache.hasFullCache() );
+  QVERIFY( cache.isFidCached( f2.id() ) );
+
+  mPointsLayer->rollBack();
 }
 
 void TestVectorLayerCache::testFullCacheThroughRequest()
@@ -330,6 +340,58 @@ void TestVectorLayerCache::testCanUseCacheForRequest()
   QVERIFY( cache.canUseCacheForRequest( QgsFeatureRequest().setFilterExpression( "$x<5" ), it ) );
 }
 
+void TestVectorLayerCache::testCacheGeom()
+{
+  QgsVectorLayerCache cache( mPointsLayer, 2 );
+  // cache geometry
+  cache.setCacheGeometry( true );
+
+  //first get some feature ids from layer
+  QgsFeature f;
+  QgsFeatureIterator it = mPointsLayer->getFeatures();
+  it.nextFeature( f );
+  QgsFeatureId id1 = f.id();
+  it.nextFeature( f );
+  QgsFeatureId id2 = f.id();
+
+  QgsFeatureRequest req;
+  req.setFlags( QgsFeatureRequest::NoGeometry ); // should be ignored by cache
+  req.setFilterFids( QgsFeatureIds() << id1 << id2 );
+
+  it = cache.getFeatures( req );
+  while ( it.nextFeature( f ) )
+  {
+    QVERIFY( f.constGeometry() );
+  }
+
+  // disabled geometry caching
+  cache.setCacheGeometry( false );
+  // we should still have cached features... no need to lose these!
+  QCOMPARE( cache.cachedFeatureIds(), QgsFeatureIds() << id1 << id2 );
+  it = cache.getFeatures( req );
+  while ( it.nextFeature( f ) )
+  {
+    QVERIFY( f.constGeometry() );
+  }
+
+  // now upgrade cache from no geometry -> geometry, should be cleared since we
+  // cannot be confident that features existing in the cache have geometry
+  cache.setCacheGeometry( true );
+  QVERIFY( cache.cachedFeatureIds().isEmpty() );
+  it = cache.getFeatures( req );
+  while ( it.nextFeature( f ) )
+  {
+    QVERIFY( f.constGeometry() );
+  }
+
+  // another test...
+  cache.setCacheGeometry( false );
+  cache.setFullCache( true );
+  QVERIFY( cache.hasFullCache() );
+  cache.setCacheGeometry( true );
+  QVERIFY( !cache.hasFullCache() );
+}
+
 void TestVectorLayerCache::onCommittedFeaturesAdded( const QString& layerId, const QgsFeatureList& features )
 {
   Q_UNUSED( layerId )
diff --git a/tests/src/gui/testqgsdualview.cpp b/tests/src/gui/testqgsdualview.cpp
index 0882a32..aa8bc53 100644
--- a/tests/src/gui/testqgsdualview.cpp
+++ b/tests/src/gui/testqgsdualview.cpp
@@ -53,6 +53,7 @@ class TestQgsDualView : public QObject
     void testSort();
 
     void testAttributeFormSharedValueScanning();
+    void testNoGeom();
 
   private:
     QgsMapCanvas* mCanvas;
@@ -266,6 +267,36 @@ void TestQgsDualView::testAttributeFormSharedValueScanning()
   QVERIFY( mixedValueFields.isEmpty() );
 }
 
+void TestQgsDualView::testNoGeom()
+{
+  //test that both the master model and cache for the dual view either both request geom or both don't request geom
+  QScopedPointer< QgsDualView > dv( new QgsDualView() );
+
+  // request with geometry
+  QgsFeatureRequest req;
+  dv->init( mPointsLayer, mCanvas, req );
+  // check that both master model AND cache are using geometry
+  QgsAttributeTableModel* model = dv->masterModel();
+  QVERIFY( model->layerCache()->cacheGeometry() );
+  QVERIFY( !( model->request().flags() & QgsFeatureRequest::NoGeometry ) );
+
+  // request with NO geometry, but using filter rect (which should override and request geom)
+  req = QgsFeatureRequest().setFilterRect( QgsRectangle( 1, 2, 3, 4 ) );
+  dv.reset( new QgsDualView() );
+  dv->init( mPointsLayer, mCanvas, req );
+  model = dv->masterModel();
+  QVERIFY( model->layerCache()->cacheGeometry() );
+  QVERIFY( !( model->request().flags() & QgsFeatureRequest::NoGeometry ) );
+
+  // request with NO geometry
+  req = QgsFeatureRequest().setFlags( QgsFeatureRequest::NoGeometry );
+  dv.reset( new QgsDualView() );
+  dv->init( mPointsLayer, mCanvas, req );
+  model = dv->masterModel();
+  QVERIFY( !model->layerCache()->cacheGeometry() );
+  QVERIFY(( model->request().flags() & QgsFeatureRequest::NoGeometry ) );
+}
+
 QTEST_MAIN( TestQgsDualView )
 #include "testqgsdualview.moc"
 
diff --git a/tests/src/python/CMakeLists.txt b/tests/src/python/CMakeLists.txt
index ada22f6..cb56191 100644
--- a/tests/src/python/CMakeLists.txt
+++ b/tests/src/python/CMakeLists.txt
@@ -57,6 +57,7 @@ ADD_PYTHON_TEST(PyQgsGeometryValidator test_qgsgeometryvalidator.py)
 ADD_PYTHON_TEST(PyQgsGraduatedSymbolRendererV2 test_qgsgraduatedsymbolrendererv2.py)
 ADD_PYTHON_TEST(PyQgsInterval test_qgsinterval.py)
 ADD_PYTHON_TEST(PyQgsJSONUtils test_qgsjsonutils.py)
+ADD_PYTHON_TEST(PyQgsMapRenderer test_qgsmaprenderer.py)
 ADD_PYTHON_TEST(PyQgsMapUnitScale test_qgsmapunitscale.py)
 ADD_PYTHON_TEST(PyQgsMemoryProvider test_provider_memory.py)
 ADD_PYTHON_TEST(PyQgsMultiEditToolButton test_qgsmultiedittoolbutton.py)
diff --git a/tests/src/python/test_console.py b/tests/src/python/test_console.py
index decaf12..3a81782 100644
--- a/tests/src/python/test_console.py
+++ b/tests/src/python/test_console.py
@@ -13,9 +13,11 @@ __copyright__ = 'Copyright 2015, The QGIS Project'
 __revision__ = '$Format:%H$'
 
 import qgis  # NOQA
+import os
 
 from qgis.testing import unittest, start_app
 from console import console
+from qgis.PyQt.QtCore import QSettings, QCoreApplication
 
 start_app()
 
@@ -23,6 +25,12 @@ start_app()
 class TestConsole(unittest.TestCase):
 
     def test_show_console(self):
+        if os.name == 'nt':
+            QCoreApplication.setOrganizationName("QGIS")
+            QCoreApplication.setOrganizationDomain("qgis.org")
+            QCoreApplication.setApplicationName("QGIS-TEST")
+            QSettings().setValue('pythonConsole/contextHelpOnFirstLaunch', False)
+
         my_console = console.show_console()
         my_console_widget = my_console.console
 
diff --git a/tests/src/python/test_provider_wfs.py b/tests/src/python/test_provider_wfs.py
index 2a916df..a1c50f9 100644
--- a/tests/src/python/test_provider_wfs.py
+++ b/tests/src/python/test_provider_wfs.py
@@ -488,6 +488,127 @@ class TestPyQgsWFSProvider(unittest.TestCase, ProviderTestCase):
         values = [f['INTFIELD'] for f in vl.getFeatures(request)]
         self.assertEqual(values, [100])
 
+    def testWFS10_outputformat_GML3(self):
+        """Test WFS 1.0 with OUTPUTFORMAT=GML3"""
+        # We also test attribute fields in upper-case, and a field named GEOMETRY
+
+        endpoint = self.__class__.basetestpath + '/fake_qgis_http_endpoint_WFS1.0_gml3'
+
+        with open(sanitize(endpoint, '?SERVICE=WFS?REQUEST=GetCapabilities?VERSION=1.0.0'), 'wb') as f:
+            f.write("""
+<WFS_Capabilities version="1.0.0" xmlns="http://www.opengis.net/wfs" xmlns:ogc="http://www.opengis.net/ogc">
+  <Capability>
+    <Request>
+      <GetFeature>
+        <ResultFormat>
+          <GML2/>
+          <GML3/>
+        </ResultFormat>
+      </GetFeature>
+    </Request>
+  </Capability>
+  <FeatureTypeList>
+    <FeatureType>
+      <Name>my:typename</Name>
+      <Title>Title</Title>
+      <Abstract>Abstract</Abstract>
+      <SRS>EPSG:32631</SRS>
+      <!-- in WFS 1.0, LatLongBoundingBox is in SRS units, not necessarily lat/long... -->
+      <LatLongBoundingBox minx="400000" miny="5400000" maxx="450000" maxy="5500000"/>
+    </FeatureType>
+  </FeatureTypeList>
+</WFS_Capabilities>""".encode('UTF-8'))
+
+        with open(sanitize(endpoint, '?SERVICE=WFS&REQUEST=DescribeFeatureType&VERSION=1.0.0&TYPENAME=my:typename'), 'wb') as f:
+            f.write("""
+<xsd:schema xmlns:my="http://my" xmlns:gml="http://www.opengis.net/gml" xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" targetNamespace="http://my">
+  <xsd:import namespace="http://www.opengis.net/gml"/>
+  <xsd:complexType name="typenameType">
+    <xsd:complexContent>
+      <xsd:extension base="gml:AbstractFeatureType">
+        <xsd:sequence>
+          <xsd:element maxOccurs="1" minOccurs="0" name="geometry" nillable="true" type="gml:PointPropertyType"/>
+        </xsd:sequence>
+      </xsd:extension>
+    </xsd:complexContent>
+  </xsd:complexType>
+  <xsd:element name="typename" substitutionGroup="gml:_Feature" type="my:typenameType"/>
+</xsd:schema>
+""".encode('UTF-8'))
+
+        vl = QgsVectorLayer("url='http://" + endpoint + "' typename='my:typename' version='1.0.0'", 'test', 'WFS')
+        assert vl.isValid()
+
+        with open(sanitize(endpoint, '?SERVICE=WFS&REQUEST=GetFeature&VERSION=1.0.0&TYPENAME=my:typename&SRSNAME=EPSG:32631&OUTPUTFORMAT=GML3'), 'wb') as f:
+            f.write("""
+<wfs:FeatureCollection
+                       xmlns:wfs="http://www.opengis.net/wfs"
+                       xmlns:gml="http://www.opengis.net/gml"
+                       xmlns:my="http://my">
+  <gml:boundedBy><gml:null>unknown</gml:null></gml:boundedBy>
+  <gml:featureMember>
+    <my:typename fid="typename.0">
+      <my:geometry>
+          <gml:Point srsName="urn:ogc:def:crs:EPSG::32631"><gml:coordinates decimal="." cs="," ts=" ">426858,5427937</gml:coordinates></gml:Point>
+      </my:geometry>
+    </my:typename>
+  </gml:featureMember>
+</wfs:FeatureCollection>""".encode('UTF-8'))
+
+        got_f = [f for f in vl.getFeatures()]
+        got = got_f[0].geometry().geometry()
+        self.assertEqual((got.x(), got.y()), (426858.0, 5427937.0))
+
+        # Test with explicit OUTPUTFORMAT as parameter
+        vl = QgsVectorLayer("url='http://" + endpoint + "' typename='my:typename' version='1.0.0' outputformat='GML2'", 'test', 'WFS')
+        assert vl.isValid()
+
+        with open(sanitize(endpoint, '?SERVICE=WFS&REQUEST=GetFeature&VERSION=1.0.0&TYPENAME=my:typename&SRSNAME=EPSG:32631&OUTPUTFORMAT=GML2'), 'wb') as f:
+            f.write("""
+<wfs:FeatureCollection
+                       xmlns:wfs="http://www.opengis.net/wfs"
+                       xmlns:gml="http://www.opengis.net/gml"
+                       xmlns:my="http://my">
+  <gml:boundedBy><gml:null>unknown</gml:null></gml:boundedBy>
+  <gml:featureMember>
+    <my:typename fid="typename.0">
+      <my:geometry>
+          <gml:Point srsName="urn:ogc:def:crs:EPSG::32631"><gml:coordinates decimal="." cs="," ts=" ">1,2</gml:coordinates></gml:Point>
+      </my:geometry>
+    </my:typename>
+  </gml:featureMember>
+</wfs:FeatureCollection>""".encode('UTF-8'))
+
+        got_f = [f for f in vl.getFeatures()]
+        got = got_f[0].geometry().geometry()
+        self.assertEqual((got.x(), got.y()), (1.0, 2.0))
+
+        # Test with explicit OUTPUTFORMAT  in URL
+        # For some reason this fails on Travis (on assert vl.isValid()) whereas it works locally for me...
+        if False:
+            vl = QgsVectorLayer("url='http://" + endpoint + "?OUTPUTFORMAT=GML2' typename='my:typename' version='1.0.0'", 'test', 'WFS')
+            assert vl.isValid()
+
+            with open(sanitize(endpoint, '?SERVICE=WFS&REQUEST=GetFeature&VERSION=1.0.0&TYPENAME=my:typename&SRSNAME=EPSG:32631&OUTPUTFORMAT=GML2'), 'wb') as f:
+                f.write("""
+    <wfs:FeatureCollection
+                        xmlns:wfs="http://www.opengis.net/wfs"
+                        xmlns:gml="http://www.opengis.net/gml"
+                        xmlns:my="http://my">
+    <gml:boundedBy><gml:null>unknown</gml:null></gml:boundedBy>
+    <gml:featureMember>
+        <my:typename fid="typename.0">
+        <my:geometry>
+            <gml:Point srsName="urn:ogc:def:crs:EPSG::32631"><gml:coordinates decimal="." cs="," ts=" ">3,4</gml:coordinates></gml:Point>
+        </my:geometry>
+        </my:typename>
+    </gml:featureMember>
+    </wfs:FeatureCollection>""".encode('UTF-8'))
+
+            got_f = [f for f in vl.getFeatures()]
+            got = got_f[0].geometry().geometry()
+            self.assertEqual((got.x(), got.y()), (3.0, 4.0))
+
     def testWFS10_latlongboundingbox_in_WGS84(self):
         """Test WFS 1.0 with non conformatn LatLongBoundingBox"""
 
@@ -2362,5 +2483,6 @@ class TestPyQgsWFSProvider(unittest.TestCase, ProviderTestCase):
         self.assertNotEqual(vl.dataProvider().capabilities() & vl.dataProvider().EditingCapabilities, 0)
         self.assertEqual(vl.wkbType(), QgsWKBTypes.Point)
 
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/tests/src/python/test_qgsappstartup.py b/tests/src/python/test_qgsappstartup.py
index b63ea72..c7d7a88 100644
--- a/tests/src/python/test_qgsappstartup.py
+++ b/tests/src/python/test_qgsappstartup.py
@@ -27,7 +27,6 @@ import locale
 
 from qgis.testing import unittest
 from utilities import unitTestDataPath
-from builtins import str
 
 print('CTEST_FULL_OUTPUT')
 
@@ -162,8 +161,11 @@ class TestPyQgsAppStartup(unittest.TestCase):
             env={'PYQGIS_STARTUP': testmod})
 
     def testOptionsAsFiles(self):
+        if os.name == 'nt':
+            return
+
         # verify QGIS accepts filenames that match options after the special option '--'
-        # '--help' should return immediatly (after displaying the usage hints)
+        # '--help' should return immediately (after displaying the usage hints)
         # '-- --help' should not exit but try (and probably fail) to load a layer called '--help'
         with self.assertRaises(Exception):
             self.doTestStartup(option="--configpath",
diff --git a/tests/src/python/test_qgsmaprenderer.py b/tests/src/python/test_qgsmaprenderer.py
new file mode 100644
index 0000000..5b4e0f1
--- /dev/null
+++ b/tests/src/python/test_qgsmaprenderer.py
@@ -0,0 +1,112 @@
+# -*- coding: utf-8 -*-
+"""QGIS Unit tests for QgsMapRenderer.
+
+.. note:: This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+"""
+__author__ = 'Nyall Dawson'
+__date__ = '1/02/2017'
+__copyright__ = 'Copyright 2017, The QGIS Project'
+# This will get replaced with a git SHA1 when you do a git archive
+__revision__ = '$Format:%H$'
+
+import qgis  # NOQA
+
+from qgis.core import (QgsMapRendererCache,
+                       QgsMapRendererParallelJob,
+                       QgsMapRendererSequentialJob,
+                       QgsMapRendererCustomPainterJob,
+                       QgsRectangle,
+                       QgsVectorLayer,
+                       QgsProject,
+                       QgsFeature,
+                       QgsGeometry,
+                       QgsMapSettings,
+                       QgsPoint)
+from qgis.testing import start_app, unittest
+from qgis.PyQt.QtCore import QSize, QThreadPool
+from qgis.PyQt.QtGui import QPainter, QImage
+from random import uniform
+
+
+app = start_app()
+
+
+class TestQgsMapRenderer(unittest.TestCase):
+
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        # avoid crash on finish, probably related to https://bugreports.qt.io/browse/QTBUG-35760
+        QThreadPool.globalInstance().waitForDone()
+
+    def checkCancel(self, job_type):
+        """test canceling a render job"""
+        layer = QgsVectorLayer("Point?field=fldtxt:string",
+                               "layer1", "memory")
+
+        # add a ton of random points
+        for i in range(2000):
+            x = uniform(5, 25)
+            y = uniform(25, 45)
+            g = QgsGeometry.fromPoint(QgsPoint(x, y))
+            f = QgsFeature()
+            f.setGeometry(g)
+            f.initAttributes(1)
+            layer.dataProvider().addFeatures([f])
+
+        settings = QgsMapSettings()
+        settings.setExtent(QgsRectangle(5, 25, 25, 45))
+        settings.setOutputSize(QSize(600, 400))
+        settings.setLayers([layer.id()])
+
+        # first try non-blocking cancelWithoutBlocking() call
+        job = job_type(settings)
+        job.start()
+
+        # insta cancel!
+        job.cancelWithoutBlocking()
+        # should still be active immediately after
+        self.assertTrue(job.isActive())
+
+        while job.isActive():
+            app.processEvents()
+
+        # try blocking cancel() call
+        job = job_type(settings)
+        job.start()
+
+        # insta cancel!
+        job.cancel()
+        # should not be active anymore
+        self.assertFalse(job.isActive())
+
+    def runRendererChecks(self, renderer):
+        """ runs all checks on the specified renderer """
+        self.checkCancel(renderer)
+
+    def testParallelRenderer(self):
+        """ run test suite on QgsMapRendererParallelJob"""
+        self.runRendererChecks(QgsMapRendererParallelJob)
+
+    def testSequentialRenderer(self):
+        """ run test suite on QgsMapRendererSequentialJob"""
+        self.runRendererChecks(QgsMapRendererSequentialJob)
+
+    def testCustomPainterRenderer(self):
+        """ run test suite on QgsMapRendererCustomPainterJob"""
+        im = QImage(200, 200, QImage.Format_RGB32)
+        p = QPainter(im)
+
+        def create_job(settings):
+            return QgsMapRendererCustomPainterJob(settings, p)
+
+        self.runRendererChecks(create_job)
+        p.end()
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/src/python/test_qgsserver.py b/tests/src/python/test_qgsserver.py
index 6028683..2ac231e 100644
--- a/tests/src/python/test_qgsserver.py
+++ b/tests/src/python/test_qgsserver.py
@@ -600,6 +600,7 @@ class TestQgsServer(unittest.TestCase):
         r, h = self._result(self.server.handleRequest(qs))
         self._img_diff_error(r, h, "WMS_GetPrint_Basic")
 
+    @unittest.skip('Randomly failing to draw the map layer')
     def test_wms_getprint_srs(self):
         qs = "&".join(["%s=%s" % i for i in list({
             "MAP": urllib.quote(self.projectPath),

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/qgis.git



More information about the Pkg-grass-devel mailing list