[Python-modules-commits] [pygments] 01/05: Import pygments_2.1.3+dfsg.orig.tar.gz

Dmitry Shachnev mitya57 at moszumanska.debian.org
Mon May 2 10:06:54 UTC 2016


This is an automated email from the git hooks/post-receive script.

mitya57 pushed a commit to branch master
in repository pygments.

commit e69a782be13ffbdc263a758c17f1929526a111a9
Author: Dmitry Shachnev <mitya57 at gmail.com>
Date:   Mon May 2 12:52:16 2016 +0300

    Import pygments_2.1.3+dfsg.orig.tar.gz
---
 .hgignore                       | 19 +++++++++++
 .hgtags                         | 31 ++++++++++++++++++
 CHANGES                         | 28 ++++++++++++++++
 PKG-INFO                        |  2 +-
 Pygments.egg-info/PKG-INFO      |  2 +-
 Pygments.egg-info/SOURCES.txt   | 34 ++++++++++++++++++++
 doc/docs/integrate.rst          | 11 +++++++
 doc/faq.rst                     | 32 +++++++++----------
 pygments/__init__.py            |  2 +-
 pygments/formatters/_mapping.py |  2 +-
 pygments/formatters/html.py     |  4 ++-
 pygments/formatters/img.py      | 10 ++++--
 pygments/formatters/latex.py    |  9 +++++-
 pygments/lexers/_mapping.py     |  6 ++--
 pygments/lexers/c_cpp.py        |  9 +++++-
 pygments/lexers/chapel.py       | 13 ++++----
 pygments/lexers/dotnet.py       |  6 ++--
 pygments/lexers/hdl.py          | 25 +++++++++------
 pygments/lexers/julia.py        | 25 ++++++++++-----
 pygments/lexers/lisp.py         |  2 +-
 pygments/lexers/python.py       | 39 ++++++++++++----------
 pygments/lexers/rdf.py          | 71 ++++++++++++++++++++++++-----------------
 pygments/lexers/shell.py        | 11 ++++---
 pygments/lexers/sql.py          | 20 +++++++++---
 pygments/lexers/templates.py    |  6 ++--
 pygments/lexers/textfmts.py     |  5 +++
 pygments/token.py               |  8 +++++
 requirements.txt                |  5 +++
 setup.cfg                       |  3 ++
 setup.py                        |  2 +-
 tests/test_html_formatter.py    |  4 +--
 tests/test_token.py             |  8 +++++
 tox.ini                         |  7 ++++
 33 files changed, 343 insertions(+), 118 deletions(-)

diff --git a/.hgignore b/.hgignore
new file mode 100644
index 0000000..850baf1
--- /dev/null
+++ b/.hgignore
@@ -0,0 +1,19 @@
+syntax: glob
+*.egg
+*.pyc
+*.pyo
+.*.sw[op]
+.idea/
+.ropeproject
+.project
+.tags
+.tox
+Pygments.egg-info/*
+TAGS
+build/*
+dist/*
+doc/_build
+TAGS
+tests/.coverage
+tests/cover
+tests/examplefiles/output
diff --git a/.hgtags b/.hgtags
new file mode 100644
index 0000000..33b32ca
--- /dev/null
+++ b/.hgtags
@@ -0,0 +1,31 @@
+634420aa4221cc1eb2b3753bd571166bd9e611d4 0.9
+942ecbb5c84ca5d57ae82f5697775973f4e12717 0.10
+63632d0340958d891176db20fe9a32a56abcd5ea 0.11
+13834ec94d2c5a90a68bc2c2a327abd962c486bc 0.11.1
+a5748745272afffd725570e068a560d46e28dc1f 1.0
+5a794a620dc711a219722a7af94d9d2e95cda26d 1.1
+dd81c35efd95292de4965153c66c8bbfe435f1c4 1.1.1
+e7691aa4f473a2cdaa2e5b7bfed8aec196719aca 0.5.1
+6f53364d63ddb8bd9532bb6ea402e3af05275b03 0.5
+11efe99c11e601071c3a77910b9fca769de66fbf 0.6
+99df0a7404d168b05626ffced6fd16edcf58c145 0.7
+d0b08fd569d3d9dafec4c045a7d8876442b3ef64 0.7.1
+1054522d1dda9c7899516ead3e65e5e363fdf30d 0.8
+066e56d8f5caa31e15386fff6f938bedd85a8732 0.8.1
+bae0833cae75e5a641abe3c4b430fa384cd9d258 1.2
+f6e5acee4f761696676e05a9112c91a5a5670b49 1.2.1
+580c5ce755486bc92c79c50f80cfc79924e15140 1.2.2
+c62867700c9e98cc2988c62f298ec54cee9b6927 1.3
+3a3846c2503db85bb70a243c8bc702629c4bce57 1.3.1
+8ad6d35dd2ab0530a1e2c088ab7fe0e00426b5f9 1.4
+eff3aee4abff2b72564ddfde77fcc82adbba52ad 1.5
+2c262bfc66b05a8aecc1109c3acc5b9447a5213c 1.6rc1
+7c962dcb484cb73394aec7f41709940340dc8a9c 1.6
+da509a68ea620bbb8ee3f5d5cf7761375d8f4451 2.0rc1
+ed3206a773e9cb90a0edeabee8ef6b56b5b9a53c 2.0
+94e1e056c92d97e3a54759f9216e8deff22efbdd 2.0.1
+142a870bf0f1822414649ae26f433b112a5c92d5 2.0.2
+34530db252d35d7ef57a8dbb9fce7bcc46f6ba6b 2.1
+2935c3a59672e8ae74ffb7ea66ea6567f49782f6 2.1.1
+8e7ebc56153cf899067333bff4f15ae98758a2e1 2.1.2
+88527db663dce0729c2cd6e3bc2f3c657ae39254 2.1.3
diff --git a/CHANGES b/CHANGES
index 379cc87..478970d 100644
--- a/CHANGES
+++ b/CHANGES
@@ -6,6 +6,34 @@ Issue numbers refer to the tracker at
 pull request numbers to the requests at
 <https://bitbucket.org/birkenfeld/pygments-main/pull-requests/merged>.
 
+Version 2.1.3
+-------------
+(released Mar 2, 2016)
+
+- Fixed regression in Bash lexer (PR#563)
+
+
+Version 2.1.2
+-------------
+(released Feb 29, 2016)
+
+- Fixed Python 3 regression in image formatter (#1215)
+- Fixed regression in Bash lexer (PR#562)
+
+
+Version 2.1.1
+-------------
+(relased Feb 14, 2016)
+
+- Fixed Jython compatibility (#1205)
+- Fixed HTML formatter output with leading empty lines (#1111)
+- Added a mapping table for LaTeX encodings and added utf8 (#1152)
+- Fixed image formatter font searching on Macs (#1188)
+- Fixed deepcopy-ing of Token instances (#1168)
+- Fixed Julia string interpolation (#1170)
+- Fixed statefulness of HttpLexer between get_tokens calls
+- Many smaller fixes to various lexers
+
 
 Version 2.1
 -----------
diff --git a/PKG-INFO b/PKG-INFO
index a648547..4e75234 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: Pygments
-Version: 2.1
+Version: 2.1.3
 Summary: Pygments is a syntax highlighting package written in Python.
 Home-page: http://pygments.org/
 Author: Georg Brandl
diff --git a/Pygments.egg-info/PKG-INFO b/Pygments.egg-info/PKG-INFO
index a648547..4e75234 100644
--- a/Pygments.egg-info/PKG-INFO
+++ b/Pygments.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: Pygments
-Version: 2.1
+Version: 2.1.3
 Summary: Pygments is a syntax highlighting package written in Python.
 Home-page: http://pygments.org/
 Author: Georg Brandl
diff --git a/Pygments.egg-info/SOURCES.txt b/Pygments.egg-info/SOURCES.txt
index e36db6a..5155bcc 100644
--- a/Pygments.egg-info/SOURCES.txt
+++ b/Pygments.egg-info/SOURCES.txt
@@ -1,3 +1,5 @@
+.hgignore
+.hgtags
 AUTHORS
 CHANGES
 LICENSE
@@ -7,8 +9,10 @@ README.rst
 TODO
 ez_setup.py
 pygmentize
+requirements.txt
 setup.cfg
 setup.py
+tox.ini
 Pygments.egg-info/PKG-INFO
 Pygments.egg-info/SOURCES.txt
 Pygments.egg-info/dependency_links.txt
@@ -313,32 +317,62 @@ tests/test_using_api.pyc
 tests/test_util.py
 tests/test_util.pyc
 tests/__pycache__/string_asserts.cpython-33.pyc
+tests/__pycache__/string_asserts.cpython-35.pyc
 tests/__pycache__/support.cpython-33.pyc
+tests/__pycache__/support.cpython-35.pyc
 tests/__pycache__/test_basic_api.cpython-33.pyc
+tests/__pycache__/test_basic_api.cpython-35.pyc
 tests/__pycache__/test_cfm.cpython-33.pyc
+tests/__pycache__/test_cfm.cpython-35.pyc
 tests/__pycache__/test_clexer.cpython-33.pyc
+tests/__pycache__/test_clexer.cpython-35.pyc
 tests/__pycache__/test_cmdline.cpython-33.pyc
+tests/__pycache__/test_cmdline.cpython-35.pyc
 tests/__pycache__/test_examplefiles.cpython-33.pyc
+tests/__pycache__/test_examplefiles.cpython-35.pyc
+tests/__pycache__/test_ezhil.cpython-35.pyc
 tests/__pycache__/test_html_formatter.cpython-33.pyc
+tests/__pycache__/test_html_formatter.cpython-35.pyc
 tests/__pycache__/test_inherit.cpython-33.pyc
+tests/__pycache__/test_inherit.cpython-35.pyc
+tests/__pycache__/test_irc_formatter.cpython-35.pyc
 tests/__pycache__/test_java.cpython-33.pyc
+tests/__pycache__/test_java.cpython-35.pyc
 tests/__pycache__/test_latex_formatter.cpython-33.pyc
+tests/__pycache__/test_latex_formatter.cpython-35.pyc
 tests/__pycache__/test_lexers_other.cpython-33.pyc
+tests/__pycache__/test_lexers_other.cpython-35.pyc
 tests/__pycache__/test_objectiveclexer.cpython-33.pyc
+tests/__pycache__/test_objectiveclexer.cpython-35.pyc
 tests/__pycache__/test_perllexer.cpython-33.pyc
+tests/__pycache__/test_perllexer.cpython-35.pyc
 tests/__pycache__/test_qbasiclexer.cpython-33.pyc
+tests/__pycache__/test_qbasiclexer.cpython-35.pyc
 tests/__pycache__/test_regexlexer.cpython-33.pyc
+tests/__pycache__/test_regexlexer.cpython-35.pyc
 tests/__pycache__/test_regexopt.cpython-33.pyc
+tests/__pycache__/test_regexopt.cpython-35.pyc
 tests/__pycache__/test_rtf_formatter.cpython-33.pyc
+tests/__pycache__/test_rtf_formatter.cpython-35.pyc
 tests/__pycache__/test_ruby.cpython-33.pyc
+tests/__pycache__/test_ruby.cpython-35.pyc
 tests/__pycache__/test_shell.cpython-33.pyc
+tests/__pycache__/test_shell.cpython-35.pyc
 tests/__pycache__/test_smarty.cpython-33.pyc
+tests/__pycache__/test_smarty.cpython-35.pyc
 tests/__pycache__/test_string_asserts.cpython-33.pyc
+tests/__pycache__/test_string_asserts.cpython-35.pyc
+tests/__pycache__/test_terminal_formatter.cpython-35.pyc
 tests/__pycache__/test_textfmts.cpython-33.pyc
+tests/__pycache__/test_textfmts.cpython-35.pyc
 tests/__pycache__/test_token.cpython-33.pyc
+tests/__pycache__/test_token.cpython-35.pyc
 tests/__pycache__/test_unistring.cpython-33.pyc
+tests/__pycache__/test_unistring.cpython-35.pyc
 tests/__pycache__/test_using_api.cpython-33.pyc
+tests/__pycache__/test_using_api.cpython-35.pyc
 tests/__pycache__/test_util.cpython-33.pyc
+tests/__pycache__/test_util.cpython-35.pyc
 tests/cover/coverage_html.js
 tests/cover/jquery.hotkeys.js
 tests/cover/jquery.isonscreen.js
diff --git a/doc/docs/integrate.rst b/doc/docs/integrate.rst
index 73e02ac..77daaa4 100644
--- a/doc/docs/integrate.rst
+++ b/doc/docs/integrate.rst
@@ -27,3 +27,14 @@ Bash completion
 
 The source distribution contains a file ``external/pygments.bashcomp`` that
 sets up completion for the ``pygmentize`` command in bash.
+
+Wrappers for other languages
+----------------------------
+
+These libraries provide Pygments highlighting for users of other languages
+than Python:
+
+* `pygments.rb <https://github.com/tmm1/pygments.rb>`_, a pygments wrapper for Ruby
+* `Clygments <https://github.com/bfontaine/clygments>`_, a pygments wrapper for
+  Clojure
+* `PHPygments <https://github.com/capynet/PHPygments>`_, a pygments wrapper for PHP
diff --git a/doc/faq.rst b/doc/faq.rst
index 5458e65..f375828 100644
--- a/doc/faq.rst
+++ b/doc/faq.rst
@@ -89,28 +89,24 @@ Who uses Pygments?
 
 This is an (incomplete) list of projects and sites known to use the Pygments highlighter.
 
-* `Pygments API <http://pygments.appspot.com/>`_, a HTTP POST interface to Pygments
+* `Wikipedia <http://en.wikipedia.org>`_
+* `BitBucket <http://bitbucket.org/>`_, a Mercurial and Git hosting site
 * `The Sphinx documentation builder <http://sphinx.pocoo.org/>`_, for embedded source examples
 * `rst2pdf <http://code.google.com/p/rst2pdf/>`_, a reStructuredText to PDF converter
-* `Zine <http://zine.pocoo.org/>`_, a Python blogging system
+* `Codecov <http://codecov.io/>`_, a code coverage CI service
 * `Trac <http://trac.edgewall.org/>`_, the universal project management tool
-* `Bruce <http://r1chardj0n3s.googlepages.com/bruce>`_, a reStructuredText presentation tool
 * `AsciiDoc <http://www.methods.co.nz/asciidoc/>`_, a text-based documentation generator
 * `ActiveState Code <http://code.activestate.com/>`_, the Python Cookbook successor
 * `ViewVC <http://viewvc.org/>`_, a web-based version control repository browser
 * `BzrFruit <http://repo.or.cz/w/bzrfruit.git>`_, a Bazaar branch viewer
 * `QBzr <http://bazaar-vcs.org/QBzr>`_, a cross-platform Qt-based GUI front end for Bazaar
-* `BitBucket <http://bitbucket.org/>`_, a Mercurial and Git hosting site
 * `Review Board <http://www.review-board.org/>`_, a collaborative code reviewing tool
-* `skeletonz <http://orangoo.com/skeletonz/>`_, a Python powered content management system
 * `Diamanda <http://code.google.com/p/diamanda/>`_, a Django powered wiki system with support for Pygments
 * `Progopedia <http://progopedia.ru/>`_ (`English <http://progopedia.com/>`_),
   an encyclopedia of programming languages
-* `Postmarkup <http://code.google.com/p/postmarkup/>`_, a BBCode to XHTML generator
-* `Language Comparison <http://michaelsilver.us/lc>`_, a site that compares different programming languages
-* `BPython <http://www.noiseforfree.com/bpython/>`_, a curses-based intelligent Python shell
-* `Challenge-You! <http://challenge-you.appspot.com/>`_, a site offering programming challenges
+* `Bruce <http://r1chardj0n3s.googlepages.com/bruce>`_, a reStructuredText presentation tool
 * `PIDA <http://pida.co.uk/>`_, a universal IDE written in Python
+* `BPython <http://www.noiseforfree.com/bpython/>`_, a curses-based intelligent Python shell
 * `PuDB <http://pypi.python.org/pypi/pudb>`_, a console Python debugger
 * `XWiki <http://www.xwiki.org/>`_, a wiki-based development framework in Java, using Jython
 * `roux <http://ananelson.com/software/roux/>`_, a script for running R scripts
@@ -118,23 +114,25 @@ This is an (incomplete) list of projects and sites known to use the Pygments hig
 * `hurl <http://hurl.it/>`_, a web service for making HTTP requests
 * `wxHTMLPygmentizer <http://colinbarnette.net/projects/wxHTMLPygmentizer>`_ is
   a GUI utility, used to make code-colorization easier
-* `WpPygments <http://blog.mirotin.net/?page_id=49>`_, a highlighter plugin for WordPress
-* `LodgeIt <http://paste.pocoo.org/>`_, a pastebin with XMLRPC support and diffs
-* `SpammCan <http://chrisarndt.de/projects/spammcan/>`_, a pastebin (demo see
-  `here <http://paste.chrisarndt.de/>`_)
-* `WowAce.com pastes <http://www.wowace.com/paste/>`_, a pastebin
+* `Postmarkup <http://code.google.com/p/postmarkup/>`_, a BBCode to XHTML generator
+* `WpPygments <http://blog.mirotin.net/?page_id=49>`_, and `WPygments
+  <https://github.com/capynet/WPygments>`_, highlighter plugins for WordPress
 * `Siafoo <http://siafoo.net>`_, a tool for sharing and storing useful code and programming experience
 * `D source <http://www.dsource.org/>`_, a community for the D programming language
-* `dumpz.org <http://dumpz.org/>`_, a pastebin
 * `dpaste.com <http://dpaste.com/>`_, another Django pastebin
-* `PylonsHQ Pasties <http://pylonshq.com/pasties/new>`_, a pastebin
 * `Django snippets <http://www.djangosnippets.org/>`_, a pastebin for Django code
 * `Fayaa <http://www.fayaa.com/code/>`_, a Chinese pastebin
 * `Incollo.com <http://incollo.com>`_, a free collaborative debugging tool
 * `PasteBox <http://p.boxnet.eu/>`_, a pastebin focused on privacy
-* `xinotes.org <http://www.xinotes.org/>`_, a site to share notes, code snippets etc.
 * `hilite.me <http://www.hilite.me/>`_, a site to highlight code snippets
 * `patx.me <http://patx.me/paste>`_, a pastebin
+* `Fluidic <https://github.com/richsmith/fluidic>`_, an experiment in
+  integrating shells with a GUI
+* `pygments.rb <https://github.com/tmm1/pygments.rb>`_, a pygments wrapper for Ruby
+* `Clygments <https://github.com/bfontaine/clygments>`_, a pygments wrapper for
+  Clojure
+* `PHPygments <https://github.com/capynet/PHPygments>`_, a pygments wrapper for PHP
+
 
 If you have a project or web site using Pygments, drop me a line, and I'll add a
 link here.
diff --git a/pygments/__init__.py b/pygments/__init__.py
index 7bd7557..c623440 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -26,7 +26,7 @@
     :license: BSD, see LICENSE for details.
 """
 
-__version__ = '2.1'
+__version__ = '2.1.3'
 __docformat__ = 'restructuredtext'
 
 __all__ = ['lex', 'format', 'highlight']
diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py
index 569ae84..01d053d 100755
--- a/pygments/formatters/_mapping.py
+++ b/pygments/formatters/_mapping.py
@@ -77,7 +77,7 @@ if __name__ == '__main__':  # pragma: no cover
     footer = content[content.find("if __name__ == '__main__':"):]
 
     # write new file
-    with open(__file__, 'wb') as fp:
+    with open(__file__, 'w') as fp:
         fp.write(header)
         fp.write('FORMATTERS = {\n    %s\n}\n\n' % ',\n    '.join(found_formatters))
         fp.write(footer)
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index a008751..2c6bb19 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -702,7 +702,9 @@ class HtmlFormatter(Formatter):
         if self.filename:
             yield 0, ('<span class="filename">' + self.filename + '</span>')
 
-        yield 0, ('<pre' + (style and ' style="%s"' % style) + '>')
+        # the empty span here is to keep leading empty lines from being
+        # ignored by HTML parsers
+        yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
         for tup in inner:
             yield tup
         yield 0, '</pre>'
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index a7b5d51..cc95ce2 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -82,9 +82,13 @@ class FontManager(object):
         stdout, _ = proc.communicate()
         if proc.returncode == 0:
             lines = stdout.splitlines()
-            if lines:
-                path = lines[0].decode().strip().strip(':')
-                return path
+            for line in lines:
+                if line.startswith(b'Fontconfig warning:'):
+                    continue
+                path = line.decode().strip().strip(':')
+                if path:
+                    return path
+            return None
 
     def _create_nix(self):
         for name in STYLES['NORMAL']:
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index 15e68e3..66d521f 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -413,11 +413,18 @@ class LatexFormatter(Formatter):
         outfile.write(u'\\end{' + self.envname + u'}\n')
 
         if self.full:
+            encoding = self.encoding or 'utf8'
+            # map known existings encodings from LaTeX distribution
+            encoding = {
+                'utf_8': 'utf8',
+                'latin_1': 'latin1',
+                'iso_8859_1': 'latin1',
+            }.get(encoding.replace('-', '_'), encoding)
             realoutfile.write(DOC_TEMPLATE %
                 dict(docclass  = self.docclass,
                      preamble  = self.preamble,
                      title     = self.title,
-                     encoding  = self.encoding or 'utf8',
+                     encoding  = encoding,
                      styledefs = self.get_style_defs(),
                      code      = outfile.getvalue()))
 
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 3afa269..090e7a9 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -46,7 +46,7 @@ LEXERS = {
     'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
     'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
     'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
-    'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
+    'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
     'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
     'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
     'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
@@ -130,7 +130,7 @@ LEXERS = {
     'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
     'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
     'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
-    'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
+    'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
     'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
     'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
     'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
@@ -457,7 +457,7 @@ if __name__ == '__main__':  # pragma: no cover
     footer = content[content.find("if __name__ == '__main__':"):]
 
     # write new file
-    with open(__file__, 'wb') as fp:
+    with open(__file__, 'w') as fp:
         fp.write(header)
         fp.write('LEXERS = {\n    %s,\n}\n\n' % ',\n    '.join(found_lexers))
         fp.write(footer)
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
index 5a7137e..5c724d0 100644
--- a/pygments/lexers/c_cpp.py
+++ b/pygments/lexers/c_cpp.py
@@ -216,6 +216,8 @@ class CppLexer(CFamilyLexer):
                 'final'), suffix=r'\b'), Keyword),
             (r'char(16_t|32_t)\b', Keyword.Type),
             (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+            # C++11 raw strings
+            (r'R"\(', String, 'rawstring'),
             inherit,
         ],
         'root': [
@@ -232,10 +234,15 @@ class CppLexer(CFamilyLexer):
             # template specification
             (r'\s*(?=>)', Text, '#pop'),
         ],
+        'rawstring': [
+            (r'\)"', String, '#pop'),
+            (r'[^)]+', String),
+            (r'\)', String),
+        ],
     }
 
     def analyse_text(text):
-        if re.search('#include <[a-z]+>', text):
+        if re.search('#include <[a-z_]+>', text):
             return 0.2
         if re.search('using namespace ', text):
             return 0.4
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
index d69c55f..9f9894c 100644
--- a/pygments/lexers/chapel.py
+++ b/pygments/lexers/chapel.py
@@ -44,12 +44,13 @@ class ChapelLexer(RegexLexer):
             (words((
                 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall',
                 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum',
-                'export', 'extern', 'for', 'forall', 'if', 'index', 'inline',
-                'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on',
-                'otherwise', 'pragma', 'private', 'public', 'reduce',
-                'require', 'return', 'scan', 'select', 'serial', 'single',
-                'sparse', 'subdomain', 'sync', 'then', 'use', 'when', 'where',
-                'while', 'with', 'yield', 'zip'), suffix=r'\b'),
+                'except', 'export', 'extern', 'for', 'forall', 'if', 'index',
+                'inline', 'iter', 'label', 'lambda', 'let', 'local', 'new',
+                'noinit', 'on', 'only', 'otherwise', 'pragma', 'private',
+                'public', 'reduce', 'require', 'return', 'scan', 'select',
+                'serial', 'single', 'sparse', 'subdomain', 'sync', 'then',
+                'use', 'when', 'where', 'while', 'with', 'yield', 'zip'),
+                   suffix=r'\b'),
              Keyword),
             (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
             (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 21f8d1e..eac4b5e 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -97,17 +97,17 @@ class CSharpLexer(RegexLexer):
                  Comment.Preproc),
                 (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
                  Keyword)),
-                (r'(abstract|as|async|await|base|break|case|catch|'
+                (r'(abstract|as|async|await|base|break|by|case|catch|'
                  r'checked|const|continue|default|delegate|'
                  r'do|else|enum|event|explicit|extern|false|finally|'
                  r'fixed|for|foreach|goto|if|implicit|in|interface|'
-                 r'internal|is|lock|new|null|operator|'
+                 r'internal|is|let|lock|new|null|on|operator|'
                  r'out|override|params|private|protected|public|readonly|'
                  r'ref|return|sealed|sizeof|stackalloc|static|'
                  r'switch|this|throw|true|try|typeof|'
                  r'unchecked|unsafe|virtual|void|while|'
                  r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
-                 r'descending|from|group|into|orderby|select|where|'
+                 r'descending|from|group|into|orderby|select|thenby|where|'
                  r'join|equals)\b', Keyword),
                 (r'(global)(::)', bygroups(Keyword, Punctuation)),
                 (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
diff --git a/pygments/lexers/hdl.py b/pygments/lexers/hdl.py
index fc5ff71..04cef14 100644
--- a/pygments/lexers/hdl.py
+++ b/pygments/lexers/hdl.py
@@ -108,8 +108,8 @@ class VerilogLexer(RegexLexer):
                 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
                 'shortreal', 'real', 'realtime'), suffix=r'\b'),
              Keyword.Type),
-            ('[a-zA-Z_]\w*:(?!:)', Name.Label),
-            ('[a-zA-Z_]\w*', Name),
+            (r'[a-zA-Z_]\w*:(?!:)', Name.Label),
+            (r'\$?[a-zA-Z_]\w*', Name),
         ],
         'string': [
             (r'"', String, '#pop'),
@@ -250,8 +250,8 @@ class SystemVerilogLexer(RegexLexer):
                 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
                 'shortreal', 'real', 'realtime'), suffix=r'\b'),
              Keyword.Type),
-            ('[a-zA-Z_]\w*:(?!:)', Name.Label),
-            ('[a-zA-Z_]\w*', Name),
+            (r'[a-zA-Z_]\w*:(?!:)', Name.Label),
+            (r'\$?[a-zA-Z_]\w*', Name),
         ],
         'classname': [
             (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
@@ -308,20 +308,27 @@ class VhdlLexer(RegexLexer):
             (r'[~!%^&*+=|?:<>/-]', Operator),
             (r"'[a-z_]\w*", Name.Attribute),
             (r'[()\[\],.;\']', Punctuation),
-            (r'"[^\n\\]*"', String),
+            (r'"[^\n\\"]*"', String),
 
             (r'(library)(\s+)([a-z_]\w*)',
              bygroups(Keyword, Text, Name.Namespace)),
             (r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
+            (r'(use)(\s+)([a-z_][\w.]*\.)(all)',
+             bygroups(Keyword, Text, Name.Namespace, Keyword)),
             (r'(use)(\s+)([a-z_][\w.]*)',
              bygroups(Keyword, Text, Name.Namespace)),
+            (r'(std|ieee)(\.[a-z_]\w*)',
+             bygroups(Name.Namespace, Name.Namespace)),
+            (words(('std', 'ieee', 'work'), suffix=r'\b'),
+             Name.Namespace),
             (r'(entity|component)(\s+)([a-z_]\w*)',
              bygroups(Keyword, Text, Name.Class)),
             (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
              r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
              bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
                       Name.Class, Text, Keyword)),
-
+            (r'([a-z_]\w*)(:)(\s+)(process|for)',
+             bygroups(Name.Class, Operator, Text, Keyword)),
             (r'(end)(\s+)', bygroups(using(this), Text), 'endblock'),
 
             include('types'),
@@ -341,7 +348,7 @@ class VhdlLexer(RegexLexer):
                 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
                 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
                 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
-                'std_logic', 'std_logic_vector'), suffix=r'\b'),
+                'std_logic', 'std_logic_vector', 'signed', 'unsigned'), suffix=r'\b'),
              Keyword.Type),
         ],
         'keywords': [
@@ -357,8 +364,8 @@ class VhdlLexer(RegexLexer):
                 'next', 'nor', 'not', 'null', 'of', 'on',
                 'open', 'or', 'others', 'out', 'package', 'port',
                 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
-                'register', 'reject', 'return', 'rol', 'ror', 'select',
-                'severity', 'signal', 'shared', 'sla', 'sli', 'sra',
+                'register', 'reject', 'rem', 'return', 'rol', 'ror', 'select',
+                'severity', 'signal', 'shared', 'sla', 'sll', 'sra',
                 'srl', 'subtype', 'then', 'to', 'transport', 'type',
                 'units', 'until', 'use', 'variable', 'wait', 'when',
                 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
index cf7c7d6..d0aa6d3 100644
--- a/pygments/lexers/julia.py
+++ b/pygments/lexers/julia.py
@@ -65,7 +65,7 @@ class JuliaLexer(RegexLexer):
                 bygroups(Keyword, Name.Function), 'funcname'),
 
             # types
-            (r'(type|typealias|abstract)((?:\s|\\\s)+)',
+            (r'(type|typealias|abstract|immutable)((?:\s|\\\s)+)',
                 bygroups(Keyword, Name.Class), 'typename'),
 
             # operators
@@ -132,14 +132,23 @@ class JuliaLexer(RegexLexer):
         'string': [
             (r'"', String, '#pop'),
             (r'\\\\|\\"|\\\n', String.Escape),  # included here for raw strings
-            (r'\$(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
-                String.Interpol),
-            (r'[^\\"$]+', String),
-            # quotes, dollar signs, and backslashes must be parsed one at a time
-            (r'["\\]', String),
-            # unhandled string formatting sign
-            (r'\$', String)
+            # Interpolation is defined as "$" followed by the shortest full
+            # expression, which is something we can't parse.
+            # Include the most common cases here: $word, and $(paren'd expr).
+            (r'\$[a-zA-Z_]+', String.Interpol),
+            (r'\$\(', String.Interpol, 'in-intp'),
+            # @printf and @sprintf formats
+            (r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[diouxXeEfFgGcrs%]',
+             String.Interpol),
+            (r'[^$%"\\]+', String),
+            # unhandled special signs
+            (r'[$%"\\]', String),
         ],
+        'in-intp': [
+            (r'[^()]+', String.Interpol),
+            (r'\(', String.Interpol, '#push'),
+            (r'\)', String.Interpol, '#pop'),
+        ]
     }
 
     def analyse_text(text):
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
index bd59d2b..84720fa 100644
--- a/pygments/lexers/lisp.py
+++ b/pygments/lexers/lisp.py
@@ -1488,7 +1488,7 @@ class EmacsLispLexer(RegexLexer):
     .. versionadded:: 2.1
     """
     name = 'EmacsLisp'
-    aliases = ['emacs', 'elisp']
+    aliases = ['emacs', 'elisp', 'emacs-lisp']
     filenames = ['*.el']
     mimetypes = ['text/x-elisp', 'application/x-elisp']
 
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
index c05c8ae..9433f7f 100644
--- a/pygments/lexers/python.py
+++ b/pygments/lexers/python.py
@@ -213,6 +213,26 @@ class Python3Lexer(RegexLexer):
 
     uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
 
+    def innerstring_rules(ttype):
+        return [
+            # the old style '%s' % (...) string formatting (still valid in Py3)
+            (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+             '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+            # the new style '{}'.format(...) string formatting
+            (r'\{'
+             '((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+             '(\![sra])?'                      # conversion
+             '(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[bcdeEfFgGnosxX%]?)?'
+             '\}', String.Interpol),
+
+            # backslashes, quotes and formatting signs must be parsed one at a time
+            (r'[^\\\'"%\{\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            # unhandled string formatting sign
+            (r'%|(\{{1,2})', ttype)
+            # newlines are an error (use "nl" state)
+        ]
+
     tokens = PythonLexer.tokens.copy()
     tokens['keywords'] = [
         (words((
@@ -295,23 +315,8 @@ class Python3Lexer(RegexLexer):
         (uni_name, Name.Namespace),
         default('#pop'),
     ]
-    tokens['strings'] = [
-        # the old style '%s' % (...) string formatting (still valid in Py3)
-        (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-         '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
-        # the new style '{}'.format(...) string formatting
-        (r'\{'
-         '((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
-         '(\![sra])?'                      # conversion
-         '(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[bcdeEfFgGnosxX%]?)?'
-         '\}', String.Interpol),
-        # backslashes, quotes and formatting signs must be parsed one at a time
-        (r'[^\\\'"%\{\n]+', String),
-        (r'[\'"\\]', String),
-        # unhandled string formatting sign
-        (r'%|(\{{1,2})', String)
-        # newlines are an error (use "nl" state)
-    ]
+    tokens['strings-single'] = innerstring_rules(String.Single)
+    tokens['strings-double'] = innerstring_rules(String.Double)
 
     def analyse_text(text):
         return shebang_matches(text, r'pythonw?3(\.\d)?')
diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py
index cb634ee..103b4ad 100644
--- a/pygments/lexers/rdf.py
+++ b/pygments/lexers/rdf.py
@@ -29,43 +29,56 @@ class SparqlLexer(RegexLexer):
     filenames = ['*.rq', '*.sparql']
     mimetypes = ['application/sparql-query']
 
+    # character group definitions ::
+
+    PN_CHARS_BASE_GRP = (u'a-zA-Z'
+                         u'\u00c0-\u00d6'
+                         u'\u00d8-\u00f6'
+                         u'\u00f8-\u02ff'
+                         u'\u0370-\u037d'
+                         u'\u037f-\u1fff'
+                         u'\u200c-\u200d'
+                         u'\u2070-\u218f'
+                         u'\u2c00-\u2fef'
+                         u'\u3001-\ud7ff'
+                         u'\uf900-\ufdcf'
+                         u'\ufdf0-\ufffd'
+                         u'\U00010000-\U000effff')
+
+    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+    PN_CHARS_GRP = (PN_CHARS_U_GRP +
+                    r'\-' +
+                    r'0-9' +
+                    u'\u00b7' +
+                    u'\u0300-\u036f' +
+                    u'\u203f-\u2040')
+
+    HEX_GRP = '0-9A-Fa-f'
+
+    PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&""()*+,;=/?#@%'
+
     # terminal productions ::
 
-    PN_CHARS_BASE = (u'(?:[a-zA-Z'
-                     u'\u00c0-\u00d6'
-                     u'\u00d8-\u00f6'
-                     u'\u00f8-\u02ff'
-                     u'\u0370-\u037d'
-                     u'\u037f-\u1fff'
-                     u'\u200c-\u200d'
-                     u'\u2070-\u218f'
-                     u'\u2c00-\u2fef'
-                     u'\u3001-\ud7ff'
-                     u'\uf900-\ufdcf'
-                     u'\ufdf0-\ufffd]|'
-                     u'[^\u0000-\uffff]|'
-                     u'[\ud800-\udbff][\udc00-\udfff])')
+    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
 
-    PN_CHARS_U = '(?:' + PN_CHARS_BASE + '|_)'
+    PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
 
-    PN_CHARS = ('(?:' + PN_CHARS_U + r'|[\-0-9' +
-                u'\u00b7' +
-                u'\u0300-\u036f' +
-                u'\u203f-\u2040])')
+    PN_CHARS = '[' + PN_CHARS_GRP + ']'
 
-    HEX = '[0-9A-Fa-f]'
+    HEX = '[' + HEX_GRP + ']'
 
-    PN_LOCAL_ESC_CHARS = r'[ _~.\-!$&""()*+,;=/?#@%]'
+    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
 
     IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
 
-    BLANK_NODE_LABEL = '_:(?:' + PN_CHARS_U + '|[0-9])(?:(?:' + PN_CHARS + '|\.)*' + \
-                       PN_CHARS + ')?'
+    BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+                       '.]*' + PN_CHARS + ')?'
 
-    PN_PREFIX = PN_CHARS_BASE + '(?:(?:' + PN_CHARS + '|\.)*' + PN_CHARS + ')?'
+    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
 
-    VARNAME = '(?:' + PN_CHARS_U + '|[0-9])(?:' + PN_CHARS_U + \
-              u'|[0-9\u00b7\u0300-\u036f\u203f-\u2040])*'
+    VARNAME = u'[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
+              u'0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
 
     PERCENT = '%' + HEX + HEX
 
@@ -73,9 +86,9 @@ class SparqlLexer(RegexLexer):
 
     PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
 
-    PN_LOCAL = ('(?:(?:' + PN_CHARS_U + '|[:0-9])|' + PLX + ')' +
-                '(?:(?:(?:' + PN_CHARS + '|[.:])|' + PLX + ')*(?:(?:' +
-                PN_CHARS + '|:)|' + PLX + '))?')
+    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+                PN_CHARS_GRP + ':]|' + PLX + '))?')
 
     EXPONENT = r'[eE][+-]?\d+'
 
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index dc23d01..ad2e2d7 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -35,6 +35,7 @@ class BashLexer(RegexLexer):
     name = 'Bash'
     aliases = ['bash', 'sh', 'ksh', 'shell']
     filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
+                 '*.exheres-0', '*.exlib',
                  '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
     mimetypes = ['application/x-sh', 'application/x-shellscript']
 
@@ -49,7 +50,7 @@ class BashLexer(RegexLexer):
             (r'\$\(\(', Keyword, 'math'),
             (r'\$\(', Keyword, 'paren'),
             (r'\$\{#?', String.Interpol, 'curly'),
-            (r'\$[a-fA-F_][a-fA-F0-9_]*', Name.Variable),  # user variable
+            (r'\$[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),  # user variable
             (r'\$(?:\d+|[#$?!_*@-])', Name.Variable),      # builtin
             (r'\$', Text),
         ],
@@ -213,16 +214,16 @@ class BatchLexer(RegexLexer):
                        (_nl, _punct, _ws, _nl))
     _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
     _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
-    _string = r'(?:"[^%s"]*"?)' % _nl
+    _string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
     _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
                  r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
                  r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
                  r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
                  r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
                  (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
-    _core_token = r'(?:(?:(?:\^[%s]?)?[^%s%s%s])+)' % (_nl, _nl, _punct, _ws)
-    _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^%s%s%s)])+)' % (_nl, _nl,
-                                                                 _punct, _ws)
+    _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s])+)' % (_nl, _nl, _punct, _ws)
+    _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s)])+)' % (_nl, _nl,
+                                                                  _punct, _ws)
     _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
     _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
     _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index 646a9f3..a7736f7 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -148,8 +148,8 @@ class PostgresLexer(PostgresBase, RegexLexer):
             (r'\$\d+', Name.Variable),
             (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
             (r'[0-9]+', Number.Integer),
-            (r"(E|U&)?'(''|[^'])*'", String.Single),
-            (r'(U&)?"(""|[^"])*"', String.Name),  # quoted identifier
+            (r"(E|U&)?'", String.Single, 'string'),
+            (r'(U&)?"', String.Name, 'quoted-ident'),  # quoted identifier
             (r'(?s)(\$[^$]*\$)(.*?)(\1)', language_callback),
             (r'[a-z_]\w*', Name),
 
@@ -164,6 +164,16 @@ class PostgresLexer(PostgresBase, RegexLexer):
             (r'[^/*]+', Comment.Multiline),
             (r'[/*]', Comment.Multiline)
         ],
+        'string': [
+            (r"[^']+", String.Single),
+            (r"''", String.Single),
+            (r"'", String.Single, '#pop'),
+        ],
+        'quoted-ident': [
+            (r'[^"]+', String.Name),
+            (r'""', String.Name),
+            (r'"', String.Name, '#pop'),
+        ],
     }
 
 
@@ -380,13 +390,13 @@ class SqlLexer(RegexLexer):
                 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DEREF', 'DESC',
                 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR', 'DETERMINISTIC',
                 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH', 'DISTINCT', 'DO',
-                'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', 'DYNAMIC_FUNCTION_CODE',
-                'EACH', 'ELSE', 'ENCODING', 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY',
+                'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', 'DYNAMIC_FUNCTION_CODE', 'EACH',
+                'ELSE', 'ELSIF', 'ENCODING', 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY',
                 'EXCEPTION', 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
                 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FOR',
                 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE', 'FREEZE', 'FROM', 'FULL',
                 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET', 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED',
-                'GROUP', 'GROUPING', 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY',
+                'GROUP', 'GROUPING', 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
                 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT', 'IN',
                 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX', 'INHERITS', 'INITIALIZE',
                 'INITIALLY', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTANTIABLE',
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 1f2322c..3e55b6a 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -251,7 +251,7 @@ class VelocityLexer(RegexLexer):
         'funcparams': [
             (r'\$\{?', Punctuation, 'variable'),
             (r'\s+', Text),
-            (r',', Punctuation),
+            (r'[,:]', Punctuation),
             (r'"(\\\\|\\"|[^"])*"', String.Double),
             (r"'(\\\\|\\'|[^'])*'", String.Single),
             (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
@@ -259,6 +259,8 @@ class VelocityLexer(RegexLexer):
             (r'(true|false|null)\b', Keyword.Constant),
             (r'\(', Punctuation, '#push'),
             (r'\)', Punctuation, '#pop'),
+            (r'\{', Punctuation, '#push'),
+            (r'\}', Punctuation, '#pop'),
             (r'\[', Punctuation, '#push'),
             (r'\]', Punctuation, '#pop'),
         ]
@@ -875,7 +877,7 @@ class GenshiMarkupLexer(RegexLexer):
             # yield style and script blocks as Other
             (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
             (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
-            (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
+            (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
             include('variable'),
             (r'[<$]', Other),
         ],
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py
index 43b16f8..cab9add 100644
--- a/pygments/lexers/textfmts.py
+++ b/pygments/lexers/textfmts.py
@@ -122,6 +122,11 @@ class HttpLexer(RegexLexer):
 
     flags = re.DOTALL
 
+    def get_tokens_unprocessed(self, text, stack=('root',)):
+        """Reset the content-type state."""
+        self.content_type = None
+        return RegexLexer.get_tokens_unprocessed(self, text, stack)
+
     def header_callback(self, match):
         if match.group(1).lower() == 'content-type':
             content_type = match.group(5).strip()
diff --git a/pygments/token.py b/pygments/token.py
index f31625e..fa3b1e1 100644
--- a/pygments/token.py
+++ b/pygments/token.py
@@ -43,6 +43,14 @@ class _TokenType(tuple):
     def __repr__(self):
         return 'Token' + (self and '.' or '') + '.'.join(self)
 
+    def __copy__(self):
+        # These instances are supposed to be singletons
+        return self
+
+    def __deepcopy__(self, memo):
+        # These instances are supposed to be singletons
+        return self
+
 
 Token       = _TokenType()
 
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..4754a9d
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+coverage
+nose
+pyflakes
+pylint
+tox
diff --git a/setup.cfg b/setup.cfg
index 48ab3bd..04980ac 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -7,3 +7,6 @@ tag_svn_revision = 0
... 77 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/pygments.git



More information about the Python-modules-commits mailing list