[med-svn] [mypy] 01/02: Imported Upstream version 0.3.1

Michael Crusoe misterc-guest at moszumanska.debian.org
Thu May 5 13:58:27 UTC 2016


This is an automated email from the git hooks/post-receive script.

misterc-guest pushed a commit to branch master
in repository mypy.

commit 3fe903a0eebf24cfd5c17b43afa77ba011b36981
Author: Michael R. Crusoe <crusoe at ucdavis.edu>
Date:   Thu May 5 05:59:50 2016 -0700

    Imported Upstream version 0.3.1
---
 PKG-INFO                                           |   29 +
 lib-typing/3.2/typing.py                           | 1716 +++++++++++++
 mypy/__init__.py                                   |    1 +
 mypy/__main__.py                                   |    5 +
 mypy/applytype.py                                  |   61 +
 mypy/build.py                                      | 1103 +++++++++
 mypy/checker.py                                    | 2539 +++++++++++++++++++
 mypy/checkexpr.py                                  | 1647 +++++++++++++
 mypy/checkmember.py                                |  404 +++
 mypy/checkstrformat.py                             |  297 +++
 mypy/constraints.py                                |  319 +++
 mypy/defaults.py                                   |    2 +
 mypy/docstring.py                                  |  204 ++
 mypy/erasetype.py                                  |  110 +
 mypy/errors.py                                     |  381 +++
 mypy/expandtype.py                                 |  121 +
 mypy/exprtotype.py                                 |   72 +
 mypy/git.py                                        |  136 +
 mypy/infer.py                                      |   42 +
 mypy/join.py                                       |  321 +++
 mypy/lex.py                                        |  888 +++++++
 mypy/main.py                                       |  340 +++
 mypy/maptype.py                                    |   96 +
 mypy/meet.py                                       |  200 ++
 mypy/messages.py                                   |  874 +++++++
 mypy/moduleinfo.py                                 |  560 +++++
 mypy/nodes.py                                      | 1819 ++++++++++++++
 mypy/parse.py                                      | 1999 +++++++++++++++
 mypy/parsetype.py                                  |  250 ++
 mypy/replacetvars.py                               |   48 +
 mypy/report.py                                     |  287 +++
 mypy/sametypes.py                                  |  123 +
 mypy/semanal.py                                    | 2603 ++++++++++++++++++++
 mypy/solve.py                                      |   75 +
 mypy/stats.py                                      |  371 +++
 mypy/strconv.py                                    |  452 ++++
 mypy/stubgen.py                                    |  662 +++++
 mypy/stubgenc.py                                   |  215 ++
 mypy/stubutil.py                                   |  102 +
 mypy/subtypes.py                                   |  346 +++
 mypy/traverser.py                                  |  219 ++
 mypy/treetransform.py                              |  522 ++++
 mypy/typeanal.py                                   |  377 +++
 mypy/typefixture.py                                |  238 ++
 mypy/types.py                                      |  986 ++++++++
 mypy/util.py                                       |  102 +
 mypy/version.py                                    |    1 +
 mypy/visitor.py                                    |  229 ++
 mypy/waiter.py                                     |  285 +++
 scripts/mypy                                       |    6 +
 scripts/stubgen                                    |   20 +
 setup.cfg                                          |   13 +
 setup.py                                           |   92 +
 typeshed/runtests.py                               |   95 +
 typeshed/stdlib/2.7/HTMLParser.pyi                 |   28 +
 typeshed/stdlib/2.7/Queue.pyi                      |   29 +
 typeshed/stdlib/2.7/StringIO.pyi                   |   28 +
 typeshed/stdlib/2.7/UserDict.pyi                   |   11 +
 typeshed/stdlib/2.7/UserList.pyi                   |    3 +
 typeshed/stdlib/2.7/UserString.pyi                 |    4 +
 typeshed/stdlib/2.7/__builtin__.pyi                |  870 +++++++
 typeshed/stdlib/2.7/__future__.pyi                 |    9 +
 typeshed/stdlib/2.7/_ast.pyi                       |  516 ++++
 typeshed/stdlib/2.7/_codecs.pyi                    |   55 +
 typeshed/stdlib/2.7/_collections.pyi               |   41 +
 typeshed/stdlib/2.7/_functools.pyi                 |   19 +
 typeshed/stdlib/2.7/_hotshot.pyi                   |   34 +
 typeshed/stdlib/2.7/_io.pyi                        |  107 +
 typeshed/stdlib/2.7/_json.pyi                      |   19 +
 typeshed/stdlib/2.7/_locale.pyi                    |   81 +
 typeshed/stdlib/2.7/_md5.pyi                       |   13 +
 typeshed/stdlib/2.7/_random.pyi                    |   13 +
 typeshed/stdlib/2.7/_sha.pyi                       |   15 +
 typeshed/stdlib/2.7/_sha256.pyi                    |   23 +
 typeshed/stdlib/2.7/_sha512.pyi                    |   23 +
 typeshed/stdlib/2.7/_socket.pyi                    |  287 +++
 typeshed/stdlib/2.7/_sre.pyi                       |   53 +
 typeshed/stdlib/2.7/_struct.pyi                    |   22 +
 typeshed/stdlib/2.7/_symtable.pyi                  |   41 +
 typeshed/stdlib/2.7/_warnings.pyi                  |   11 +
 typeshed/stdlib/2.7/_weakref.pyi                   |   16 +
 typeshed/stdlib/2.7/_weakrefset.pyi                |    5 +
 typeshed/stdlib/2.7/abc.pyi                        |   39 +
 typeshed/stdlib/2.7/argparse.pyi                   |  171 ++
 typeshed/stdlib/2.7/array.pyi                      |   56 +
 typeshed/stdlib/2.7/ast.pyi                        |   40 +
 typeshed/stdlib/2.7/atexit.pyi                     |    5 +
 typeshed/stdlib/2.7/base64.pyi                     |   25 +
 typeshed/stdlib/2.7/binascii.pyi                   |   21 +
 typeshed/stdlib/2.7/builtins.pyi                   |  870 +++++++
 typeshed/stdlib/2.7/cPickle.pyi                    |   23 +
 typeshed/stdlib/2.7/cStringIO.pyi                  |   50 +
 typeshed/stdlib/2.7/codecs.pyi                     |  194 ++
 typeshed/stdlib/2.7/collections.pyi                |   91 +
 typeshed/stdlib/2.7/compileall.pyi                 |    7 +
 typeshed/stdlib/2.7/contextlib.pyi                 |   15 +
 typeshed/stdlib/2.7/copy.pyi                       |   10 +
 typeshed/stdlib/2.7/csv.pyi                        |   93 +
 typeshed/stdlib/2.7/datetime.pyi                   |  221 ++
 typeshed/stdlib/2.7/difflib.pyi                    |   63 +
 typeshed/stdlib/2.7/distutils/__init__.pyi         |    7 +
 typeshed/stdlib/2.7/distutils/version.pyi          |   23 +
 typeshed/stdlib/2.7/doctest.pyi                    |    9 +
 typeshed/stdlib/2.7/email/MIMEText.pyi             |    8 +
 typeshed/stdlib/2.7/email/__init__.pyi             |    6 +
 typeshed/stdlib/2.7/email/mime/__init__.pyi        |    0
 typeshed/stdlib/2.7/email/mime/base.pyi            |   10 +
 typeshed/stdlib/2.7/email/mime/multipart.pyi       |    8 +
 typeshed/stdlib/2.7/email/mime/nonmultipart.pyi    |    8 +
 typeshed/stdlib/2.7/email/mime/text.pyi            |    8 +
 typeshed/stdlib/2.7/encodings/__init__.pyi         |    6 +
 typeshed/stdlib/2.7/encodings/utf_8.pyi            |   14 +
 typeshed/stdlib/2.7/errno.pyi                      |  129 +
 typeshed/stdlib/2.7/exceptions.pyi                 |   80 +
 typeshed/stdlib/2.7/fcntl.pyi                      |   85 +
 typeshed/stdlib/2.7/fnmatch.pyi                    |    6 +
 typeshed/stdlib/2.7/functools.pyi                  |   29 +
 typeshed/stdlib/2.7/gc.pyi                         |   27 +
 typeshed/stdlib/2.7/getpass.pyi                    |    8 +
 typeshed/stdlib/2.7/gettext.pyi                    |   40 +
 typeshed/stdlib/2.7/glob.pyi                       |    4 +
 typeshed/stdlib/2.7/grp.pyi                        |   11 +
 typeshed/stdlib/2.7/gzip.pyi                       |   41 +
 typeshed/stdlib/2.7/hashlib.pyi                    |   27 +
 typeshed/stdlib/2.7/hmac.pyi                       |   11 +
 typeshed/stdlib/2.7/htmlentitydefs.pyi             |    9 +
 typeshed/stdlib/2.7/httplib.pyi                    |  124 +
 typeshed/stdlib/2.7/imp.pyi                        |   35 +
 typeshed/stdlib/2.7/importlib.pyi                  |    3 +
 typeshed/stdlib/2.7/inspect.pyi                    |   19 +
 typeshed/stdlib/2.7/io.pyi                         |  101 +
 typeshed/stdlib/2.7/itertools.pyi                  |   81 +
 typeshed/stdlib/2.7/json.pyi                       |   54 +
 typeshed/stdlib/2.7/logging/__init__.pyi           |  239 ++
 typeshed/stdlib/2.7/logging/handlers.pyi           |  200 ++
 typeshed/stdlib/2.7/markupbase.pyi                 |    9 +
 typeshed/stdlib/2.7/md5.pyi                        |   11 +
 typeshed/stdlib/2.7/mimetools.pyi                  |   31 +
 typeshed/stdlib/2.7/numbers.pyi                    |   77 +
 typeshed/stdlib/2.7/os/__init__.pyi                |  264 ++
 typeshed/stdlib/2.7/os/path.pyi                    |   65 +
 typeshed/stdlib/2.7/pickle.pyi                     |    8 +
 typeshed/stdlib/2.7/pipes.pyi                      |   13 +
 typeshed/stdlib/2.7/platform.pyi                   |   45 +
 typeshed/stdlib/2.7/posix.pyi                      |  206 ++
 typeshed/stdlib/2.7/pprint.pyi                     |   21 +
 typeshed/stdlib/2.7/pwd.pyi                        |   18 +
 typeshed/stdlib/2.7/random.pyi                     |   76 +
 typeshed/stdlib/2.7/re.pyi                         |   65 +
 typeshed/stdlib/2.7/resource.pyi                   |   33 +
 typeshed/stdlib/2.7/rfc822.pyi                     |   79 +
 typeshed/stdlib/2.7/robotparser.pyi                |    7 +
 typeshed/stdlib/2.7/select.pyi                     |  100 +
 typeshed/stdlib/2.7/sha.pyi                        |   12 +
 typeshed/stdlib/2.7/shlex.pyi                      |   27 +
 typeshed/stdlib/2.7/shutil.pyi                     |   30 +
 typeshed/stdlib/2.7/signal.pyi                     |   62 +
 typeshed/stdlib/2.7/simplejson/__init__.pyi        |   10 +
 typeshed/stdlib/2.7/simplejson/decoder.pyi         |    6 +
 typeshed/stdlib/2.7/simplejson/encoder.pyi         |    9 +
 typeshed/stdlib/2.7/simplejson/scanner.pyi         |    7 +
 typeshed/stdlib/2.7/smtplib.pyi                    |   90 +
 typeshed/stdlib/2.7/socket.pyi                     |  388 +++
 typeshed/stdlib/2.7/spwd.pyi                       |   15 +
 typeshed/stdlib/2.7/sqlite3/__init__.pyi           |    5 +
 typeshed/stdlib/2.7/sqlite3/dbapi2.pyi             |  237 ++
 typeshed/stdlib/2.7/ssl.pyi                        |    5 +
 typeshed/stdlib/2.7/stat.pyi                       |   58 +
 typeshed/stdlib/2.7/string.pyi                     |   74 +
 typeshed/stdlib/2.7/strop.pyi                      |   73 +
 typeshed/stdlib/2.7/struct.pyi                     |   28 +
 typeshed/stdlib/2.7/subprocess.pyi                 |   79 +
 typeshed/stdlib/2.7/sys.pyi                        |  128 +
 typeshed/stdlib/2.7/syslog.pyi                     |   38 +
 typeshed/stdlib/2.7/tarfile.pyi                    |  237 ++
 typeshed/stdlib/2.7/tempfile.pyi                   |   42 +
 typeshed/stdlib/2.7/textwrap.pyi                   |   33 +
 typeshed/stdlib/2.7/thread.pyi                     |   33 +
 typeshed/stdlib/2.7/threading.pyi                  |   95 +
 typeshed/stdlib/2.7/time.pyi                       |   48 +
 typeshed/stdlib/2.7/token.pyi                      |   62 +
 typeshed/stdlib/2.7/tokenize.pyi                   |  143 ++
 typeshed/stdlib/2.7/traceback.pyi                  |   17 +
 typeshed/stdlib/2.7/types.pyi                      |  162 ++
 typeshed/stdlib/2.7/typing.pyi                     |  312 +++
 typeshed/stdlib/2.7/unicodedata.pyi                |   40 +
 typeshed/stdlib/2.7/unittest.pyi                   |  176 ++
 typeshed/stdlib/2.7/urllib.pyi                     |  135 +
 typeshed/stdlib/2.7/urllib2.pyi                    |  149 ++
 typeshed/stdlib/2.7/urlparse.pyi                   |   53 +
 typeshed/stdlib/2.7/uuid.pyi                       |   36 +
 typeshed/stdlib/2.7/xml/__init__.pyi               |    0
 typeshed/stdlib/2.7/xml/sax/__init__.pyi           |    0
 typeshed/stdlib/2.7/xml/sax/handler.pyi            |   50 +
 typeshed/stdlib/2.7/xml/sax/saxutils.pyi           |   58 +
 typeshed/stdlib/2.7/xml/sax/xmlreader.pyi          |   75 +
 typeshed/stdlib/2.7/xxsubtype.pyi                  |   17 +
 typeshed/stdlib/2.7/zipimport.pyi                  |   25 +
 typeshed/stdlib/2.7/zlib.pyi                       |   36 +
 typeshed/stdlib/2and3/_bisect.pyi                  |   11 +
 typeshed/stdlib/2and3/_heapq.pyi                   |   15 +
 typeshed/stdlib/2and3/cmath.pyi                    |   34 +
 typeshed/stdlib/2and3/marshal.pyi                  |    8 +
 typeshed/stdlib/2and3/math.pyi                     |   52 +
 typeshed/stdlib/2and3/operator.pyi                 |  127 +
 typeshed/stdlib/2and3/webbrowser.pyi               |  100 +
 typeshed/stdlib/3.3/ipaddress.pyi                  |  200 ++
 typeshed/stdlib/3.4/_operator.pyi                  |  108 +
 typeshed/stdlib/3.4/_stat.pyi                      |   69 +
 typeshed/stdlib/3.4/_tracemalloc.pyi               |   26 +
 typeshed/stdlib/3.4/asyncio/__init__.pyi           |   33 +
 typeshed/stdlib/3.4/asyncio/events.pyi             |  172 ++
 typeshed/stdlib/3.4/asyncio/futures.pyi            |   40 +
 typeshed/stdlib/3.4/asyncio/queues.pyi             |   48 +
 typeshed/stdlib/3.4/asyncio/tasks.pyi              |   38 +
 typeshed/stdlib/3.4/enum.pyi                       |   19 +
 typeshed/stdlib/3.4/pathlib.pyi                    |  164 ++
 typeshed/stdlib/3/__future__.pyi                   |    9 +
 typeshed/stdlib/3/_codecs.pyi                      |   51 +
 typeshed/stdlib/3/_dummy_thread.pyi                |   11 +
 typeshed/stdlib/3/_io.pyi                          |   48 +
 typeshed/stdlib/3/_json.pyi                        |   30 +
 typeshed/stdlib/3/_locale.pyi                      |   84 +
 typeshed/stdlib/3/_markupbase.pyi                  |    9 +
 typeshed/stdlib/3/_posixsubprocess.pyi             |   13 +
 typeshed/stdlib/3/_random.pyi                      |   12 +
 typeshed/stdlib/3/_subprocess.pyi                  |   38 +
 typeshed/stdlib/3/_thread.pyi                      |   14 +
 typeshed/stdlib/3/_warnings.pyi                    |   11 +
 typeshed/stdlib/3/abc.pyi                          |    6 +
 typeshed/stdlib/3/argparse.pyi                     |  162 ++
 typeshed/stdlib/3/array.pyi                        |   49 +
 typeshed/stdlib/3/atexit.pyi                       |    9 +
 typeshed/stdlib/3/base64.pyi                       |   25 +
 typeshed/stdlib/3/binascii.pyi                     |   26 +
 typeshed/stdlib/3/bisect.pyi                       |   12 +
 typeshed/stdlib/3/builtins.pyi                     |  811 ++++++
 typeshed/stdlib/3/bz2.pyi                          |    0
 typeshed/stdlib/3/calendar.pyi                     |   15 +
 typeshed/stdlib/3/cgi.pyi                          |    1 +
 typeshed/stdlib/3/codecs.pyi                       |  194 ++
 typeshed/stdlib/3/collections.pyi                  |  134 +
 typeshed/stdlib/3/contextlib.pyi                   |   15 +
 typeshed/stdlib/3/copy.pyi                         |   10 +
 typeshed/stdlib/3/csv.pyi                          |   77 +
 typeshed/stdlib/3/datetime.pyi                     |  221 ++
 typeshed/stdlib/3/decimal.pyi                      |  255 ++
 typeshed/stdlib/3/difflib.pyi                      |   61 +
 typeshed/stdlib/3/distutils/__init__.pyi           |    0
 typeshed/stdlib/3/distutils/errors.pyi             |    4 +
 typeshed/stdlib/3/distutils/spawn.pyi              |    6 +
 typeshed/stdlib/3/doctest.pyi                      |    9 +
 typeshed/stdlib/3/email/__init__.pyi               |   23 +
 typeshed/stdlib/3/email/_header_value_parser.pyi   |  397 +++
 typeshed/stdlib/3/email/_parseaddr.pyi             |   44 +
 typeshed/stdlib/3/email/_policybase.pyi            |   34 +
 typeshed/stdlib/3/email/base64mime.pyi             |   13 +
 typeshed/stdlib/3/email/charset.pyi                |   25 +
 typeshed/stdlib/3/email/contentmanager.pyi         |   27 +
 typeshed/stdlib/3/email/encoders.pyi               |    8 +
 typeshed/stdlib/3/email/errors.pyi                 |   44 +
 typeshed/stdlib/3/email/feedparser.pyi             |   26 +
 typeshed/stdlib/3/email/generator.pyi              |   19 +
 typeshed/stdlib/3/email/header.pyi                 |   29 +
 typeshed/stdlib/3/email/headerregistry.pyi         |  133 +
 typeshed/stdlib/3/email/iterators.pyi              |    7 +
 typeshed/stdlib/3/email/message.pyi                |   74 +
 typeshed/stdlib/3/email/mime/__init__.pyi          |    4 +
 typeshed/stdlib/3/email/mime/application.pyi       |    8 +
 typeshed/stdlib/3/email/mime/audio.pyi             |    8 +
 typeshed/stdlib/3/email/mime/base.pyi              |    8 +
 typeshed/stdlib/3/email/mime/image.pyi             |    8 +
 typeshed/stdlib/3/email/mime/message.pyi           |    8 +
 typeshed/stdlib/3/email/mime/multipart.pyi         |    8 +
 typeshed/stdlib/3/email/mime/nonmultipart.pyi      |    8 +
 typeshed/stdlib/3/email/mime/text.pyi              |    8 +
 typeshed/stdlib/3/email/parser.pyi                 |   29 +
 typeshed/stdlib/3/email/policy.pyi                 |   26 +
 typeshed/stdlib/3/email/quoprimime.pyi             |   18 +
 typeshed/stdlib/3/email/utils.pyi                  |   22 +
 typeshed/stdlib/3/encodings/__init__.pyi           |    6 +
 typeshed/stdlib/3/encodings/utf_8.pyi              |   14 +
 typeshed/stdlib/3/errno.pyi                        |  132 +
 typeshed/stdlib/3/fcntl.pyi                        |   11 +
 typeshed/stdlib/3/fnmatch.pyi                      |   11 +
 typeshed/stdlib/3/functools.pyi                    |   45 +
 typeshed/stdlib/3/gc.pyi                           |   10 +
 typeshed/stdlib/3/getopt.pyi                       |   19 +
 typeshed/stdlib/3/getpass.pyi                      |    5 +
 typeshed/stdlib/3/gettext.pyi                      |   39 +
 typeshed/stdlib/3/glob.pyi                         |    8 +
 typeshed/stdlib/3/grp.pyi                          |   13 +
 typeshed/stdlib/3/hashlib.pyi                      |   25 +
 typeshed/stdlib/3/heapq.pyi                        |   18 +
 typeshed/stdlib/3/html/__init__.pyi                |    4 +
 typeshed/stdlib/3/html/entities.pyi                |   10 +
 typeshed/stdlib/3/html/parser.pyi                  |   28 +
 typeshed/stdlib/3/http/__init__.pyi                |    0
 typeshed/stdlib/3/http/client.pyi                  |  101 +
 typeshed/stdlib/3/http/cookiejar.pyi               |  121 +
 typeshed/stdlib/3/imp.pyi                          |   10 +
 typeshed/stdlib/3/importlib.pyi                    |    9 +
 typeshed/stdlib/3/inspect.pyi                      |   44 +
 typeshed/stdlib/3/io.pyi                           |  150 ++
 typeshed/stdlib/3/itertools.pyi                    |   57 +
 typeshed/stdlib/3/json.pyi                         |   51 +
 typeshed/stdlib/3/linecache.pyi                    |    5 +
 typeshed/stdlib/3/locale.pyi                       |   17 +
 typeshed/stdlib/3/logging/__init__.pyi             |  239 ++
 typeshed/stdlib/3/logging/handlers.pyi             |  200 ++
 typeshed/stdlib/3/msvcrt.pyi                       |    8 +
 typeshed/stdlib/3/multiprocessing/__init__.pyi     |   12 +
 typeshed/stdlib/3/multiprocessing/managers.pyi     |    8 +
 typeshed/stdlib/3/multiprocessing/pool.pyi         |    6 +
 typeshed/stdlib/3/numbers.pyi                      |   80 +
 typeshed/stdlib/3/os/__init__.pyi                  |  340 +++
 typeshed/stdlib/3/os/path.pyi                      |   61 +
 typeshed/stdlib/3/pickle.pyi                       |   12 +
 typeshed/stdlib/3/pipes.pyi                        |   19 +
 typeshed/stdlib/3/platform.pyi                     |   35 +
 typeshed/stdlib/3/posix.pyi                        |    7 +
 typeshed/stdlib/3/posixpath.pyi                    |   46 +
 typeshed/stdlib/3/pprint.pyi                       |   23 +
 typeshed/stdlib/3/pwd.pyi                          |   18 +
 typeshed/stdlib/3/queue.pyi                        |   20 +
 typeshed/stdlib/3/random.pyi                       |   67 +
 typeshed/stdlib/3/re.pyi                           |   58 +
 typeshed/stdlib/3/resource.pyi                     |   13 +
 typeshed/stdlib/3/select.pyi                       |   27 +
 typeshed/stdlib/3/shlex.pyi                        |   39 +
 typeshed/stdlib/3/shutil.pyi                       |   46 +
 typeshed/stdlib/3/signal.pyi                       |  117 +
 typeshed/stdlib/3/smtplib.pyi                      |   94 +
 typeshed/stdlib/3/socket.pyi                       |  387 +++
 typeshed/stdlib/3/socketserver.pyi                 |   15 +
 typeshed/stdlib/3/ssl.pyi                          |  202 ++
 typeshed/stdlib/3/stat.pyi                         |   71 +
 typeshed/stdlib/3/string.pyi                       |   27 +
 typeshed/stdlib/3/struct.pyi                       |   30 +
 typeshed/stdlib/3/subprocess.pyi                   |   73 +
 typeshed/stdlib/3/sys.pyi                          |  156 ++
 typeshed/stdlib/3/sysconfig.pyi                    |    8 +
 typeshed/stdlib/3/tarfile.pyi                      |   33 +
 typeshed/stdlib/3/tempfile.pyi                     |   45 +
 typeshed/stdlib/3/textwrap.pyi                     |  119 +
 typeshed/stdlib/3/threading.pyi                    |   64 +
 typeshed/stdlib/3/time.pyi                         |   64 +
 typeshed/stdlib/3/token.pyi                        |   63 +
 typeshed/stdlib/3/traceback.pyi                    |   16 +
 typeshed/stdlib/3/types.pyi                        |  149 ++
 typeshed/stdlib/3/typing.pyi                       |  378 +++
 typeshed/stdlib/3/unicodedata.pyi                  |   37 +
 typeshed/stdlib/3/unittest.pyi                     |  167 ++
 typeshed/stdlib/3/urllib/__init__.pyi              |    0
 typeshed/stdlib/3/urllib/error.pyi                 |    5 +
 typeshed/stdlib/3/urllib/parse.pyi                 |  133 +
 typeshed/stdlib/3/urllib/request.pyi               |   15 +
 typeshed/stdlib/3/urllib/response.pyi              |   32 +
 typeshed/stdlib/3/urllib/robotparser.pyi           |    7 +
 typeshed/stdlib/3/uuid.pyi                         |   73 +
 typeshed/stdlib/3/warnings.pyi                     |   33 +
 typeshed/stdlib/3/weakref.pyi                      |   71 +
 typeshed/stdlib/3/xml/__init__.pyi                 |    0
 typeshed/stdlib/3/xml/etree/ElementInclude.pyi     |   14 +
 typeshed/stdlib/3/xml/etree/ElementPath.pyi        |   28 +
 typeshed/stdlib/3/xml/etree/ElementTree.pyi        |  127 +
 typeshed/stdlib/3/xml/etree/__init__.pyi           |    0
 typeshed/stdlib/3/xml/etree/cElementTree.pyi       |    5 +
 typeshed/stdlib/3/zipfile.pyi                      |   29 +
 typeshed/stdlib/3/zlib.pyi                         |   32 +
 typeshed/third_party/2.7/Crypto/Cipher/AES.pyi     |    0
 .../third_party/2.7/Crypto/Cipher/__init__.pyi     |   15 +
 .../third_party/2.7/Crypto/Random/__init__.pyi     |    5 +
 typeshed/third_party/2.7/Crypto/Random/random.pyi  |    3 +
 typeshed/third_party/2.7/Crypto/__init__.pyi       |    0
 typeshed/third_party/2.7/OpenSSL/__init__.pyi      |    0
 typeshed/third_party/2.7/OpenSSL/crypto.pyi        |    6 +
 typeshed/third_party/2.7/boto/__init__.pyi         |   78 +
 typeshed/third_party/2.7/boto/connection.pyi       |  108 +
 typeshed/third_party/2.7/boto/ec2/__init__.pyi     |   11 +
 typeshed/third_party/2.7/boto/ec2/elb/__init__.pyi |   43 +
 typeshed/third_party/2.7/concurrent/__init__.pyi   |    0
 .../2.7/concurrent/futures/__init__.pyi            |   37 +
 typeshed/third_party/2.7/croniter.pyi              |   27 +
 typeshed/third_party/2.7/enum.pyi                  |   19 +
 typeshed/third_party/2.7/fb303/FacebookService.pyi |  301 +++
 typeshed/third_party/2.7/fb303/__init__.pyi        |    0
 typeshed/third_party/2.7/gflags.pyi                |  216 ++
 typeshed/third_party/2.7/google/__init__.pyi       |    0
 .../third_party/2.7/google/protobuf/__init__.pyi   |    0
 .../third_party/2.7/google/protobuf/descriptor.pyi |  163 ++
 .../2.7/google/protobuf/descriptor_pb2.pyi         |    2 +
 .../2.7/google/protobuf/internal/__init__.pyi      |    0
 .../2.7/google/protobuf/internal/decoder.pyi       |   34 +
 .../2.7/google/protobuf/internal/encoder.pyi       |   38 +
 .../google/protobuf/internal/enum_type_wrapper.pyi |   11 +
 .../2.7/google/protobuf/internal/wire_format.pyi   |   54 +
 .../third_party/2.7/google/protobuf/message.pyi    |   36 +
 .../third_party/2.7/google/protobuf/reflection.pyi |   10 +
 typeshed/third_party/2.7/kazoo/__init__.pyi        |    4 +
 typeshed/third_party/2.7/kazoo/client.pyi          |  100 +
 typeshed/third_party/2.7/kazoo/exceptions.pyi      |   62 +
 typeshed/third_party/2.7/kazoo/recipe/__init__.pyi |    4 +
 typeshed/third_party/2.7/kazoo/recipe/watchers.pyi |   25 +
 typeshed/third_party/2.7/pycurl.pyi                |   81 +
 typeshed/third_party/2.7/redis/__init__.pyi        |   28 +
 typeshed/third_party/2.7/redis/client.pyi          |  293 +++
 typeshed/third_party/2.7/redis/connection.pyi      |  135 +
 typeshed/third_party/2.7/redis/exceptions.pyi      |   21 +
 typeshed/third_party/2.7/redis/utils.pyi           |   12 +
 typeshed/third_party/2.7/requests/__init__.pyi     |   38 +
 typeshed/third_party/2.7/requests/adapters.pyi     |   69 +
 typeshed/third_party/2.7/requests/api.pyi          |   14 +
 typeshed/third_party/2.7/requests/auth.pyi         |   41 +
 typeshed/third_party/2.7/requests/compat.pyi       |    6 +
 typeshed/third_party/2.7/requests/cookies.pyi      |   61 +
 typeshed/third_party/2.7/requests/exceptions.pyi   |   26 +
 typeshed/third_party/2.7/requests/hooks.pyi        |    8 +
 typeshed/third_party/2.7/requests/models.pyi       |  133 +
 .../third_party/2.7/requests/packages/__init__.pyi |    8 +
 .../2.7/requests/packages/urllib3/__init__.pyi     |   12 +
 .../2.7/requests/packages/urllib3/_collections.pyi |   51 +
 .../2.7/requests/packages/urllib3/connection.pyi   |   51 +
 .../requests/packages/urllib3/connectionpool.pyi   |   87 +
 .../requests/packages/urllib3/contrib/__init__.pyi |    4 +
 .../2.7/requests/packages/urllib3/exceptions.pyi   |   54 +
 .../2.7/requests/packages/urllib3/fields.pyi       |   16 +
 .../2.7/requests/packages/urllib3/filepost.pyi     |   19 +
 .../packages/urllib3/packages/__init__.pyi         |    4 +
 .../packages/ssl_match_hostname/__init__.pyi       |    1 +
 .../ssl_match_hostname/_implementation.pyi         |    7 +
 .../2.7/requests/packages/urllib3/poolmanager.pyi  |   31 +
 .../2.7/requests/packages/urllib3/request.pyi      |   13 +
 .../2.7/requests/packages/urllib3/response.pyi     |   58 +
 .../requests/packages/urllib3/util/__init__.pyi    |    7 +
 .../requests/packages/urllib3/util/connection.pyi  |   11 +
 .../2.7/requests/packages/urllib3/util/request.pyi |   12 +
 .../requests/packages/urllib3/util/response.pyi    |    5 +
 .../2.7/requests/packages/urllib3/util/retry.pyi   |   36 +
 .../2.7/requests/packages/urllib3/util/timeout.pyi |   24 +
 .../2.7/requests/packages/urllib3/util/url.pyi     |   26 +
 typeshed/third_party/2.7/requests/sessions.pyi     |   92 +
 typeshed/third_party/2.7/requests/status_codes.pyi |    8 +
 typeshed/third_party/2.7/requests/structures.pyi   |   21 +
 typeshed/third_party/2.7/requests/utils.pyi        |   52 +
 typeshed/third_party/2.7/routes/__init__.pyi       |   19 +
 typeshed/third_party/2.7/routes/mapper.pyi         |   70 +
 typeshed/third_party/2.7/routes/util.pyi           |   24 +
 typeshed/third_party/2.7/scribe/__init__.pyi       |    0
 typeshed/third_party/2.7/scribe/scribe.pyi         |   43 +
 typeshed/third_party/2.7/scribe/ttypes.pyi         |   22 +
 typeshed/third_party/2.7/six/__init__.pyi          |   90 +
 typeshed/third_party/2.7/six/moves/__init__.pyi    |   29 +
 typeshed/third_party/2.7/six/moves/cPickle.pyi     |    6 +
 .../third_party/2.7/six/moves/urllib/__init__.pyi  |   10 +
 .../third_party/2.7/six/moves/urllib/error.pyi     |    8 +
 .../third_party/2.7/six/moves/urllib/parse.pyi     |   30 +
 .../third_party/2.7/six/moves/urllib/request.pyi   |   38 +
 .../third_party/2.7/six/moves/urllib/response.pyi  |    9 +
 .../2.7/six/moves/urllib/robotparser.pyi           |    6 +
 .../third_party/2.7/six/moves/urllib_error.pyi     |   10 +
 .../third_party/2.7/six/moves/urllib_parse.pyi     |   28 +
 .../third_party/2.7/six/moves/urllib_request.pyi   |   40 +
 .../third_party/2.7/six/moves/urllib_response.pyi  |   11 +
 .../2.7/six/moves/urllib_robotparser.pyi           |    8 +
 typeshed/third_party/2.7/sqlalchemy/__init__.pyi   |  124 +
 .../2.7/sqlalchemy/databases/__init__.pyi          |   12 +
 .../third_party/2.7/sqlalchemy/databases/mysql.pyi |    1 +
 .../2.7/sqlalchemy/dialects/__init__.pyi           |   12 +
 .../2.7/sqlalchemy/dialects/mysql/__init__.pyi     |   42 +
 .../2.7/sqlalchemy/dialects/mysql/base.pyi         |  350 +++
 .../third_party/2.7/sqlalchemy/engine/__init__.pyi |    6 +
 .../2.7/sqlalchemy/engine/strategies.pyi           |   39 +
 typeshed/third_party/2.7/sqlalchemy/engine/url.pyi |   27 +
 typeshed/third_party/2.7/sqlalchemy/exc.pyi        |   77 +
 typeshed/third_party/2.7/sqlalchemy/inspection.pyi |    5 +
 typeshed/third_party/2.7/sqlalchemy/log.pyi        |   14 +
 .../third_party/2.7/sqlalchemy/orm/__init__.pyi    |   95 +
 .../third_party/2.7/sqlalchemy/orm/session.pyi     |   93 +
 typeshed/third_party/2.7/sqlalchemy/pool.pyi       |  118 +
 typeshed/third_party/2.7/sqlalchemy/schema.pyi     |   50 +
 .../third_party/2.7/sqlalchemy/sql/__init__.pyi    |   66 +
 .../third_party/2.7/sqlalchemy/sql/annotation.pyi  |   11 +
 typeshed/third_party/2.7/sqlalchemy/sql/base.pyi   |   42 +
 typeshed/third_party/2.7/sqlalchemy/sql/ddl.pyi    |   25 +
 typeshed/third_party/2.7/sqlalchemy/sql/dml.pyi    |   20 +
 .../third_party/2.7/sqlalchemy/sql/elements.pyi    |   60 +
 .../third_party/2.7/sqlalchemy/sql/expression.pyi  |   87 +
 .../third_party/2.7/sqlalchemy/sql/functions.pyi   |   47 +
 typeshed/third_party/2.7/sqlalchemy/sql/naming.pyi |    1 +
 .../third_party/2.7/sqlalchemy/sql/operators.pyi   |   99 +
 typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi |   98 +
 .../third_party/2.7/sqlalchemy/sql/selectable.pyi  |   60 +
 .../third_party/2.7/sqlalchemy/sql/sqltypes.pyi    |   62 +
 .../third_party/2.7/sqlalchemy/sql/type_api.pyi    |    9 +
 .../third_party/2.7/sqlalchemy/sql/visitors.pyi    |   33 +
 typeshed/third_party/2.7/sqlalchemy/types.pyi      |   51 +
 .../third_party/2.7/sqlalchemy/util/__init__.pyi   |  133 +
 .../2.7/sqlalchemy/util/_collections.pyi           |  214 ++
 .../third_party/2.7/sqlalchemy/util/compat.pyi     |   67 +
 .../2.7/sqlalchemy/util/deprecations.pyi           |   13 +
 .../2.7/sqlalchemy/util/langhelpers.pyi            |  136 +
 typeshed/third_party/2.7/thrift/Thrift.pyi         |   55 +
 typeshed/third_party/2.7/thrift/__init__.pyi       |    0
 .../2.7/thrift/protocol/TBinaryProtocol.pyi        |   65 +
 .../third_party/2.7/thrift/protocol/TProtocol.pyi  |   79 +
 .../third_party/2.7/thrift/protocol/__init__.pyi   |   11 +
 .../third_party/2.7/thrift/transport/TSocket.pyi   |   32 +
 .../2.7/thrift/transport/TTransport.pyi            |  111 +
 .../third_party/2.7/thrift/transport/__init__.pyi  |    9 +
 typeshed/third_party/2.7/tornado/__init__.pyi      |    0
 typeshed/third_party/2.7/tornado/concurrent.pyi    |   47 +
 typeshed/third_party/2.7/tornado/gen.pyi           |  113 +
 typeshed/third_party/2.7/tornado/httpclient.pyi    |  112 +
 typeshed/third_party/2.7/tornado/httpserver.pyi    |   45 +
 typeshed/third_party/2.7/tornado/httputil.pyi      |   93 +
 typeshed/third_party/2.7/tornado/ioloop.pyi        |   88 +
 typeshed/third_party/2.7/tornado/netutil.pyi       |   49 +
 typeshed/third_party/2.7/tornado/tcpserver.pyi     |   21 +
 typeshed/third_party/2.7/tornado/util.pyi          |   50 +
 typeshed/third_party/2.7/tornado/web.pyi           |  261 ++
 typeshed/third_party/2.7/yaml/__init__.pyi         |   51 +
 typeshed/third_party/2.7/yaml/composer.pyi         |   21 +
 typeshed/third_party/2.7/yaml/constructor.pyi      |   70 +
 typeshed/third_party/2.7/yaml/dumper.pyi           |   17 +
 typeshed/third_party/2.7/yaml/emitter.pyi          |  110 +
 typeshed/third_party/2.7/yaml/error.pyi            |   25 +
 typeshed/third_party/2.7/yaml/events.pyi           |   66 +
 typeshed/third_party/2.7/yaml/loader.pyi           |   19 +
 typeshed/third_party/2.7/yaml/nodes.pyi            |   35 +
 typeshed/third_party/2.7/yaml/parser.pyi           |   48 +
 typeshed/third_party/2.7/yaml/reader.pyi           |   38 +
 typeshed/third_party/2.7/yaml/representer.pyi      |   56 +
 typeshed/third_party/2.7/yaml/resolver.pyi         |   26 +
 typeshed/third_party/2.7/yaml/scanner.pyi          |  100 +
 typeshed/third_party/2.7/yaml/serializer.pyi       |   27 +
 typeshed/third_party/2.7/yaml/tokens.pyi           |   97 +
 typeshed/third_party/2and3/backports/__init__.pyi  |    0
 .../2and3/backports/ssl_match_hostname.pyi         |    3 +
 typeshed/third_party/2and3/backports_abc.pyi       |   19 +
 typeshed/third_party/2and3/certifi.pyi             |    2 +
 typeshed/third_party/2and3/singledispatch.pyi      |    5 +
 typeshed/third_party/3/docutils/__init__.pyi       |    1 +
 typeshed/third_party/3/docutils/examples.pyi       |    3 +
 typeshed/third_party/3/docutils/nodes.pyi          |    8 +
 .../third_party/3/docutils/parsers/__init__.pyi    |    1 +
 .../3/docutils/parsers/rst/__init__.pyi            |    0
 .../third_party/3/docutils/parsers/rst/nodes.pyi   |    1 +
 .../third_party/3/docutils/parsers/rst/roles.pyi   |   10 +
 .../third_party/3/docutils/parsers/rst/states.pyi  |    5 +
 typeshed/third_party/3/enum.pyi                    |   19 +
 typeshed/third_party/3/lxml/__init__.pyi           |    0
 typeshed/third_party/3/lxml/etree.pyi              |  102 +
 typeshed/third_party/3/requests/__init__.pyi       |   38 +
 typeshed/third_party/3/requests/adapters.pyi       |   69 +
 typeshed/third_party/3/requests/api.pyi            |   14 +
 typeshed/third_party/3/requests/auth.pyi           |   41 +
 typeshed/third_party/3/requests/compat.pyi         |    6 +
 typeshed/third_party/3/requests/cookies.pyi        |   65 +
 typeshed/third_party/3/requests/exceptions.pyi     |   26 +
 typeshed/third_party/3/requests/hooks.pyi          |    8 +
 typeshed/third_party/3/requests/models.pyi         |  134 +
 .../third_party/3/requests/packages/__init__.pyi   |    8 +
 .../3/requests/packages/urllib3/__init__.pyi       |   35 +
 .../3/requests/packages/urllib3/_collections.pyi   |   51 +
 .../3/requests/packages/urllib3/connection.pyi     |   64 +
 .../3/requests/packages/urllib3/connectionpool.pyi |   89 +
 .../requests/packages/urllib3/contrib/__init__.pyi |    4 +
 .../3/requests/packages/urllib3/exceptions.pyi     |   54 +
 .../3/requests/packages/urllib3/fields.pyi         |   16 +
 .../3/requests/packages/urllib3/filepost.pyi       |   19 +
 .../packages/urllib3/packages/__init__.pyi         |    4 +
 .../packages/ssl_match_hostname/__init__.pyi       |    8 +
 .../ssl_match_hostname/_implementation.pyi         |    7 +
 .../3/requests/packages/urllib3/poolmanager.pyi    |   31 +
 .../3/requests/packages/urllib3/request.pyi        |   13 +
 .../3/requests/packages/urllib3/response.pyi       |   58 +
 .../3/requests/packages/urllib3/util/__init__.pyi  |   29 +
 .../requests/packages/urllib3/util/connection.pyi  |   11 +
 .../3/requests/packages/urllib3/util/request.pyi   |   12 +
 .../3/requests/packages/urllib3/util/response.pyi  |    5 +
 .../3/requests/packages/urllib3/util/retry.pyi     |   36 +
 .../3/requests/packages/urllib3/util/ssl_.pyi      |   24 +
 .../3/requests/packages/urllib3/util/timeout.pyi   |   24 +
 .../3/requests/packages/urllib3/util/url.pyi       |   26 +
 typeshed/third_party/3/requests/sessions.pyi       |   92 +
 typeshed/third_party/3/requests/status_codes.pyi   |    8 +
 typeshed/third_party/3/requests/structures.pyi     |   21 +
 typeshed/third_party/3/requests/utils.pyi          |   52 +
 typeshed/third_party/3/six/__init__.pyi            |  103 +
 typeshed/third_party/3/six/moves/__init__.pyi      |   32 +
 typeshed/third_party/3/six/moves/cPickle.pyi       |    6 +
 .../third_party/3/six/moves/urllib/__init__.pyi    |   10 +
 typeshed/third_party/3/six/moves/urllib/error.pyi  |    8 +
 typeshed/third_party/3/six/moves/urllib/parse.pyi  |   22 +
 .../third_party/3/six/moves/urllib/request.pyi     |   40 +
 .../third_party/3/six/moves/urllib/response.pyi    |    9 +
 .../third_party/3/six/moves/urllib/robotparser.pyi |    6 +
 typeshed/third_party/3/six/moves/urllib_error.pyi  |   10 +
 typeshed/third_party/3/six/moves/urllib_parse.pyi  |   20 +
 .../third_party/3/six/moves/urllib_request.pyi     |   41 +
 .../third_party/3/six/moves/urllib_response.pyi    |   11 +
 .../third_party/3/six/moves/urllib_robotparser.pyi |    8 +
 xml/mypy-html.css                                  |  104 +
 xml/mypy-html.xslt                                 |   81 +
 xml/mypy-txt.xslt                                  |  100 +
 xml/mypy.xsd                                       |   47 +
 607 files changed, 55490 insertions(+)

diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..d22a472
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,29 @@
+Metadata-Version: 1.1
+Name: mypy-lang
+Version: 0.3.1
+Summary: Optional static typing for Python
+Home-page: http://www.mypy-lang.org/
+Author: Jukka Lehtosalo
+Author-email: jukka.lehtosalo at iki.fi
+License: MIT License
+Description: Mypy -- Optional Static Typing for Python
+        =========================================
+        
+        Add type annotations to your Python programs, and use mypy to type
+        check them.  Mypy is essentially a Python linter on steroids, and it
+        can catch many programming errors by analyzing your program, without
+        actually having to run it.  Mypy has a powerful type system with
+        features such as type inference, gradual typing, generics and union
+        types.
+        
+Platform: POSIX
+Classifier: Development Status :: 2 - Pre-Alpha
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Topic :: Software Development
diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py
new file mode 100644
index 0000000..040c531
--- /dev/null
+++ b/lib-typing/3.2/typing.py
@@ -0,0 +1,1716 @@
+# TODO:
+# - Generic[T, T] is invalid
+# - Look for TODO below
+
+# TODO nits:
+# Get rid of asserts that are the caller's fault.
+# Docstrings (e.g. ABCs).
+
+import abc
+from abc import abstractmethod, abstractproperty
+import collections
+import functools
+import re as stdlib_re  # Avoid confusion with the re we export.
+import sys
+import types
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+    # Super-special typing primitives.
+    'Any',
+    'Callable',
+    'Generic',
+    'Optional',
+    'TypeVar',
+    'Union',
+    'Tuple',
+
+    # ABCs (from collections.abc).
+    'AbstractSet',  # collections.abc.Set.
+    'ByteString',
+    'Container',
+    'Hashable',
+    'ItemsView',
+    'Iterable',
+    'Iterator',
+    'KeysView',
+    'Mapping',
+    'MappingView',
+    'MutableMapping',
+    'MutableSequence',
+    'MutableSet',
+    'Sequence',
+    'Sized',
+    'ValuesView',
+
+    # Structural checks, a.k.a. protocols.
+    'Reversible',
+    'SupportsAbs',
+    'SupportsBytes',
+    'SupportsComplex',
+    'SupportsFloat',
+    'SupportsInt',
+    'SupportsRound',
+
+    # Concrete collection types.
+    'Dict',
+    'List',
+    'Set',
+    'NamedTuple',  # Not really a type.
+    'Generator',
+
+    # One-off things.
+    'AnyStr',
+    'cast',
+    'get_type_hints',
+    'no_type_check',
+    'no_type_check_decorator',
+    'overload',
+
+    # Submodules.
+    'io',
+    're',
+]
+
+
+def _qualname(x):
+    if sys.version_info[:2] >= (3, 3):
+        return x.__qualname__
+    else:
+        # Fall back to just name.
+        return x.__name__
+
+
+class TypingMeta(type):
+    """Metaclass for every type defined below.
+
+    This overrides __new__() to require an extra keyword parameter
+    '_root', which serves as a guard against naive subclassing of the
+    typing classes.  Any legitimate class defined using a metaclass
+    derived from TypingMeta (including internal subclasses created by
+    e.g.  Union[X, Y]) must pass _root=True.
+
+    This also defines a dummy constructor (all the work is done in
+    __new__) and a nicer repr().
+    """
+
+    _is_protocol = False
+
+    def __new__(cls, name, bases, namespace, *, _root=False):
+        if not _root:
+            raise TypeError("Cannot subclass %s" %
+                            (', '.join(map(_type_repr, bases)) or '()'))
+        return super().__new__(cls, name, bases, namespace)
+
+    def __init__(self, *args, **kwds):
+        pass
+
+    def _eval_type(self, globalns, localns):
+        """Override this in subclasses to interpret forward references.
+
+        For example, Union['C'] is internally stored as
+        Union[_ForwardRef('C')], which should evaluate to _Union[C],
+        where C is an object found in globalns or localns (searching
+        localns first, of course).
+        """
+        return self
+
+    def _has_type_var(self):
+        return False
+
+    def __repr__(self):
+        return '%s.%s' % (self.__module__, _qualname(self))
+
+
+class Final:
+    """Mix-in class to prevent instantiation."""
+
+    def __new__(self, *args, **kwds):
+        raise TypeError("Cannot instantiate %r" % self.__class__)
+
+
+class _ForwardRef(TypingMeta):
+    """Wrapper to hold a forward reference."""
+
+    def __new__(cls, arg):
+        if not isinstance(arg, str):
+            raise TypeError('ForwardRef must be a string -- got %r' % (arg,))
+        try:
+            code = compile(arg, '<string>', 'eval')
+        except SyntaxError:
+            raise SyntaxError('ForwardRef must be an expression -- got %r' %
+                              (arg,))
+        self = super().__new__(cls, arg, (), {}, _root=True)
+        self.__forward_arg__ = arg
+        self.__forward_code__ = code
+        self.__forward_evaluated__ = False
+        self.__forward_value__ = None
+        typing_globals = globals()
+        frame = sys._getframe(1)
+        while frame is not None and frame.f_globals is typing_globals:
+            frame = frame.f_back
+        assert frame is not None
+        self.__forward_frame__ = frame
+        return self
+
+    def _eval_type(self, globalns, localns):
+        if not isinstance(localns, dict):
+            raise TypeError('ForwardRef localns must be a dict -- got %r' %
+                            (localns,))
+        if not isinstance(globalns, dict):
+            raise TypeError('ForwardRef globalns must be a dict -- got %r' %
+                            (globalns,))
+        if not self.__forward_evaluated__:
+            if globalns is None and localns is None:
+                globalns = localns = {}
+            elif globalns is None:
+                globalns = localns
+            elif localns is None:
+                localns = globalns
+            self.__forward_value__ = _type_check(
+                eval(self.__forward_code__, globalns, localns),
+                "Forward references must evaluate to types.")
+            self.__forward_evaluated__ = True
+        return self.__forward_value__
+
+    def __subclasscheck__(self, cls):
+        if not self.__forward_evaluated__:
+            globalns = self.__forward_frame__.f_globals
+            localns = self.__forward_frame__.f_locals
+            try:
+                self._eval_type(globalns, localns)
+            except NameError:
+                return False  # Too early.
+        return issubclass(cls, self.__forward_value__)
+
+    def __instancecheck__(self, obj):
+        if not self.__forward_evaluated__:
+            globalns = self.__forward_frame__.f_globals
+            localns = self.__forward_frame__.f_locals
+            try:
+                self._eval_type(globalns, localns)
+            except NameError:
+                return False  # Too early.
+        return isinstance(obj, self.__forward_value__)
+
+    def __repr__(self):
+        return '_ForwardRef(%r)' % (self.__forward_arg__,)
+
+
+class _TypeAlias:
+    """Internal helper class for defining generic variants of concrete types.
+
+    Note that this is not a type; let's call it a pseudo-type.  It can
+    be used in instance and subclass checks, e.g. isinstance(m, Match)
+    or issubclass(type(m), Match).  However, it cannot be itself the
+    target of an issubclass() call; e.g. issubclass(Match, C) (for
+    some arbitrary class C) raises TypeError rather than returning
+    False.
+    """
+
+    def __new__(cls, *args, **kwds):
+        """Constructor.
+
+        This only exists to give a better error message in case
+        someone tries to subclass a type alias (not a good idea).
+        """
+        if (len(args) == 3 and
+            isinstance(args[0], str) and
+            isinstance(args[1], tuple)):
+            # Close enough.
+            raise TypeError("A type alias cannot be subclassed")
+        return object.__new__(cls)
+
+    def __init__(self, name, type_var, impl_type, type_checker):
+        """Initializer.
+
+        Args:
+            name: The name, e.g. 'Pattern'.
+            type_var: The type parameter, e.g. AnyStr, or the
+                specific type, e.g. str.
+            impl_type: The implementation type.
+            type_checker: Function that takes an impl_type instance.
+                and returns a value that should be a type_var instance.
+        """
+        assert isinstance(name, str), repr(name)
+        assert isinstance(type_var, type), repr(type_var)
+        assert isinstance(impl_type, type), repr(impl_type)
+        assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+        self.name = name
+        self.type_var = type_var
+        self.impl_type = impl_type
+        self.type_checker = type_checker
+
+    def __repr__(self):
+        return "%s[%s]" % (self.name, _type_repr(self.type_var))
+
+    def __getitem__(self, parameter):
+        assert isinstance(parameter, type), repr(parameter)
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("%s cannot be further parameterized." % self)
+        if self.type_var.__constraints__:
+            if not issubclass(parameter, Union[self.type_var.__constraints__]):
+                raise TypeError("%s is not a valid substitution for %s." %
+                                (parameter, self.type_var))
+        return self.__class__(self.name, parameter,
+                              self.impl_type, self.type_checker)
+
+    def __instancecheck__(self, obj):
+        return (isinstance(obj, self.impl_type) and
+                isinstance(self.type_checker(obj), self.type_var))
+
+    def __subclasscheck__(self, cls):
+        if cls is Any:
+            return True
+        if isinstance(cls, _TypeAlias):
+            # Covariance.  For now, we compare by name.
+            return (cls.name == self.name and
+                    issubclass(cls.type_var, self.type_var))
+        else:
+            # Note that this is too lenient, because the
+            # implementation type doesn't carry information about
+            # whether it is about bytes or str (for example).
+            return issubclass(cls, self.impl_type)
+
+
+def _has_type_var(t):
+    return t is not None and isinstance(t, TypingMeta) and t._has_type_var()
+
+
+def _eval_type(t, globalns, localns):
+    if isinstance(t, TypingMeta):
+        return t._eval_type(globalns, localns)
+    else:
+        return t
+
+
+def _type_check(arg, msg):
+    """Check that the argument is a type, and return it.
+
+    As a special case, accept None and return type(None) instead.
+    Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
+
+    The msg argument is a human-readable error message, e.g.
+
+        "Union[arg, ...]: arg should be a type."
+
+    We append the repr() of the actual value (truncated to 100 chars).
+    """
+    if arg is None:
+        return type(None)
+    if isinstance(arg, str):
+        arg = _ForwardRef(arg)
+    if not isinstance(arg, (type, _TypeAlias)):
+        raise TypeError(msg + " Got %.100r." % (arg,))
+    return arg
+
+
+def _type_repr(obj):
+    """Return the repr() of an object, special-casing types.
+
+    If obj is a type, we return a shorter version than the default
+    type.__repr__, based on the module and qualified name, which is
+    typically enough to uniquely identify a type.  For everything
+    else, we fall back on repr(obj).
+    """
+    if isinstance(obj, type) and not isinstance(obj, TypingMeta):
+        if obj.__module__ == 'builtins':
+            return _qualname(obj)
+        else:
+            return '%s.%s' % (obj.__module__, _qualname(obj))
+    else:
+        return repr(obj)
+
+
+class AnyMeta(TypingMeta):
+    """Metaclass for Any."""
+
+    def __new__(cls, name, bases, namespace, _root=False):
+        self = super().__new__(cls, name, bases, namespace, _root=_root)
+        return self
+
+    def __instancecheck__(self, instance):
+        return True
+
+    def __subclasscheck__(self, cls):
+        if not isinstance(cls, type):
+            return super().__subclasscheck__(cls)  # To TypeError.
+        return True
+
+
+class Any(Final, metaclass=AnyMeta, _root=True):
+    """Special type indicating an unconstrained type.
+
+    - Any object is an instance of Any.
+    - Any class is a subclass of Any.
+    - As a special case, Any and object are subclasses of each other.
+    """
+
+
+class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
+    """Type variable.
+
+    Usage::
+
+      T = TypeVar('T')  # Can be anything
+      A = TypeVar('A', str, bytes)  # Must be str or bytes
+
+    Type variables exist primarily for the benefit of static type
+    checkers.  They serve as the parameters for generic types as well
+    as for generic function definitions.  See class Generic for more
+    information on generic types.  Generic functions work as follows:
+
+      def repeat(x: T, n: int) -> Sequence[T]:
+          '''Return a list containing n references to x.'''
+          return [x]*n
+
+      def longest(x: A, y: A) -> A:
+          '''Return the longest of two strings.'''
+          return x if len(x) >= len(y) else y
+
+    The latter example's signature is essentially the overloading
+    of (str, str) -> str and (bytes, bytes) -> bytes.  Also note
+    that if the arguments are instances of some subclass of str,
+    the return type is still plain str.
+
+    At runtime, isinstance(x, T) will raise TypeError.  However,
+    issubclass(C, T) is true for any class C, and issubclass(str, A)
+    and issubclass(bytes, A) are true, and issubclass(int, A) is
+    false.
+
+    Type variables may be marked covariant or contravariant by passing
+    covariant=True or contravariant=True.  See PEP 484 for more
+    details.  By default type variables are invariant.
+
+    Type variables can be introspected. e.g.:
+
+      T.__name__ == 'T'
+      T.__constraints__ == ()
+      T.__covariant__ == False
+      T.__contravariant__ = False
+      A.__constraints__ == (str, bytes)
+    """
+
+    def __new__(cls, name, *constraints, bound=None,
+                covariant=False, contravariant=False):
+        self = super().__new__(cls, name, (Final,), {}, _root=True)
+        if covariant and contravariant:
+            raise ValueError("Bivariant type variables are not supported.")
+        self.__covariant__ = bool(covariant)
+        self.__contravariant__ = bool(contravariant)
+        if constraints and bound is not None:
+            raise TypeError("Constraints cannot be combined with bound=...")
+        if constraints and len(constraints) == 1:
+            raise TypeError("A single constraint is not allowed")
+        msg = "TypeVar(name, constraint, ...): constraints must be types."
+        self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
+        if bound:
+            self.__bound__ = _type_check(bound, "Bound must be a type.")
+        else:
+            self.__bound__ = None
+        return self
+
+    def _has_type_var(self):
+        return True
+
+    def __repr__(self):
+        if self.__covariant__:
+            prefix = '+'
+        elif self.__contravariant__:
+            prefix = '-'
+        else:
+            prefix = '~'
+        return prefix + self.__name__
+
+    def __instancecheck__(self, instance):
+        raise TypeError("Type variables cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        # TODO: Make this raise TypeError too?
+        if cls is self:
+            return True
+        if cls is Any:
+            return True
+        if self.__bound__ is not None:
+            return issubclass(cls, self.__bound__)
+        if self.__constraints__:
+            return any(issubclass(cls, c) for c in self.__constraints__)
+        return True
+
+
+# Some unconstrained type variables.  These are used by the container types.
+T = TypeVar('T')  # Any type.
+KT = TypeVar('KT')  # Key type.
+VT = TypeVar('VT')  # Value type.
+T_co = TypeVar('T_co', covariant=True)  # Any type covariant containers.
+V_co = TypeVar('V_co', covariant=True)  # Any type covariant containers.
+KT_co = TypeVar('KT_co', covariant=True)  # Key type covariant containers.
+VT_co = TypeVar('VT_co', covariant=True)  # Value type covariant containers.
+T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+# A useful type variable with constraints.  This represents string types.
+# TODO: What about bytearray, memoryview?
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+
+class UnionMeta(TypingMeta):
+    """Metaclass for Union."""
+
+    def __new__(cls, name, bases, namespace, parameters=None, _root=False):
+        if parameters is None:
+            return super().__new__(cls, name, bases, namespace, _root=_root)
+        if not isinstance(parameters, tuple):
+            raise TypeError("Expected parameters=<tuple>")
+        # Flatten out Union[Union[...], ...] and type-check non-Union args.
+        params = []
+        msg = "Union[arg, ...]: each arg must be a type."
+        for p in parameters:
+            if isinstance(p, UnionMeta):
+                params.extend(p.__union_params__)
+            else:
+                params.append(_type_check(p, msg))
+        # Weed out strict duplicates, preserving the first of each occurrence.
+        all_params = set(params)
+        if len(all_params) < len(params):
+            new_params = []
+            for t in params:
+                if t in all_params:
+                    new_params.append(t)
+                    all_params.remove(t)
+            params = new_params
+            assert not all_params, all_params
+        # Weed out subclasses.
+        # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+        # If Any or object is present it will be the sole survivor.
+        # If both Any and object are present, Any wins.
+        # Never discard type variables, except against Any.
+        # (In particular, Union[str, AnyStr] != AnyStr.)
+        all_params = set(params)
+        for t1 in params:
+            if t1 is Any:
+                return Any
+            if isinstance(t1, TypeVar):
+                continue
+            if any(issubclass(t1, t2)
+                   for t2 in all_params - {t1} if not isinstance(t2, TypeVar)):
+                all_params.remove(t1)
+        # It's not a union if there's only one type left.
+        if len(all_params) == 1:
+            return all_params.pop()
+        # Create a new class with these params.
+        self = super().__new__(cls, name, bases, {}, _root=True)
+        self.__union_params__ = tuple(t for t in params if t in all_params)
+        self.__union_set_params__ = frozenset(self.__union_params__)
+        return self
+
+    def _eval_type(self, globalns, localns):
+        p = tuple(_eval_type(t, globalns, localns)
+                  for t in self.__union_params__)
+        if p == self.__union_params__:
+            return self
+        else:
+            return self.__class__(self.__name__, self.__bases__, {},
+                                  p, _root=True)
+
+    def _has_type_var(self):
+        if self.__union_params__:
+            for t in self.__union_params__:
+                if _has_type_var(t):
+                    return True
+        return False
+
+    def __repr__(self):
+        r = super().__repr__()
+        if self.__union_params__:
+            r += '[%s]' % (', '.join(_type_repr(t)
+                                     for t in self.__union_params__))
+        return r
+
+    def __getitem__(self, parameters):
+        if self.__union_params__ is not None:
+            raise TypeError(
+                "Cannot subscript an existing Union. Use Union[u, t] instead.")
+        if parameters == ():
+            raise TypeError("Cannot take a Union of no types.")
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        return self.__class__(self.__name__, self.__bases__,
+                              dict(self.__dict__), parameters, _root=True)
+
+    def __eq__(self, other):
+        if not isinstance(other, UnionMeta):
+            return NotImplemented
+        return self.__union_set_params__ == other.__union_set_params__
+
+    def __hash__(self):
+        return hash(self.__union_set_params__)
+
+    def __instancecheck__(self, instance):
+        return (self.__union_set_params__ is not None and
+                any(isinstance(instance, t) for t in self.__union_params__))
+
+    def __subclasscheck__(self, cls):
+        if cls is Any:
+            return True
+        if self.__union_params__ is None:
+            return isinstance(cls, UnionMeta)
+        elif isinstance(cls, UnionMeta):
+            if cls.__union_params__ is None:
+                return False
+            return all(issubclass(c, self) for c in (cls.__union_params__))
+        elif isinstance(cls, TypeVar):
+            if cls in self.__union_params__:
+                return True
+            if cls.__constraints__:
+                return issubclass(Union[cls.__constraints__], self)
+            return False
+        else:
+            return any(issubclass(cls, t) for t in self.__union_params__)
+
+
+class Union(Final, metaclass=UnionMeta, _root=True):
+    """Union type; Union[X, Y] means either X or Y.
+
+    To define a union, use e.g. Union[int, str].  Details:
+
+    - The arguments must be types and there must be at least one.
+
+    - None as an argument is a special case and is replaced by
+      type(None).
+
+    - Unions of unions are flattened, e.g.::
+
+        Union[Union[int, str], float] == Union[int, str, float]
+
+    - Unions of a single argument vanish, e.g.::
+
+        Union[int] == int  # The constructor actually returns int
+
+    - Redundant arguments are skipped, e.g.::
+
+        Union[int, str, int] == Union[int, str]
+
+    - When comparing unions, the argument order is ignored, e.g.::
+
+        Union[int, str] == Union[str, int]
+
+    - When two arguments have a subclass relationship, the least
+      derived argument is kept, e.g.::
+
+        class Employee: pass
+        class Manager(Employee): pass
+        Union[int, Employee, Manager] == Union[int, Employee]
+        Union[Manager, int, Employee] == Union[int, Employee]
+        Union[Employee, Manager] == Employee
+
+    - Corollary: if Any is present it is the sole survivor, e.g.::
+
+        Union[int, Any] == Any
+
+    - Similar for object::
+
+        Union[int, object] == object
+
+    - To cut a tie: Union[object, Any] == Union[Any, object] == Any.
+
+    - You cannot subclass or instantiate a union.
+
+    - You cannot write Union[X][Y] (what would it mean?).
+
+    - You can use Optional[X] as a shorthand for Union[X, None].
+    """
+
+    # Unsubscripted Union type has params set to None.
+    __union_params__ = None
+    __union_set_params__ = None
+
+
+class OptionalMeta(TypingMeta):
+    """Metaclass for Optional."""
+
+    def __new__(cls, name, bases, namespace, _root=False):
+        return super().__new__(cls, name, bases, namespace, _root=_root)
+
+    def __getitem__(self, arg):
+        arg = _type_check(arg, "Optional[t] requires a single type.")
+        return Union[arg, type(None)]
+
+
+class Optional(Final, metaclass=OptionalMeta, _root=True):
+    """Optional type.
+
+    Optional[X] is equivalent to Union[X, type(None)].
+    """
+
+
+class TupleMeta(TypingMeta):
+    """Metaclass for Tuple."""
+
+    def __new__(cls, name, bases, namespace, parameters=None,
+                use_ellipsis=False, _root=False):
+        self = super().__new__(cls, name, bases, namespace, _root=_root)
+        self.__tuple_params__ = parameters
+        self.__tuple_use_ellipsis__ = use_ellipsis
+        return self
+
+    def _has_type_var(self):
+        if self.__tuple_params__:
+            for t in self.__tuple_params__:
+                if _has_type_var(t):
+                    return True
+        return False
+
+    def _eval_type(self, globalns, localns):
+        tp = self.__tuple_params__
+        if tp is None:
+            return self
+        p = tuple(_eval_type(t, globalns, localns) for t in tp)
+        if p == self.__tuple_params__:
+            return self
+        else:
+            return self.__class__(self.__name__, self.__bases__, {},
+                                  p, _root=True)
+
+    def __repr__(self):
+        r = super().__repr__()
+        if self.__tuple_params__ is not None:
+            params = [_type_repr(p) for p in self.__tuple_params__]
+            if self.__tuple_use_ellipsis__:
+                params.append('...')
+            r += '[%s]' % (
+                ', '.join(params))
+        return r
+
+    def __getitem__(self, parameters):
+        if self.__tuple_params__ is not None:
+            raise TypeError("Cannot re-parameterize %r" % (self,))
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if len(parameters) == 2 and parameters[1] == Ellipsis:
+            parameters = parameters[:1]
+            use_ellipsis = True
+            msg = "Tuple[t, ...]: t must be a type."
+        else:
+            use_ellipsis = False
+            msg = "Tuple[t0, t1, ...]: each t must be a type."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        return self.__class__(self.__name__, self.__bases__,
+                              dict(self.__dict__), parameters,
+                              use_ellipsis=use_ellipsis, _root=True)
+
+    def __eq__(self, other):
+        if not isinstance(other, TupleMeta):
+            return NotImplemented
+        return self.__tuple_params__ == other.__tuple_params__
+
+    def __hash__(self):
+        return hash(self.__tuple_params__)
+
+    def __instancecheck__(self, t):
+        if not isinstance(t, tuple):
+            return False
+        if self.__tuple_params__ is None:
+            return True
+        if self.__tuple_use_ellipsis__:
+            p = self.__tuple_params__[0]
+            return all(isinstance(x, p) for x in t)
+        else:
+            return (len(t) == len(self.__tuple_params__) and
+                    all(isinstance(x, p)
+                        for x, p in zip(t, self.__tuple_params__)))
+
+    def __subclasscheck__(self, cls):
+        if cls is Any:
+            return True
+        if not isinstance(cls, type):
+            return super().__subclasscheck__(cls)  # To TypeError.
+        if issubclass(cls, tuple):
+            return True  # Special case.
+        if not isinstance(cls, TupleMeta):
+            return super().__subclasscheck__(cls)  # False.
+        if self.__tuple_params__ is None:
+            return True
+        if cls.__tuple_params__ is None:
+            return False  # ???
+        if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__:
+            return False
+        # Covariance.
+        return (len(self.__tuple_params__) == len(cls.__tuple_params__) and
+                all(issubclass(x, p)
+                    for x, p in zip(cls.__tuple_params__,
+                                    self.__tuple_params__)))
+
+
+class Tuple(Final, metaclass=TupleMeta, _root=True):
+    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+    Example: Tuple[T1, T2] is a tuple of two elements corresponding
+    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
+    of an int, a float and a string.
+
+    To specify a variable-length tuple of homogeneous type, use Sequence[T].
+    """
+
+
+class CallableMeta(TypingMeta):
+    """Metaclass for Callable."""
+
+    def __new__(cls, name, bases, namespace, _root=False,
+                args=None, result=None):
+        if args is None and result is None:
+            pass  # Must be 'class Callable'.
+        else:
+            if args is not Ellipsis:
+                if not isinstance(args, list):
+                    raise TypeError("Callable[args, result]: "
+                                    "args must be a list."
+                                    " Got %.100r." % (args,))
+                msg = "Callable[[arg, ...], result]: each arg must be a type."
+                args = tuple(_type_check(arg, msg) for arg in args)
+            msg = "Callable[args, result]: result must be a type."
+            result = _type_check(result, msg)
+        self = super().__new__(cls, name, bases, namespace, _root=_root)
+        self.__args__ = args
+        self.__result__ = result
+        return self
+
+    def _has_type_var(self):
+        if self.__args__:
+            for t in self.__args__:
+                if _has_type_var(t):
+                    return True
+        return _has_type_var(self.__result__)
+
+    def _eval_type(self, globalns, localns):
+        if self.__args__ is None and self.__result__ is None:
+            return self
+        args = [_eval_type(t, globalns, localns) for t in self.__args__]
+        result = _eval_type(self.__result__, globalns, localns)
+        if args == self.__args__ and result == self.__result__:
+            return self
+        else:
+            return self.__class__(self.__name__, self.__bases__, {},
+                                  args=args, result=result, _root=True)
+
+    def __repr__(self):
+        r = super().__repr__()
+        if self.__args__ is not None or self.__result__ is not None:
+            if self.__args__ is Ellipsis:
+                args_r = '...'
+            else:
+                args_r = '[%s]' % ', '.join(_type_repr(t)
+                                            for t in self.__args__)
+            r += '[%s, %s]' % (args_r, _type_repr(self.__result__))
+        return r
+
+    def __getitem__(self, parameters):
+        if self.__args__ is not None or self.__result__ is not None:
+            raise TypeError("This Callable type is already parameterized.")
+        if not isinstance(parameters, tuple) or len(parameters) != 2:
+            raise TypeError(
+                "Callable must be used as Callable[[arg, ...], result].")
+        args, result = parameters
+        return self.__class__(self.__name__, self.__bases__,
+                              dict(self.__dict__), _root=True,
+                              args=args, result=result)
+
+    def __eq__(self, other):
+        if not isinstance(other, CallableMeta):
+            return NotImplemented
+        return (self.__args__ == other.__args__ and
+                self.__result__ == other.__result__)
+
+    def __hash__(self):
+        return hash(self.__args__) ^ hash(self.__result__)
+
+    def __instancecheck__(self, instance):
+        if not callable(instance):
+            return False
+        if self.__args__ is None and self.__result__ is None:
+            return True
+        assert self.__args__ is not None
+        assert self.__result__ is not None
+        my_args, my_result = self.__args__, self.__result__
+        import inspect  # TODO: Avoid this import.
+        # Would it be better to use Signature objects?
+        try:
+            (args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults,
+             annotations) = inspect.getfullargspec(instance)
+        except TypeError:
+            return False  # We can't find the signature.  Give up.
+        msg = ("When testing isinstance(<callable>, Callable[...], "
+               "<calleble>'s annotations must be types.")
+        if my_args is not Ellipsis:
+            if kwonlyargs and (not kwonlydefaults or
+                               len(kwonlydefaults) < len(kwonlyargs)):
+                return False
+            if isinstance(instance, types.MethodType):
+                # For methods, getfullargspec() includes self/cls,
+                # but it's not part of the call signature, so drop it.
+                del args[0]
+            min_call_args = len(args)
+            if defaults:
+                min_call_args -= len(defaults)
+            if varargs:
+                max_call_args = 999999999
+                if len(args) < len(my_args):
+                    args += [varargs] * (len(my_args) - len(args))
+            else:
+                max_call_args = len(args)
+            if not min_call_args <= len(my_args) <= max_call_args:
+                return False
+            for my_arg_type, name in zip(my_args, args):
+                if name in annotations:
+                    annot_type = _type_check(annotations[name], msg)
+                else:
+                    annot_type = Any
+                if not issubclass(my_arg_type, annot_type):
+                    return False
+                # TODO: If mutable type, check invariance?
+        if 'return' in annotations:
+            annot_return_type = _type_check(annotations['return'], msg)
+            # Note contravariance here!
+            if not issubclass(annot_return_type, my_result):
+                return False
+        # Can't find anything wrong...
+        return True
+
+    def __subclasscheck__(self, cls):
+        if cls is Any:
+            return True
+        if not isinstance(cls, CallableMeta):
+            return super().__subclasscheck__(cls)
+        if self.__args__ is None and self.__result__ is None:
+            return True
+        # We're not doing covariance or contravariance -- this is *invariance*.
+        return self == cls
+
+
+class Callable(Final, metaclass=CallableMeta, _root=True):
+    """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+    The subscription syntax must always be used with exactly two
+    values: the argument list and the return type.  The argument list
+    must be a list of types; the return type must be a single type.
+
+    There is no syntax to indicate optional or keyword arguments,
+    such function types are rarely used as callback types.
+    """
+
+
+def _gorg(a):
+    """Return the farthest origin of a generic class."""
+    assert isinstance(a, GenericMeta)
+    while a.__origin__ is not None:
+        a = a.__origin__
+    return a
+
+
+def _geqv(a, b):
+    """Return whether two generic classes are equivalent.
+
+    The intention is to consider generic class X and any of its
+    parameterized forms (X[T], X[int], etc.)  as equivalent.
+
+    However, X is not equivalent to a subclass of X.
+
+    The relation is reflexive, symmetric and transitive.
+    """
+    assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta)
+    # Reduce each to its origin.
+    return _gorg(a) is _gorg(b)
+
+
+class GenericMeta(TypingMeta, abc.ABCMeta):
+    """Metaclass for generic types."""
+
+    # TODO: Constrain more how Generic is used; only a few
+    # standard patterns should be allowed.
+
+    # TODO: Use a more precise rule than matching __name__ to decide
+    # whether two classes are the same.  Also, save the formal
+    # parameters.  (These things are related!  A solution lies in
+    # using origin.)
+
+    __extra__ = None
+
+    def __new__(cls, name, bases, namespace,
+                parameters=None, origin=None, extra=None):
+        if parameters is None:
+            # Extract parameters from direct base classes.  Only
+            # direct bases are considered and only those that are
+            # themselves generic, and parameterized with type
+            # variables.  Don't use bases like Any, Union, Tuple,
+            # Callable or type variables.
+            params = None
+            for base in bases:
+                if isinstance(base, TypingMeta):
+                    if not isinstance(base, GenericMeta):
+                        raise TypeError(
+                            "You cannot inherit from magic class %s" %
+                            repr(base))
+                    if base.__parameters__ is None:
+                        continue  # The base is unparameterized.
+                    for bp in base.__parameters__:
+                        if _has_type_var(bp) and not isinstance(bp, TypeVar):
+                            raise TypeError(
+                                "Cannot inherit from a generic class "
+                                "parameterized with "
+                                "non-type-variable %s" % bp)
+                        if params is None:
+                            params = []
+                        if bp not in params:
+                            params.append(bp)
+            if params is not None:
+                parameters = tuple(params)
+        self = super().__new__(cls, name, bases, namespace, _root=True)
+        self.__parameters__ = parameters
+        if extra is not None:
+            self.__extra__ = extra
+        # Else __extra__ is inherited, eventually from the
+        # (meta-)class default above.
+        self.__origin__ = origin
+        return self
+
+    def _has_type_var(self):
+        if self.__parameters__:
+            for t in self.__parameters__:
+                if _has_type_var(t):
+                    return True
+        return False
+
+    def __repr__(self):
+        r = super().__repr__()
+        if self.__parameters__ is not None:
+            r += '[%s]' % (
+                ', '.join(_type_repr(p) for p in self.__parameters__))
+        return r
+
+    def __eq__(self, other):
+        if not isinstance(other, GenericMeta):
+            return NotImplemented
+        return (_geqv(self, other) and
+                self.__parameters__ == other.__parameters__)
+
+    def __hash__(self):
+        return hash((self.__name__, self.__parameters__))
+
+    def __getitem__(self, params):
+        if not isinstance(params, tuple):
+            params = (params,)
+        if not params:
+            raise TypeError("Cannot have empty parameter list")
+        msg = "Parameters to generic types must be types."
+        params = tuple(_type_check(p, msg) for p in params)
+        if self.__parameters__ is None:
+            for p in params:
+                if not isinstance(p, TypeVar):
+                    raise TypeError("Initial parameters must be "
+                                    "type variables; got %s" % p)
+        else:
+            if len(params) != len(self.__parameters__):
+                raise TypeError("Cannot change parameter count from %d to %d" %
+                                (len(self.__parameters__), len(params)))
+            for new, old in zip(params, self.__parameters__):
+                if isinstance(old, TypeVar):
+                    if not old.__constraints__:
+                        # Substituting for an unconstrained TypeVar is OK.
+                        continue
+                    if issubclass(new, Union[old.__constraints__]):
+                        # Specializing a constrained type variable is OK.
+                        continue
+                if not issubclass(new, old):
+                    raise TypeError(
+                        "Cannot substitute %s for %s in %s" %
+                        (_type_repr(new), _type_repr(old), self))
+
+        return self.__class__(self.__name__, self.__bases__,
+                              dict(self.__dict__),
+                              parameters=params,
+                              origin=self,
+                              extra=self.__extra__)
+
+    def __subclasscheck__(self, cls):
+        if cls is Any:
+            return True
+        if isinstance(cls, GenericMeta):
+            # For a class C(Generic[T]) where T is co-variant,
+            # C[X] is a subclass of C[Y] iff X is a subclass of Y.
+            origin = self.__origin__
+            if origin is not None and origin is cls.__origin__:
+                assert len(self.__parameters__) == len(origin.__parameters__)
+                assert len(cls.__parameters__) == len(origin.__parameters__)
+                for p_self, p_cls, p_origin in zip(self.__parameters__,
+                                                   cls.__parameters__,
+                                                   origin.__parameters__):
+                    if isinstance(p_origin, TypeVar):
+                        if p_origin.__covariant__:
+                            # Covariant -- p_cls must be a subclass of p_self.
+                            if not issubclass(p_cls, p_self):
+                                break
+                        elif p_origin.__contravariant__:
+                            # Contravariant.  I think it's the opposite. :-)
+                            if not issubclass(p_self, p_cls):
+                                break
+                        else:
+                            # Invariant -- p_cls and p_self must equal.
+                            if p_self != p_cls:
+                                break
+                    else:
+                        # If the origin's parameter is not a typevar,
+                        # insist on invariance.
+                        if p_self != p_cls:
+                            break
+                else:
+                    return True
+                # If we break out of the loop, the superclass gets a chance.
+        if super().__subclasscheck__(cls):
+            return True
+        if self.__extra__ is None or isinstance(cls, GenericMeta):
+            return False
+        return issubclass(cls, self.__extra__)
+
+    def __instancecheck__(self, obj):
+        if super().__instancecheck__(obj):
+            return True
+        if self.__extra__ is None:
+            return False
+        return isinstance(obj, self.__extra__)
+
+
+class Generic(metaclass=GenericMeta):
+    """Abstract base class for generic types.
+
+    A generic type is typically declared by inheriting from an
+    instantiation of this class with one or more type variables.
+    For example, a generic mapping type might be defined as::
+
+      class Mapping(Generic[KT, VT]):
+          def __getitem__(self, key: KT) -> VT:
+              ...
+          # Etc.
+
+    This class can then be used as follows::
+
+      def lookup_name(mapping: Mapping, key: KT, default: VT) -> VT:
+          try:
+              return mapping[key]
+          except KeyError:
+              return default
+
+    For clarity the type variables may be redefined, e.g.::
+
+      X = TypeVar('X')
+      Y = TypeVar('Y')
+      def lookup_name(mapping: Mapping[X, Y], key: X, default: Y) -> Y:
+          # Same body as above.
+    """
+
+    def __new__(cls, *args, **kwds):
+        next_in_mro = object
+        # Look for the last occurrence of Generic or Generic[...].
+        for i, c in enumerate(cls.__mro__[:-1]):
+            if isinstance(c, GenericMeta) and _gorg(c) is Generic:
+                next_in_mro = cls.__mro__[i+1]
+        return next_in_mro.__new__(_gorg(cls))
+
+
+def cast(typ, val):
+    """Cast a value to a type.
+
+    This returns the value unchanged.  To the type checker this
+    signals that the return value has the designated type, but at
+    runtime we intentionally don't check anything (we want this
+    to be as fast as possible).
+    """
+    return val
+
+
+def _get_defaults(func):
+    """Internal helper to extract the default arguments, by name."""
+    code = func.__code__
+    pos_count = code.co_argcount
+    kw_count = code.co_kwonlyargcount
+    arg_names = code.co_varnames
+    kwarg_names = arg_names[pos_count:pos_count + kw_count]
+    arg_names = arg_names[:pos_count]
+    defaults = func.__defaults__ or ()
+    kwdefaults = func.__kwdefaults__
+    res = dict(kwdefaults) if kwdefaults else {}
+    pos_offset = pos_count - len(defaults)
+    for name, value in zip(arg_names[pos_offset:], defaults):
+        assert name not in res
+        res[name] = value
+    return res
+
+
+def get_type_hints(obj, globalns=None, localns=None):
+    """Return type hints for a function or method object.
+
+    This is often the same as obj.__annotations__, but it handles
+    forward references encoded as string literals, and if necessary
+    adds Optional[t] if a default value equal to None is set.
+
+    BEWARE -- the behavior of globalns and localns is counterintuitive
+    (unless you are familiar with how eval() and exec() work).  The
+    search order is locals first, then globals.
+
+    - If no dict arguments are passed, an attempt is made to use the
+      globals from obj, and these are also used as the locals.  If the
+      object does not appear to have globals, an exception is raised.
+
+    - If one dict argument is passed, it is used for both globals and
+      locals.
+
+    - If two dict arguments are passed, they specify globals and
+      locals, respectively.
+    """
+    if getattr(obj, '__no_type_check__', None):
+        return {}
+    if globalns is None:
+        globalns = getattr(obj, '__globals__', {})
+        if localns is None:
+            localns = globalns
+    elif localns is None:
+        localns = globalns
+    defaults = _get_defaults(obj)
+    hints = dict(obj.__annotations__)
+    for name, value in hints.items():
+        if isinstance(value, str):
+            value = _ForwardRef(value)
+        value = _eval_type(value, globalns, localns)
+        if name in defaults and defaults[name] is None:
+            value = Optional[value]
+        hints[name] = value
+    return hints
+
+
+# TODO: Also support this as a class decorator.
+def no_type_check(arg):
+    """Decorator to indicate that annotations are not type hints.
+
+    The argument must be a class or function; if it is a class, it
+    applies recursively to all methods defined in that class (but not
+    to methods defined in its superclasses or subclasses).
+
+    This mutates the function(s) in place.
+    """
+    if isinstance(arg, type):
+        for obj in arg.__dict__.values():
+            if isinstance(obj, types.FunctionType):
+                obj.__no_type_check__ = True
+    else:
+        arg.__no_type_check__ = True
+    return arg
+
+
+def no_type_check_decorator(decorator):
+    """Decorator to give another decorator the @no_type_check effect.
+
+    This wraps the decorator with something that wraps the decorated
+    function in @no_type_check.
+    """
+
+    @functools.wraps(decorator)
+    def wrapped_decorator(*args, **kwds):
+        func = decorator(*args, **kwds)
+        func = no_type_check(func)
+        return func
+
+    return wrapped_decorator
+
+
+def overload(func):
+    raise RuntimeError("Overloading is only supported in library stubs")
+
+
+class _ProtocolMeta(GenericMeta):
+    """Internal metaclass for _Protocol.
+
+    This exists so _Protocol classes can be generic without deriving
+    from Generic.
+    """
+
+    def __subclasscheck__(self, cls):
+        if not self._is_protocol:
+            # No structural checks since this isn't a protocol.
+            return NotImplemented
+
+        if self is _Protocol:
+            # Every class is a subclass of the empty protocol.
+            return True
+
+        # Find all attributes defined in the protocol.
+        attrs = self._get_protocol_attrs()
+
+        for attr in attrs:
+            if not any(attr in d.__dict__ for d in cls.__mro__):
+                return False
+        return True
+
+    def _get_protocol_attrs(self):
+        # Get all Protocol base classes.
+        protocol_bases = []
+        for c in self.__mro__:
+            if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
+                protocol_bases.append(c)
+
+        # Get attributes included in protocol.
+        attrs = set()
+        for base in protocol_bases:
+            for attr in base.__dict__.keys():
+                # Include attributes not defined in any non-protocol bases.
+                for c in self.__mro__:
+                    if (c is not base and attr in c.__dict__ and
+                            not getattr(c, '_is_protocol', False)):
+                        break
+                else:
+                    if (not attr.startswith('_abc_') and
+                        attr != '__abstractmethods__' and
+                        attr != '_is_protocol' and
+                        attr != '__dict__' and
+                        attr != '_get_protocol_attrs' and
+                        attr != '__parameters__' and
+                        attr != '__origin__' and
+                        attr != '__module__'):
+                        attrs.add(attr)
+
+        return attrs
+
+
+class _Protocol(metaclass=_ProtocolMeta):
+    """Internal base class for protocol classes.
+
+    This implements a simple-minded structural isinstance check
+    (similar but more general than the one-offs in collections.abc
+    such as Hashable).
+    """
+
+    _is_protocol = True
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+Hashable = collections_abc.Hashable  # Not generic.
+
+
+class Iterable(Generic[T_co], extra=collections_abc.Iterable):
+    pass
+
+
+class Iterator(Iterable[T_co], extra=collections_abc.Iterator):
+    pass
+
+
+class SupportsInt(_Protocol):
+
+    @abstractmethod
+    def __int__(self) -> int:
+        pass
+
+
+class SupportsFloat(_Protocol):
+
+    @abstractmethod
+    def __float__(self) -> float:
+        pass
+
+
+class SupportsComplex(_Protocol):
+
+    @abstractmethod
+    def __complex__(self) -> complex:
+        pass
+
+
+class SupportsBytes(_Protocol):
+
+    @abstractmethod
+    def __bytes__(self) -> bytes:
+        pass
+
+
+class SupportsAbs(_Protocol[T]):
+
+    @abstractmethod
+    def __abs__(self) -> T:
+        pass
+
+
+class SupportsRound(_Protocol[T]):
+
+    @abstractmethod
+    def __round__(self, ndigits: int = 0) -> T:
+        pass
+
+
+class Reversible(_Protocol[T]):
+
+    @abstractmethod
+    def __reversed__(self) -> 'Iterator[T]':
+        pass
+
+
+Sized = collections_abc.Sized  # Not generic.
+
+
+class Container(Generic[T_co], extra=collections_abc.Container):
+    pass
+
+
+# Callable was defined earlier.
+
+
+class AbstractSet(Sized, Iterable[T_co], Container[T_co],
+                  extra=collections_abc.Set):
+    pass
+
+
+class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet):
+    pass
+
+
+class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
+              extra=collections_abc.Mapping):
+    pass
+
+
+class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping):
+    pass
+
+
+class Sequence(Sized, Iterable[T_co], Container[T_co],
+               extra=collections_abc.Sequence):
+    pass
+
+
+class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence):
+    pass
+
+
+class ByteString(Sequence[int], extra=collections_abc.ByteString):
+    pass
+
+
+ByteString.register(type(memoryview(b'')))
+
+
+class _ListMeta(GenericMeta):
+
+    def __instancecheck__(self, obj):
+        if not super().__instancecheck__(obj):
+            return False
+        itemtype = self.__parameters__[0]
+        for x in obj:
+            if not isinstance(x, itemtype):
+                return False
+        return True
+
+
+class List(list, MutableSequence[T], metaclass=_ListMeta):
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, List):
+            raise TypeError("Type List cannot be instantiated; "
+                            "use list() instead")
+        return list.__new__(cls, *args, **kwds)
+
+
+class _SetMeta(GenericMeta):
+
+    def __instancecheck__(self, obj):
+        if not super().__instancecheck__(obj):
+            return False
+        itemtype = self.__parameters__[0]
+        for x in obj:
+            if not isinstance(x, itemtype):
+                return False
+        return True
+
+
+class Set(set, MutableSet[T], metaclass=_SetMeta):
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Set):
+            raise TypeError("Type Set cannot be instantiated; "
+                            "use set() instead")
+        return set.__new__(cls, *args, **kwds)
+
+
+class _FrozenSetMeta(_SetMeta):
+    """This metaclass ensures set is not a subclass of FrozenSet.
+
+    Without this metaclass, set would be considered a subclass of
+    FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and
+    set is a subclass of that.
+    """
+
+    def __subclasscheck__(self, cls):
+        if issubclass(cls, Set):
+            return False
+        return super().__subclasscheck__(cls)
+
+    def __instancecheck__(self, obj):
+        if issubclass(obj.__class__, Set):
+            return False
+        return super().__instancecheck__(obj)
+
+
+class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta):
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, FrozenSet):
+            raise TypeError("Type FrozenSet cannot be instantiated; "
+                            "use frozenset() instead")
+        return frozenset.__new__(cls, *args, **kwds)
+
+
+class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView):
+    pass
+
+
+class KeysView(MappingView[KT_co], AbstractSet[KT_co],
+               extra=collections_abc.KeysView):
+    pass
+
+
+# TODO: Enable Set[Tuple[KT_co, VT_co]] instead of Generic[KT_co, VT_co].
+class ItemsView(MappingView, Generic[KT_co, VT_co],
+                extra=collections_abc.ItemsView):
+    pass
+
+
+class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView):
+    pass
+
+
+class _DictMeta(GenericMeta):
+
+    def __instancecheck__(self, obj):
+        if not super().__instancecheck__(obj):
+            return False
+        keytype, valuetype = self.__parameters__
+        for key, value in obj.items():
+            if not (isinstance(key, keytype) and
+                    isinstance(value, valuetype)):
+                return False
+        return True
+
+
+class Dict(dict, MutableMapping[KT, VT], metaclass=_DictMeta):
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Dict):
+            raise TypeError("Type Dict cannot be instantiated; "
+                            "use dict() instead")
+        return dict.__new__(cls, *args, **kwds)
+
+
+# Determine what base class to use for Generator.
+if hasattr(collections_abc, 'Generator'):
+    # Sufficiently recent versions of 3.5 have a Generator ABC.
+    _G_base = collections_abc.Generator
+else:
+    # Fall back on the exact type.
+    _G_base = types.GeneratorType
+
+
+class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co],
+                extra=_G_base):
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Generator):
+            raise TypeError("Type Generator cannot be instantiated; "
+                            "create a subclass instead")
+        return super().__new__(cls, *args, **kwds)
+
+
+def NamedTuple(typename, fields):
+    """Typed version of namedtuple.
+
+    Usage::
+
+        Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)])
+
+    This is equivalent to::
+
+        Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+    The resulting class has one extra attribute: _field_types,
+    giving a dict mapping field names to types.  (The field names
+    are in the _fields attribute, which is part of the namedtuple
+    API.)
+    """
+    fields = [(n, t) for n, t in fields]
+    cls = collections.namedtuple(typename, [n for n, t in fields])
+    cls._field_types = dict(fields)
+    return cls
+
+
+class IO(Generic[AnyStr]):
+    """Generic base class for TextIO and BinaryIO.
+
+    This is an abstract, generic version of the return of open().
+
+    NOTE: This does not distinguish between the different possible
+    classes (text vs. binary, read vs. write vs. read/write,
+    append-only, unbuffered).  The TextIO and BinaryIO subclasses
+    below capture the distinctions between text vs. binary, which is
+    pervasive in the interface; however we currently do not offer a
+    way to track the other distinctions in the type system.
+    """
+
+    @abstractproperty
+    def mode(self) -> str:
+        pass
+
+    @abstractproperty
+    def name(self) -> str:
+        pass
+
+    @abstractmethod
+    def close(self) -> None:
+        pass
+
+    @abstractmethod
+    def closed(self) -> bool:
+        pass
+
+    @abstractmethod
+    def fileno(self) -> int:
+        pass
+
+    @abstractmethod
+    def flush(self) -> None:
+        pass
+
+    @abstractmethod
+    def isatty(self) -> bool:
+        pass
+
+    @abstractmethod
+    def read(self, n: int = -1) -> AnyStr:
+        pass
+
+    @abstractmethod
+    def readable(self) -> bool:
+        pass
+
+    @abstractmethod
+    def readline(self, limit: int = -1) -> AnyStr:
+        pass
+
+    @abstractmethod
+    def readlines(self, hint: int = -1) -> List[AnyStr]:
+        pass
+
+    @abstractmethod
+    def seek(self, offset: int, whence: int = 0) -> int:
+        pass
+
+    @abstractmethod
+    def seekable(self) -> bool:
+        pass
+
+    @abstractmethod
+    def tell(self) -> int:
+        pass
+
+    @abstractmethod
+    def truncate(self, size: int = None) -> int:
+        pass
+
+    @abstractmethod
+    def writable(self) -> bool:
+        pass
+
+    @abstractmethod
+    def write(self, s: AnyStr) -> int:
+        pass
+
+    @abstractmethod
+    def writelines(self, lines: List[AnyStr]) -> None:
+        pass
+
+    @abstractmethod
+    def __enter__(self) -> 'IO[AnyStr]':
+        pass
+
+    @abstractmethod
+    def __exit__(self, type, value, traceback) -> None:
+        pass
+
+
+class BinaryIO(IO[bytes]):
+    """Typed version of the return of open() in binary mode."""
+
+    @abstractmethod
+    def write(self, s: Union[bytes, bytearray]) -> int:
+        pass
+
+    @abstractmethod
+    def __enter__(self) -> 'BinaryIO':
+        pass
+
+
+class TextIO(IO[str]):
+    """Typed version of the return of open() in text mode."""
+
+    @abstractproperty
+    def buffer(self) -> BinaryIO:
+        pass
+
+    @abstractproperty
+    def encoding(self) -> str:
+        pass
+
+    @abstractproperty
+    def errors(self) -> str:
+        pass
+
+    @abstractproperty
+    def line_buffering(self) -> bool:
+        pass
+
+    @abstractproperty
+    def newlines(self) -> Any:
+        pass
+
+    @abstractmethod
+    def __enter__(self) -> 'TextIO':
+        pass
+
+
+class io:
+    """Wrapper namespace for IO generic classes."""
+
+    __all__ = ['IO', 'TextIO', 'BinaryIO']
+    IO = IO
+    TextIO = TextIO
+    BinaryIO = BinaryIO
+
+io.__name__ = __name__ + '.io'
+sys.modules[io.__name__] = io
+
+
+Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
+                     lambda p: p.pattern)
+Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
+                   lambda m: m.re.pattern)
+
+
+class re:
+    """Wrapper namespace for re type aliases."""
+
+    __all__ = ['Pattern', 'Match']
+    Pattern = Pattern
+    Match = Match
+
+re.__name__ = __name__ + '.re'
+sys.modules[re.__name__] = re
diff --git a/mypy/__init__.py b/mypy/__init__.py
new file mode 100644
index 0000000..fa40002
--- /dev/null
+++ b/mypy/__init__.py
@@ -0,0 +1 @@
+# This page intentionally left blank
diff --git a/mypy/__main__.py b/mypy/__main__.py
new file mode 100644
index 0000000..0a6f792
--- /dev/null
+++ b/mypy/__main__.py
@@ -0,0 +1,5 @@
+"""Mypy type checker command line tool."""
+
+from mypy.main import main
+
+main(None)
diff --git a/mypy/applytype.py b/mypy/applytype.py
new file mode 100644
index 0000000..439c6c5
--- /dev/null
+++ b/mypy/applytype.py
@@ -0,0 +1,61 @@
+from typing import List, Dict
+
+import mypy.subtypes
+from mypy.expandtype import expand_type
+from mypy.types import Type, CallableType, AnyType
+from mypy.messages import MessageBuilder
+from mypy.nodes import Context
+
+
+def apply_generic_arguments(callable: CallableType, types: List[Type],
+                            msg: MessageBuilder, context: Context) -> Type:
+    """Apply generic type arguments to a callable type.
+
+    For example, applying [int] to 'def [T] (T) -> T' results in
+    'def [-1:int] (int) -> int'. Here '[-1:int]' is an implicit bound type
+    variable.
+
+    Note that each type can be None; in this case, it will not be applied.
+    """
+    tvars = callable.variables
+    if len(tvars) != len(types):
+        msg.incompatible_type_application(len(tvars), len(types), context)
+        return AnyType()
+
+    # Check that inferred type variable values are compatible with allowed
+    # values.  Also, promote subtype values to allowed values.
+    types = types[:]
+    for i, type in enumerate(types):
+        values = callable.variables[i].values
+        if values and type:
+            if isinstance(type, AnyType):
+                continue
+            for value in values:
+                if mypy.subtypes.is_subtype(type, value):
+                    types[i] = value
+                    break
+            else:
+                msg.incompatible_typevar_value(callable, i + 1, type, context)
+
+    # Create a map from type variable id to target type.
+    id_to_type = {}  # type: Dict[int, Type]
+    for i, tv in enumerate(tvars):
+        if types[i]:
+            id_to_type[tv.id] = types[i]
+
+    # Apply arguments to argument types.
+    arg_types = [expand_type(at, id_to_type) for at in callable.arg_types]
+
+    bound_vars = [(tv.id, id_to_type[tv.id])
+                  for tv in tvars
+                  if tv.id in id_to_type]
+
+    # The callable may retain some type vars if only some were applied.
+    remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type]
+
+    return callable.copy_modified(
+        arg_types=arg_types,
+        ret_type=expand_type(callable.ret_type, id_to_type),
+        variables=remaining_tvars,
+        bound_vars=callable.bound_vars + bound_vars,
+    )
diff --git a/mypy/build.py b/mypy/build.py
new file mode 100644
index 0000000..4d01c5d
--- /dev/null
+++ b/mypy/build.py
@@ -0,0 +1,1103 @@
+"""Facilities to analyze entire programs, including imported modules.
+
+Parse and analyze the source files of a program in the correct order
+(based on file dependencies), and collect the results.
+
+This module only directs a build, which is performed in multiple passes per
+file.  The individual passes are implemented in separate modules.
+
+The function build() is the main interface to this module.
+"""
+
+import os
+import os.path
+import shlex
+import subprocess
+import sys
+import re
+from os.path import dirname, basename
+
+from typing import Dict, List, Tuple, Iterable, cast, Set, Union, Optional
+
+from mypy.types import Type
+from mypy.nodes import MypyFile, Node, Import, ImportFrom, ImportAll
+from mypy.nodes import SymbolTableNode, MODULE_REF
+from mypy.semanal import SemanticAnalyzer, FirstPass, ThirdPass
+from mypy.checker import TypeChecker
+from mypy.errors import Errors, CompileError
+from mypy import parse
+from mypy import stats
+from mypy.report import Reports
+from mypy import defaults
+from mypy import moduleinfo
+from mypy import util
+
+
+# We need to know the location of this file to load data, but
+# until Python 3.4, __file__ is relative.
+__file__ = os.path.realpath(__file__)
+
+
+# Build targets (for selecting compiler passes)
+SEMANTIC_ANALYSIS = 0   # Semantic analysis only
+TYPE_CHECK = 1          # Type check
+
+
+# Build flags
+VERBOSE = 'verbose'              # More verbose messages (for troubleshooting)
+MODULE = 'module'                # Build module as a script
+PROGRAM_TEXT = 'program-text'    # Build command-line argument as a script
+TEST_BUILTINS = 'test-builtins'  # Use stub builtins to speed up tests
+DUMP_TYPE_STATS = 'dump-type-stats'
+DUMP_INFER_STATS = 'dump-infer-stats'
+SILENT_IMPORTS = 'silent-imports'  # Silence imports of .py files
+
+# State ids. These describe the states a source file / module can be in a
+# build.
+
+# We aren't processing this source file yet (no associated state object).
+UNSEEN_STATE = 0
+# The source file has a state object, but we haven't done anything with it yet.
+UNPROCESSED_STATE = 1
+# We've parsed the source file.
+PARSED_STATE = 2
+# We've done the first two passes of semantic analysis.
+PARTIAL_SEMANTIC_ANALYSIS_STATE = 3
+# We've semantically analyzed the source file.
+SEMANTICALLY_ANALYSED_STATE = 4
+# We've type checked the source file (and all its dependencies).
+TYPE_CHECKED_STATE = 5
+
+PYTHON_EXTENSIONS = ['.pyi', '.py']
+
+final_state = TYPE_CHECKED_STATE
+
+
+def earlier_state(s: int, t: int) -> bool:
+    return s < t
+
+
+class BuildResult:
+    """The result of a successful build.
+
+    Attributes:
+      files:  Dictionary from module name to related AST node.
+      types:  Dictionary from parse tree node to its inferred type.
+    """
+
+    def __init__(self, files: Dict[str, MypyFile],
+                 types: Dict[Node, Type]) -> None:
+        self.files = files
+        self.types = types
+
+
+class BuildSource:
+    def __init__(self, path: Optional[str], module: Optional[str],
+            text: Optional[str]) -> None:
+        self.path = path
+        self.module = module or '__main__'
+        self.text = text
+
+    def load(self, lib_path, pyversion: Tuple[int, int]) -> str:
+        """Load the module if needed. This also has the side effect
+        of calculating the effective path for modules."""
+        if self.text is not None:
+            return self.text
+
+        self.path = self.path or lookup_program(self.module, lib_path)
+        return read_program(self.path, pyversion)
+
+    @property
+    def effective_path(self) -> str:
+        """Return the effective path (ie, <string> if its from in memory)"""
+        return self.path or '<string>'
+
+
+def build(sources: List[BuildSource],
+          target: int,
+          alt_lib_path: str = None,
+          bin_dir: str = None,
+          pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+          custom_typing_module: str = None,
+          implicit_any: bool = False,
+          report_dirs: Dict[str, str] = {},
+          flags: List[str] = None,
+          python_path: bool = False) -> BuildResult:
+    """Analyze a program.
+
+    A single call to build performs parsing, semantic analysis and optionally
+    type checking for the program *and* all imported modules, recursively.
+
+    Return BuildResult if successful; otherwise raise CompileError.
+
+    Args:
+      target: select passes to perform (a build target constant, e.g. C)
+      sources: list of sources to build
+      alt_lib_dir: an additional directory for looking up library modules
+        (takes precedence over other directories)
+      bin_dir: directory containing the mypy script, used for finding data
+        directories; if omitted, use '.' as the data directory
+      pyversion: Python version (major, minor)
+      custom_typing_module: if not None, use this module id as an alias for typing
+      implicit_any: if True, add implicit Any signatures to all functions
+      flags: list of build options (e.g. COMPILE_ONLY)
+    """
+    flags = flags or []
+
+    data_dir = default_data_dir(bin_dir)
+
+    find_module_clear_caches()
+
+    # Determine the default module search path.
+    lib_path = default_lib_path(data_dir, pyversion, python_path)
+
+    if TEST_BUILTINS in flags:
+        # Use stub builtins (to speed up test cases and to make them easier to
+        # debug).
+        lib_path.insert(0, os.path.join(os.path.dirname(__file__), 'test', 'data', 'lib-stub'))
+    else:
+        for source in sources:
+            if source.path:
+                # Include directory of the program file in the module search path.
+                lib_path.insert(
+                    0, remove_cwd_prefix_from_path(dirname(source.path)))
+
+        # Do this even if running as a file, for sanity (mainly because with
+        # multiple builds, there could be a mix of files/modules, so its easier
+        # to just define the semantics that we always add the current director
+        # to the lib_path
+        lib_path.insert(0, os.getcwd())
+
+    # Add MYPYPATH environment variable to front of library path, if defined.
+    lib_path[:0] = mypy_path()
+
+    # If provided, insert the caller-supplied extra module path to the
+    # beginning (highest priority) of the search path.
+    if alt_lib_path:
+        lib_path.insert(0, alt_lib_path)
+
+    # TODO Reports is global to a build manager but only supports a single "main file"
+    # Fix this.
+    reports = Reports(sources[0].effective_path, data_dir, report_dirs)
+
+    # Construct a build manager object that performs all the stages of the
+    # build in the correct order.
+    #
+    # Ignore current directory prefix in error messages.
+    manager = BuildManager(data_dir, lib_path, target,
+                           pyversion=pyversion, flags=flags,
+                           ignore_prefix=os.getcwd(),
+                           custom_typing_module=custom_typing_module,
+                           implicit_any=implicit_any,
+                           reports=reports)
+
+    # Construct information that describes the initial files. __main__ is the
+    # implicit module id and the import context is empty initially ([]).
+    initial_states = []  # type: List[UnprocessedFile]
+    for source in sources:
+        content = source.load(lib_path, pyversion)
+        info = StateInfo(source.effective_path, source.module, [], manager)
+        initial_state = UnprocessedFile(info, content)
+        initial_states += [initial_state]
+
+    # Perform the build by sending the files as new file (UnprocessedFile is the
+    # initial state of all files) to the manager. The manager will process the
+    # file and all dependant modules recursively.
+    result = manager.process(initial_states)
+    reports.finish()
+    return result
+
+
+def default_data_dir(bin_dir: str) -> str:
+    # TODO fix this logic
+    if not bin_dir:
+        mypy_package = os.path.dirname(__file__)
+        parent = os.path.dirname(mypy_package)
+        if os.path.basename(parent) == 'site-packages':
+            # Installed in site-packages, but invoked with python3 -m mypy;
+            # __file__ is .../blah/lib/python3.N/site-packages/mypy/__init__.py;
+            # blah may be a virtualenv or /usr/local.  We want .../blah/lib/mypy.
+            lib = os.path.dirname(os.path.dirname(parent))
+            if os.path.basename(lib) == 'lib':
+                return os.path.join(lib, 'mypy')
+        # Default to directory containing this file's parent.
+        return parent
+    base = os.path.basename(bin_dir)
+    dir = os.path.dirname(bin_dir)
+    if (sys.platform == 'win32' and base.lower() == 'scripts'
+            and not os.path.isdir(os.path.join(dir, 'typeshed'))):
+        # Installed, on Windows.
+        return os.path.join(dir, 'Lib', 'mypy')
+    elif base == 'scripts':
+        # Assume that we have a repo check out or unpacked source tarball.
+        return os.path.dirname(bin_dir)
+    elif base == 'bin':
+        # Installed to somewhere (can be under /usr/local or anywhere).
+        return os.path.join(dir, 'lib', 'mypy')
+    elif base == 'python3':
+        # Assume we installed python3 with brew on os x
+        return os.path.join(os.path.dirname(dir), 'lib', 'mypy')
+    else:
+        # Don't know where to find the data files!
+        raise RuntimeError("Broken installation: can't determine base dir")
+
+
+def mypy_path() -> List[str]:
+    path_env = os.getenv('MYPYPATH')
+    if not path_env:
+        return []
+    return path_env.split(os.pathsep)
+
+
+def default_lib_path(data_dir: str, pyversion: Tuple[int, int],
+        python_path: bool) -> List[str]:
+    """Return default standard library search paths."""
+    # IDEA: Make this more portable.
+    path = []  # type: List[str]
+
+    auto = os.path.join(data_dir, 'stubs-auto')
+    if os.path.isdir(auto):
+        data_dir = auto
+
+    # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption
+    # is that a module added with 3.4 will still be present in Python 3.5.
+    versions = ["%d.%d" % (pyversion[0], minor)
+                for minor in reversed(range(pyversion[1] + 1))]
+    # E.g. for Python 3.2, try 3.2/, 3.1/, 3.0/, 3/, 2and3/.
+    # (Note that 3.1 and 3.0 aren't really supported, but we don't care.)
+    for v in versions + [str(pyversion[0]), '2and3']:
+        for lib_type in ['stdlib', 'third_party']:
+            stubdir = os.path.join(data_dir, 'typeshed', lib_type, v)
+            if os.path.isdir(stubdir):
+                path.append(stubdir)
+
+    # Add fallback path that can be used if we have a broken installation.
+    if sys.platform != 'win32':
+        path.append('/usr/local/lib/mypy')
+
+    # Contents of Python's sys.path go last, to prefer the stubs
+    # TODO: To more closely model what Python actually does, builtins should
+    #       go first, then sys.path, then anything in stdlib and third_party.
+    if python_path:
+        path.extend(sys.path)
+
+    return path
+
+
+def lookup_program(module: str, lib_path: List[str]) -> str:
+    # Modules are .py or .pyi
+    path = find_module(module, lib_path)
+    if path:
+        return path
+    else:
+        raise CompileError([
+            "mypy: can't find module '{}'".format(module)])
+
+
+def read_program(path: str, pyversion: Tuple[int, int]) -> str:
+    try:
+        text = read_with_python_encoding(path, pyversion)
+    except IOError as ioerr:
+        raise CompileError([
+            "mypy: can't read file '{}': {}".format(path, ioerr.strerror)])
+    except UnicodeDecodeError as decodeerr:
+        raise CompileError([
+            "mypy: can't decode file '{}': {}".format(path, str(decodeerr))])
+    return text
+
+
+class BuildManager:
+    """This is the central class for building a mypy program.
+
+    It coordinates parsing, import processing, semantic analysis and
+    type checking. It manages state objects that actually perform the
+    build steps.
+
+    Attributes:
+      data_dir:        Mypy data directory (contains stubs)
+      target:          Build target; selects which passes to perform
+      lib_path:        Library path for looking up modules
+      semantic_analyzer:
+                       Semantic analyzer, pass 2
+      semantic_analyzer_pass3:
+                       Semantic analyzer, pass 3
+      type_checker:    Type checker
+      errors:          Used for reporting all errors
+      pyversion:       Python version (major, minor)
+      flags:           Build options
+      states:          States of all individual files that are being
+                       processed. Each file in a build is always represented
+                       by a single state object (after it has been encountered
+                       for the first time). This is the only place where
+                       states are stored.
+      module_files:    Map from module name to source file path. There is a
+                       1:1 mapping between modules and source files.
+      module_deps:     Cache for module dependencies (direct or indirect).
+                       Item (m, n) indicates whether m depends on n (directly
+                       or indirectly).
+      missing_modules: Set of modules that could not be imported encountered so far
+    """
+
+    def __init__(self, data_dir: str,
+                 lib_path: List[str],
+                 target: int,
+                 pyversion: Tuple[int, int],
+                 flags: List[str],
+                 ignore_prefix: str,
+                 custom_typing_module: str,
+                 implicit_any: bool,
+                 reports: Reports) -> None:
+        self.data_dir = data_dir
+        self.errors = Errors()
+        self.errors.set_ignore_prefix(ignore_prefix)
+        self.lib_path = tuple(lib_path)
+        self.target = target
+        self.pyversion = pyversion
+        self.flags = flags
+        self.custom_typing_module = custom_typing_module
+        self.implicit_any = implicit_any
+        self.reports = reports
+        self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors,
+                                                  pyversion=pyversion)
+        modules = self.semantic_analyzer.modules
+        self.semantic_analyzer_pass3 = ThirdPass(modules, self.errors)
+        self.type_checker = TypeChecker(self.errors, modules, self.pyversion)
+        self.states = []  # type: List[State]
+        self.module_files = {}  # type: Dict[str, str]
+        self.module_deps = {}  # type: Dict[Tuple[str, str], bool]
+        self.missing_modules = set()  # type: Set[str]
+
+    def process(self, initial_states: List['UnprocessedFile']) -> BuildResult:
+        """Perform a build.
+
+        The argument is a state that represents the main program
+        file. This method should only be called once per a build
+        manager object.  The return values are identical to the return
+        values of the build function.
+        """
+        self.states += initial_states
+        for initial_state in initial_states:
+            self.module_files[initial_state.id] = initial_state.path
+        for initial_state in initial_states:
+            initial_state.load_dependencies()
+
+        # Process states in a loop until all files (states) have been
+        # semantically analyzed or type checked (depending on target).
+        #
+        # We type check all files before the rest of the passes so that we can
+        # report errors and fail as quickly as possible.
+        while True:
+            # Find the next state that has all its dependencies met.
+            next = self.next_available_state()
+            if not next:
+                self.trace('done')
+                break
+
+            # Potentially output some debug information.
+            self.trace('next {} ({})'.format(next.path, next.state()))
+
+            # Set the import context for reporting error messages correctly.
+            self.errors.set_import_context(next.import_context)
+            # Process the state. The process method is reponsible for adding a
+            # new state object representing the new state of the file.
+            next.process()
+
+            # Raise exception if the build failed. The build can fail for
+            # various reasons, such as parse error, semantic analysis error,
+            # etc.
+            if self.errors.is_blockers():
+                self.errors.raise_error()
+
+        # If there were no errors, all files should have been fully processed.
+        for s in self.states:
+            assert s.state() == final_state, (
+                '{} still unprocessed in state {}'.format(s.path, s.state()))
+
+        if self.errors.is_errors():
+            self.errors.raise_error()
+
+        # Collect a list of all files.
+        trees = []  # type: List[MypyFile]
+        for state in self.states:
+            trees.append(cast(ParsedFile, state).tree)
+
+        # Perform any additional passes after type checking for all the files.
+        self.final_passes(trees, self.type_checker.type_map)
+
+        return BuildResult(self.semantic_analyzer.modules,
+                           self.type_checker.type_map)
+
+    def next_available_state(self) -> 'State':
+        """Find a ready state (one that has all its dependencies met)."""
+        i = len(self.states) - 1
+        while i >= 0:
+            if self.states[i].is_ready():
+                num_incomplete = self.states[i].num_incomplete_deps()
+                if num_incomplete == 0:
+                    # This is perfect; no need to look for the best match.
+                    return self.states[i]
+            i -= 1
+        return None
+
+    def has_module(self, name: str) -> bool:
+        """Have we seen a module yet?"""
+        return name in self.module_files
+
+    def file_state(self, path: str) -> int:
+        """Return the state of a source file.
+
+        In particular, return UNSEEN_STATE if the file has no associated
+        state.
+
+        This function does not consider any dependencies.
+        """
+        for s in self.states:
+            if s.path == path:
+                return s.state()
+        return UNSEEN_STATE
+
+    def module_state(self, name: str) -> int:
+        """Return the state of a module.
+
+        In particular, return UNSEEN_STATE if the file has no associated
+        state.
+
+        This considers also module dependencies.
+        """
+        if not self.has_module(name):
+            return UNSEEN_STATE
+        state = final_state
+        fs = self.file_state(self.module_files[name])
+        if earlier_state(fs, state):
+            state = fs
+        return state
+
+    def is_dep(self, m1: str, m2: str, done: Set[str] = None) -> bool:
+        """Does m1 import m2 directly or indirectly?"""
+        # Have we computed this previously?
+        dep = self.module_deps.get((m1, m2))
+        if dep is not None:
+            return dep
+
+        if not done:
+            done = set([m1])
+
+        # m1 depends on m2 iff one of the deps of m1 depends on m2.
+        st = self.lookup_state(m1)
+        for m in st.dependencies:
+            if m in done:
+                continue
+            done.add(m)
+            # Cache this dependency.
+            self.module_deps[m1, m] = True
+            # Search recursively.
+            if m == m2 or self.is_dep(m, m2, done):
+                # Yes! Mark it in the cache.
+                self.module_deps[m1, m2] = True
+                return True
+        # No dependency. Mark it in the cache.
+        self.module_deps[m1, m2] = False
+        return False
+
+    def lookup_state(self, module: str) -> 'State':
+        for state in self.states:
+            if state.id == module:
+                return state
+        raise RuntimeError('%s not found' % module)
+
+    def all_imported_modules_in_file(self,
+                                     file: MypyFile) -> List[Tuple[str, int]]:
+        """Find all reachable import statements in a file.
+
+        Return list of tuples (module id, import line number) for all modules
+        imported in file.
+        """
+        def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str:
+            """Function to correct for relative imports."""
+            file_id = file.fullname()
+            rel = imp.relative
+            if rel == 0:
+                return imp.id
+            if os.path.basename(file.path).startswith('__init__.'):
+                rel -= 1
+            if rel != 0:
+                file_id = ".".join(file_id.split(".")[:-rel])
+            new_id = file_id + "." + imp.id if imp.id else file_id
+
+            return new_id
+
+        res = []  # type: List[Tuple[str, int]]
+        for imp in file.imports:
+            if not imp.is_unreachable:
+                if isinstance(imp, Import):
+                    for id, _ in imp.ids:
+                        res.append((id, imp.line))
+                elif isinstance(imp, ImportFrom):
+                    cur_id = correct_rel_imp(imp)
+                    res.append((cur_id, imp.line))
+                    # Also add any imported names that are submodules.
+                    for name, __ in imp.names:
+                        sub_id = cur_id + '.' + name
+                        if self.is_module(sub_id):
+                            res.append((sub_id, imp.line))
+                elif isinstance(imp, ImportAll):
+                    res.append((correct_rel_imp(imp), imp.line))
+        return res
+
+    def is_module(self, id: str) -> bool:
+        """Is there a file in the file system corresponding to module id?"""
+        return find_module(id, self.lib_path) is not None
+
+    def final_passes(self, files: List[MypyFile],
+                     types: Dict[Node, Type]) -> None:
+        """Perform the code generation passes for type checked files."""
+        if self.target in [SEMANTIC_ANALYSIS, TYPE_CHECK]:
+            pass  # Nothing to do.
+        else:
+            raise RuntimeError('Unsupported target %d' % self.target)
+
+    def log(self, message: str) -> None:
+        if VERBOSE in self.flags:
+            print('LOG:', message, file=sys.stderr)
+
+    def trace(self, message: str) -> None:
+        if self.flags.count(VERBOSE) >= 2:
+            print('TRACE:', message, file=sys.stderr)
+
+
+def remove_cwd_prefix_from_path(p: str) -> str:
+    """Remove current working directory prefix from p, if present.
+
+    Also crawl up until a directory without __init__.py is found.
+
+    If the result would be empty, return '.' instead.
+    """
+    cur = os.getcwd()
+    # Add separator to the end of the path, unless one is already present.
+    if basename(cur) != '':
+        cur += os.sep
+    # Compute root path.
+    while p and os.path.isfile(os.path.join(p, '__init__.py')):
+        dir, base = os.path.split(p)
+        if not base:
+            break
+        p = dir
+    # Remove current directory prefix from the path, if present.
+    if p.startswith(cur):
+        p = p[len(cur):]
+    # Avoid returning an empty path; replace that with '.'.
+    if p == '':
+        p = '.'
+    return p
+
+
+class StateInfo:
+    """Description of a source file that is being built."""
+
+    def __init__(self, path: str, id: str,
+                 import_context: List[Tuple[str, int]],
+                 manager: BuildManager) -> None:
+        """Initialize state information.
+
+        Arguments:
+          path:    Path to the file
+          id:      Module id, such as 'os.path' or '__main__' (for the main
+                   program file)
+          import_context:
+                   The import trail that caused this module to be
+                   imported (path, line) tuples
+          manager: The manager that manages this build
+        """
+        self.path = path
+        self.id = id
+        self.import_context = import_context
+        self.manager = manager
+
+
+class State:
+    """Abstract base class for build states.
+
+    There is always at most one state per source file.
+    """
+
+    # The StateInfo attributes are duplicated here for convenience.
+    path = ''
+    id = ''
+    import_context = None  # type: List[Tuple[str, int]]
+    manager = None  # type: BuildManager
+    # Modules that this file directly depends on (in no particular order).
+    dependencies = None  # type: List[str]
+
+    def __init__(self, info: StateInfo) -> None:
+        self.path = info.path
+        self.id = info.id
+        self.import_context = info.import_context
+        self.manager = info.manager
+        self.dependencies = []
+
+    def info(self) -> StateInfo:
+        return StateInfo(self.path, self.id, self.import_context, self.manager)
+
+    def process(self) -> None:
+        raise RuntimeError('Not implemented')
+
+    def is_ready(self) -> bool:
+        """Return True if all dependencies are at least in the same state
+        as this object (but not in the initial state).
+        """
+        for module in self.dependencies:
+            state = self.manager.module_state(module)
+            if earlier_state(state,
+                             self.state()) or state == UNPROCESSED_STATE:
+                return False
+        return True
+
+    def num_incomplete_deps(self) -> int:
+        """Return the number of dependencies that are ready but incomplete."""
+        return 0  # Does not matter in this state
+
+    def state(self) -> int:
+        raise RuntimeError('Not implemented')
+
+    def switch_state(self, state_object: 'State') -> None:
+        """Called by state objects to replace the state of the file.
+
+        Also notify the manager.
+        """
+        for i in range(len(self.manager.states)):
+            if self.manager.states[i].path == state_object.path:
+                self.manager.states[i] = state_object
+                return
+        raise RuntimeError('State for {} not found'.format(state_object.path))
+
+    def errors(self) -> Errors:
+        return self.manager.errors
+
+    def semantic_analyzer(self) -> SemanticAnalyzer:
+        return self.manager.semantic_analyzer
+
+    def semantic_analyzer_pass3(self) -> ThirdPass:
+        return self.manager.semantic_analyzer_pass3
+
+    def type_checker(self) -> TypeChecker:
+        return self.manager.type_checker
+
+    def fail(self, path: str, line: int, msg: str, blocker: bool = True) -> None:
+        """Report an error in the build (e.g. if could not find a module)."""
+        self.errors().set_file(path)
+        self.errors().report(line, msg, blocker=blocker)
+
+    def module_not_found(self, path: str, line: int, id: str) -> None:
+        self.errors().set_file(path)
+        stub_msg = "(Stub files are from https://github.com/python/typeshed)"
+        if ((self.manager.pyversion[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
+                (self.manager.pyversion[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
+            self.errors().report(
+                line, "No library stub file for standard library module '{}'".format(id))
+            self.errors().report(line, stub_msg, severity='note', only_once=True)
+        elif moduleinfo.is_third_party_module(id):
+            self.errors().report(line, "No library stub file for module '{}'".format(id))
+            self.errors().report(line, stub_msg, severity='note', only_once=True)
+        else:
+            self.errors().report(line, "Cannot find module named '{}'".format(id))
+            self.errors().report(line, "(Perhaps setting MYPYPATH would help)", severity='note',
+                                 only_once=True)
+
+
+class UnprocessedFile(State):
+    def __init__(self, info: StateInfo, program_text: str) -> None:
+        super().__init__(info)
+        self.program_text = program_text
+        self.silent = SILENT_IMPORTS in self.manager.flags
+
+    def load_dependencies(self):
+        # Add surrounding package(s) as dependencies.
+        for p in super_packages(self.id):
+            if p in self.manager.missing_modules:
+                continue
+            if not self.import_module(p):
+                # Could not find a module. Typically the reason is a
+                # misspelled module name, missing stub, module not in
+                # search path or the module has not been installed.
+                if self.silent:
+                    self.manager.missing_modules.add(p)
+                else:
+                    self.module_not_found(self.path, 1, p)
+            else:
+                self.dependencies.append(p)
+
+    def process(self) -> None:
+        """Parse the file, store global names and advance to the next state."""
+        if self.id in self.manager.semantic_analyzer.modules:
+            self.fail(self.path, 1, "Duplicate module named '{}'".format(self.id))
+            return
+
+        tree = self.parse(self.program_text, self.path)
+
+        # Store the parsed module in the shared module symbol table.
+        self.manager.semantic_analyzer.modules[self.id] = tree
+
+        if '.' in self.id:
+            # Include module in the symbol table of the enclosing package.
+            c = self.id.split('.')
+            p = '.'.join(c[:-1])
+            sem_anal = self.manager.semantic_analyzer
+            if p in sem_anal.modules:
+                sem_anal.modules[p].names[c[-1]] = SymbolTableNode(
+                    MODULE_REF, tree, p)
+
+        if self.id != 'builtins':
+            # The builtins module is imported implicitly in every program (it
+            # contains definitions of int, print etc.).
+            self.manager.trace('import builtins')
+            if not self.import_module('builtins'):
+                self.fail(self.path, 1, 'Could not find builtins')
+
+        # Do the first pass of semantic analysis: add top-level definitions in
+        # the file to the symbol table. We must do this before processing imports,
+        # since this may mark some import statements as unreachable.
+        first = FirstPass(self.semantic_analyzer())
+        first.analyze(tree, self.path, self.id)
+
+        # Add all directly imported modules to be processed (however they are
+        # not processed yet, just waiting to be processed).
+        for id, line in self.manager.all_imported_modules_in_file(tree):
+            self.errors().push_import_context(self.path, line)
+            try:
+                res = self.import_module(id)
+            finally:
+                self.errors().pop_import_context()
+            if not res:
+                if id == '':
+                    # Must be from a relative import.
+                    self.fail(self.path, line,
+                              "No parent module -- cannot perform relative import".format(id),
+                              blocker=True)
+                else:
+                    if (line not in tree.ignored_lines and
+                            'import' not in tree.weak_opts and
+                            not self.silent):
+                        self.module_not_found(self.path, line, id)
+                self.manager.missing_modules.add(id)
+
+        # Initialize module symbol table, which was populated by the semantic
+        # analyzer.
+        tree.names = self.semantic_analyzer().globals
+
+        # Replace this state object with a parsed state in BuildManager.
+        self.switch_state(ParsedFile(self.info(), tree))
+
+    def import_module(self, id: str) -> bool:
+        """Schedule a module to be processed.
+
+        Add an unprocessed state object corresponding to the module to the
+        manager, or do nothing if the module already has a state object.
+        """
+        if self.manager.has_module(id):
+            # Do nothing: already being compiled.
+            return True
+
+        if id == 'builtins' and self.manager.pyversion[0] == 2:
+            # The __builtin__ module is called internally by mypy 'builtins' in Python 2 mode
+            # (similar to Python 3), but the stub file is __builtin__.pyi. The reason is that
+            # a lot of code hard codes 'builtins.x' and this it's easier to work it around like
+            # this. It also means that the implementation can mostly ignore the difference and
+            # just assume 'builtins' everywhere, which simplifies code.
+            file_id = '__builtin__'
+        else:
+            file_id = id
+        path, text = read_module_source_from_file(file_id, self.manager.lib_path,
+                                                  self.manager.pyversion, self.silent)
+        if text is not None:
+            info = StateInfo(path, id, self.errors().import_context(),
+                             self.manager)
+            new_file = UnprocessedFile(info, text)
+            self.manager.states.append(new_file)
+            self.manager.module_files[id] = path
+            new_file.load_dependencies()
+            return True
+        else:
+            return False
+
+    def parse(self, source_text: Union[str, bytes], fnam: str) -> MypyFile:
+        """Parse the source of a file with the given name.
+
+        Raise CompileError if there is a parse error.
+        """
+        num_errs = self.errors().num_messages()
+        tree = parse.parse(source_text, fnam, self.errors(),
+                           pyversion=self.manager.pyversion,
+                           custom_typing_module=self.manager.custom_typing_module,
+                           implicit_any=self.manager.implicit_any)
+        tree._fullname = self.id
+        if self.errors().num_messages() != num_errs:
+            self.errors().raise_error()
+        return tree
+
+    def state(self) -> int:
+        return UNPROCESSED_STATE
+
+
+class ParsedFile(State):
+    tree = None  # type: MypyFile
+
+    def __init__(self, info: StateInfo, tree: MypyFile) -> None:
+        super().__init__(info)
+        self.tree = tree
+
+        # Build a list all directly imported moules (dependencies).
+        imp = []  # type: List[str]
+        for id, line in self.manager.all_imported_modules_in_file(tree):
+            # Omit missing modules, as otherwise we could not type check
+            # programs with missing modules.
+            if id not in self.manager.missing_modules and id != self.id:
+                imp.append(id)
+        if self.id != 'builtins':
+            imp.append('builtins')
+
+        if imp != []:
+            self.manager.trace('{} dependencies: {}'.format(info.path, imp))
+
+        # Record the dependencies. Note that the dependencies list also
+        # contains any superpackages and we must preserve them (e.g. os for
+        # os.path).
+        self.dependencies.extend(imp)
+
+    def process(self) -> None:
+        """Semantically analyze file and advance to the next state."""
+        self.semantic_analyzer().visit_file(self.tree, self.tree.path)
+        self.switch_state(PartiallySemanticallyAnalyzedFile(self.info(),
+                                                            self.tree))
+
+    def num_incomplete_deps(self) -> int:
+        """Return the number of dependencies that are incomplete.
+
+        Here complete means that their state is *later* than this module.
+        Cyclic dependencies are omitted to break cycles forcibly (and somewhat
+        arbitrarily).
+        """
+        incomplete = 0
+        for module in self.dependencies:
+            state = self.manager.module_state(module)
+            if (not earlier_state(self.state(), state) and
+                    not self.manager.is_dep(module, self.id)):
+                incomplete += 1
+        return incomplete
+
+    def state(self) -> int:
+        return PARSED_STATE
+
+
+class PartiallySemanticallyAnalyzedFile(ParsedFile):
+    def process(self) -> None:
+        """Perform final pass of semantic analysis and advance state."""
+        self.semantic_analyzer_pass3().visit_file(self.tree, self.tree.path)
+        if DUMP_TYPE_STATS in self.manager.flags:
+            stats.dump_type_stats(self.tree, self.tree.path)
+        self.switch_state(SemanticallyAnalyzedFile(self.info(), self.tree))
+
+    def state(self) -> int:
+        return PARTIAL_SEMANTIC_ANALYSIS_STATE
+
+
+class SemanticallyAnalyzedFile(ParsedFile):
+    def process(self) -> None:
+        """Type check file and advance to the next state."""
+        if self.manager.target >= TYPE_CHECK:
+            self.type_checker().visit_file(self.tree, self.tree.path)
+            if DUMP_INFER_STATS in self.manager.flags:
+                stats.dump_type_stats(self.tree, self.tree.path, inferred=True,
+                                      typemap=self.manager.type_checker.type_map)
+            self.manager.reports.file(self.tree, type_map=self.manager.type_checker.type_map)
+
+        # FIX remove from active state list to speed up processing
+
+        self.switch_state(TypeCheckedFile(self.info(), self.tree))
+
+    def state(self) -> int:
+        return SEMANTICALLY_ANALYSED_STATE
+
+
+class TypeCheckedFile(SemanticallyAnalyzedFile):
+    def process(self) -> None:
+        """Finished, so cannot process."""
+        raise RuntimeError('Cannot process TypeCheckedFile')
+
+    def is_ready(self) -> bool:
+        """Finished, so cannot ever become ready."""
+        return False
+
+    def state(self) -> int:
+        return TYPE_CHECKED_STATE
+
+
+def read_module_source_from_file(id: str,
+                                 lib_path: Iterable[str],
+                                 pyversion: Tuple[int, int],
+                                 silent: bool) -> Tuple[Optional[str], Optional[str]]:
+    """Find and read the source file of a module.
+
+    Return a pair (path, file contents). Return (None, None) if the module
+    could not be found or read.
+
+    Args:
+      id:       module name, a string of form 'foo' or 'foo.bar'
+      lib_path: library search path
+      silent:   if set, don't import .py files (only .pyi files)
+    """
+    path = find_module(id, lib_path)
+    if path is not None:
+        if silent and not path.endswith('.pyi'):
+            return None, None
+        try:
+            text = read_with_python_encoding(path, pyversion)
+        except IOError:
+            return None, None
+        return path, text
+    else:
+        return None, None
+
+
+# Cache find_module: (id, lib_path) -> result.
+find_module_cache = {}  # type: Dict[Tuple[str, Tuple[str, ...]], str]
+
+# Cache some repeated work within distinct find_module calls: finding which
+# elements of lib_path have even the subdirectory they'd need for the module
+# to exist.  This is shared among different module ids when they differ only
+# in the last component.
+find_module_dir_cache = {}  # type: Dict[Tuple[str, Tuple[str, ...]], List[str]]
+
+
+def find_module_clear_caches():
+    find_module_cache.clear()
+    find_module_dir_cache.clear()
+
+
+def find_module(id: str, lib_path: Iterable[str]) -> str:
+    """Return the path of the module source file, or None if not found."""
+    if not isinstance(lib_path, tuple):
+        lib_path = tuple(lib_path)
+
+    def find():
+        # If we're looking for a module like 'foo.bar.baz', it's likely that most of the
+        # many elements of lib_path don't even have a subdirectory 'foo/bar'.  Discover
+        # that only once and cache it for when we look for modules like 'foo.bar.blah'
+        # that will require the same subdirectory.
+        components = id.split('.')
+        dir_chain = os.sep.join(components[:-1])  # e.g., 'foo/bar'
+        if (dir_chain, lib_path) not in find_module_dir_cache:
+            dirs = []
+            for pathitem in lib_path:
+                # e.g., '/usr/lib/python3.4/foo/bar'
+                dir = os.path.normpath(os.path.join(pathitem, dir_chain))
+                if os.path.isdir(dir):
+                    dirs.append(dir)
+            find_module_dir_cache[dir_chain, lib_path] = dirs
+        candidate_base_dirs = find_module_dir_cache[dir_chain, lib_path]
+
+        # If we're looking for a module like 'foo.bar.baz', then candidate_base_dirs now
+        # contains just the subdirectories 'foo/bar' that actually exist under the
+        # elements of lib_path.  This is probably much shorter than lib_path itself.
+        # Now just look for 'baz.pyi', 'baz/__init__.py', etc., inside those directories.
+        seplast = os.sep + components[-1]  # so e.g. '/baz'
+        sepinit = os.sep + '__init__'
+        for base_dir in candidate_base_dirs:
+            base_path = base_dir + seplast  # so e.g. '/usr/lib/python3.4/foo/bar/baz'
+            for extension in PYTHON_EXTENSIONS:
+                path = base_path + extension
+                if not os.path.isfile(path):
+                    path = base_path + sepinit + extension
+                if os.path.isfile(path) and verify_module(id, path):
+                    return path
+        return None
+
+    key = (id, lib_path)
+    if key not in find_module_cache:
+        find_module_cache[key] = find()
+    return find_module_cache[key]
+
+
+def find_modules_recursive(module: str, lib_path: List[str]) -> List[BuildSource]:
+    module_path = find_module(module, lib_path)
+    if not module_path:
+        return []
+    result = [BuildSource(module_path, module, None)]
+    if module_path.endswith(('__init__.py', '__init__.pyi')):
+        # Subtle: this code prefers the .pyi over the .py if both
+        # exists, and also prefers packages over modules if both x/
+        # and x.py* exist.  How?  We sort the directory items, so x
+        # comes before x.py and x.pyi.  But the preference for .pyi
+        # over .py is encoded in find_module(); even though we see
+        # x.py before x.pyi, find_module() will find x.pyi first.  We
+        # use hits to avoid adding it a second time when we see x.pyi.
+        # This also avoids both x.py and x.pyi when x/ was seen first.
+        hits = set()  # type: Set[str]
+        for item in sorted(os.listdir(os.path.dirname(module_path))):
+            abs_path = os.path.join(os.path.dirname(module_path), item)
+            if os.path.isdir(abs_path) and \
+                    (os.path.isfile(os.path.join(abs_path, '__init__.py')) or
+                    os.path.isfile(os.path.join(abs_path, '__init__.pyi'))):
+                hits.add(item)
+                result += find_modules_recursive(module + '.' + item, lib_path)
+            elif item != '__init__.py' and item != '__init__.pyi' and \
+                    item.endswith(('.py', '.pyi')):
+                mod = item.split('.')[0]
+                if mod not in hits:
+                    hits.add(mod)
+                    result += find_modules_recursive(
+                        module + '.' + mod, lib_path)
+    return result
+
+
+def verify_module(id: str, path: str) -> bool:
+    """Check that all packages containing id have a __init__ file."""
+    if path.endswith(('__init__.py', '__init__.pyi')):
+        path = dirname(path)
+    for i in range(id.count('.')):
+        path = dirname(path)
+        if not any(os.path.isfile(os.path.join(path, '__init__{}'.format(extension)))
+                   for extension in PYTHON_EXTENSIONS):
+            return False
+    return True
+
+
+def super_packages(id: str) -> List[str]:
+    """Return the surrounding packages of a module, e.g. ['os'] for os.path."""
+    c = id.split('.')
+    res = []  # type: List[str]
+    for i in range(1, len(c)):
+        res.append('.'.join(c[:i]))
+    return res
+
+
+def make_parent_dirs(path: str) -> None:
+    parent = os.path.dirname(path)
+    try:
+        os.makedirs(parent)
+    except OSError:
+        pass
+
+
+def read_with_python_encoding(path: str, pyversion: Tuple[int, int]) -> str:
+    """Read the Python file with while obeying PEP-263 encoding detection"""
+    source_bytearray = bytearray()
+    encoding = 'utf8' if pyversion[0] >= 3 else 'ascii'
+
+    with open(path, 'rb') as f:
+        # read first two lines and check if PEP-263 coding is present
+        source_bytearray.extend(f.readline())
+        source_bytearray.extend(f.readline())
+
+        # check for BOM UTF-8 encoding and strip it out if present
+        if source_bytearray.startswith(b'\xef\xbb\xbf'):
+            encoding = 'utf8'
+            source_bytearray = source_bytearray[3:]
+        else:
+            _encoding, _ = util.find_python_encoding(source_bytearray, pyversion)
+            # check that the coding isn't mypy. We skip it since
+            # registering may not have happened yet
+            if _encoding != 'mypy':
+                encoding = _encoding
+
+        source_bytearray.extend(f.read())
+        return source_bytearray.decode(encoding)
diff --git a/mypy/checker.py b/mypy/checker.py
new file mode 100644
index 0000000..6fdc775
--- /dev/null
+++ b/mypy/checker.py
@@ -0,0 +1,2539 @@
+"""Mypy type checker."""
+
+import itertools
+
+from typing import (
+    Any, Dict, Set, List, cast, Tuple, Callable, TypeVar, Union, Optional, NamedTuple
+)
+
+from mypy.errors import Errors, report_internal_error
+from mypy.nodes import (
+    SymbolTable, Node, MypyFile, LDEF, Var,
+    OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo,
+    ClassDef, GDEF, Block, AssignmentStmt, NameExpr, MemberExpr, IndexExpr,
+    TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, IfStmt,
+    WhileStmt, OperatorAssignmentStmt, WithStmt, AssertStmt,
+    RaiseStmt, TryStmt, ForStmt, DelStmt, CallExpr, IntExpr, StrExpr,
+    BytesExpr, UnicodeExpr, FloatExpr, OpExpr, UnaryExpr, CastExpr, SuperExpr,
+    TypeApplication, DictExpr, SliceExpr, FuncExpr, TempNode, SymbolTableNode,
+    Context, ListComprehension, ConditionalExpr, GeneratorExpr,
+    Decorator, SetExpr, PassStmt, TypeVarExpr, PrintStmt,
+    LITERAL_TYPE, BreakStmt, ContinueStmt, ComparisonExpr, StarExpr,
+    YieldFromExpr, NamedTupleExpr, SetComprehension,
+    DictionaryComprehension, ComplexExpr, EllipsisExpr, TypeAliasExpr,
+    RefExpr, YieldExpr, BackquoteExpr, ImportFrom, ImportAll, ImportBase,
+    CONTRAVARIANT, COVARIANT
+)
+from mypy.nodes import function_type, method_type, method_type_with_fallback
+from mypy import nodes
+from mypy.types import (
+    Type, AnyType, CallableType, Void, FunctionLike, Overloaded, TupleType,
+    Instance, NoneTyp, UnboundType, ErrorType, TypeTranslator, strip_type,
+    UnionType, TypeVarType, PartialType, DeletedType
+)
+from mypy.sametypes import is_same_type
+from mypy.messages import MessageBuilder
+import mypy.checkexpr
+from mypy import defaults
+from mypy import messages
+from mypy.subtypes import (
+    is_subtype, is_equivalent, is_proper_subtype,
+    is_more_precise, restrict_subtype_away
+)
+from mypy.maptype import map_instance_to_supertype
+from mypy.semanal import self_type, set_callable_name, refers_to_fullname
+from mypy.erasetype import erase_typevars
+from mypy.expandtype import expand_type_by_instance, expand_type
+from mypy.visitor import NodeVisitor
+from mypy.join import join_simple, join_types
+from mypy.treetransform import TransformVisitor
+from mypy.meet import meet_simple, nearest_builtin_ancestor, is_overlapping_types
+
+
+T = TypeVar('T')
+
+
+def min_with_None_large(x: T, y: T) -> T:
+    """Return min(x, y) but with  a < None for all variables a that are not None"""
+    if x is None:
+        return y
+    return min(x, x if y is None else y)
+
+
+class Frame(Dict[Any, Type]):
+    pass
+
+
+class Key(AnyType):
+    pass
+
+
+class ConditionalTypeBinder:
+    """Keep track of conditional types of variables."""
+
+    def __init__(self) -> None:
+        self.frames = []  # type: List[Frame]
+        # The first frame is special: it's the declared types of variables.
+        self.frames.append(Frame())
+        # Set of other keys to invalidate if a key is changed.
+        self.dependencies = {}  # type: Dict[Key, Set[Key]]
+        # Set of keys with dependencies added already.
+        self._added_dependencies = set()  # type: Set[Key]
+
+        self.frames_on_escape = {}  # type: Dict[int, List[Frame]]
+
+        self.try_frames = set()  # type: Set[int]
+        self.loop_frames = []  # type: List[int]
+
+    def _add_dependencies(self, key: Key, value: Key = None) -> None:
+        if value is None:
+            value = key
+            if value in self._added_dependencies:
+                return
+            self._added_dependencies.add(value)
+        if isinstance(key, tuple):
+            key = cast(Any, key)   # XXX sad
+            if key != value:
+                self.dependencies[key] = set()
+                self.dependencies.setdefault(key, set()).add(value)
+            for elt in cast(Any, key):
+                self._add_dependencies(elt, value)
+
+    def push_frame(self) -> Frame:
+        d = Frame()
+        self.frames.append(d)
+        return d
+
+    def _push(self, key: Key, type: Type, index: int=-1) -> None:
+        self._add_dependencies(key)
+        self.frames[index][key] = type
+
+    def _get(self, key: Key, index: int=-1) -> Type:
+        if index < 0:
+            index += len(self.frames)
+        for i in range(index, -1, -1):
+            if key in self.frames[i]:
+                return self.frames[i][key]
+        return None
+
+    def push(self, expr: Node, typ: Type) -> None:
+        if not expr.literal:
+            return
+        key = expr.literal_hash
+        self.frames[0][key] = self.get_declaration(expr)
+        self._push(key, typ)
+
+    def get(self, expr: Node) -> Type:
+        return self._get(expr.literal_hash)
+
+    def cleanse(self, expr: Node) -> None:
+        """Remove all references to a Node from the binder."""
+        key = expr.literal_hash
+        for frame in self.frames:
+            if key in frame:
+                del frame[key]
+
+    def update_from_options(self, frames: List[Frame]) -> bool:
+        """Update the frame to reflect that each key will be updated
+        as in one of the frames.  Return whether any item changes.
+
+        If a key is declared as AnyType, only update it if all the
+        options are the same.
+        """
+
+        changed = False
+        keys = set(key for f in frames for key in f)
+
+        for key in keys:
+            current_value = self._get(key)
+            resulting_values = [f.get(key, current_value) for f in frames]
+            if any(x is None for x in resulting_values):
+                continue
+
+            if isinstance(self.frames[0].get(key), AnyType):
+                type = resulting_values[0]
+                if not all(is_same_type(type, t) for t in resulting_values[1:]):
+                    type = AnyType()
+            else:
+                type = resulting_values[0]
+                for other in resulting_values[1:]:
+                    type = join_simple(self.frames[0][key], type, other)
+            if not is_same_type(type, current_value):
+                self._push(key, type)
+                changed = True
+
+        return changed
+
+    def update_expand(self, frame: Frame, index: int = -1) -> bool:
+        """Update frame to include another one, if that other one is larger than the current value.
+
+        Return whether anything changed."""
+        result = False
+
+        for key in frame:
+            old_type = self._get(key, index)
+            if old_type is None:
+                continue
+            replacement = join_simple(self.frames[0][key], old_type, frame[key])
+
+            if not is_same_type(replacement, old_type):
+                self._push(key, replacement, index)
+                result = True
+        return result
+
+    def pop_frame(self, canskip=True, fallthrough=False) -> Tuple[bool, Frame]:
+        """Pop a frame.
+
+        If canskip, then allow types to skip all the inner frame
+        blocks.  That is, changes that happened in the inner frames
+        are not necessarily reflected in the outer frame (for example,
+        an if block that may be skipped).
+
+        If fallthrough, then allow types to escape from the inner
+        frame to the resulting frame.  That is, the state of types at
+        the end of the last frame are allowed to fall through into the
+        enclosing frame.
+
+        Return whether the newly innermost frame was modified since it
+        was last on top, and what it would be if the block had run to
+        completion.
+        """
+        result = self.frames.pop()
+
+        options = self.frames_on_escape.pop(len(self.frames) - 1, [])
+        if canskip:
+            options.append(self.frames[-1])
+        if fallthrough:
+            options.append(result)
+
+        changed = self.update_from_options(options)
+
+        return (changed, result)
+
+    def get_declaration(self, expr: Any) -> Type:
+        if hasattr(expr, 'node') and isinstance(expr.node, Var):
+            type = expr.node.type
+            if isinstance(type, PartialType):
+                return None
+            return type
+        else:
+            return self.frames[0].get(expr.literal_hash)
+
+    def assign_type(self, expr: Node, type: Type,
+                    restrict_any: bool = False) -> None:
+        if not expr.literal:
+            return
+        self.invalidate_dependencies(expr)
+
+        declared_type = self.get_declaration(expr)
+
+        if declared_type is None:
+            # Not sure why this happens.  It seems to mainly happen in
+            # member initialization.
+            return
+        if not is_subtype(type, declared_type):
+            # Pretty sure this is only happens when there's a type error.
+
+            # Ideally this function wouldn't be called if the
+            # expression has a type error, though -- do other kinds of
+            # errors cause this function to get called at invalid
+            # times?
+            return
+
+        # If x is Any and y is int, after x = y we do not infer that x is int.
+        # This could be changed.
+        # Eric: I'm changing it in weak typing mode, since Any is so common.
+
+        if (isinstance(self.most_recent_enclosing_type(expr, type), AnyType)
+                and not restrict_any):
+            pass
+        elif isinstance(type, AnyType):
+            self.push(expr, declared_type)
+        else:
+            self.push(expr, type)
+
+        for i in self.try_frames:
+            # XXX This should probably not copy the entire frame, but
+            # just copy this variable into a single stored frame.
+            self.allow_jump(i)
+
+    def invalidate_dependencies(self, expr: Node) -> None:
+        """Invalidate knowledge of types that include expr, but not expr itself.
+
+        For example, when expr is foo.bar, invalidate foo.bar.baz and
+        foo.bar[0].
+
+        It is overly conservative: it invalidates globally, including
+        in code paths unreachable from here.
+        """
+        for dep in self.dependencies.get(expr.literal_hash, set()):
+            for f in self.frames:
+                if dep in f:
+                    del f[dep]
+
+    def most_recent_enclosing_type(self, expr: Node, type: Type) -> Type:
+        if isinstance(type, AnyType):
+            return self.get_declaration(expr)
+        key = expr.literal_hash
+        enclosers = ([self.get_declaration(expr)] +
+                     [f[key] for f in self.frames
+                      if key in f and is_subtype(type, f[key])])
+        return enclosers[-1]
+
+    def allow_jump(self, index: int) -> None:
+        new_frame = Frame()
+        for f in self.frames[index + 1:]:
+            for k in f:
+                new_frame[k] = f[k]
+
+        self.frames_on_escape.setdefault(index, []).append(new_frame)
+
+    def push_loop_frame(self):
+        self.loop_frames.append(len(self.frames) - 1)
+
+    def pop_loop_frame(self):
+        self.loop_frames.pop()
+
+
+def meet_frames(*frames: Frame) -> Frame:
+    answer = Frame()
+    for f in frames:
+        for key in f:
+            if key in answer:
+                answer[key] = meet_simple(answer[key], f[key])
+            else:
+                answer[key] = f[key]
+    return answer
+
+
+# A node which is postponed to be type checked during the next pass.
+DeferredNode = NamedTuple(
+    'DeferredNode',
+    [
+        ('node', Node),
+        ('context_type_name', Optional[str]),  # Name of the surrounding class (for error messages)
+    ])
+
+
+class TypeChecker(NodeVisitor[Type]):
+    """Mypy type checker.
+
+    Type check mypy source files that have been semantically analyzed.
+    """
+
+    # Target Python version
+    pyversion = defaults.PYTHON3_VERSION
+    # Are we type checking a stub?
+    is_stub = False
+    # Error message reporter
+    errors = None  # type: Errors
+    # SymbolNode table for the whole program
+    symtable = None  # type: SymbolTable
+    # Utility for generating messages
+    msg = None  # type: MessageBuilder
+    # Types of type checked nodes
+    type_map = None  # type: Dict[Node, Type]
+
+    # Helper for managing conditional types
+    binder = None  # type: ConditionalTypeBinder
+    # Helper for type checking expressions
+    expr_checker = None  # type: mypy.checkexpr.ExpressionChecker
+
+    # Stack of function return types
+    return_types = None  # type: List[Type]
+    # Type context for type inference
+    type_context = None  # type: List[Type]
+    # Flags; true for dynamically typed functions
+    dynamic_funcs = None  # type: List[bool]
+    # Stack of functions being type checked
+    function_stack = None  # type: List[FuncItem]
+    # Set to True on return/break/raise, False on blocks that can block any of them
+    breaking_out = False
+    # Do weak type checking in this file
+    weak_opts = set()        # type: Set[str]
+    # Stack of collections of variables with partial types
+    partial_types = None  # type: List[Dict[Var, Context]]
+    globals = None  # type: SymbolTable
+    locals = None  # type: SymbolTable
+    modules = None  # type: Dict[str, MypyFile]
+    # Nodes that couldn't be checked because some types weren't available. We'll run
+    # another pass and try these again.
+    deferred_nodes = None  # type: List[DeferredNode]
+    # Type checking pass number (0 = first pass)
+    pass_num = 0
+    # Have we deferred the current function? If yes, don't infer additional
+    # types during this pass within the function.
+    current_node_deferred = False
+
+    def __init__(self, errors: Errors, modules: Dict[str, MypyFile],
+                 pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
+        """Construct a type checker.
+
+        Use errors to report type check errors. Assume symtable has been
+        populated by the semantic analyzer.
+        """
+        self.expr_checker
+        self.errors = errors
+        self.modules = modules
+        self.pyversion = pyversion
+        self.msg = MessageBuilder(errors, modules)
+        self.type_map = {}
+        self.binder = ConditionalTypeBinder()
+        self.binder.push_frame()
+        self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg)
+        self.return_types = []
+        self.type_context = []
+        self.dynamic_funcs = []
+        self.function_stack = []
+        self.weak_opts = set()  # type: Set[str]
+        self.partial_types = []
+        self.deferred_nodes = []
+        self.pass_num = 0
+        self.current_node_deferred = False
+
+    def visit_file(self, file_node: MypyFile, path: str) -> None:
+        """Type check a mypy file with the given path."""
+        self.pass_num = 0
+        self.is_stub = file_node.is_stub
+        self.errors.set_file(path)
+        self.errors.set_ignored_lines(file_node.ignored_lines)
+        self.globals = file_node.names
+        self.locals = None
+        self.weak_opts = file_node.weak_opts
+        self.enter_partial_types()
+
+        for d in file_node.defs:
+            self.accept(d)
+
+        self.leave_partial_types()
+
+        if self.deferred_nodes:
+            self.check_second_pass()
+
+        self.errors.set_ignored_lines(set())
+        self.current_node_deferred = False
+
+    def check_second_pass(self):
+        """Run second pass of type checking which goes through deferred nodes."""
+        self.pass_num = 1
+        for node, type_name in self.deferred_nodes:
+            if type_name:
+                self.errors.push_type(type_name)
+            self.accept(node)
+            if type_name:
+                self.errors.pop_type()
+        self.deferred_nodes = []
+
+    def handle_cannot_determine_type(self, name: str, context: Context) -> None:
+        if self.pass_num == 0 and self.function_stack:
+            # Don't report an error yet. Just defer.
+            node = self.function_stack[-1]
+            if self.errors.type_name:
+                type_name = self.errors.type_name[-1]
+            else:
+                type_name = None
+            self.deferred_nodes.append(DeferredNode(node, type_name))
+            # Set a marker so that we won't infer additional types in this
+            # function. Any inferred types could be bogus, because there's at
+            # least one type that we don't know.
+            self.current_node_deferred = True
+        else:
+            self.msg.cannot_determine_type(name, context)
+
+    def accept(self, node: Node, type_context: Type = None) -> Type:
+        """Type check a node in the given type context."""
+        self.type_context.append(type_context)
+        try:
+            typ = node.accept(self)
+        except Exception as err:
+            report_internal_error(err, self.errors.file, node.line)
+        self.type_context.pop()
+        self.store_type(node, typ)
+        if self.typing_mode_none():
+            return AnyType()
+        else:
+            return typ
+
+    def accept_in_frame(self, node: Node, type_context: Type = None,
+                        repeat_till_fixed: bool = False) -> Type:
+        """Type check a node in the given type context in a new frame of inferred types."""
+        while True:
+            self.binder.push_frame()
+            answer = self.accept(node, type_context)
+            changed, _ = self.binder.pop_frame(True, True)
+            self.breaking_out = False
+            if not repeat_till_fixed or not changed:
+                break
+
+        return answer
+
+    #
+    # Definitions
+    #
+
+    def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> Type:
+        num_abstract = 0
+        if defn.is_property:
+            # HACK: Infer the type of the property.
+            self.visit_decorator(defn.items[0])
+        for fdef in defn.items:
+            self.check_func_item(fdef.func, name=fdef.func.name())
+            if fdef.func.is_abstract:
+                num_abstract += 1
+        if num_abstract not in (0, len(defn.items)):
+            self.fail(messages.INCONSISTENT_ABSTRACT_OVERLOAD, defn)
+        if defn.info:
+            self.check_method_override(defn)
+            self.check_inplace_operator_method(defn)
+        self.check_overlapping_overloads(defn)
+
+    def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
+        for i, item in enumerate(defn.items):
+            for j, item2 in enumerate(defn.items[i + 1:]):
+                # TODO overloads involving decorators
+                sig1 = self.function_type(item.func)
+                sig2 = self.function_type(item2.func)
+                if is_unsafe_overlapping_signatures(sig1, sig2):
+                    self.msg.overloaded_signatures_overlap(i + 1, j + 2,
+                                                           item.func)
+
+    def is_generator_return_type(self, typ: Type) -> bool:
+        return is_subtype(self.named_generic_type('typing.Generator',
+                                                  [AnyType(), AnyType(), AnyType()]),
+                          typ)
+
+    def get_generator_yield_type(self, return_type: Type) -> Type:
+        if isinstance(return_type, AnyType):
+            return AnyType()
+        elif not self.is_generator_return_type(return_type):
+            # If the function doesn't have a proper Generator (or superclass) return type, anything
+            # is permissible.
+            return AnyType()
+        elif not isinstance(return_type, Instance):
+            # Same as above, but written as a separate branch so the typechecker can understand.
+            return AnyType()
+        elif return_type.args:
+            return return_type.args[0]
+        else:
+            # If the function's declared supertype of Generator has no type
+            # parameters (i.e. is `object`), then the yielded values can't
+            # be accessed so any type is acceptable.
+            return AnyType()
+
+    def get_generator_receive_type(self, return_type: Type) -> Type:
+        if isinstance(return_type, AnyType):
+            return AnyType()
+        elif not self.is_generator_return_type(return_type):
+            # If the function doesn't have a proper Generator (or superclass) return type, anything
+            # is permissible.
+            return AnyType()
+        elif not isinstance(return_type, Instance):
+            # Same as above, but written as a separate branch so the typechecker can understand.
+            return AnyType()
+        elif return_type.type.fullname() == 'typing.Generator':
+            # Generator is the only type which specifies the type of values it can receive.
+            return return_type.args[1]
+        else:
+            # `return_type` is a supertype of Generator, so callers won't be able to send it
+            # values.
+            return Void()
+
+    def get_generator_return_type(self, return_type: Type) -> Type:
+        if isinstance(return_type, AnyType):
+            return AnyType()
+        elif not self.is_generator_return_type(return_type):
+            # If the function doesn't have a proper Generator (or superclass) return type, anything
+            # is permissible.
+            return AnyType()
+        elif not isinstance(return_type, Instance):
+            # Same as above, but written as a separate branch so the typechecker can understand.
+            return AnyType()
+        elif return_type.type.fullname() == 'typing.Generator':
+            # Generator is the only type which specifies the type of values it returns into
+            # `yield from` expressions.
+            return return_type.args[2]
+        else:
+            # `return_type` is supertype of Generator, so callers won't be able to see the return
+            # type when used in a `yield from` expression.
+            return AnyType()
+
+    def visit_func_def(self, defn: FuncDef) -> Type:
+        """Type check a function definition."""
+        self.check_func_item(defn, name=defn.name())
+        if defn.info:
+            if not defn.is_dynamic():
+                self.check_method_override(defn)
+            self.check_inplace_operator_method(defn)
+        if defn.original_def:
+            # Override previous definition.
+            new_type = self.function_type(defn)
+            if isinstance(defn.original_def, FuncDef):
+                # Function definition overrides function definition.
+                if not is_same_type(new_type, self.function_type(defn.original_def)):
+                    self.msg.incompatible_conditional_function_def(defn)
+            else:
+                # Function definition overrides a variable initialized via assignment.
+                orig_type = defn.original_def.type
+                if isinstance(orig_type, PartialType):
+                    if orig_type.type is None:
+                        # Ah this is a partial type. Give it the type of the function.
+                        var = defn.original_def
+                        partial_types = self.find_partial_types(var)
+                        if partial_types is not None:
+                            var.type = new_type
+                            del partial_types[var]
+                    else:
+                        # Trying to redefine something like partial empty list as function.
+                        self.fail(messages.INCOMPATIBLE_REDEFINITION, defn)
+                else:
+                    # TODO: Update conditional type binder.
+                    self.check_subtype(orig_type, new_type, defn,
+                                       messages.INCOMPATIBLE_REDEFINITION,
+                                       'original type',
+                                       'redefinition with type')
+
+    def check_func_item(self, defn: FuncItem,
+                        type_override: CallableType = None,
+                        name: str = None) -> Type:
+        """Type check a function.
+
+        If type_override is provided, use it as the function type.
+        """
+        # We may be checking a function definition or an anonymous function. In
+        # the first case, set up another reference with the precise type.
+        fdef = None  # type: FuncDef
+        if isinstance(defn, FuncDef):
+            fdef = defn
+
+        self.function_stack.append(defn)
+        self.dynamic_funcs.append(defn.is_dynamic() and not type_override)
+
+        if fdef:
+            self.errors.push_function(fdef.name())
+
+        self.enter_partial_types()
+
+        typ = self.function_type(defn)
+        if type_override:
+            typ = type_override
+        if isinstance(typ, CallableType):
+            self.check_func_def(defn, typ, name)
+        else:
+            raise RuntimeError('Not supported')
+
+        self.leave_partial_types()
+
+        if fdef:
+            self.errors.pop_function()
+
+        self.dynamic_funcs.pop()
+        self.function_stack.pop()
+        self.current_node_deferred = False
+
+    def check_func_def(self, defn: FuncItem, typ: CallableType, name: str) -> None:
+        """Type check a function definition."""
+        # Expand type variables with value restrictions to ordinary types.
+        for item, typ in self.expand_typevars(defn, typ):
+            old_binder = self.binder
+            self.binder = ConditionalTypeBinder()
+            self.binder.push_frame()
+            defn.expanded.append(item)
+
+            # We may be checking a function definition or an anonymous
+            # function. In the first case, set up another reference with the
+            # precise type.
+            if isinstance(item, FuncDef):
+                fdef = item
+            else:
+                fdef = None
+
+            self.enter()
+
+            if fdef:
+                # Check if __init__ has an invalid, non-None return type.
+                if (fdef.info and fdef.name() == '__init__' and
+                        not isinstance(typ.ret_type, Void) and
+                        not self.dynamic_funcs[-1]):
+                    self.fail(messages.INIT_MUST_HAVE_NONE_RETURN_TYPE,
+                              item.type)
+
+            if name in nodes.reverse_op_method_set:
+                self.check_reverse_op_method(item, typ, name)
+            elif name == '__getattr__':
+                self.check_getattr_method(typ, defn)
+
+            # Refuse contravariant return type variable
+            if isinstance(typ.ret_type, TypeVarType):
+                if typ.ret_type.variance == CONTRAVARIANT:
+                    self.fail(messages.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT,
+                         typ.ret_type)
+
+            # Check that Generator functions have the appropriate return type.
+            if defn.is_generator:
+                if not self.is_generator_return_type(typ.ret_type):
+                    self.fail(messages.INVALID_RETURN_TYPE_FOR_GENERATOR, typ)
+
+                # Python 2 generators aren't allowed to return values.
+                if (self.pyversion[0] == 2 and
+                        isinstance(typ.ret_type, Instance) and
+                        typ.ret_type.type.fullname() == 'typing.Generator'):
+                    if not (isinstance(typ.ret_type.args[2], Void)
+                            or isinstance(typ.ret_type.args[2], AnyType)):
+                        self.fail(messages.INVALID_GENERATOR_RETURN_ITEM_TYPE, typ)
+
+            # Push return type.
+            self.return_types.append(typ.ret_type)
+
+            # Store argument types.
+            for i in range(len(typ.arg_types)):
+                arg_type = typ.arg_types[i]
+
+                # Refuse covariant parameter type variables
+                if isinstance(arg_type, TypeVarType):
+                    if arg_type.variance == COVARIANT:
+                        self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT,
+                                  arg_type)
+
+                if typ.arg_kinds[i] == nodes.ARG_STAR:
+                    # builtins.tuple[T] is typing.Tuple[T, ...]
+                    arg_type = self.named_generic_type('builtins.tuple',
+                                                       [arg_type])
+                elif typ.arg_kinds[i] == nodes.ARG_STAR2:
+                    arg_type = self.named_generic_type('builtins.dict',
+                                                       [self.str_type(),
+                                                        arg_type])
+                item.arguments[i].variable.type = arg_type
+
+            # Type check initialization expressions.
+            for arg in item.arguments:
+                init = arg.initialization_statement
+                if init:
+                    self.accept(init)
+
+            # Clear out the default assignments from the binder
+            self.binder.pop_frame()
+            self.binder.push_frame()
+            # Type check body in a new scope.
+            self.accept_in_frame(item.body)
+
+            self.return_types.pop()
+
+            self.leave()
+            self.binder = old_binder
+
+    def check_reverse_op_method(self, defn: FuncItem, typ: CallableType,
+                                method: str) -> None:
+        """Check a reverse operator method such as __radd__."""
+
+        # This used to check for some very obscure scenario.  It now
+        # just decides whether it's worth calling
+        # check_overlapping_op_methods().
+
+        if method in ('__eq__', '__ne__'):
+            # These are defined for all objects => can't cause trouble.
+            return
+
+        # With 'Any' or 'object' return type we are happy, since any possible
+        # return value is valid.
+        ret_type = typ.ret_type
+        if isinstance(ret_type, AnyType):
+            return
+        if isinstance(ret_type, Instance):
+            if ret_type.type.fullname() == 'builtins.object':
+                return
+        # Plausibly the method could have too few arguments, which would result
+        # in an error elsewhere.
+        if len(typ.arg_types) <= 2:
+            # TODO check self argument kind
+
+            # Check for the issue described above.
+            arg_type = typ.arg_types[1]
+            other_method = nodes.normal_from_reverse_op[method]
+            if isinstance(arg_type, Instance):
+                if not arg_type.type.has_readable_member(other_method):
+                    return
+            elif isinstance(arg_type, AnyType):
+                return
+            elif isinstance(arg_type, UnionType):
+                if not arg_type.has_readable_member(other_method):
+                    return
+            else:
+                return
+
+            typ2 = self.expr_checker.analyze_external_member_access(
+                other_method, arg_type, defn)
+            self.check_overlapping_op_methods(
+                typ, method, defn.info,
+                typ2, other_method, cast(Instance, arg_type),
+                defn)
+
+    def check_overlapping_op_methods(self,
+                                     reverse_type: CallableType,
+                                     reverse_name: str,
+                                     reverse_class: TypeInfo,
+                                     forward_type: Type,
+                                     forward_name: str,
+                                     forward_base: Instance,
+                                     context: Context) -> None:
+        """Check for overlapping method and reverse method signatures.
+
+        Assume reverse method has valid argument count and kinds.
+        """
+
+        # Reverse operator method that overlaps unsafely with the
+        # forward operator method can result in type unsafety. This is
+        # similar to overlapping overload variants.
+        #
+        # This example illustrates the issue:
+        #
+        #   class X: pass
+        #   class A:
+        #       def __add__(self, x: X) -> int:
+        #           if isinstance(x, X):
+        #               return 1
+        #           return NotImplemented
+        #   class B:
+        #       def __radd__(self, x: A) -> str: return 'x'
+        #   class C(X, B): pass
+        #   def f(b: B) -> None:
+        #       A() + b # Result is 1, even though static type seems to be str!
+        #   f(C())
+        #
+        # The reason for the problem is that B and X are overlapping
+        # types, and the return types are different. Also, if the type
+        # of x in __radd__ would not be A, the methods could be
+        # non-overlapping.
+
+        if isinstance(forward_type, CallableType):
+            # TODO check argument kinds
+            if len(forward_type.arg_types) < 1:
+                # Not a valid operator method -- can't succeed anyway.
+                return
+
+            # Construct normalized function signatures corresponding to the
+            # operator methods. The first argument is the left operand and the
+            # second operand is the right argument -- we switch the order of
+            # the arguments of the reverse method.
+            forward_tweaked = CallableType(
+                [forward_base, forward_type.arg_types[0]],
+                [nodes.ARG_POS] * 2,
+                [None] * 2,
+                forward_type.ret_type,
+                forward_type.fallback,
+                name=forward_type.name)
+            reverse_args = reverse_type.arg_types
+            reverse_tweaked = CallableType(
+                [reverse_args[1], reverse_args[0]],
+                [nodes.ARG_POS] * 2,
+                [None] * 2,
+                reverse_type.ret_type,
+                fallback=self.named_type('builtins.function'),
+                name=reverse_type.name)
+
+            if is_unsafe_overlapping_signatures(forward_tweaked,
+                                                reverse_tweaked):
+                self.msg.operator_method_signatures_overlap(
+                    reverse_class.name(), reverse_name,
+                    forward_base.type.name(), forward_name, context)
+        elif isinstance(forward_type, Overloaded):
+            for item in forward_type.items():
+                self.check_overlapping_op_methods(
+                    reverse_type, reverse_name, reverse_class,
+                    item, forward_name, forward_base, context)
+        elif not isinstance(forward_type, AnyType):
+            self.msg.forward_operator_not_callable(forward_name, context)
+
+    def check_inplace_operator_method(self, defn: FuncBase) -> None:
+        """Check an inplace operator method such as __iadd__.
+
+        They cannot arbitrarily overlap with __add__.
+        """
+        method = defn.name()
+        if method not in nodes.inplace_operator_methods:
+            return
+        typ = self.method_type(defn)
+        cls = defn.info
+        other_method = '__' + method[3:]
+        if cls.has_readable_member(other_method):
+            instance = self_type(cls)
+            typ2 = self.expr_checker.analyze_external_member_access(
+                other_method, instance, defn)
+            fail = False
+            if isinstance(typ2, FunctionLike):
+                if not is_more_general_arg_prefix(typ, typ2):
+                    fail = True
+            else:
+                # TODO overloads
+                fail = True
+            if fail:
+                self.msg.signatures_incompatible(method, other_method, defn)
+
+    def check_getattr_method(self, typ: CallableType, context: Context) -> None:
+        method_type = CallableType([AnyType(), self.named_type('builtins.str')],
+                                   [nodes.ARG_POS, nodes.ARG_POS],
+                                   [None],
+                                   AnyType(),
+                                   self.named_type('builtins.function'))
+        if not is_subtype(typ, method_type):
+            self.msg.invalid_signature(typ, context)
+
+    def expand_typevars(self, defn: FuncItem,
+                        typ: CallableType) -> List[Tuple[FuncItem, CallableType]]:
+        # TODO use generator
+        subst = []  # type: List[List[Tuple[int, Type]]]
+        tvars = typ.variables or []
+        tvars = tvars[:]
+        if defn.info:
+            # Class type variables
+            tvars += defn.info.defn.type_vars or []
+        for tvar in tvars:
+            if tvar.values:
+                subst.append([(tvar.id, value)
+                              for value in tvar.values])
+        if subst:
+            result = []  # type: List[Tuple[FuncItem, CallableType]]
+            for substitutions in itertools.product(*subst):
+                mapping = dict(substitutions)
+                expanded = cast(CallableType, expand_type(typ, mapping))
+                result.append((expand_func(defn, mapping), expanded))
+            return result
+        else:
+            return [(defn, typ)]
+
+    def check_method_override(self, defn: FuncBase) -> None:
+        """Check if function definition is compatible with base classes."""
+        # Check against definitions in base classes.
+        for base in defn.info.mro[1:]:
+            self.check_method_or_accessor_override_for_base(defn, base)
+
+    def check_method_or_accessor_override_for_base(self, defn: FuncBase,
+                                                   base: TypeInfo) -> None:
+        """Check if method definition is compatible with a base class."""
+        if base:
+            name = defn.name()
+            if name not in ('__init__', '__new__'):
+                # Check method override (__init__ and __new__ are special).
+                self.check_method_override_for_base_with_name(defn, name, base)
+                if name in nodes.inplace_operator_methods:
+                    # Figure out the name of the corresponding operator method.
+                    method = '__' + name[3:]
+                    # An inplace overator method such as __iadd__ might not be
+                    # always introduced safely if a base class defined __add__.
+                    # TODO can't come up with an example where this is
+                    #      necessary; now it's "just in case"
+                    self.check_method_override_for_base_with_name(defn, method,
+                                                                  base)
+
+    def check_method_override_for_base_with_name(
+            self, defn: FuncBase, name: str, base: TypeInfo) -> None:
+        base_attr = base.names.get(name)
+        if base_attr:
+            # The name of the method is defined in the base class.
+
+            # Construct the type of the overriding method.
+            typ = self.method_type(defn)
+            # Map the overridden method type to subtype context so that
+            # it can be checked for compatibility.
+            original_type = base_attr.type
+            if original_type is None and isinstance(base_attr.node,
+                                                    FuncDef):
+                original_type = self.function_type(cast(FuncDef,
+                                                        base_attr.node))
+            if isinstance(original_type, FunctionLike):
+                original = map_type_from_supertype(
+                    method_type(original_type),
+                    defn.info, base)
+                # Check that the types are compatible.
+                # TODO overloaded signatures
+                self.check_override(cast(FunctionLike, typ),
+                                    cast(FunctionLike, original),
+                                    defn.name(),
+                                    name,
+                                    base.name(),
+                                    defn)
+            else:
+                assert original_type is not None
+                self.msg.signature_incompatible_with_supertype(
+                    defn.name(), name, base.name(), defn)
+
+    def check_override(self, override: FunctionLike, original: FunctionLike,
+                       name: str, name_in_super: str, supertype: str,
+                       node: Context) -> None:
+        """Check a method override with given signatures.
+
+        Arguments:
+          override:  The signature of the overriding method.
+          original:  The signature of the original supertype method.
+          name:      The name of the subtype. This and the next argument are
+                     only used for generating error messages.
+          supertype: The name of the supertype.
+        """
+        if (isinstance(override, Overloaded) or
+                isinstance(original, Overloaded) or
+                len(cast(CallableType, override).arg_types) !=
+                len(cast(CallableType, original).arg_types) or
+                cast(CallableType, override).min_args !=
+                cast(CallableType, original).min_args):
+            # Use boolean variable to clarify code.
+            fail = False
+            if not is_subtype(override, original):
+                fail = True
+            elif (not isinstance(original, Overloaded) and
+                  isinstance(override, Overloaded) and
+                  name in nodes.reverse_op_methods.keys()):
+                # Operator method overrides cannot introduce overloading, as
+                # this could be unsafe with reverse operator methods.
+                fail = True
+            if fail:
+                self.msg.signature_incompatible_with_supertype(
+                    name, name_in_super, supertype, node)
+            return
+        else:
+            # Give more detailed messages for the common case of both
+            # signatures having the same number of arguments and no
+            # overloads.
+
+            coverride = cast(CallableType, override)
+            coriginal = cast(CallableType, original)
+
+            for i in range(len(coverride.arg_types)):
+                if not is_subtype(coriginal.arg_types[i],
+                                  coverride.arg_types[i]):
+                    self.msg.argument_incompatible_with_supertype(
+                        i + 1, name, name_in_super, supertype, node)
+
+            if not is_subtype(coverride.ret_type, coriginal.ret_type):
+                self.msg.return_type_incompatible_with_supertype(
+                    name, name_in_super, supertype, node)
+
+    def visit_class_def(self, defn: ClassDef) -> Type:
+        """Type check a class definition."""
+        typ = defn.info
+        self.errors.push_type(defn.name)
+        self.enter_partial_types()
+        old_binder = self.binder
+        self.binder = ConditionalTypeBinder()
+        self.binder.push_frame()
+        self.accept(defn.defs)
+        self.binder = old_binder
+        self.check_multiple_inheritance(typ)
+        self.leave_partial_types()
+        self.errors.pop_type()
+
+    def check_multiple_inheritance(self, typ: TypeInfo) -> None:
+        """Check for multiple inheritance related errors."""
+
+        if len(typ.bases) <= 1:
+            # No multiple inheritance.
+            return
+        # Verify that inherited attributes are compatible.
+        mro = typ.mro[1:]
+        for i, base in enumerate(mro):
+            for name in base.names:
+                for base2 in mro[i + 1:]:
+                    # We only need to check compatibility of attributes from classes not
+                    # in a subclass relationship. For subclasses, normal (single inheritance)
+                    # checks suffice (these are implemented elsewhere).
+                    if name in base2.names and base2 not in base.mro:
+                        self.check_compatibility(name, base, base2, typ)
+        # Verify that base class layouts are compatible.
+        builtin_bases = [nearest_builtin_ancestor(base.type)
+                         for base in typ.bases]
+        for base1 in builtin_bases:
+            for base2 in builtin_bases:
+                if not (base1 in base2.mro or base2 in base1.mro):
+                    self.fail(messages.INSTANCE_LAYOUT_CONFLICT, typ)
+
+    def check_compatibility(self, name: str, base1: TypeInfo,
+                            base2: TypeInfo, ctx: Context) -> None:
+        """Check if attribute name in base1 is compatible with base2 in multiple inheritance.
+
+        Assume base1 comes before base2 in the MRO, and that base1 and base2 don't have
+        a direct subclass relationship (i.e., the compatibility requirement only derives from
+        multiple inheritance).
+        """
+        if name == '__init__':
+            # __init__ can be incompatible -- it's a special case.
+            return
+        first = base1[name]
+        second = base2[name]
+        first_type = first.type
+        if first_type is None and isinstance(first.node, FuncDef):
+            first_type = self.function_type(cast(FuncDef, first.node))
+        second_type = second.type
+        if second_type is None and isinstance(second.node, FuncDef):
+            second_type = self.function_type(cast(FuncDef, second.node))
+        # TODO: What if some classes are generic?
+        if (isinstance(first_type, FunctionLike) and
+                isinstance(second_type, FunctionLike)):
+            # Method override
+            first_sig = method_type(cast(FunctionLike, first_type))
+            second_sig = method_type(cast(FunctionLike, second_type))
+            ok = is_subtype(first_sig, second_sig)
+        elif first_type and second_type:
+            ok = is_equivalent(first_type, second_type)
+        else:
+            if first_type is None:
+                self.msg.cannot_determine_type_in_base(name, base1.name(), ctx)
+            if second_type is None:
+                self.msg.cannot_determine_type_in_base(name, base2.name(), ctx)
+            ok = True
+        if not ok:
+            self.msg.base_class_definitions_incompatible(name, base1, base2,
+                                                         ctx)
+
+    def visit_import_from(self, node: ImportFrom) -> Type:
+        self.check_import(node)
+
+    def visit_import_all(self, node: ImportAll) -> Type:
+        self.check_import(node)
+
+    def check_import(self, node: ImportBase) -> Type:
+        for assign in node.assignments:
+            lvalue = assign.lvalues[0]
+            lvalue_type, _, __ = self.check_lvalue(lvalue)
+            if lvalue_type is None:
+                # TODO: This is broken.
+                lvalue_type = AnyType()
+            message = '{} "{}"'.format(messages.INCOMPATIBLE_IMPORT_OF,
+                                       cast(NameExpr, assign.rvalue).name)
+            self.check_simple_assignment(lvalue_type, assign.rvalue, node,
+                                         msg=message, lvalue_name='local name',
+                                         rvalue_name='imported name')
+
+    #
+    # Statements
+    #
+
+    def visit_block(self, b: Block) -> Type:
+        if b.is_unreachable:
+            return None
+        for s in b.body:
+            self.accept(s)
+            if self.breaking_out:
+                break
+
+    def visit_assignment_stmt(self, s: AssignmentStmt) -> Type:
+        """Type check an assignment statement.
+
+        Handle all kinds of assignment statements (simple, indexed, multiple).
+        """
+        self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None)
+
+        if len(s.lvalues) > 1:
+            # Chained assignment (e.g. x = y = ...).
+            # Make sure that rvalue type will not be reinferred.
+            rvalue = self.temp_node(self.type_map[s.rvalue], s)
+            for lv in s.lvalues[:-1]:
+                self.check_assignment(lv, rvalue, s.type is None)
+
+    def check_assignment(self, lvalue: Node, rvalue: Node, infer_lvalue_type: bool = True) -> None:
+        """Type check a single assignment: lvalue = rvalue."""
+        if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
+            ltuple = cast(Union[TupleExpr, ListExpr], lvalue)
+
+            self.check_assignment_to_multiple_lvalues(ltuple.items, rvalue, lvalue,
+                                                      infer_lvalue_type)
+        else:
+            lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue)
+            if lvalue_type:
+                if isinstance(lvalue_type, PartialType) and lvalue_type.type is None:
+                    # Try to infer a proper type for a variable with a partial None type.
+                    rvalue_type = self.accept(rvalue)
+                    if isinstance(rvalue_type, NoneTyp):
+                        # This doesn't actually provide any additional information -- multiple
+                        # None initializers preserve the partial None type.
+                        return
+                    if is_valid_inferred_type(rvalue_type):
+                        var = lvalue_type.var
+                        partial_types = self.find_partial_types(var)
+                        if partial_types is not None:
+                            if not self.current_node_deferred:
+                                var.type = rvalue_type
+                            else:
+                                var.type = None
+                            del partial_types[var]
+                    # Try to infer a partial type. No need to check the return value, as
+                    # an error will be reported elsewhere.
+                    self.infer_partial_type(lvalue_type.var, lvalue, rvalue_type)
+                    return
+                rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue)
+
+                if rvalue_type and infer_lvalue_type:
+                    self.binder.assign_type(lvalue, rvalue_type,
+                                            self.typing_mode_weak())
+            elif index_lvalue:
+                self.check_indexed_assignment(index_lvalue, rvalue, rvalue)
+
+            if inferred:
+                self.infer_variable_type(inferred, lvalue, self.accept(rvalue),
+                                         rvalue)
+
+    def check_assignment_to_multiple_lvalues(self, lvalues: List[Node], rvalue: Node,
+                                             context: Context,
+                                             infer_lvalue_type: bool = True) -> None:
+        if isinstance(rvalue, TupleExpr) or isinstance(rvalue, ListExpr):
+            # Recursively go into Tuple or List expression rhs instead of
+            # using the type of rhs, because this allowed more fine grained
+            # control in cases like: a, b = [int, str] where rhs would get
+            # type List[object]
+
+            rvalues = cast(Union[TupleExpr, ListExpr], rvalue).items
+
+            if self.check_rvalue_count_in_assignment(lvalues, len(rvalues), context):
+                star_index = next((i for i, lv in enumerate(lvalues) if
+                                   isinstance(lv, StarExpr)), len(lvalues))
+
+                left_lvs = lvalues[:star_index]
+                star_lv = cast(StarExpr,
+                               lvalues[star_index]) if star_index != len(lvalues) else None
+                right_lvs = lvalues[star_index + 1:]
+
+                left_rvs, star_rvs, right_rvs = self.split_around_star(
+                    rvalues, star_index, len(lvalues))
+
+                lr_pairs = list(zip(left_lvs, left_rvs))
+                if star_lv:
+                    rv_list = ListExpr(star_rvs)
+                    rv_list.set_line(rvalue.get_line())
+                    lr_pairs.append((star_lv.expr, rv_list))
+                lr_pairs.extend(zip(right_lvs, right_rvs))
+
+                for lv, rv in lr_pairs:
+                    self.check_assignment(lv, rv, infer_lvalue_type)
+        else:
+            self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type)
+
+    def check_rvalue_count_in_assignment(self, lvalues: List[Node], rvalue_count: int,
+                                         context: Context) -> bool:
+        if any(isinstance(lvalue, StarExpr) for lvalue in lvalues):
+            if len(lvalues) - 1 > rvalue_count:
+                self.msg.wrong_number_values_to_unpack(rvalue_count,
+                                                       len(lvalues) - 1, context)
+                return False
+        elif rvalue_count != len(lvalues):
+            self.msg.wrong_number_values_to_unpack(rvalue_count,
+                            len(lvalues), context)
+            return False
+        return True
+
+    def check_multi_assignment(self, lvalues: List[Node],
+                               rvalue: Node,
+                               context: Context,
+                               infer_lvalue_type: bool = True,
+                               msg: str = None) -> None:
+        """Check the assignment of one rvalue to a number of lvalues."""
+
+        if not msg:
+            msg = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT
+
+        # Infer the type of an ordinary rvalue expression.
+        rvalue_type = self.accept(rvalue)  # TODO maybe elsewhere; redundant
+        undefined_rvalue = False
+
+        if isinstance(rvalue_type, AnyType):
+            for lv in lvalues:
+                if isinstance(lv, StarExpr):
+                    lv = lv.expr
+                self.check_assignment(lv, self.temp_node(AnyType(), context), infer_lvalue_type)
+        elif isinstance(rvalue_type, TupleType):
+            self.check_multi_assignment_from_tuple(lvalues, rvalue, cast(TupleType, rvalue_type),
+                                                  context, undefined_rvalue, infer_lvalue_type)
+        else:
+            self.check_multi_assignment_from_iterable(lvalues, rvalue_type,
+                                                     context, infer_lvalue_type)
+
+    def check_multi_assignment_from_tuple(self, lvalues: List[Node], rvalue: Node,
+                                          rvalue_type: TupleType, context: Context,
+                                          undefined_rvalue: bool,
+                                          infer_lvalue_type: bool = True) -> None:
+        if self.check_rvalue_count_in_assignment(lvalues, len(rvalue_type.items), context):
+            star_index = next((i for i, lv in enumerate(lvalues)
+                               if isinstance(lv, StarExpr)), len(lvalues))
+
+            left_lvs = lvalues[:star_index]
+            star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None
+            right_lvs = lvalues[star_index + 1:]
+
+            if not undefined_rvalue:
+                # Infer rvalue again, now in the correct type context.
+                lvalue_type = self.lvalue_type_for_inference(lvalues, rvalue_type)
+                rvalue_type = cast(TupleType, self.accept(rvalue, lvalue_type))
+
+            left_rv_types, star_rv_types, right_rv_types = self.split_around_star(
+                rvalue_type.items, star_index, len(lvalues))
+
+            for lv, rv_type in zip(left_lvs, left_rv_types):
+                self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type)
+            if star_lv:
+                nodes = [self.temp_node(rv_type, context) for rv_type in star_rv_types]
+                list_expr = ListExpr(nodes)
+                list_expr.set_line(context.get_line())
+                self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type)
+            for lv, rv_type in zip(right_lvs, right_rv_types):
+                self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type)
+
+    def lvalue_type_for_inference(self, lvalues: List[Node], rvalue_type: TupleType) -> Type:
+        star_index = next((i for i, lv in enumerate(lvalues)
+                           if isinstance(lv, StarExpr)), len(lvalues))
+        left_lvs = lvalues[:star_index]
+        star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None
+        right_lvs = lvalues[star_index + 1:]
+        left_rv_types, star_rv_types, right_rv_types = self.split_around_star(
+            rvalue_type.items, star_index, len(lvalues))
+
+        type_parameters = []  # type: List[Type]
+
+        def append_types_for_inference(lvs: List[Node], rv_types: List[Type]) -> None:
+            for lv, rv_type in zip(lvs, rv_types):
+                sub_lvalue_type, index_expr, inferred = self.check_lvalue(lv)
+                if sub_lvalue_type:
+                    type_parameters.append(sub_lvalue_type)
+                else:  # index lvalue
+                    # TODO Figure out more precise type context, probably
+                    #      based on the type signature of the _set method.
+                    type_parameters.append(rv_type)
+
+        append_types_for_inference(left_lvs, left_rv_types)
+
+        if star_lv:
+            sub_lvalue_type, index_expr, inferred = self.check_lvalue(star_lv.expr)
+            if sub_lvalue_type:
+                type_parameters.extend([sub_lvalue_type] * len(star_rv_types))
+            else:  # index lvalue
+                # TODO Figure out more precise type context, probably
+                #      based on the type signature of the _set method.
+                type_parameters.extend(star_rv_types)
+
+        append_types_for_inference(right_lvs, right_rv_types)
+
+        return TupleType(type_parameters, self.named_type('builtins.tuple'))
+
+    def split_around_star(self, items: List[T], star_index: int,
+                          length: int) -> Tuple[List[T], List[T], List[T]]:
+        """Splits a list of items in three to match another list of length 'length'
+        that contains a starred expression at 'star_index' in the following way:
+
+        star_index = 2, length = 5 (i.e., [a,b,*,c,d]), items = [1,2,3,4,5,6,7]
+        returns in: ([1,2], [3,4,5], [6,7])
+        """
+        nr_right_of_star = length - star_index - 1
+        right_index = nr_right_of_star if -nr_right_of_star != 0 else len(items)
+        left = items[:star_index]
+        star = items[star_index:right_index]
+        right = items[right_index:]
+        return (left, star, right)
+
+    def type_is_iterable(self, type: Type) -> bool:
+        return (is_subtype(type, self.named_generic_type('typing.Iterable',
+                                                        [AnyType()])) and
+                isinstance(type, Instance))
+
+    def check_multi_assignment_from_iterable(self, lvalues: List[Node], rvalue_type: Type,
+                                             context: Context,
+                                             infer_lvalue_type: bool = True) -> None:
+        if self.type_is_iterable(rvalue_type):
+            item_type = self.iterable_item_type(cast(Instance, rvalue_type))
+            for lv in lvalues:
+                if isinstance(lv, StarExpr):
+                    self.check_assignment(lv.expr, self.temp_node(rvalue_type, context),
+                                          infer_lvalue_type)
+                else:
+                    self.check_assignment(lv, self.temp_node(item_type, context),
+                                          infer_lvalue_type)
+        else:
+            self.msg.type_not_iterable(rvalue_type, context)
+
+    def check_lvalue(self, lvalue: Node) -> Tuple[Type, IndexExpr, Var]:
+        lvalue_type = None  # type: Type
+        index_lvalue = None  # type: IndexExpr
+        inferred = None  # type: Var
+
+        if self.is_definition(lvalue):
+            if isinstance(lvalue, NameExpr):
+                inferred = cast(Var, lvalue.node)
+                assert isinstance(inferred, Var)
+            else:
+                m = cast(MemberExpr, lvalue)
+                self.accept(m.expr)
+                inferred = m.def_var
+        elif isinstance(lvalue, IndexExpr):
+            index_lvalue = lvalue
+        elif isinstance(lvalue, MemberExpr):
+            lvalue_type = self.expr_checker.analyze_ordinary_member_access(lvalue,
+                                                                 True)
+            self.store_type(lvalue, lvalue_type)
+        elif isinstance(lvalue, NameExpr):
+            lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True)
+            self.store_type(lvalue, lvalue_type)
+        elif isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
+            lv = cast(Union[TupleExpr, ListExpr], lvalue)
+            types = [self.check_lvalue(sub_expr)[0] for sub_expr in lv.items]
+            lvalue_type = TupleType(types, self.named_type('builtins.tuple'))
+        else:
+            lvalue_type = self.accept(lvalue)
+
+        return lvalue_type, index_lvalue, inferred
+
+    def is_definition(self, s: Node) -> bool:
+        if isinstance(s, NameExpr):
+            if s.is_def:
+                return True
+            # If the node type is not defined, this must the first assignment
+            # that we process => this is a definition, even though the semantic
+            # analyzer did not recognize this as such. This can arise in code
+            # that uses isinstance checks, if type checking of the primary
+            # definition is skipped due to an always False type check.
+            node = s.node
+            if isinstance(node, Var):
+                return node.type is None
+        elif isinstance(s, MemberExpr):
+            return s.is_def
+        return False
+
+    def infer_variable_type(self, name: Var, lvalue: Node,
+                            init_type: Type, context: Context) -> None:
+        """Infer the type of initialized variables from initializer type."""
+        if self.typing_mode_weak():
+            self.set_inferred_type(name, lvalue, AnyType())
+            self.binder.assign_type(lvalue, init_type, True)
+        elif isinstance(init_type, Void):
+            self.check_not_void(init_type, context)
+            self.set_inference_error_fallback_type(name, lvalue, init_type, context)
+        elif isinstance(init_type, DeletedType):
+            self.msg.deleted_as_rvalue(init_type, context)
+        elif not is_valid_inferred_type(init_type):
+            # We cannot use the type of the initialization expression for full type
+            # inference (it's not specific enough), but we might be able to give
+            # partial type which will be made more specific later. A partial type
+            # gets generated in assignment like 'x = []' where item type is not known.
+            if not self.infer_partial_type(name, lvalue, init_type):
+                self.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+                self.set_inference_error_fallback_type(name, lvalue, init_type, context)
+        else:
+            # Infer type of the target.
+
+            # Make the type more general (strip away function names etc.).
+            init_type = strip_type(init_type)
+
+            self.set_inferred_type(name, lvalue, init_type)
+
+    def infer_partial_type(self, name: Var, lvalue: Node, init_type: Type) -> bool:
+        if isinstance(init_type, NoneTyp):
+            partial_type = PartialType(None, name)
+        elif isinstance(init_type, Instance):
+            fullname = init_type.type.fullname()
+            if ((fullname == 'builtins.list' or fullname == 'builtins.set' or
+                 fullname == 'builtins.dict')
+                    and isinstance(init_type.args[0], NoneTyp)
+                    and (fullname != 'builtins.dict' or isinstance(init_type.args[1], NoneTyp))
+                    and isinstance(lvalue, NameExpr)):
+                partial_type = PartialType(init_type.type, name)
+            else:
+                return False
+        else:
+            return False
+        self.set_inferred_type(name, lvalue, partial_type)
+        self.partial_types[-1][name] = lvalue
+        return True
+
+    def set_inferred_type(self, var: Var, lvalue: Node, type: Type) -> None:
+        """Store inferred variable type.
+
+        Store the type to both the variable node and the expression node that
+        refers to the variable (lvalue). If var is None, do nothing.
+        """
+        if var and not self.current_node_deferred:
+            var.type = type
+            self.store_type(lvalue, type)
+
+    def set_inference_error_fallback_type(self, var: Var, lvalue: Node, type: Type,
+                                          context: Context) -> None:
+        """If errors on context line are ignored, store dummy type for variable.
+
+        If a program ignores error on type inference error, the variable should get some
+        inferred type so that if can used later on in the program. Example:
+
+          x = []  # type: ignore
+          x.append(1)   # Should be ok!
+
+        We implement this here by giving x a valid type (Any).
+        """
+        if context.get_line() in self.errors.ignored_lines:
+            self.set_inferred_type(var, lvalue, AnyType())
+
+    def narrow_type_from_binder(self, expr: Node, known_type: Type) -> Type:
+        if expr.literal >= LITERAL_TYPE:
+            restriction = self.binder.get(expr)
+            if restriction:
+                ans = meet_simple(known_type, restriction)
+                return ans
+        return known_type
+
+    def check_simple_assignment(self, lvalue_type: Type, rvalue: Node,
+                                context: Node,
+                                msg: str = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
+                                lvalue_name: str = 'variable',
+                                rvalue_name: str = 'expression') -> Type:
+        if self.is_stub and isinstance(rvalue, EllipsisExpr):
+            # '...' is always a valid initializer in a stub.
+            return AnyType()
+        else:
+            rvalue_type = self.accept(rvalue, lvalue_type)
+            if isinstance(rvalue_type, DeletedType):
+                self.msg.deleted_as_rvalue(rvalue_type, context)
+            if self.typing_mode_weak():
+                return rvalue_type
+            if isinstance(lvalue_type, DeletedType):
+                self.msg.deleted_as_lvalue(lvalue_type, context)
+            else:
+                self.check_subtype(rvalue_type, lvalue_type, context, msg,
+                                   '{} has type'.format(rvalue_name),
+                                   '{} has type'.format(lvalue_name))
+            return rvalue_type
+
+    def check_indexed_assignment(self, lvalue: IndexExpr,
+                                 rvalue: Node, context: Context) -> None:
+        """Type check indexed assignment base[index] = rvalue.
+
+        The lvalue argument is the base[index] expression.
+        """
+        self.try_infer_partial_type_from_indexed_assignment(lvalue, rvalue)
+        basetype = self.accept(lvalue.base)
+        method_type = self.expr_checker.analyze_external_member_access(
+            '__setitem__', basetype, context)
+        lvalue.method_type = method_type
+        self.expr_checker.check_call(method_type, [lvalue.index, rvalue],
+                                     [nodes.ARG_POS, nodes.ARG_POS],
+                                     context)
+
+    def try_infer_partial_type_from_indexed_assignment(
+            self, lvalue: IndexExpr, rvalue: Node) -> None:
+        # TODO: Should we share some of this with try_infer_partial_type?
+        if isinstance(lvalue.base, RefExpr):
+            var = cast(Var, lvalue.base.node)
+            if var is not None and isinstance(var.type, PartialType):
+                type_type = var.type.type
+                if type_type is None:
+                    return  # The partial type is None.
+                partial_types = self.find_partial_types(var)
+                if partial_types is None:
+                    return
+                typename = type_type.fullname()
+                if typename == 'builtins.dict':
+                    # TODO: Don't infer things twice.
+                    key_type = self.accept(lvalue.index)
+                    value_type = self.accept(rvalue)
+                    if is_valid_inferred_type(key_type) and is_valid_inferred_type(value_type):
+                        if not self.current_node_deferred:
+                            var.type = self.named_generic_type('builtins.dict',
+                                                               [key_type, value_type])
+                        del partial_types[var]
+
+    def visit_expression_stmt(self, s: ExpressionStmt) -> Type:
+        self.accept(s.expr)
+
+    def visit_return_stmt(self, s: ReturnStmt) -> Type:
+        """Type check a return statement."""
+        self.breaking_out = True
+        if self.is_within_function():
+            if self.function_stack[-1].is_generator:
+                return_type = self.get_generator_return_type(self.return_types[-1])
+            else:
+                return_type = self.return_types[-1]
+
+            if s.expr:
+                # Return with a value.
+                typ = self.accept(s.expr, return_type)
+                # Returning a value of type Any is always fine.
+                if isinstance(typ, AnyType):
+                    return None
+
+                if isinstance(return_type, Void):
+                    # Lambdas are allowed to have a Void return.
+                    # Functions returning a value of type None are allowed to have a Void return.
+                    if isinstance(self.function_stack[-1], FuncExpr) or isinstance(typ, NoneTyp):
+                        return None
+                    self.fail(messages.NO_RETURN_VALUE_EXPECTED, s)
+                else:
+                    self.check_subtype(
+                        typ, return_type, s,
+                        messages.INCOMPATIBLE_RETURN_VALUE_TYPE
+                        + ": expected {}, got {}".format(return_type, typ)
+                    )
+            else:
+                # Empty returns are valid in Generators with Any typed returns.
+                if (self.function_stack[-1].is_generator and isinstance(return_type, AnyType)):
+                    return None
+
+                if isinstance(return_type, Void):
+                    return None
+
+                if isinstance(return_type, AnyType):
+                    return None
+
+                if self.typing_mode_full():
+                    self.fail(messages.RETURN_VALUE_EXPECTED, s)
+
+    def wrap_generic_type(self, typ: Instance, rtyp: Instance, check_type:
+                          str, context: Context) -> Type:
+        n_diff = self.count_nested_types(rtyp, check_type) - self.count_nested_types(typ,
+                                                                                     check_type)
+        if n_diff == 1:
+            return self.named_generic_type(check_type, [typ])
+        elif n_diff == 0 or n_diff > 1:
+            self.fail(messages.INCOMPATIBLE_RETURN_VALUE_TYPE
+                + ": expected {}, got {}".format(rtyp, typ), context)
+            return typ
+        return typ
+
+    def count_nested_types(self, typ: Instance, check_type: str) -> int:
+        c = 0
+        while is_subtype(typ, self.named_type(check_type)):
+            c += 1
+            typ = map_instance_to_supertype(self.named_generic_type(check_type, typ.args),
+                                            self.lookup_typeinfo(check_type))
+            if typ.args:
+                typ = cast(Instance, typ.args[0])
+            else:
+                return c
+        return c
+
+    def visit_if_stmt(self, s: IfStmt) -> Type:
+        """Type check an if statement."""
+        broken = True
+        ending_frames = []  # type: List[Frame]
+        clauses_frame = self.binder.push_frame()
+        for e, b in zip(s.expr, s.body):
+            t = self.accept(e)
+            self.check_not_void(t, e)
+            if_map, else_map = find_isinstance_check(
+                e, self.type_map,
+                self.typing_mode_weak()
+            )
+            if if_map is None:
+                # The condition is always false
+                # XXX should issue a warning?
+                pass
+            else:
+                # Only type check body if the if condition can be true.
+                self.binder.push_frame()
+                if if_map:
+                    for var, type in if_map.items():
+                        self.binder.push(var, type)
+
+                self.accept(b)
+                _, frame = self.binder.pop_frame()
+                if not self.breaking_out:
+                    broken = False
+                    ending_frames.append(meet_frames(clauses_frame, frame))
+
+                self.breaking_out = False
+
+                if else_map:
+                    for var, type in else_map.items():
+                        self.binder.push(var, type)
+            if else_map is None:
+                # The condition is always true => remaining elif/else blocks
+                # can never be reached.
+
+                # Might also want to issue a warning
+                # print("Warning: isinstance always true")
+                if broken:
+                    self.binder.pop_frame()
+                    self.breaking_out = True
+                    return None
+                break
+        else:
+            if s.else_body:
+                self.accept(s.else_body)
+
+                if self.breaking_out and broken:
+                    self.binder.pop_frame()
+                    return None
+
+                if not self.breaking_out:
+                    ending_frames.append(clauses_frame)
+
+                self.breaking_out = False
+            else:
+                ending_frames.append(clauses_frame)
+
+        self.binder.pop_frame()
+        self.binder.update_from_options(ending_frames)
+
+    def visit_while_stmt(self, s: WhileStmt) -> Type:
+        """Type check a while statement."""
+        self.binder.push_frame()
+        self.binder.push_loop_frame()
+        self.accept_in_frame(IfStmt([s.expr], [s.body], None),
+                             repeat_till_fixed=True)
+        self.binder.pop_loop_frame()
+        if s.else_body:
+            self.accept(s.else_body)
+        self.binder.pop_frame(False, True)
+
+    def visit_operator_assignment_stmt(self,
+                                       s: OperatorAssignmentStmt) -> Type:
+        """Type check an operator assignment statement, e.g. x += 1."""
+        lvalue_type = self.accept(s.lvalue)
+        method = infer_operator_assignment_method(lvalue_type, s.op)
+        rvalue_type, method_type = self.expr_checker.check_op(
+            method, lvalue_type, s.rvalue, s)
+
+        if isinstance(s.lvalue, IndexExpr):
+            lv = cast(IndexExpr, s.lvalue)
+            self.check_indexed_assignment(lv, s.rvalue, s.rvalue)
+        else:
+            if not is_subtype(rvalue_type, lvalue_type):
+                self.msg.incompatible_operator_assignment(s.op, s)
+
+    def visit_assert_stmt(self, s: AssertStmt) -> Type:
+        self.accept(s.expr)
+
+    def visit_raise_stmt(self, s: RaiseStmt) -> Type:
+        """Type check a raise statement."""
+        self.breaking_out = True
+        if s.expr:
+            self.type_check_raise(s.expr, s)
+        if s.from_expr:
+            self.type_check_raise(s.from_expr, s)
+
+    def type_check_raise(self, e: Node, s: RaiseStmt) -> None:
+        typ = self.accept(e)
+        if isinstance(typ, FunctionLike):
+            if typ.is_type_obj():
+                # Cases like "raise/from ExceptionClass".
+                typeinfo = typ.type_object()
+                base = self.lookup_typeinfo('builtins.BaseException')
+                if base in typeinfo.mro or typeinfo.fallback_to_any:
+                    # Good!
+                    return
+                # Else fall back to the checks below (which will fail).
+        if isinstance(typ, TupleType) and self.pyversion[0] == 2:
+            # allow `raise type, value, traceback`
+            # https://docs.python.org/2/reference/simple_stmts.html#the-raise-statement
+            # TODO: Also check tuple item types.
+            if len(cast(TupleType, typ).items) in (2, 3):
+                return
+        if isinstance(typ, Instance) and typ.type.fallback_to_any:
+            # OK!
+            return
+        self.check_subtype(typ,
+                           self.named_type('builtins.BaseException'), s,
+                           messages.INVALID_EXCEPTION)
+
+    def visit_try_stmt(self, s: TryStmt) -> Type:
+        """Type check a try statement."""
+        completed_frames = []  # type: List[Frame]
+        self.binder.push_frame()
+        self.binder.try_frames.add(len(self.binder.frames) - 2)
+        self.accept(s.body)
+        self.binder.try_frames.remove(len(self.binder.frames) - 2)
+        if s.else_body:
+            self.accept(s.else_body)
+        self.breaking_out = False
+        changed, frame_on_completion = self.binder.pop_frame()
+        completed_frames.append(frame_on_completion)
+
+        for i in range(len(s.handlers)):
+            self.binder.push_frame()
+            if s.types[i]:
+                t = self.exception_type(s.types[i])
+                if s.vars[i]:
+                    # To support local variables, we make this a definition line,
+                    # causing assignment to set the variable's type.
+                    s.vars[i].is_def = True
+                    self.check_assignment(s.vars[i], self.temp_node(t, s.vars[i]))
+            self.accept(s.handlers[i])
+            if s.vars[i]:
+                # Exception variables are deleted in python 3 but not python 2.
+                # But, since it's bad form in python 2 and the type checking
+                # wouldn't work very well, we delete it anyway.
+
+                # Unfortunately, this doesn't let us detect usage before the
+                # try/except block.
+                if self.pyversion[0] >= 3:
+                    source = s.vars[i].name
+                else:
+                    source = ('(exception variable "{}", which we do not accept '
+                              'outside except: blocks even in python 2)'.format(s.vars[i].name))
+                var = cast(Var, s.vars[i].node)
+                var.type = DeletedType(source=source)
+                self.binder.cleanse(s.vars[i])
+
+            self.breaking_out = False
+            changed, frame_on_completion = self.binder.pop_frame()
+            completed_frames.append(frame_on_completion)
+
+        self.binder.update_from_options(completed_frames)
+
+        if s.finally_body:
+            self.accept(s.finally_body)
+
+    def exception_type(self, n: Node) -> Type:
+        if isinstance(n, TupleExpr):
+            t = None  # type: Type
+            for item in n.items:
+                tt = self.exception_type(item)
+                if t:
+                    t = join_types(t, tt)
+                else:
+                    t = tt
+            return t
+        else:
+            # A single exception type; should evaluate to a type object type.
+            type = self.accept(n)
+            return self.check_exception_type(type, n)
+        self.fail('Unsupported exception', n)
+        return AnyType()
+
+    def check_exception_type(self, type: Type, context: Context) -> Type:
+        if isinstance(type, FunctionLike):
+            item = type.items()[0]
+            ret = item.ret_type
+            if (is_subtype(ret, self.named_type('builtins.BaseException'))
+                    and item.is_type_obj()):
+                return ret
+            else:
+                self.fail(messages.INVALID_EXCEPTION_TYPE, context)
+                return AnyType()
+        elif isinstance(type, AnyType):
+            return AnyType()
+        else:
+            self.fail(messages.INVALID_EXCEPTION_TYPE, context)
+            return AnyType()
+
+    def visit_for_stmt(self, s: ForStmt) -> Type:
+        """Type check a for statement."""
+        item_type = self.analyze_iterable_item_type(s.expr)
+        self.analyze_index_variables(s.index, item_type, s)
+        self.binder.push_frame()
+        self.binder.push_loop_frame()
+        self.accept_in_frame(s.body, repeat_till_fixed=True)
+        self.binder.pop_loop_frame()
+        if s.else_body:
+            self.accept(s.else_body)
+        self.binder.pop_frame(False, True)
+
+    def analyze_iterable_item_type(self, expr: Node) -> Type:
+        """Analyse iterable expression and return iterator item type."""
+        iterable = self.accept(expr)
+
+        self.check_not_void(iterable, expr)
+        if isinstance(iterable, TupleType):
+            joined = NoneTyp()  # type: Type
+            for item in iterable.items:
+                joined = join_types(joined, item)
+            if isinstance(joined, ErrorType):
+                self.fail(messages.CANNOT_INFER_ITEM_TYPE, expr)
+                return AnyType()
+            return joined
+        else:
+            # Non-tuple iterable.
+            self.check_subtype(iterable,
+                               self.named_generic_type('typing.Iterable',
+                                                       [AnyType()]),
+                               expr, messages.ITERABLE_EXPECTED)
+
+            echk = self.expr_checker
+            method = echk.analyze_external_member_access('__iter__', iterable,
+                                                         expr)
+            iterator = echk.check_call(method, [], [], expr)[0]
+            if self.pyversion[0] >= 3:
+                nextmethod = '__next__'
+            else:
+                nextmethod = 'next'
+            method = echk.analyze_external_member_access(nextmethod, iterator,
+                                                         expr)
+            return echk.check_call(method, [], [], expr)[0]
+
+    def analyze_index_variables(self, index: Node, item_type: Type,
+                                context: Context) -> None:
+        """Type check or infer for loop or list comprehension index vars."""
+        self.check_assignment(index, self.temp_node(item_type, context))
+
+    def visit_del_stmt(self, s: DelStmt) -> Type:
+        if isinstance(s.expr, IndexExpr):
+            e = cast(IndexExpr, s.expr)  # Cast
+            m = MemberExpr(e.base, '__delitem__')
+            m.line = s.line
+            c = CallExpr(m, [e.index], [nodes.ARG_POS], [None])
+            c.line = s.line
+            return c.accept(self)
+        else:
+            def flatten(t: Node) -> List[Node]:
+                """Flatten a nested sequence of tuples/lists into one list of nodes."""
+                if isinstance(t, TupleExpr) or isinstance(t, ListExpr):
+                    t = cast(Union[TupleExpr, ListExpr], t)
+                    return [b for a in t.items for b in flatten(a)]
+                else:
+                    return [t]
+
+            s.expr.accept(self)
+            for elt in flatten(s.expr):
+                if isinstance(elt, NameExpr):
+                    self.binder.assign_type(elt, DeletedType(source=elt.name),
+                                            self.typing_mode_weak())
+            return None
+
+    def visit_decorator(self, e: Decorator) -> Type:
+        for d in e.decorators:
+            if isinstance(d, RefExpr):
+                if d.fullname == 'typing.no_type_check':
+                    e.var.type = AnyType()
+                    e.var.is_ready = True
+                    return NoneTyp()
+
+        e.func.accept(self)
+        sig = self.function_type(e.func)  # type: Type
+        # Process decorators from the inside out.
+        for i in range(len(e.decorators)):
+            n = len(e.decorators) - 1 - i
+            dec = self.accept(e.decorators[n])
+            temp = self.temp_node(sig)
+            sig, t2 = self.expr_checker.check_call(dec, [temp],
+                                                   [nodes.ARG_POS], e)
+        sig = cast(FunctionLike, sig)
+        sig = set_callable_name(sig, e.func)
+        e.var.type = sig
+        e.var.is_ready = True
+        if e.func.is_property:
+            self.check_incompatible_property_override(e)
+
+    def check_incompatible_property_override(self, e: Decorator) -> None:
+        if not e.var.is_settable_property:
+            name = e.func.name()
+            for base in e.func.info.mro[1:]:
+                base_attr = base.names.get(name)
+                if not base_attr:
+                    continue
+                if (isinstance(base_attr.node, OverloadedFuncDef) and
+                        base_attr.node.is_property and
+                        base_attr.node.items[0].var.is_settable_property):
+                    self.fail(messages.READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE, e)
+
+    def visit_with_stmt(self, s: WithStmt) -> Type:
+        echk = self.expr_checker
+        for expr, target in zip(s.expr, s.target):
+            ctx = self.accept(expr)
+            enter = echk.analyze_external_member_access('__enter__', ctx, expr)
+            obj = echk.check_call(enter, [], [], expr)[0]
+            if target:
+                self.check_assignment(target, self.temp_node(obj, expr))
+            exit = echk.analyze_external_member_access('__exit__', ctx, expr)
+            arg = self.temp_node(AnyType(), expr)
+            echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)
+        self.accept(s.body)
+
+    def visit_print_stmt(self, s: PrintStmt) -> Type:
+        for arg in s.args:
+            self.accept(arg)
+        if s.target:
+            target_type = self.accept(s.target)
+            if not isinstance(target_type, NoneTyp):
+                # TODO: Also verify the type of 'write'.
+                self.expr_checker.analyze_external_member_access('write', target_type, s.target)
+
+    #
+    # Expressions
+    #
+
+    def visit_name_expr(self, e: NameExpr) -> Type:
+        return self.expr_checker.visit_name_expr(e)
+
+    def visit_call_expr(self, e: CallExpr) -> Type:
+        return self.expr_checker.visit_call_expr(e)
+
+    def visit_yield_from_expr(self, e: YieldFromExpr) -> Type:
+        return_type = self.return_types[-1]
+        subexpr_type = self.accept(e.expr, return_type)
+        iter_type = None  # type: Type
+
+        # Check that the expr is an instance of Iterable and get the type of the iterator produced
+        # by __iter__.
+        if isinstance(subexpr_type, AnyType):
+            iter_type = AnyType()
+        elif (isinstance(subexpr_type, Instance) and
+                is_subtype(subexpr_type, self.named_type('typing.Iterable'))):
+            iter_method_type = self.expr_checker.analyze_external_member_access(
+                '__iter__',
+                subexpr_type,
+                AnyType())
+
+            generic_generator_type = self.named_generic_type('typing.Generator',
+                                                             [AnyType(), AnyType(), AnyType()])
+            iter_type, _ = self.expr_checker.check_call(iter_method_type, [], [],
+                                                        context=generic_generator_type)
+        else:
+            self.msg.yield_from_invalid_operand_type(subexpr_type, e)
+            iter_type = AnyType()
+
+        # Check that the iterator's item type matches the type yielded by the Generator function
+        # containing this `yield from` expression.
+        expected_item_type = self.get_generator_yield_type(return_type)
+        actual_item_type = self.get_generator_yield_type(iter_type)
+
+        self.check_subtype(actual_item_type, expected_item_type, e,
+                           messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM,
+                           'actual type', 'expected type')
+
+        # Determine the type of the entire yield from expression.
+        if (isinstance(iter_type, Instance) and
+                iter_type.type.fullname() == 'typing.Generator'):
+            return self.get_generator_return_type(iter_type)
+        else:
+            # Non-Generators don't return anything from `yield from` expressions.
+            return Void()
+
+    def visit_member_expr(self, e: MemberExpr) -> Type:
+        return self.expr_checker.visit_member_expr(e)
+
+    def visit_break_stmt(self, s: BreakStmt) -> Type:
+        self.breaking_out = True
+        self.binder.allow_jump(self.binder.loop_frames[-1] - 1)
+        return None
+
+    def visit_continue_stmt(self, s: ContinueStmt) -> Type:
+        self.breaking_out = True
+        self.binder.allow_jump(self.binder.loop_frames[-1])
+        return None
+
+    def visit_int_expr(self, e: IntExpr) -> Type:
+        return self.expr_checker.visit_int_expr(e)
+
+    def visit_str_expr(self, e: StrExpr) -> Type:
+        return self.expr_checker.visit_str_expr(e)
+
+    def visit_bytes_expr(self, e: BytesExpr) -> Type:
+        return self.expr_checker.visit_bytes_expr(e)
+
+    def visit_unicode_expr(self, e: UnicodeExpr) -> Type:
+        return self.expr_checker.visit_unicode_expr(e)
+
+    def visit_float_expr(self, e: FloatExpr) -> Type:
+        return self.expr_checker.visit_float_expr(e)
+
+    def visit_complex_expr(self, e: ComplexExpr) -> Type:
+        return self.expr_checker.visit_complex_expr(e)
+
+    def visit_ellipsis(self, e: EllipsisExpr) -> Type:
+        return self.expr_checker.visit_ellipsis(e)
+
+    def visit_op_expr(self, e: OpExpr) -> Type:
+        return self.expr_checker.visit_op_expr(e)
+
+    def visit_comparison_expr(self, e: ComparisonExpr) -> Type:
+        return self.expr_checker.visit_comparison_expr(e)
+
+    def visit_unary_expr(self, e: UnaryExpr) -> Type:
+        return self.expr_checker.visit_unary_expr(e)
+
+    def visit_index_expr(self, e: IndexExpr) -> Type:
+        return self.expr_checker.visit_index_expr(e)
+
+    def visit_cast_expr(self, e: CastExpr) -> Type:
+        return self.expr_checker.visit_cast_expr(e)
+
+    def visit_super_expr(self, e: SuperExpr) -> Type:
+        return self.expr_checker.visit_super_expr(e)
+
+    def visit_type_application(self, e: TypeApplication) -> Type:
+        return self.expr_checker.visit_type_application(e)
+
+    def visit_type_alias_expr(self, e: TypeAliasExpr) -> Type:
+        return self.expr_checker.visit_type_alias_expr(e)
+
+    def visit_type_var_expr(self, e: TypeVarExpr) -> Type:
+        # TODO: Perhaps return a special type used for type variables only?
+        return AnyType()
+
+    def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
+        # TODO: Perhaps return a type object type?
+        return AnyType()
+
+    def visit_list_expr(self, e: ListExpr) -> Type:
+        return self.expr_checker.visit_list_expr(e)
+
+    def visit_set_expr(self, e: SetExpr) -> Type:
+        return self.expr_checker.visit_set_expr(e)
+
+    def visit_tuple_expr(self, e: TupleExpr) -> Type:
+        return self.expr_checker.visit_tuple_expr(e)
+
+    def visit_dict_expr(self, e: DictExpr) -> Type:
+        return self.expr_checker.visit_dict_expr(e)
+
+    def visit_slice_expr(self, e: SliceExpr) -> Type:
+        return self.expr_checker.visit_slice_expr(e)
+
+    def visit_func_expr(self, e: FuncExpr) -> Type:
+        return self.expr_checker.visit_func_expr(e)
+
+    def visit_list_comprehension(self, e: ListComprehension) -> Type:
+        return self.expr_checker.visit_list_comprehension(e)
+
+    def visit_set_comprehension(self, e: SetComprehension) -> Type:
+        return self.expr_checker.visit_set_comprehension(e)
+
+    def visit_generator_expr(self, e: GeneratorExpr) -> Type:
+        return self.expr_checker.visit_generator_expr(e)
+
+    def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type:
+        return self.expr_checker.visit_dictionary_comprehension(e)
+
+    def visit_temp_node(self, e: TempNode) -> Type:
+        return e.type
+
+    def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
+        return self.expr_checker.visit_conditional_expr(e)
+
+    def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
+        return self.expr_checker.visit_backquote_expr(e)
+
+    def visit_yield_expr(self, e: YieldExpr) -> Type:
+        return_type = self.return_types[-1]
+        expected_item_type = self.get_generator_yield_type(return_type)
+        if e.expr is None:
+            if (not (isinstance(expected_item_type, Void) or
+                     isinstance(expected_item_type, AnyType))
+                    and self.typing_mode_full()):
+                self.fail(messages.YIELD_VALUE_EXPECTED, e)
+        else:
+            actual_item_type = self.accept(e.expr, expected_item_type)
+            self.check_subtype(actual_item_type, expected_item_type, e,
+                            messages.INCOMPATIBLE_TYPES_IN_YIELD,
+                            'actual type', 'expected type')
+        return self.get_generator_receive_type(return_type)
+
+    #
+    # Helpers
+    #
+
+    def check_subtype(self, subtype: Type, supertype: Type, context: Context,
+                      msg: str = messages.INCOMPATIBLE_TYPES,
+                      subtype_label: str = None,
+                      supertype_label: str = None) -> None:
+        """Generate an error if the subtype is not compatible with
+        supertype."""
+        if not is_subtype(subtype, supertype):
+            if isinstance(subtype, Void):
+                self.msg.does_not_return_value(subtype, context)
+            else:
+                extra_info = []  # type: List[str]
+                if subtype_label is not None or supertype_label is not None:
+                    subtype_str, supertype_str = self.msg.format_distinctly(subtype, supertype)
+                    if subtype_label is not None:
+                        extra_info.append(subtype_label + ' ' + subtype_str)
+                    if supertype_label is not None:
+                        extra_info.append(supertype_label + ' ' + supertype_str)
+                if extra_info:
+                    msg += ' (' + ', '.join(extra_info) + ')'
+                self.fail(msg, context)
+
+    def named_type(self, name: str) -> Instance:
+        """Return an instance type with type given by the name and no
+        type arguments. For example, named_type('builtins.object')
+        produces the object type.
+        """
+        # Assume that the name refers to a type.
+        sym = self.lookup_qualified(name)
+        return Instance(cast(TypeInfo, sym.node), [])
+
+    def named_generic_type(self, name: str, args: List[Type]) -> Instance:
+        """Return an instance with the given name and type arguments.
+
+        Assume that the number of arguments is correct.  Assume that
+        the name refers to a compatible generic type.
+        """
+        return Instance(self.lookup_typeinfo(name), args)
+
+    def lookup_typeinfo(self, fullname: str) -> TypeInfo:
+        # Assume that the name refers to a class.
+        sym = self.lookup_qualified(fullname)
+        return cast(TypeInfo, sym.node)
+
+    def type_type(self) -> Instance:
+        """Return instance type 'type'."""
+        return self.named_type('builtins.type')
+
+    def object_type(self) -> Instance:
+        """Return instance type 'object'."""
+        return self.named_type('builtins.object')
+
+    def bool_type(self) -> Instance:
+        """Return instance type 'bool'."""
+        return self.named_type('builtins.bool')
+
+    def str_type(self) -> Instance:
+        """Return instance type 'str'."""
+        return self.named_type('builtins.str')
+
+    def check_type_equivalency(self, t1: Type, t2: Type, node: Context,
+                               msg: str = messages.INCOMPATIBLE_TYPES) -> None:
+        """Generate an error if the types are not equivalent. The
+        dynamic type is equivalent with all types.
+        """
+        if not is_equivalent(t1, t2):
+            self.fail(msg, node)
+
+    def store_type(self, node: Node, typ: Type) -> None:
+        """Store the type of a node in the type map."""
+        self.type_map[node] = typ
+
+    def typing_mode_none(self) -> bool:
+        if self.is_dynamic_function():
+            return not self.weak_opts
+        elif self.function_stack:
+            return False
+        else:
+            return False
+
+    def typing_mode_weak(self) -> bool:
+        if self.is_dynamic_function():
+            return bool(self.weak_opts)
+        elif self.function_stack:
+            return False
+        else:
+            return 'global' in self.weak_opts
+
+    def typing_mode_full(self) -> bool:
+        if self.is_dynamic_function():
+            return False
+        elif self.function_stack:
+            return True
+        else:
+            return 'global' not in self.weak_opts
+
+    def is_dynamic_function(self) -> bool:
+        return len(self.dynamic_funcs) > 0 and self.dynamic_funcs[-1]
+
+    def lookup(self, name: str, kind: int) -> SymbolTableNode:
+        """Look up a definition from the symbol table with the given name.
+        TODO remove kind argument
+        """
+        if self.locals is not None and name in self.locals:
+            return self.locals[name]
+        elif name in self.globals:
+            return self.globals[name]
+        else:
+            b = self.globals.get('__builtins__', None)
+            if b:
+                table = cast(MypyFile, b.node).names
+                if name in table:
+                    return table[name]
+            raise KeyError('Failed lookup: {}'.format(name))
+
+    def lookup_qualified(self, name: str) -> SymbolTableNode:
+        if '.' not in name:
+            return self.lookup(name, GDEF)  # FIX kind
+        else:
+            parts = name.split('.')
+            n = self.modules[parts[0]]
+            for i in range(1, len(parts) - 1):
+                n = cast(MypyFile, n.names.get(parts[i], None).node)
+            return n.names[parts[-1]]
+
+    def enter(self) -> None:
+        self.locals = SymbolTable()
+
+    def leave(self) -> None:
+        self.locals = None
+
+    def enter_partial_types(self) -> None:
+        """Push a new scope for collecting partial types."""
+        self.partial_types.append({})
+
+    def leave_partial_types(self) -> None:
+        """Pop partial type scope.
+
+        Also report errors for variables which still have partial
+        types, i.e. we couldn't infer a complete type.
+        """
+        partial_types = self.partial_types.pop()
+        if not self.current_node_deferred:
+            for var, context in partial_types.items():
+                self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+                var.type = AnyType()
+
+    def find_partial_types(self, var: Var) -> Optional[Dict[Var, Context]]:
+        for partial_types in reversed(self.partial_types):
+            if var in partial_types:
+                return partial_types
+        return None
+
+    def is_within_function(self) -> bool:
+        """Are we currently type checking within a function?
+
+        I.e. not at class body or at the top level.
+        """
+        return self.return_types != []
+
+    def check_not_void(self, typ: Type, context: Context) -> None:
+        """Generate an error if the type is Void."""
+        if isinstance(typ, Void):
+            self.msg.does_not_return_value(typ, context)
+
+    def temp_node(self, t: Type, context: Context = None) -> Node:
+        """Create a temporary node with the given, fixed type."""
+        temp = TempNode(t)
+        if context:
+            temp.set_line(context.get_line())
+        return temp
+
+    def fail(self, msg: str, context: Context) -> None:
+        """Produce an error message."""
+        self.msg.fail(msg, context)
+
+    def iterable_item_type(self, instance: Instance) -> Type:
+        iterable = map_instance_to_supertype(
+            instance,
+            self.lookup_typeinfo('typing.Iterable'))
+        return iterable.args[0]
+
+    def function_type(self, func: FuncBase) -> FunctionLike:
+        return function_type(func, self.named_type('builtins.function'))
+
+    def method_type(self, func: FuncBase) -> FunctionLike:
+        return method_type_with_fallback(func, self.named_type('builtins.function'))
+
+
+def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
+                            super_info: TypeInfo) -> Type:
+    """Map type variables in a type defined in a supertype context to be valid
+    in the subtype context. Assume that the result is unique; if more than
+    one type is possible, return one of the alternatives.
+
+    For example, assume
+
+    . class D(Generic[S]) ...
+    . class C(D[E[T]], Generic[T]) ...
+
+    Now S in the context of D would be mapped to E[T] in the context of C.
+    """
+    # Create the type of self in subtype, of form t[a1, ...].
+    inst_type = self_type(sub_info)
+    if isinstance(inst_type, TupleType):
+        inst_type = inst_type.fallback
+    # Map the type of self to supertype. This gets us a description of the
+    # supertype type variables in terms of subtype variables, i.e. t[t1, ...]
+    # so that any type variables in tN are to be interpreted in subtype
+    # context.
+    inst_type = map_instance_to_supertype(inst_type, super_info)
+    # Finally expand the type variables in type with those in the previously
+    # constructed type. Note that both type and inst_type may have type
+    # variables, but in type they are interpreterd in supertype context while
+    # in inst_type they are interpreted in subtype context. This works even if
+    # the names of type variables in supertype and subtype overlap.
+    return expand_type_by_instance(typ, inst_type)
+
+
+def find_isinstance_check(node: Node,
+                          type_map: Dict[Node, Type],
+                          weak: bool=False) \
+        -> Tuple[Optional[Dict[Node, Type]], Optional[Dict[Node, Type]]]:
+    """Find any isinstance checks (within a chain of ands).
+
+    Return value is a map of variables to their types if the condition
+    is true and a map of variables to their types if the condition is false.
+
+    If either of the values in the tuple is None, then that particular
+    branch can never occur.
+
+    Guaranteed to not return None, None. (But may return {}, {})
+    """
+    if isinstance(node, CallExpr):
+        if refers_to_fullname(node.callee, 'builtins.isinstance'):
+            expr = node.args[0]
+            if expr.literal == LITERAL_TYPE:
+                vartype = type_map[expr]
+                type = get_isinstance_type(node.args[1], type_map)
+                if type:
+                    elsetype = vartype
+                    if vartype:
+                        if is_proper_subtype(vartype, type):
+                            elsetype = None
+                            return {expr: type}, None
+                        elif not is_overlapping_types(vartype, type):
+                            return None, {expr: elsetype}
+                        else:
+                            elsetype = restrict_subtype_away(vartype, type)
+                    return {expr: type}, {expr: elsetype}
+                else:
+                    # An isinstance check, but we don't understand the type
+                    if weak:
+                        return {expr: AnyType()}, {expr: vartype}
+    elif isinstance(node, OpExpr) and node.op == 'and':
+        left_if_vars, right_else_vars = find_isinstance_check(
+            node.left,
+            type_map,
+            weak,
+        )
+
+        right_if_vars, right_else_vars = find_isinstance_check(
+            node.right,
+            type_map,
+            weak,
+        )
+        if left_if_vars:
+            if right_if_vars is not None:
+                left_if_vars.update(right_if_vars)
+            else:
+                left_if_vars = None
+        else:
+            left_if_vars = right_if_vars
+
+        # Make no claim about the types in else
+        return left_if_vars, {}
+    elif isinstance(node, UnaryExpr) and node.op == 'not':
+        left, right = find_isinstance_check(node.expr, type_map, weak)
+        return right, left
+
+    # Not a supported isinstance check
+    return {}, {}
+
+
+def get_isinstance_type(node: Node, type_map: Dict[Node, Type]) -> Type:
+    type = type_map[node]
+    if isinstance(type, FunctionLike):
+        if type.is_type_obj():
+            # Type variables may be present -- erase them, which is the best
+            # we can do (outside disallowing them here).
+            return erase_typevars(type.items()[0].ret_type)
+    return None
+
+
+def expand_node(defn: Node, map: Dict[int, Type]) -> Node:
+    visitor = TypeTransformVisitor(map)
+    return defn.accept(visitor)
+
+
+def expand_func(defn: FuncItem, map: Dict[int, Type]) -> FuncItem:
+    return cast(FuncItem, expand_node(defn, map))
+
+
+class TypeTransformVisitor(TransformVisitor):
+    def __init__(self, map: Dict[int, Type]) -> None:
+        super().__init__()
+        self.map = map
+
+    def type(self, type: Type) -> Type:
+        return expand_type(type, self.map)
+
+
+def is_unsafe_overlapping_signatures(signature: Type, other: Type) -> bool:
+    """Check if two signatures may be unsafely overlapping.
+
+    Two signatures s and t are overlapping if both can be valid for the same
+    statically typed values and the return types are incompatible.
+
+    Assume calls are first checked against 'signature', then against 'other'.
+    Thus if 'signature' is more general than 'other', there is no unsafe
+    overlapping.
+
+    TODO If argument types vary covariantly, the return type may vary
+         covariantly as well.
+    """
+    if isinstance(signature, CallableType):
+        if isinstance(other, CallableType):
+            # TODO varargs
+            # TODO keyword args
+            # TODO erasure
+            # TODO allow to vary covariantly
+            # Check if the argument counts are overlapping.
+            min_args = max(signature.min_args, other.min_args)
+            max_args = min(len(signature.arg_types), len(other.arg_types))
+            if min_args > max_args:
+                # Argument counts are not overlapping.
+                return False
+            # Signatures are overlapping iff if they are overlapping for the
+            # smallest common argument count.
+            for i in range(min_args):
+                t1 = signature.arg_types[i]
+                t2 = other.arg_types[i]
+                if not is_overlapping_types(t1, t2):
+                    return False
+            # All arguments types for the smallest common argument count are
+            # overlapping => the signature is overlapping. The overlapping is
+            # safe if the return types are identical.
+            if is_same_type(signature.ret_type, other.ret_type):
+                return False
+            # If the first signature has more general argument types, the
+            # latter will never be called
+            if is_more_general_arg_prefix(signature, other):
+                return False
+            return not is_more_precise_signature(signature, other)
+    return True
+
+
+def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool:
+    """Does t have wider arguments than s?"""
+    # TODO should an overload with additional items be allowed to be more
+    #      general than one with fewer items (or just one item)?
+    # TODO check argument kinds
+    if isinstance(t, CallableType):
+        if isinstance(s, CallableType):
+            return all(is_proper_subtype(args, argt)
+                       for argt, args in zip(t.arg_types, s.arg_types))
+    elif isinstance(t, FunctionLike):
+        if isinstance(s, FunctionLike):
+            if len(t.items()) == len(s.items()):
+                return all(is_same_arg_prefix(items, itemt)
+                           for items, itemt in zip(t.items(), s.items()))
+    return False
+
+
+def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool:
+    # TODO check argument kinds
+    return all(is_same_type(argt, args)
+               for argt, args in zip(t.arg_types, s.arg_types))
+
+
+def is_more_precise_signature(t: CallableType, s: CallableType) -> bool:
+    """Is t more precise than s?
+
+    A signature t is more precise than s if all argument types and the return
+    type of t are more precise than the corresponding types in s.
+
+    Assume that the argument kinds and names are compatible, and that the
+    argument counts are overlapping.
+    """
+    # TODO generic function types
+    # Only consider the common prefix of argument types.
+    for argt, args in zip(t.arg_types, s.arg_types):
+        if not is_more_precise(argt, args):
+            return False
+    return is_more_precise(t.ret_type, s.ret_type)
+
+
+def infer_operator_assignment_method(type: Type, operator: str) -> str:
+    """Return the method used for operator assignment for given value type.
+
+    For example, if operator is '+', return '__iadd__' or '__add__' depending
+    on which method is supported by the type.
+    """
+    method = nodes.op_methods[operator]
+    if isinstance(type, Instance):
+        if operator in nodes.ops_with_inplace_method:
+            inplace = '__i' + method[2:]
+            if type.type.has_readable_member(inplace):
+                method = inplace
+    return method
+
+
+def is_valid_inferred_type(typ: Type) -> bool:
+    """Is an inferred type valid?
+
+    Examples of invalid types include the None type or a type with a None component.
+    """
+    if is_same_type(typ, NoneTyp()):
+        return False
+    elif isinstance(typ, Instance):
+        for arg in typ.args:
+            if not is_valid_inferred_type(arg):
+                return False
+    elif isinstance(typ, TupleType):
+        for item in typ.items:
+            if not is_valid_inferred_type(item):
+                return False
+    return True
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
new file mode 100644
index 0000000..b32e995
--- /dev/null
+++ b/mypy/checkexpr.py
@@ -0,0 +1,1647 @@
+"""Expression type checker. This file is conceptually part of TypeChecker."""
+
+from typing import cast, List, Tuple, Dict, Callable, Union, Optional
+
+from mypy.types import (
+    Type, AnyType, CallableType, Overloaded, NoneTyp, Void, TypeVarDef,
+    TupleType, Instance, TypeVarType, TypeTranslator, ErasedType, FunctionLike, UnionType,
+    PartialType, DeletedType
+)
+from mypy.nodes import (
+    NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
+    Node, MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
+    OpExpr, UnaryExpr, IndexExpr, CastExpr, TypeApplication, ListExpr,
+    TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context,
+    ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
+    ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
+    DictionaryComprehension, ComplexExpr, EllipsisExpr, LITERAL_TYPE,
+    TypeAliasExpr, YieldExpr, BackquoteExpr, ARG_POS
+)
+from mypy.errors import Errors
+from mypy.nodes import function_type
+from mypy import nodes
+import mypy.checker
+from mypy import types
+from mypy.sametypes import is_same_type
+from mypy.replacetvars import replace_func_type_vars, replace_type_vars
+from mypy.messages import MessageBuilder
+from mypy import messages
+from mypy.infer import infer_type_arguments, infer_function_type_arguments
+from mypy import join
+from mypy.expandtype import expand_type
+from mypy.subtypes import is_subtype, is_more_precise
+from mypy import applytype
+from mypy import erasetype
+from mypy.checkmember import analyze_member_access, type_object_type
+from mypy.semanal import self_type
+from mypy.constraints import get_actual_type
+from mypy.checkstrformat import StringFormatterChecker
+
+
+# Type of callback user for checking individual function arguments. See
+# check_args() below for details.
+ArgChecker = Callable[[Type, Type, Type, int, int, CallableType, Context, MessageBuilder],
+                      None]
+
+
+class Finished(Exception):
+    """Raised if we can terminate overload argument check early (no match)."""
+
+
+class ExpressionChecker:
+    """Expression type checker.
+
+    This class works closely together with checker.TypeChecker.
+    """
+
+    # Some services are provided by a TypeChecker instance.
+    chk = None  # type: mypy.checker.TypeChecker
+    # This is shared with TypeChecker, but stored also here for convenience.
+    msg = None  # type: MessageBuilder
+
+    strfrm_checker = None  # type: mypy.checkstrformat.StringFormatterChecker
+
+    def __init__(self,
+                 chk: 'mypy.checker.TypeChecker',
+                 msg: MessageBuilder) -> None:
+        """Construct an expression type checker."""
+        self.chk = chk
+        self.msg = msg
+        self.strfrm_checker = mypy.checkexpr.StringFormatterChecker(self, self.chk, self.msg)
+
+    def visit_name_expr(self, e: NameExpr) -> Type:
+        """Type check a name expression.
+
+        It can be of any kind: local, member or global.
+        """
+        result = self.analyze_ref_expr(e)
+        return self.chk.narrow_type_from_binder(e, result)
+
+    def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
+        result = None  # type: Type
+        node = e.node
+        if isinstance(node, Var):
+            # Variable reference.
+            result = self.analyze_var_ref(node, e)
+            if isinstance(result, PartialType):
+                if result.type is None:
+                    # 'None' partial type. It has a well-defined type. In an lvalue context
+                    # we want to preserve the knowledge of it being a partial type.
+                    if not lvalue:
+                        result = NoneTyp()
+                else:
+                    partial_types = self.chk.find_partial_types(node)
+                    if partial_types is not None and not self.chk.current_node_deferred:
+                        context = partial_types[node]
+                        self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+                    result = AnyType()
+        elif isinstance(node, FuncDef):
+            # Reference to a global function.
+            result = function_type(node, self.named_type('builtins.function'))
+        elif isinstance(node, OverloadedFuncDef):
+            result = node.type
+        elif isinstance(node, TypeInfo):
+            # Reference to a type object.
+            result = type_object_type(node, self.named_type)
+        elif isinstance(node, MypyFile):
+            # Reference to a module object.
+            result = self.named_type('builtins.module')
+        elif isinstance(node, Decorator):
+            result = self.analyze_var_ref(node.var, e)
+        else:
+            # Unknown reference; use any type implicitly to avoid
+            # generating extra type errors.
+            result = AnyType()
+        return result
+
+    def analyze_var_ref(self, var: Var, context: Context) -> Type:
+        if not var.type:
+            if not var.is_ready and self.chk.typing_mode_full():
+                self.chk.handle_cannot_determine_type(var.name(), context)
+            # Implicit 'Any' type.
+            return AnyType()
+        else:
+            # Look up local type of variable with type (inferred or explicit).
+            val = self.chk.binder.get(var)
+            if val is None:
+                return var.type
+            else:
+                return val
+
+    def visit_call_expr(self, e: CallExpr) -> Type:
+        """Type check a call expression."""
+        if e.analyzed:
+            # It's really a special form that only looks like a call.
+            return self.accept(e.analyzed, self.chk.type_context[-1])
+        self.try_infer_partial_type(e)
+        self.accept(e.callee)
+        # Access callee type directly, since accept may return the Any type
+        # even if the type is known (in a dynamically typed function). This
+        # way we get a more precise callee in dynamically typed functions.
+        callee_type = self.chk.type_map[e.callee]
+        return self.check_call_expr_with_callee_type(callee_type, e)
+
+    # Types and methods that can be used to infer partial types.
+    item_args = {'builtins.list': ['append'],
+                 'builtins.set': ['add', 'discard'],
+                 }
+    container_args = {'builtins.list': {'extend': ['builtins.list']},
+                      'builtins.dict': {'update': ['builtins.dict']},
+                      'builtins.set': {'update': ['builtins.set', 'builtins.list']},
+                      }
+
+    def try_infer_partial_type(self, e: CallExpr) -> None:
+        if isinstance(e.callee, MemberExpr) and isinstance(e.callee.expr, RefExpr):
+            var = cast(Var, e.callee.expr.node)
+            partial_types = self.chk.find_partial_types(var)
+            if partial_types is not None and not self.chk.current_node_deferred:
+                partial_type_type = cast(PartialType, var.type).type
+                if partial_type_type is None:
+                    # A partial None type -> can't infer anything.
+                    return
+                typename = partial_type_type.fullname()
+                methodname = e.callee.name
+                # Sometimes we can infer a full type for a partial List, Dict or Set type.
+                # TODO: Don't infer argument expression twice.
+                if (typename in self.item_args and methodname in self.item_args[typename]
+                        and e.arg_kinds == [ARG_POS]):
+                    item_type = self.accept(e.args[0])
+                    if mypy.checker.is_valid_inferred_type(item_type):
+                        var.type = self.chk.named_generic_type(typename, [item_type])
+                        del partial_types[var]
+                elif (typename in self.container_args
+                      and methodname in self.container_args[typename]
+                      and e.arg_kinds == [ARG_POS]):
+                    arg_type = self.accept(e.args[0])
+                    if isinstance(arg_type, Instance):
+                        arg_typename = arg_type.type.fullname()
+                        if arg_typename in self.container_args[typename][methodname]:
+                            if all(mypy.checker.is_valid_inferred_type(item_type)
+                                   for item_type in arg_type.args):
+                                var.type = self.chk.named_generic_type(typename,
+                                                                       list(arg_type.args))
+                                del partial_types[var]
+
+    def check_call_expr_with_callee_type(self, callee_type: Type,
+                                         e: CallExpr) -> Type:
+        """Type check call expression.
+
+        The given callee type overrides the type of the callee
+        expression.
+        """
+        return self.check_call(callee_type, e.args, e.arg_kinds, e,
+                               e.arg_names, callable_node=e.callee)[0]
+
+    def check_call(self, callee: Type, args: List[Node],
+                   arg_kinds: List[int], context: Context,
+                   arg_names: List[str] = None,
+                   callable_node: Node = None,
+                   arg_messages: MessageBuilder = None) -> Tuple[Type, Type]:
+        """Type check a call.
+
+        Also infer type arguments if the callee is a generic function.
+
+        Return (result type, inferred callee type).
+
+        Arguments:
+          callee: type of the called value
+          args: actual argument expressions
+          arg_kinds: contains nodes.ARG_* constant for each argument in args
+            describing whether the argument is positional, *arg, etc.
+          arg_names: names of arguments (optional)
+          callable_node: associate the inferred callable type to this node,
+            if specified
+          arg_messages: TODO
+        """
+        arg_messages = arg_messages or self.msg
+        if isinstance(callee, CallableType):
+            if callee.is_type_obj() and callee.type_object().is_abstract:
+                type = callee.type_object()
+                self.msg.cannot_instantiate_abstract_class(
+                    callee.type_object().name(), type.abstract_attributes,
+                    context)
+
+            formal_to_actual = map_actuals_to_formals(
+                arg_kinds, arg_names,
+                callee.arg_kinds, callee.arg_names,
+                lambda i: self.accept(args[i]))
+
+            if callee.is_generic():
+                callee = self.infer_function_type_arguments_using_context(
+                    callee, context)
+                callee = self.infer_function_type_arguments(
+                    callee, args, arg_kinds, formal_to_actual, context)
+
+            arg_types = self.infer_arg_types_in_context2(
+                callee, args, arg_kinds, formal_to_actual)
+
+            self.check_argument_count(callee, arg_types, arg_kinds,
+                                      arg_names, formal_to_actual, context, self.msg)
+
+            self.check_argument_types(arg_types, arg_kinds, callee,
+                                      formal_to_actual, context,
+                                      messages=arg_messages)
+            if callable_node:
+                # Store the inferred callable type.
+                self.chk.store_type(callable_node, callee)
+            return callee.ret_type, callee
+        elif isinstance(callee, Overloaded):
+            # Type check arguments in empty context. They will be checked again
+            # later in a context derived from the signature; these types are
+            # only used to pick a signature variant.
+            self.msg.disable_errors()
+            arg_types = self.infer_arg_types_in_context(None, args)
+            self.msg.enable_errors()
+
+            target = self.overload_call_target(arg_types, arg_kinds, arg_names,
+                                               callee, context,
+                                               messages=arg_messages)
+            return self.check_call(target, args, arg_kinds, context, arg_names,
+                                   arg_messages=arg_messages)
+        elif isinstance(callee, AnyType) or self.chk.typing_mode_none():
+            self.infer_arg_types_in_context(None, args)
+            return AnyType(), AnyType()
+        elif isinstance(callee, UnionType):
+            self.msg.disable_type_names += 1
+            results = [self.check_call(subtype, args, arg_kinds, context, arg_names,
+                                       arg_messages=arg_messages)
+                       for subtype in callee.items]
+            self.msg.disable_type_names -= 1
+            return (UnionType.make_simplified_union([res[0] for res in results]),
+                    callee)
+        elif isinstance(callee, Instance):
+            call_function = analyze_member_access('__call__', callee, context,
+                                         False, False, self.named_type, self.not_ready_callback,
+                                         self.msg)
+            return self.check_call(call_function, args, arg_kinds, context, arg_names,
+                                   callable_node, arg_messages)
+        else:
+            return self.msg.not_callable(callee, context), AnyType()
+
+    def infer_arg_types_in_context(self, callee: CallableType,
+                                   args: List[Node]) -> List[Type]:
+        """Infer argument expression types using a callable type as context.
+
+        For example, if callee argument 2 has type List[int], infer the
+        argument expression with List[int] type context.
+        """
+        # TODO Always called with callee as None, i.e. empty context.
+        res = []  # type: List[Type]
+
+        fixed = len(args)
+        if callee:
+            fixed = min(fixed, callee.max_fixed_args())
+
+        arg_type = None  # type: Type
+        ctx = None  # type: Type
+        for i, arg in enumerate(args):
+            if i < fixed:
+                if callee and i < len(callee.arg_types):
+                    ctx = callee.arg_types[i]
+                arg_type = self.accept(arg, ctx)
+            else:
+                if callee and callee.is_var_arg:
+                    arg_type = self.accept(arg, callee.arg_types[-1])
+                else:
+                    arg_type = self.accept(arg)
+            if isinstance(arg_type, ErasedType):
+                res.append(NoneTyp())
+            else:
+                res.append(arg_type)
+        return res
+
+    def infer_arg_types_in_context2(
+            self, callee: CallableType, args: List[Node], arg_kinds: List[int],
+            formal_to_actual: List[List[int]]) -> List[Type]:
+        """Infer argument expression types using a callable type as context.
+
+        For example, if callee argument 2 has type List[int], infer the
+        argument exprsession with List[int] type context.
+
+        Returns the inferred types of *actual arguments*.
+        """
+        res = [None] * len(args)  # type: List[Type]
+
+        for i, actuals in enumerate(formal_to_actual):
+            for ai in actuals:
+                if arg_kinds[ai] != nodes.ARG_STAR:
+                    res[ai] = self.accept(args[ai], callee.arg_types[i])
+
+        # Fill in the rest of the argument types.
+        for i, t in enumerate(res):
+            if not t:
+                res[i] = self.accept(args[i])
+        return res
+
+    def infer_function_type_arguments_using_context(
+            self, callable: CallableType, error_context: Context) -> CallableType:
+        """Unify callable return type to type context to infer type vars.
+
+        For example, if the return type is set[t] where 't' is a type variable
+        of callable, and if the context is set[int], return callable modified
+        by substituting 't' with 'int'.
+        """
+        ctx = self.chk.type_context[-1]
+        if not ctx:
+            return callable
+        # The return type may have references to function type variables that
+        # we are inferring right now. We must consider them as indeterminate
+        # and they are not potential results; thus we replace them with the
+        # special ErasedType type. On the other hand, class type variables are
+        # valid results.
+        erased_ctx = replace_func_type_vars(ctx, ErasedType())
+        ret_type = callable.ret_type
+        if isinstance(ret_type, TypeVarType):
+            if ret_type.values or (not isinstance(ctx, Instance) or
+                                   not cast(Instance, ctx).args):
+                # The return type is a type variable. If it has values, we can't easily restrict
+                # type inference to conform to the valid values. If it's unrestricted, we could
+                # infer a too general type for the type variable if we use context, and this could
+                # result in confusing and spurious type errors elsewhere.
+                #
+                # Give up and just use function arguments for type inference. As an exception,
+                # if the context is a generic instance type, actually use it as context, as
+                # this *seems* to usually be the reasonable thing to do.
+                #
+                # See also github issues #462 and #360.
+                ret_type = NoneTyp()
+        args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx)
+        # Only substite non-None and non-erased types.
+        new_args = []  # type: List[Type]
+        for arg in args:
+            if isinstance(arg, NoneTyp) or has_erased_component(arg):
+                new_args.append(None)
+            else:
+                new_args.append(arg)
+        return cast(CallableType, self.apply_generic_arguments(callable, new_args,
+                                                           error_context))
+
+    def infer_function_type_arguments(self, callee_type: CallableType,
+                                      args: List[Node],
+                                      arg_kinds: List[int],
+                                      formal_to_actual: List[List[int]],
+                                      context: Context) -> CallableType:
+        """Infer the type arguments for a generic callee type.
+
+        Infer based on the types of arguments.
+
+        Return a derived callable type that has the arguments applied (and
+        stored as implicit type arguments).
+        """
+        if not self.chk.typing_mode_none():
+            # Disable type errors during type inference. There may be errors
+            # due to partial available context information at this time, but
+            # these errors can be safely ignored as the arguments will be
+            # inferred again later.
+            self.msg.disable_errors()
+
+            arg_types = self.infer_arg_types_in_context2(
+                callee_type, args, arg_kinds, formal_to_actual)
+
+            self.msg.enable_errors()
+
+            arg_pass_nums = self.get_arg_infer_passes(
+                callee_type.arg_types, formal_to_actual, len(args))
+
+            pass1_args = []  # type: List[Type]
+            for i, arg in enumerate(arg_types):
+                if arg_pass_nums[i] > 1:
+                    pass1_args.append(None)
+                else:
+                    pass1_args.append(arg)
+
+            inferred_args = infer_function_type_arguments(
+                callee_type, pass1_args, arg_kinds, formal_to_actual,
+                strict=self.chk.typing_mode_full())  # type: List[Type]
+
+            if 2 in arg_pass_nums:
+                # Second pass of type inference.
+                (callee_type,
+                 inferred_args) = self.infer_function_type_arguments_pass2(
+                    callee_type, args, arg_kinds, formal_to_actual,
+                    inferred_args, context)
+        else:
+            # In dynamically typed functions use implicit 'Any' types for
+            # type variables.
+            inferred_args = [AnyType()] * len(callee_type.variables)
+        return self.apply_inferred_arguments(callee_type, inferred_args,
+                                             context)
+
+    def infer_function_type_arguments_pass2(
+            self, callee_type: CallableType,
+            args: List[Node],
+            arg_kinds: List[int],
+            formal_to_actual: List[List[int]],
+            inferred_args: List[Type],
+            context: Context) -> Tuple[CallableType, List[Type]]:
+        """Perform second pass of generic function type argument inference.
+
+        The second pass is needed for arguments with types such as Callable[[T], S],
+        where both T and S are type variables, when the actual argument is a
+        lambda with inferred types.  The idea is to infer the type variable T
+        in the first pass (based on the types of other arguments).  This lets
+        us infer the argument and return type of the lambda expression and
+        thus also the type variable S in this second pass.
+
+        Return (the callee with type vars applied, inferred actual arg types).
+        """
+        # None or erased types in inferred types mean that there was not enough
+        # information to infer the argument. Replace them with None values so
+        # that they are not applied yet below.
+        for i, arg in enumerate(inferred_args):
+            if isinstance(arg, NoneTyp) or isinstance(arg, ErasedType):
+                inferred_args[i] = None
+
+        callee_type = cast(CallableType, self.apply_generic_arguments(
+            callee_type, inferred_args, context))
+        arg_types = self.infer_arg_types_in_context2(
+            callee_type, args, arg_kinds, formal_to_actual)
+
+        inferred_args = infer_function_type_arguments(
+            callee_type, arg_types, arg_kinds, formal_to_actual)
+
+        return callee_type, inferred_args
+
+    def get_arg_infer_passes(self, arg_types: List[Type],
+                             formal_to_actual: List[List[int]],
+                             num_actuals: int) -> List[int]:
+        """Return pass numbers for args for two-pass argument type inference.
+
+        For each actual, the pass number is either 1 (first pass) or 2 (second
+        pass).
+
+        Two-pass argument type inference primarily lets us infer types of
+        lambdas more effectively.
+        """
+        res = [1] * num_actuals
+        for i, arg in enumerate(arg_types):
+            if arg.accept(ArgInferSecondPassQuery()):
+                for j in formal_to_actual[i]:
+                    res[j] = 2
+        return res
+
+    def apply_inferred_arguments(self, callee_type: CallableType,
+                                 inferred_args: List[Type],
+                                 context: Context) -> CallableType:
+        """Apply inferred values of type arguments to a generic function.
+
+        Inferred_args contains the values of function type arguments.
+        """
+        # Report error if some of the variables could not be solved. In that
+        # case assume that all variables have type Any to avoid extra
+        # bogus error messages.
+        for i, inferred_type in enumerate(inferred_args):
+            if not inferred_type:
+                # Could not infer a non-trivial type for a type variable.
+                self.msg.could_not_infer_type_arguments(
+                    callee_type, i + 1, context)
+                inferred_args = [AnyType()] * len(inferred_args)
+        # Apply the inferred types to the function type. In this case the
+        # return type must be CallableType, since we give the right number of type
+        # arguments.
+        return cast(CallableType, self.apply_generic_arguments(callee_type,
+                                                           inferred_args, context))
+
+    def check_argument_count(self, callee: CallableType, actual_types: List[Type],
+                             actual_kinds: List[int], actual_names: List[str],
+                             formal_to_actual: List[List[int]],
+                             context: Context,
+                             messages: Optional[MessageBuilder]) -> bool:
+        """Check that there is a value for all required arguments to a function.
+
+        Also check that there are no duplicate values for arguments. Report found errors
+        using 'messages' if it's not None.
+
+        Return False if there were any errors. Otherwise return True
+        """
+        # TODO(jukka): We could return as soon as we find an error if messages is None.
+        formal_kinds = callee.arg_kinds
+
+        # Collect list of all actual arguments matched to formal arguments.
+        all_actuals = []  # type: List[int]
+        for actuals in formal_to_actual:
+            all_actuals.extend(actuals)
+
+        is_unexpected_arg_error = False  # Keep track of errors to avoid duplicate errors.
+        ok = True  # False if we've found any error.
+        for i, kind in enumerate(actual_kinds):
+            if i not in all_actuals and (
+                    kind != nodes.ARG_STAR or
+                    not is_empty_tuple(actual_types[i])):
+                # Extra actual: not matched by a formal argument.
+                ok = False
+                if kind != nodes.ARG_NAMED:
+                    if messages:
+                        messages.too_many_arguments(callee, context)
+                else:
+                    if messages:
+                        messages.unexpected_keyword_argument(
+                            callee, actual_names[i], context)
+                    is_unexpected_arg_error = True
+            elif kind == nodes.ARG_STAR and (
+                    nodes.ARG_STAR not in formal_kinds):
+                actual_type = actual_types[i]
+                if isinstance(actual_type, TupleType):
+                    if all_actuals.count(i) < len(actual_type.items):
+                        # Too many tuple items as some did not match.
+                        if messages:
+                            messages.too_many_arguments(callee, context)
+                        ok = False
+                # *args can be applied even if the function takes a fixed
+                # number of positional arguments. This may succeed at runtime.
+
+        for i, kind in enumerate(formal_kinds):
+            if kind == nodes.ARG_POS and (not formal_to_actual[i] and
+                                          not is_unexpected_arg_error):
+                # No actual for a mandatory positional formal.
+                if messages:
+                    messages.too_few_arguments(callee, context, actual_names)
+                ok = False
+            elif kind in [nodes.ARG_POS, nodes.ARG_OPT,
+                          nodes.ARG_NAMED] and is_duplicate_mapping(
+                    formal_to_actual[i], actual_kinds):
+                if (self.chk.typing_mode_full() or
+                        isinstance(actual_types[formal_to_actual[i][0]], TupleType)):
+                    if messages:
+                        messages.duplicate_argument_value(callee, i, context)
+                    ok = False
+            elif (kind == nodes.ARG_NAMED and formal_to_actual[i] and
+                  actual_kinds[formal_to_actual[i][0]] != nodes.ARG_NAMED):
+                # Positional argument when expecting a keyword argument.
+                if messages:
+                    messages.too_many_positional_arguments(callee, context)
+                ok = False
+        return ok
+
+    def check_argument_types(self, arg_types: List[Type], arg_kinds: List[int],
+                             callee: CallableType,
+                             formal_to_actual: List[List[int]],
+                             context: Context,
+                             messages: MessageBuilder = None,
+                             check_arg: ArgChecker = None) -> None:
+        """Check argument types against a callable type.
+
+        Report errors if the argument types are not compatible.
+        """
+        messages = messages or self.msg
+        check_arg = check_arg or self.check_arg
+        # Keep track of consumed tuple *arg items.
+        tuple_counter = [0]
+        for i, actuals in enumerate(formal_to_actual):
+            for actual in actuals:
+                arg_type = arg_types[actual]
+                # Check that a *arg is valid as varargs.
+                if (arg_kinds[actual] == nodes.ARG_STAR and
+                        not self.is_valid_var_arg(arg_type)):
+                    messages.invalid_var_arg(arg_type, context)
+                if (arg_kinds[actual] == nodes.ARG_STAR2 and
+                        not self.is_valid_keyword_var_arg(arg_type)):
+                    messages.invalid_keyword_var_arg(arg_type, context)
+                # Get the type of an individual actual argument (for *args
+                # and **args this is the item type, not the collection type).
+                actual_type = get_actual_type(arg_type, arg_kinds[actual],
+                                              tuple_counter)
+                check_arg(actual_type, arg_type,
+                          callee.arg_types[i],
+                          actual + 1, i + 1, callee, context, messages)
+
+                # There may be some remaining tuple varargs items that haven't
+                # been checked yet. Handle them.
+                if (callee.arg_kinds[i] == nodes.ARG_STAR and
+                        arg_kinds[actual] == nodes.ARG_STAR and
+                        isinstance(arg_types[actual], TupleType)):
+                    tuplet = cast(TupleType, arg_types[actual])
+                    while tuple_counter[0] < len(tuplet.items):
+                        actual_type = get_actual_type(arg_type,
+                                                      arg_kinds[actual],
+                                                      tuple_counter)
+                        check_arg(actual_type, arg_type,
+                                  callee.arg_types[i],
+                                  actual + 1, i + 1, callee, context, messages)
+
+    def check_arg(self, caller_type: Type, original_caller_type: Type,
+                  callee_type: Type, n: int, m: int, callee: CallableType,
+                  context: Context, messages: MessageBuilder) -> None:
+        """Check the type of a single argument in a call."""
+        if isinstance(caller_type, Void):
+            messages.does_not_return_value(caller_type, context)
+        elif isinstance(caller_type, DeletedType):
+            messages.deleted_as_rvalue(caller_type, context)
+        elif not is_subtype(caller_type, callee_type):
+            messages.incompatible_argument(n, m, callee, original_caller_type,
+                                           context)
+
+    def overload_call_target(self, arg_types: List[Type], arg_kinds: List[int],
+                             arg_names: List[str],
+                             overload: Overloaded, context: Context,
+                             messages: MessageBuilder = None) -> Type:
+        """Infer the correct overload item to call with given argument types.
+
+        The return value may be CallableType or AnyType (if an unique item
+        could not be determined).
+        """
+        messages = messages or self.msg
+        # TODO also consider argument names and kinds
+        # TODO for overlapping signatures we should try to get a more precise
+        #      result than 'Any'
+        match = []  # type: List[CallableType]
+        best_match = 0
+        for typ in overload.items():
+            similarity = self.erased_signature_similarity(arg_types, arg_kinds, arg_names,
+                                                          typ)
+            if similarity > 0 and similarity >= best_match:
+                if (match and not is_same_type(match[-1].ret_type,
+                                               typ.ret_type) and
+                    not mypy.checker.is_more_precise_signature(
+                        match[-1], typ)):
+                    # Ambiguous return type. Either the function overload is
+                    # overlapping (which results in an error elsewhere) or the
+                    # caller has provided some Any argument types; in
+                    # either case can only infer the type to be Any, as it is
+                    # not an error to use Any types in calls.
+                    #
+                    # Overlapping overload items are fine if the items are
+                    # covariant in both argument types and return types with
+                    # respect to type precision.
+                    return AnyType()
+                else:
+                    match.append(typ)
+                best_match = max(best_match, similarity)
+        if not match:
+            messages.no_variant_matches_arguments(overload, arg_types, context)
+            return AnyType()
+        else:
+            if len(match) == 1:
+                return match[0]
+            else:
+                # More than one signature matches. Pick the first *non-erased*
+                # matching signature, or default to the first one if none
+                # match.
+                for m in match:
+                    if self.match_signature_types(arg_types, arg_kinds, arg_names, m):
+                        return m
+                return match[0]
+
+    def erased_signature_similarity(self, arg_types: List[Type], arg_kinds: List[int],
+                                    arg_names: List[str], callee: CallableType) -> int:
+        """Determine whether arguments could match the signature at runtime.
+
+        If is_var_arg is True, the caller uses varargs. This is used for
+        overload resolution.
+
+        Return similarity level (0 = no match, 1 = can match, 2 = non-promotion match). See
+        overload_arg_similarity for a discussion of similarity levels.
+        """
+        formal_to_actual = map_actuals_to_formals(arg_kinds,
+                                                  arg_names,
+                                                  callee.arg_kinds,
+                                                  callee.arg_names,
+                                                  lambda i: arg_types[i])
+
+        if not self.check_argument_count(callee, arg_types, arg_kinds, arg_names,
+                                         formal_to_actual, None, None):
+            # Too few or many arguments -> no match.
+            return 0
+
+        similarity = 2
+
+        def check_arg(caller_type: Type, original_caller_type: Type,
+                      callee_type: Type, n: int, m: int, callee: CallableType,
+                      context: Context, messages: MessageBuilder) -> None:
+            nonlocal similarity
+            similarity = min(similarity,
+                             overload_arg_similarity(caller_type, callee_type))
+            if similarity == 0:
+                # No match -- exit early since none of the remaining work can change
+                # the result.
+                raise Finished
+
+        try:
+            self.check_argument_types(arg_types, arg_kinds, callee, formal_to_actual,
+                                      None, check_arg=check_arg)
+        except Finished:
+            pass
+
+        return similarity
+
+    def match_signature_types(self, arg_types: List[Type], arg_kinds: List[int],
+                              arg_names: List[str], callee: CallableType) -> bool:
+        """Determine whether arguments types match the signature.
+
+        Assume that argument counts are compatible.
+
+        Return True if arguments match.
+        """
+        formal_to_actual = map_actuals_to_formals(arg_kinds,
+                                                  arg_names,
+                                                  callee.arg_kinds,
+                                                  callee.arg_names,
+                                                  lambda i: arg_types[i])
+        ok = True
+
+        def check_arg(caller_type: Type, original_caller_type: Type,
+                      callee_type: Type, n: int, m: int, callee: CallableType,
+                      context: Context, messages: MessageBuilder) -> None:
+            nonlocal ok
+            if not is_subtype(caller_type, callee_type):
+                ok = False
+
+        self.check_argument_types(arg_types, arg_kinds, callee, formal_to_actual,
+                                  None, check_arg=check_arg)
+        return ok
+
+    def apply_generic_arguments(self, callable: CallableType, types: List[Type],
+                                context: Context) -> Type:
+        """Simple wrapper around mypy.applytype.apply_generic_arguments."""
+        return applytype.apply_generic_arguments(callable, types, self.msg, context)
+
+    def apply_generic_arguments2(self, overload: Overloaded, types: List[Type],
+                                 context: Context) -> Type:
+        items = []  # type: List[CallableType]
+        for item in overload.items():
+            applied = self.apply_generic_arguments(item, types, context)
+            if isinstance(applied, CallableType):
+                items.append(applied)
+            else:
+                # There was an error.
+                return AnyType()
+        return Overloaded(items)
+
+    def visit_member_expr(self, e: MemberExpr) -> Type:
+        """Visit member expression (of form e.id)."""
+        result = self.analyze_ordinary_member_access(e, False)
+        return self.chk.narrow_type_from_binder(e, result)
+
+    def analyze_ordinary_member_access(self, e: MemberExpr,
+                                       is_lvalue: bool) -> Type:
+        """Analyse member expression or member lvalue."""
+        if e.kind is not None:
+            # This is a reference to a module attribute.
+            return self.analyze_ref_expr(e)
+        else:
+            # This is a reference to a non-module attribute.
+            return analyze_member_access(e.name, self.accept(e.expr), e,
+                                         is_lvalue, False,
+                                         self.named_type, self.not_ready_callback, self.msg)
+
+    def analyze_external_member_access(self, member: str, base_type: Type,
+                                       context: Context) -> Type:
+        """Analyse member access that is external, i.e. it cannot
+        refer to private definitions. Return the result type.
+        """
+        # TODO remove; no private definitions in mypy
+        return analyze_member_access(member, base_type, context, False, False,
+                                     self.named_type, self.not_ready_callback, self.msg)
+
+    def visit_int_expr(self, e: IntExpr) -> Type:
+        """Type check an integer literal (trivial)."""
+        return self.named_type('builtins.int')
+
+    def visit_str_expr(self, e: StrExpr) -> Type:
+        """Type check a string literal (trivial)."""
+        return self.named_type('builtins.str')
+
+    def visit_bytes_expr(self, e: BytesExpr) -> Type:
+        """Type check a bytes literal (trivial)."""
+        return self.named_type('builtins.bytes')
+
+    def visit_unicode_expr(self, e: UnicodeExpr) -> Type:
+        """Type check a unicode literal (trivial)."""
+        return self.named_type('builtins.unicode')
+
+    def visit_float_expr(self, e: FloatExpr) -> Type:
+        """Type check a float literal (trivial)."""
+        return self.named_type('builtins.float')
+
+    def visit_complex_expr(self, e: ComplexExpr) -> Type:
+        """Type check a complex literal."""
+        return self.named_type('builtins.complex')
+
+    def visit_ellipsis(self, e: EllipsisExpr) -> Type:
+        """Type check '...'."""
+        if self.chk.pyversion[0] >= 3:
+            return self.named_type('builtins.ellipsis')
+        else:
+            # '...' is not valid in normal Python 2 code, but it can
+            # be used in stubs.  The parser makes sure that we only
+            # get this far if we are in a stub, and we can safely
+            # return 'object' as ellipsis is special cased elsewhere.
+            # The builtins.ellipsis type does not exist in Python 2.
+            return self.named_type('builtins.object')
+
+    def visit_op_expr(self, e: OpExpr) -> Type:
+        """Type check a binary operator expression."""
+        if e.op == 'and' or e.op == 'or':
+            return self.check_boolean_op(e, e)
+        if e.op == '*' and isinstance(e.left, ListExpr):
+            # Expressions of form [...] * e get special type inference.
+            return self.check_list_multiply(e)
+        if e.op == '%' and isinstance(e.left, StrExpr):
+            return self.strfrm_checker.check_str_interpolation(cast(StrExpr, e.left), e.right)
+        left_type = self.accept(e.left)
+
+        if e.op in nodes.op_methods:
+            method = self.get_operator_method(e.op)
+            result, method_type = self.check_op(method, left_type, e.right, e,
+                                                allow_reverse=True)
+            e.method_type = method_type
+            return result
+        else:
+            raise RuntimeError('Unknown operator {}'.format(e.op))
+
+    def visit_comparison_expr(self, e: ComparisonExpr) -> Type:
+        """Type check a comparison expression.
+
+        Comparison expressions are type checked consecutive-pair-wise
+        That is, 'a < b > c == d' is check as 'a < b and b > c and c == d'
+        """
+        result = None  # type: mypy.types.Type
+
+        # Check each consecutive operand pair and their operator
+        for left, right, operator in zip(e.operands, e.operands[1:], e.operators):
+            left_type = self.accept(left)
+
+            method_type = None  # type: mypy.types.Type
+
+            if operator == 'in' or operator == 'not in':
+                right_type = self.accept(right)  # TODO only evaluate if needed
+
+                # Keep track of whether we get type check errors (these won't be reported, they
+                # are just to verify whether something is valid typing wise).
+                local_errors = self.msg.copy()
+                local_errors.disable_count = 0
+                sub_result, method_type = self.check_op_local('__contains__', right_type,
+                                                          left, e, local_errors)
+                if isinstance(right_type, PartialType):
+                    # We don't really know if this is an error or not, so just shut up.
+                    pass
+                elif (local_errors.is_errors() and
+                    # is_valid_var_arg is True for any Iterable
+                        self.is_valid_var_arg(right_type)):
+                    itertype = self.chk.analyze_iterable_item_type(right)
+                    method_type = CallableType(
+                        [left_type],
+                        [nodes.ARG_POS],
+                        [None],
+                        self.chk.bool_type(),
+                        self.named_type('builtins.function'))
+                    sub_result = self.chk.bool_type()
+                    if not is_subtype(left_type, itertype):
+                        self.msg.unsupported_operand_types('in', left_type, right_type, e)
+                else:
+                    self.msg.add_errors(local_errors)
+                if operator == 'not in':
+                    sub_result = self.chk.bool_type()
+            elif operator in nodes.op_methods:
+                method = self.get_operator_method(operator)
+                sub_result, method_type = self.check_op(method, left_type, right, e,
+                                                    allow_reverse=True)
+
+            elif operator == 'is' or operator == 'is not':
+                sub_result = self.chk.bool_type()
+                method_type = None
+            else:
+                raise RuntimeError('Unknown comparison operator {}'.format(operator))
+
+            e.method_types.append(method_type)
+
+            #  Determine type of boolean-and of result and sub_result
+            if result is None:
+                result = sub_result
+            else:
+                # TODO: check on void needed?
+                self.check_not_void(sub_result, e)
+                result = join.join_types(result, sub_result)
+
+        return result
+
+    def get_operator_method(self, op: str) -> str:
+        if op == '/' and self.chk.pyversion[0] == 2:
+            # TODO also check for "from __future__ import division"
+            return '__div__'
+        else:
+            return nodes.op_methods[op]
+
+    def check_op_local(self, method: str, base_type: Type, arg: Node,
+                       context: Context, local_errors: MessageBuilder) -> Tuple[Type, Type]:
+        """Type check a binary operation which maps to a method call.
+
+        Return tuple (result type, inferred operator method type).
+        """
+        method_type = analyze_member_access(method, base_type, context, False, False,
+                                            self.named_type, self.not_ready_callback, local_errors)
+        return self.check_call(method_type, [arg], [nodes.ARG_POS],
+                               context, arg_messages=local_errors)
+
+    def check_op(self, method: str, base_type: Type, arg: Node,
+                 context: Context,
+                 allow_reverse: bool = False) -> Tuple[Type, Type]:
+        """Type check a binary operation which maps to a method call.
+
+        Return tuple (result type, inferred operator method type).
+        """
+        # Use a local error storage for errors related to invalid argument
+        # type (but NOT other errors). This error may need to be suppressed
+        # for operators which support __rX methods.
+        local_errors = self.msg.copy()
+        local_errors.disable_count = 0
+        if not allow_reverse or self.has_member(base_type, method):
+            result = self.check_op_local(method, base_type, arg, context,
+                                         local_errors)
+            if allow_reverse:
+                arg_type = self.chk.type_map[arg]
+                if isinstance(arg_type, AnyType):
+                    # If the right operand has type Any, we can't make any
+                    # conjectures about the type of the result, since the
+                    # operand could have a __r method that returns anything.
+
+                    # However, in weak mode, we do make conjectures.
+                    if not self.chk.typing_mode_weak():
+                        result = AnyType(), result[1]
+            success = not local_errors.is_errors()
+        else:
+            result = AnyType(), AnyType()
+            success = False
+        if success or not allow_reverse or isinstance(base_type, AnyType):
+            # We were able to call the normal variant of the operator method,
+            # or there was some problem not related to argument type
+            # validity, or the operator has no __rX method. In any case, we
+            # don't need to consider the __rX method.
+            self.msg.add_errors(local_errors)
+            return result
+        else:
+            # Calling the operator method was unsuccessful. Try the __rX
+            # method of the other operand instead.
+            rmethod = self.get_reverse_op_method(method)
+            arg_type = self.accept(arg)
+            if self.has_member(arg_type, rmethod):
+                method_type = self.analyze_external_member_access(
+                    rmethod, arg_type, context)
+                temp = TempNode(base_type)
+                return self.check_call(method_type, [temp], [nodes.ARG_POS],
+                                       context)
+            else:
+                # No __rX method either. Do deferred type checking to produce
+                # error message that we may have missed previously.
+                # TODO Fix type checking an expression more than once.
+                return self.check_op_local(method, base_type, arg, context,
+                                           self.msg)
+
+    def get_reverse_op_method(self, method: str) -> str:
+        if method == '__div__' and self.chk.pyversion[0] == 2:
+            return '__rdiv__'
+        else:
+            return nodes.reverse_op_methods[method]
+
+    def check_boolean_op(self, e: OpExpr, context: Context) -> Type:
+        """Type check a boolean operation ('and' or 'or')."""
+
+        # A boolean operation can evaluate to either of the operands.
+
+        # We use the current type context to guide the type inference of of
+        # the left operand. We also use the left operand type to guide the type
+        # inference of the right operand so that expressions such as
+        # '[1] or []' are inferred correctly.
+        ctx = self.chk.type_context[-1]
+        left_type = self.accept(e.left, ctx)
+
+        if e.op == 'and':
+            # else_map unused
+            if_map, else_map = \
+                mypy.checker.find_isinstance_check(e.left, self.chk.type_map,
+                                                   self.chk.typing_mode_weak())
+        else:
+            if_map = None
+
+        self.chk.binder.push_frame()
+        if if_map:
+            for var, type in if_map.items():
+                self.chk.binder.push(var, type)
+
+        right_type = self.accept(e.right, left_type)
+
+        self.chk.binder.pop_frame()
+
+        self.check_not_void(left_type, context)
+        self.check_not_void(right_type, context)
+        return UnionType.make_simplified_union([left_type, right_type])
+
+    def check_list_multiply(self, e: OpExpr) -> Type:
+        """Type check an expression of form '[...] * e'.
+
+        Type inference is special-cased for this common construct.
+        """
+        right_type = self.accept(e.right)
+        if is_subtype(right_type, self.named_type('builtins.int')):
+            # Special case: [...] * <int value>. Use the type context of the
+            # OpExpr, since the multiplication does not affect the type.
+            left_type = self.accept(e.left, context=self.chk.type_context[-1])
+        else:
+            left_type = self.accept(e.left)
+        result, method_type = self.check_op('__mul__', left_type, e.right, e)
+        e.method_type = method_type
+        return result
+
+    def visit_unary_expr(self, e: UnaryExpr) -> Type:
+        """Type check an unary operation ('not', '-', '+' or '~')."""
+        operand_type = self.accept(e.expr)
+        op = e.op
+        if op == 'not':
+            self.check_not_void(operand_type, e)
+            result = self.chk.bool_type()  # type: Type
+        elif op == '-':
+            method_type = self.analyze_external_member_access('__neg__',
+                                                              operand_type, e)
+            result, method_type = self.check_call(method_type, [], [], e)
+            e.method_type = method_type
+        elif op == '+':
+            method_type = self.analyze_external_member_access('__pos__',
+                                                              operand_type, e)
+            result, method_type = self.check_call(method_type, [], [], e)
+            e.method_type = method_type
+        else:
+            assert op == '~', "unhandled unary operator"
+            method_type = self.analyze_external_member_access('__invert__',
+                                                              operand_type, e)
+            result, method_type = self.check_call(method_type, [], [], e)
+            e.method_type = method_type
+        return result
+
+    def visit_index_expr(self, e: IndexExpr) -> Type:
+        """Type check an index expression (base[index]).
+
+        It may also represent type application.
+        """
+        result = self.visit_index_expr_helper(e)
+        return self.chk.narrow_type_from_binder(e, result)
+
+    def visit_index_expr_helper(self, e: IndexExpr) -> Type:
+        if e.analyzed:
+            # It's actually a type application.
+            return self.accept(e.analyzed)
+        left_type = self.accept(e.base)
+        if isinstance(left_type, TupleType) and self.chk.typing_mode_full():
+            left_type = cast(TupleType, left_type)
+            # Special case for tuples. They support indexing only by integer
+            # literals.  (Except in weak type checking mode.)
+            index = e.index
+            if isinstance(index, SliceExpr):
+                return self.visit_tuple_slice_helper(left_type, index)
+
+            ok = False
+            if isinstance(index, IntExpr):
+                n = index.value
+                ok = True
+            elif isinstance(index, UnaryExpr):
+                if index.op == '-':
+                    operand = index.expr
+                    if isinstance(operand, IntExpr):
+                        n = len(left_type.items) - operand.value
+                        ok = True
+            if ok:
+                if n >= 0 and n < len(left_type.items):
+                    return left_type.items[n]
+                else:
+                    self.chk.fail(messages.TUPLE_INDEX_OUT_OF_RANGE, e)
+                    return AnyType()
+            else:
+                self.chk.fail(messages.TUPLE_INDEX_MUST_BE_AN_INT_LITERAL, e)
+                return AnyType()
+        else:
+            result, method_type = self.check_op('__getitem__', left_type, e.index, e)
+            e.method_type = method_type
+            return result
+
+    def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr):
+        begin = 0
+        end = len(left_type.items)
+        stride = 1
+        if slic.begin_index:
+            if isinstance(slic.begin_index, IntExpr):
+                begin = slic.begin_index.value
+            else:
+                self.chk.fail(messages.TUPLE_SLICE_MUST_BE_AN_INT_LITERAL, slic.begin_index)
+                return AnyType()
+        if slic.end_index:
+            if isinstance(slic.end_index, IntExpr):
+                end = slic.end_index.value
+            else:
+                self.chk.fail(messages.TUPLE_SLICE_MUST_BE_AN_INT_LITERAL, slic.end_index)
+                return AnyType()
+        if slic.stride:
+            if isinstance(slic.stride, IntExpr):
+                stride = slic.stride.value
+            else:
+                self.chk.fail(messages.TUPLE_SLICE_MUST_BE_AN_INT_LITERAL, slic.stride)
+                return AnyType()
+
+        return TupleType(left_type.items[begin:end:stride], left_type.fallback,
+                    left_type.line, left_type.implicit)
+
+    def visit_cast_expr(self, expr: CastExpr) -> Type:
+        """Type check a cast expression."""
+        source_type = self.accept(expr.expr, context=AnyType())
+        target_type = expr.type
+        if not self.is_valid_cast(source_type, target_type):
+            self.msg.invalid_cast(target_type, source_type, expr)
+        return target_type
+
+    def is_valid_cast(self, source_type: Type, target_type: Type) -> bool:
+        """Is a cast from source_type to target_type meaningful?"""
+        return (isinstance(target_type, AnyType) or
+                (not isinstance(source_type, Void) and
+                 not isinstance(target_type, Void)))
+
+    def visit_type_application(self, tapp: TypeApplication) -> Type:
+        """Type check a type application (expr[type, ...])."""
+        self.chk.fail(messages.GENERIC_TYPE_NOT_VALID_AS_EXPRESSION, tapp)
+        return AnyType()
+
+    def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type:
+        return AnyType()
+
+    def visit_list_expr(self, e: ListExpr) -> Type:
+        """Type check a list expression [...]."""
+        return self.check_list_or_set_expr(e.items, 'builtins.list', '<list>',
+                                           e)
+
+    def visit_set_expr(self, e: SetExpr) -> Type:
+        return self.check_list_or_set_expr(e.items, 'builtins.set', '<set>', e)
+
+    def check_list_or_set_expr(self, items: List[Node], fullname: str,
+                               tag: str, context: Context) -> Type:
+        # Translate into type checking a generic function call.
+        tv = TypeVarType('T', -1, [], self.chk.object_type())
+        constructor = CallableType(
+            [tv],
+            [nodes.ARG_STAR],
+            [None],
+            self.chk.named_generic_type(fullname, [tv]),
+            self.named_type('builtins.function'),
+            name=tag,
+            variables=[TypeVarDef('T', -1, None, self.chk.object_type())])
+        return self.check_call(constructor,
+                               items,
+                               [nodes.ARG_POS] * len(items), context)[0]
+
+    def visit_tuple_expr(self, e: TupleExpr) -> Type:
+        """Type check a tuple expression."""
+        ctx = None  # type: TupleType
+        # Try to determine type context for type inference.
+        if isinstance(self.chk.type_context[-1], TupleType):
+            t = cast(TupleType, self.chk.type_context[-1])
+            if len(t.items) == len(e.items):
+                ctx = t
+        # Infer item types.
+        items = []  # type: List[Type]
+        for i in range(len(e.items)):
+            item = e.items[i]
+            tt = None  # type: Type
+            if not ctx:
+                tt = self.accept(item)
+            else:
+                tt = self.accept(item, ctx.items[i])
+            self.check_not_void(tt, e)
+            items.append(tt)
+        fallback_item = join.join_type_list(items)
+        return TupleType(items, self.chk.named_generic_type('builtins.tuple', [fallback_item]))
+
+    def visit_dict_expr(self, e: DictExpr) -> Type:
+        # Translate into type checking a generic function call.
+        tv1 = TypeVarType('KT', -1, [], self.chk.object_type())
+        tv2 = TypeVarType('VT', -2, [], self.chk.object_type())
+        # The callable type represents a function like this:
+        #
+        #   def <unnamed>(*v: Tuple[kt, vt]) -> Dict[kt, vt]: ...
+        constructor = CallableType(
+            [TupleType([tv1, tv2], self.named_type('builtins.tuple'))],
+            [nodes.ARG_STAR],
+            [None],
+            self.chk.named_generic_type('builtins.dict', [tv1, tv2]),
+            self.named_type('builtins.function'),
+            name='<list>',
+            variables=[TypeVarDef('KT', -1, None, self.chk.object_type()),
+                       TypeVarDef('VT', -2, None, self.chk.object_type())])
+        # Synthesize function arguments.
+        args = []  # type: List[Node]
+        for key, value in e.items:
+            args.append(TupleExpr([key, value]))
+        return self.check_call(constructor,
+                               args,
+                               [nodes.ARG_POS] * len(args), e)[0]
+
+    def visit_func_expr(self, e: FuncExpr) -> Type:
+        """Type check lambda expression."""
+        inferred_type = self.infer_lambda_type_using_context(e)
+        if not inferred_type:
+            # No useful type context.
+            ret_type = e.expr().accept(self.chk)
+            if not e.arguments:
+                # Form 'lambda: e'; just use the inferred return type.
+                return CallableType([], [], [], ret_type, self.named_type('builtins.function'))
+            else:
+                # TODO: Consider reporting an error. However, this is fine if
+                # we are just doing the first pass in contextual type
+                # inference.
+                return AnyType()
+        else:
+            # Type context available.
+            self.chk.check_func_item(e, type_override=inferred_type)
+            ret_type = self.chk.type_map[e.expr()]
+            return replace_callable_return_type(inferred_type, ret_type)
+
+    def infer_lambda_type_using_context(self, e: FuncExpr) -> CallableType:
+        """Try to infer lambda expression type using context.
+
+        Return None if could not infer type.
+        """
+        # TODO also accept 'Any' context
+        ctx = self.chk.type_context[-1]
+        if not ctx or not isinstance(ctx, CallableType):
+            return None
+
+        # The context may have function type variables in it. We replace them
+        # since these are the type variables we are ultimately trying to infer;
+        # they must be considered as indeterminate. We use ErasedType since it
+        # does not affect type inference results (it is for purposes like this
+        # only).
+        ctx = replace_func_type_vars(ctx, ErasedType())
+
+        callable_ctx = cast(CallableType, ctx)
+
+        arg_kinds = [arg.kind for arg in e.arguments]
+
+        if callable_ctx.arg_kinds != arg_kinds:
+            # Incompatible context; cannot use it to infer types.
+            self.chk.fail(messages.CANNOT_INFER_LAMBDA_TYPE, e)
+            return None
+
+        return callable_ctx
+
+    def visit_super_expr(self, e: SuperExpr) -> Type:
+        """Type check a super expression (non-lvalue)."""
+        t = self.analyze_super(e, False)
+        return t
+
+    def analyze_super(self, e: SuperExpr, is_lvalue: bool) -> Type:
+        """Type check a super expression."""
+        if e.info and e.info.bases:
+            # TODO fix multiple inheritance etc
+            if len(e.info.mro) < 2:
+                self.chk.fail('Internal error: unexpected mro for {}: {}'.format(
+                    e.info.name(), e.info.mro), e)
+                return AnyType()
+            for base in e.info.mro[1:]:
+                if e.name in base.names or base == e.info.mro[-1]:
+                    if e.info.fallback_to_any and base == e.info.mro[-1]:
+                        # There's an undefined base class, and we're
+                        # at the end of the chain.  That's not an error.
+                        return AnyType()
+                    return analyze_member_access(e.name, self_type(e.info), e,
+                                                 is_lvalue, True,
+                                                 self.named_type, self.not_ready_callback,
+                                                 self.msg, base)
+        else:
+            # Invalid super. This has been reported by the semantic analyzer.
+            return AnyType()
+
+    def visit_slice_expr(self, e: SliceExpr) -> Type:
+        for index in [e.begin_index, e.end_index, e.stride]:
+            if index:
+                t = self.accept(index)
+                self.chk.check_subtype(t, self.named_type('builtins.int'),
+                                       index, messages.INVALID_SLICE_INDEX)
+        return self.named_type('builtins.slice')
+
+    def visit_list_comprehension(self, e: ListComprehension) -> Type:
+        return self.check_generator_or_comprehension(
+            e.generator, 'builtins.list', '<list-comprehension>')
+
+    def visit_set_comprehension(self, e: SetComprehension) -> Type:
+        return self.check_generator_or_comprehension(
+            e.generator, 'builtins.set', '<set-comprehension>')
+
+    def visit_generator_expr(self, e: GeneratorExpr) -> Type:
+        return self.check_generator_or_comprehension(e, 'typing.Iterator',
+                                                     '<generator>')
+
+    def check_generator_or_comprehension(self, gen: GeneratorExpr,
+                                         type_name: str,
+                                         id_for_messages: str) -> Type:
+        """Type check a generator expression or a list comprehension."""
+        self.check_for_comp(gen)
+
+        # Infer the type of the list comprehension by using a synthetic generic
+        # callable type.
+        tv = TypeVarType('T', -1, [], self.chk.object_type())
+        constructor = CallableType(
+            [tv],
+            [nodes.ARG_POS],
+            [None],
+            self.chk.named_generic_type(type_name, [tv]),
+            self.chk.named_type('builtins.function'),
+            name=id_for_messages,
+            variables=[TypeVarDef('T', -1, None, self.chk.object_type())])
+        return self.check_call(constructor,
+                               [gen.left_expr], [nodes.ARG_POS], gen)[0]
+
+    def visit_dictionary_comprehension(self, e: DictionaryComprehension):
+        """Type check a dictionary comprehension."""
+        self.check_for_comp(e)
+
+        # Infer the type of the list comprehension by using a synthetic generic
+        # callable type.
+        key_tv = TypeVarType('KT', -1, [], self.chk.object_type())
+        value_tv = TypeVarType('VT', -2, [], self.chk.object_type())
+        constructor = CallableType(
+            [key_tv, value_tv],
+            [nodes.ARG_POS, nodes.ARG_POS],
+            [None, None],
+            self.chk.named_generic_type('builtins.dict', [key_tv, value_tv]),
+            self.chk.named_type('builtins.function'),
+            name='<dictionary-comprehension>',
+            variables=[TypeVarDef('KT', -1, None, self.chk.object_type()),
+                       TypeVarDef('VT', -2, None, self.chk.object_type())])
+        return self.check_call(constructor,
+                               [e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0]
+
+    def check_for_comp(self, e: Union[GeneratorExpr, DictionaryComprehension]) -> None:
+        """Check the for_comp part of comprehensions. That is the part from 'for':
+        ... for x in y if z
+        """
+        self.chk.binder.push_frame()
+        for index, sequence, conditions in zip(e.indices, e.sequences,
+                                               e.condlists):
+            sequence_type = self.chk.analyze_iterable_item_type(sequence)
+            self.chk.analyze_index_variables(index, sequence_type, e)
+            for condition in conditions:
+                self.accept(condition)
+        self.chk.binder.pop_frame()
+
+    def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
+        cond_type = self.accept(e.cond)
+        self.check_not_void(cond_type, e)
+
+        # Gain type information from isinstance if it is there
+        # but only for the current expression
+        if_map, else_map = mypy.checker.find_isinstance_check(
+            e.cond,
+            self.chk.type_map,
+            self.chk.typing_mode_weak())
+
+        self.chk.binder.push_frame()
+
+        if if_map:
+            for var, type in if_map.items():
+                self.chk.binder.push(var, type)
+
+        if_type = self.accept(e.if_expr)
+
+        self.chk.binder.pop_frame()
+        self.chk.binder.push_frame()
+
+        if else_map:
+            for var, type in else_map.items():
+                self.chk.binder.push(var, type)
+
+        else_type = self.accept(e.else_expr, context=if_type)
+
+        self.chk.binder.pop_frame()
+
+        return join.join_types(if_type, else_type)
+
+    def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
+        self.accept(e.expr)
+        return self.named_type('builtins.str')
+
+    #
+    # Helpers
+    #
+
+    def accept(self, node: Node, context: Type = None) -> Type:
+        """Type check a node. Alias for TypeChecker.accept."""
+        return self.chk.accept(node, context)
+
+    def check_not_void(self, typ: Type, context: Context) -> None:
+        """Generate an error if type is Void."""
+        self.chk.check_not_void(typ, context)
+
+    def is_boolean(self, typ: Type) -> bool:
+        """Is type compatible with bool?"""
+        return is_subtype(typ, self.chk.bool_type())
+
+    def named_type(self, name: str) -> Instance:
+        """Return an instance type with type given by the name and no type
+        arguments. Alias for TypeChecker.named_type.
+        """
+        return self.chk.named_type(name)
+
+    def is_valid_var_arg(self, typ: Type) -> bool:
+        """Is a type valid as a *args argument?"""
+        return (isinstance(typ, TupleType) or
+                is_subtype(typ, self.chk.named_generic_type('typing.Iterable',
+                                                            [AnyType()])) or
+                isinstance(typ, AnyType))
+
+    def is_valid_keyword_var_arg(self, typ: Type) -> bool:
+        """Is a type valid as a **kwargs argument?"""
+        return is_subtype(typ, self.chk.named_generic_type(
+            'builtins.dict', [self.named_type('builtins.str'), AnyType()]))
+
+    def has_non_method(self, typ: Type, member: str) -> bool:
+        """Does type have a member variable / property with the given name?"""
+        if isinstance(typ, Instance):
+            return (not typ.type.has_method(member) and
+                    typ.type.has_readable_member(member))
+        else:
+            return False
+
+    def has_member(self, typ: Type, member: str) -> bool:
+        """Does type have member with the given name?"""
+        # TODO TupleType => also consider tuple attributes
+        if isinstance(typ, Instance):
+            return typ.type.has_readable_member(member)
+        elif isinstance(typ, AnyType):
+            return True
+        elif isinstance(typ, UnionType):
+            result = all(self.has_member(x, member) for x in typ.items)
+            return result
+        elif isinstance(typ, TupleType):
+            return self.has_member(typ.fallback, member)
+        else:
+            return False
+
+    def not_ready_callback(self, name: str, context: Context) -> None:
+        """Called when we can't infer the type of a variable because it's not ready yet.
+
+        Either defer type checking of the enclosing function to the next
+        pass or report an error.
+        """
+        self.chk.handle_cannot_determine_type(name, context)
+
+
+def map_actuals_to_formals(caller_kinds: List[int],
+                           caller_names: List[str],
+                           callee_kinds: List[int],
+                           callee_names: List[str],
+                           caller_arg_type: Callable[[int],
+                                                     Type]) -> List[List[int]]:
+    """Calculate mapping between actual (caller) args and formals.
+
+    The result contains a list of caller argument indexes mapping to each
+    callee argument index, indexed by callee index.
+
+    The caller_arg_type argument should evaluate to the type of the actual
+    argument type with the given index.
+    """
+    ncallee = len(callee_kinds)
+    map = [None] * ncallee  # type: List[List[int]]
+    for i in range(ncallee):
+        map[i] = []
+    j = 0
+    for i, kind in enumerate(caller_kinds):
+        if kind == nodes.ARG_POS:
+            if j < ncallee:
+                if callee_kinds[j] in [nodes.ARG_POS, nodes.ARG_OPT,
+                                       nodes.ARG_NAMED]:
+                    map[j].append(i)
+                    j += 1
+                elif callee_kinds[j] == nodes.ARG_STAR:
+                    map[j].append(i)
+        elif kind == nodes.ARG_STAR:
+            # We need to to know the actual type to map varargs.
+            argt = caller_arg_type(i)
+            if isinstance(argt, TupleType):
+                # A tuple actual maps to a fixed number of formals.
+                for _ in range(len(argt.items)):
+                    if j < ncallee:
+                        if callee_kinds[j] != nodes.ARG_STAR2:
+                            map[j].append(i)
+                        else:
+                            raise NotImplementedError()
+                        j += 1
+            else:
+                # Assume that it is an iterable (if it isn't, there will be
+                # an error later).
+                while j < ncallee:
+                    if callee_kinds[j] in (nodes.ARG_NAMED, nodes.ARG_STAR2):
+                        break
+                    else:
+                        map[j].append(i)
+                    j += 1
+        elif kind == nodes.ARG_NAMED:
+            name = caller_names[i]
+            if name in callee_names:
+                map[callee_names.index(name)].append(i)
+            elif nodes.ARG_STAR2 in callee_kinds:
+                map[callee_kinds.index(nodes.ARG_STAR2)].append(i)
+        else:
+            assert kind == nodes.ARG_STAR2
+            for j in range(ncallee):
+                # TODO tuple varargs complicate this
+                no_certain_match = (
+                    not map[j] or caller_kinds[map[j][0]] == nodes.ARG_STAR)
+                if ((callee_names[j] and no_certain_match)
+                        or callee_kinds[j] == nodes.ARG_STAR2):
+                    map[j].append(i)
+    return map
+
+
+def is_empty_tuple(t: Type) -> bool:
+    return isinstance(t, TupleType) and not cast(TupleType, t).items
+
+
+def is_duplicate_mapping(mapping: List[int], actual_kinds: List[int]) -> bool:
+    # Multiple actuals can map to the same formal only if they both come from
+    # varargs (*args and **kwargs); in this case at runtime it is possible that
+    # there are no duplicates. We need to allow this, as the convention
+    # f(..., *args, **kwargs) is common enough.
+    return len(mapping) > 1 and not (
+        len(mapping) == 2 and
+        actual_kinds[mapping[0]] == nodes.ARG_STAR and
+        actual_kinds[mapping[1]] == nodes.ARG_STAR2)
+
+
+def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> CallableType:
+    """Return a copy of a callable type with a different return type."""
+    return c.copy_modified(ret_type=new_ret_type)
+
+
+class ArgInferSecondPassQuery(types.TypeQuery):
+    """Query whether an argument type should be inferred in the second pass.
+
+    The result is True if the type has a type variable in a callable return
+    type anywhere. For example, the result for Callable[[], T] is True if t is
+    a type variable.
+    """
+    def __init__(self) -> None:
+        super().__init__(False, types.ANY_TYPE_STRATEGY)
+
+    def visit_callable_type(self, t: CallableType) -> bool:
+        return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery())
+
+
+class HasTypeVarQuery(types.TypeQuery):
+    """Visitor for querying whether a type has a type variable component."""
+    def __init__(self) -> None:
+        super().__init__(False, types.ANY_TYPE_STRATEGY)
+
+    def visit_type_var(self, t: TypeVarType) -> bool:
+        return True
+
+
+def has_erased_component(t: Type) -> bool:
+    return t is not None and t.accept(HasErasedComponentsQuery())
+
+
+class HasErasedComponentsQuery(types.TypeQuery):
+    """Visitor for querying whether a type has an erased component."""
+    def __init__(self) -> None:
+        super().__init__(False, types.ANY_TYPE_STRATEGY)
+
+    def visit_erased_type(self, t: ErasedType) -> bool:
+        return True
+
+
+def overload_arg_similarity(actual: Type, formal: Type) -> int:
+    """Return if caller argument (actual) is compatible with overloaded signature arg (formal).
+
+    Return a similarity level:
+      0: no match
+      1: actual is compatible, but only using type promitions (e.g. int vs float)
+      2: actual is compatible without type promotions (e.g. int vs object)
+
+    The distinction is important in cases where multiple overload items match. We want
+    give priority to higher similarity matches.
+    """
+    if (isinstance(actual, NoneTyp) or isinstance(actual, AnyType) or
+            isinstance(formal, AnyType) or isinstance(formal, TypeVarType) or
+            isinstance(formal, CallableType)):
+        # These could match anything at runtime.
+        return 2
+    if isinstance(actual, UnionType):
+        return max(overload_arg_similarity(item, formal)
+                   for item in actual.items)
+    if isinstance(formal, UnionType):
+        return max(overload_arg_similarity(actual, item)
+                   for item in formal.items)
+    if isinstance(formal, Instance):
+        if isinstance(actual, CallableType):
+            actual = actual.fallback
+        if isinstance(actual, Overloaded):
+            actual = actual.items()[0].fallback
+        if isinstance(actual, TupleType):
+            actual = actual.fallback
+        if isinstance(actual, Instance):
+            # First perform a quick check (as an optimization) and fall back to generic
+            # subtyping algorithm if type promotions are possible (e.g., int vs. float).
+            if formal.type in actual.type.mro:
+                return 2
+            elif actual.type._promote and is_subtype(actual, formal):
+                return 1
+            else:
+                return 0
+        else:
+            return 0
+    # Fall back to a conservative equality check for the remaining kinds of type.
+    return 2 if is_same_type(erasetype.erase_type(actual), erasetype.erase_type(formal)) else 0
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
new file mode 100644
index 0000000..0364351
--- /dev/null
+++ b/mypy/checkmember.py
@@ -0,0 +1,404 @@
+"""Type checking of attribute access"""
+
+from typing import cast, Callable, List
+
+from mypy.types import (
+    Type, Instance, AnyType, TupleType, CallableType, FunctionLike, TypeVarDef,
+    Overloaded, TypeVarType, TypeTranslator, UnionType, PartialType, DeletedType, NoneTyp
+)
+from mypy.nodes import TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context
+from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2, function_type, Decorator, OverloadedFuncDef
+from mypy.messages import MessageBuilder
+from mypy.maptype import map_instance_to_supertype
+from mypy.expandtype import expand_type_by_instance
+from mypy.nodes import method_type, method_type_with_fallback
+from mypy.semanal import self_type
+from mypy import messages
+from mypy import subtypes
+
+
+def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool,
+                          is_super: bool,
+                          builtin_type: Callable[[str], Instance],
+                          not_ready_callback: Callable[[str, Context], None],
+                          msg: MessageBuilder, override_info: TypeInfo = None,
+                          report_type: Type = None) -> Type:
+    """Analyse attribute access.
+
+    This is a general operation that supports various different variations:
+
+      1. lvalue or non-lvalue access (i.e. setter or getter access)
+      2. supertype access (when using super(); is_super == True and
+         override_info should refer to the supertype)
+    """
+    report_type = report_type or typ
+    if isinstance(typ, Instance):
+        if name == '__init__' and not is_super:
+            # Accessing __init__ in statically typed code would compromise
+            # type safety unless used via super().
+            msg.fail(messages.CANNOT_ACCESS_INIT, node)
+            return AnyType()
+
+        # The base object has an instance type.
+
+        info = typ.type
+        if override_info:
+            info = override_info
+
+        # Look up the member. First look up the method dictionary.
+        method = info.get_method(name)
+        if method:
+            if method.is_property:
+                assert isinstance(method, OverloadedFuncDef)
+                method = cast(OverloadedFuncDef, method)
+                return analyze_var(name, method.items[0].var, typ, info, node, is_lvalue, msg,
+                                   not_ready_callback)
+            if is_lvalue:
+                msg.cant_assign_to_method(node)
+            typ = map_instance_to_supertype(typ, method.info)
+            if name == '__new__':
+                # __new__ is special and behaves like a static method -- don't strip
+                # the first argument.
+                signature = function_type(method, builtin_type('builtins.function'))
+            else:
+                signature = method_type_with_fallback(method, builtin_type('builtins.function'))
+            return expand_type_by_instance(signature, typ)
+        else:
+            # Not a method.
+            return analyze_member_var_access(name, typ, info, node,
+                                             is_lvalue, is_super, builtin_type,
+                                             not_ready_callback, msg,
+                                             report_type=report_type)
+    elif isinstance(typ, AnyType):
+        # The base object has dynamic type.
+        return AnyType()
+    elif isinstance(typ, UnionType):
+        # The base object has dynamic type.
+        msg.disable_type_names += 1
+        results = [analyze_member_access(name, subtype, node, is_lvalue,
+                                         is_super, builtin_type, not_ready_callback, msg)
+                   for subtype in typ.items]
+        msg.disable_type_names -= 1
+        return UnionType.make_simplified_union(results)
+    elif isinstance(typ, TupleType):
+        # Actually look up from the fallback instance type.
+        return analyze_member_access(name, typ.fallback, node, is_lvalue,
+                                     is_super, builtin_type, not_ready_callback, msg)
+    elif isinstance(typ, FunctionLike) and typ.is_type_obj():
+        # Class attribute.
+        # TODO super?
+        ret_type = typ.items()[0].ret_type
+        if isinstance(ret_type, TupleType):
+            ret_type = ret_type.fallback
+        if isinstance(ret_type, Instance):
+            result = analyze_class_attribute_access(ret_type, name, node, is_lvalue,
+                                                    builtin_type, not_ready_callback, msg)
+            if result:
+                return result
+            # Look up from the 'type' type.
+            return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
+                                         builtin_type, not_ready_callback, msg,
+                                         report_type=report_type)
+        else:
+            assert False, 'Unexpected type {}'.format(repr(ret_type))
+    elif isinstance(typ, FunctionLike):
+        # Look up from the 'function' type.
+        return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
+                                     builtin_type, not_ready_callback, msg,
+                                     report_type=report_type)
+    elif isinstance(typ, TypeVarType):
+        return analyze_member_access(name, typ.upper_bound, node, is_lvalue, is_super,
+                                     builtin_type, not_ready_callback, msg,
+                                     report_type=report_type)
+    elif isinstance(typ, DeletedType):
+        msg.deleted_as_rvalue(typ, node)
+        return AnyType()
+    return msg.has_no_attr(report_type, name, node)
+
+
+def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
+                              node: Context, is_lvalue: bool, is_super: bool,
+                              builtin_type: Callable[[str], Instance],
+                              not_ready_callback: Callable[[str, Context], None],
+                              msg: MessageBuilder,
+                              report_type: Type = None) -> Type:
+    """Analyse attribute access that does not target a method.
+
+    This is logically part of analyze_member_access and the arguments are
+    similar.
+    """
+    # It was not a method. Try looking up a variable.
+    v = lookup_member_var_or_accessor(info, name, is_lvalue)
+
+    vv = v
+    if isinstance(vv, Decorator):
+        # The associated Var node of a decorator contains the type.
+        v = vv.var
+
+    if isinstance(v, Var):
+        return analyze_var(name, v, itype, info, node, is_lvalue, msg, not_ready_callback)
+    elif isinstance(v, FuncDef):
+        assert False, "Did not expect a function"
+    elif not v and name not in ['__getattr__', '__setattr__']:
+        if not is_lvalue:
+            method = info.get_method('__getattr__')
+            if method:
+                typ = map_instance_to_supertype(itype, method.info)
+                getattr_type = expand_type_by_instance(
+                    method_type_with_fallback(method, builtin_type('builtins.function')), typ)
+                if isinstance(getattr_type, CallableType):
+                    return getattr_type.ret_type
+
+    if itype.type.fallback_to_any:
+        return AnyType()
+
+    # Could not find the member.
+    if is_super:
+        msg.undefined_in_superclass(name, node)
+        return AnyType()
+    else:
+        return msg.has_no_attr(report_type or itype, name, node)
+
+
+def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Context,
+               is_lvalue: bool, msg: MessageBuilder,
+               not_ready_callback: Callable[[str, Context], None]) -> Type:
+    """Analyze access to an attribute via a Var node.
+
+    This is conceptually part of analyze_member_access and the arguments are similar.
+    """
+    # Found a member variable.
+    itype = map_instance_to_supertype(itype, var.info)
+    typ = var.type
+    if typ:
+        if isinstance(typ, PartialType):
+            return handle_partial_attribute_type(typ, is_lvalue, msg, var)
+        t = expand_type_by_instance(typ, itype)
+        if var.is_initialized_in_class and isinstance(t, FunctionLike):
+            if is_lvalue:
+                if var.is_property:
+                    if not var.is_settable_property:
+                        msg.read_only_property(name, info, node)
+                else:
+                    msg.cant_assign_to_method(node)
+
+            if not var.is_staticmethod:
+                # Class-level function objects and classmethods become bound
+                # methods: the former to the instance, the latter to the
+                # class.
+                functype = cast(FunctionLike, t)
+                check_method_type(functype, itype, node, msg)
+                signature = method_type(functype)
+                if var.is_property:
+                    # A property cannot have an overloaded type => the cast
+                    # is fine.
+                    return cast(CallableType, signature).ret_type
+                else:
+                    return signature
+        return t
+    else:
+        if not var.is_ready:
+            not_ready_callback(var.name(), node)
+        # Implicit 'Any' type.
+        return AnyType()
+
+
+def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: MessageBuilder,
+                                  context: Context) -> Type:
+    if typ.type is None:
+        # 'None' partial type. It has a well-defined type -- 'None'.
+        # In an lvalue context we want to preserver the knowledge of
+        # it being a partial type.
+        if not is_lvalue:
+            return NoneTyp()
+        return typ
+    else:
+        msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+        return AnyType()
+
+
+def lookup_member_var_or_accessor(info: TypeInfo, name: str,
+                                  is_lvalue: bool) -> SymbolNode:
+    """Find the attribute/accessor node that refers to a member of a type."""
+    # TODO handle lvalues
+    node = info.get(name)
+    if node:
+        return node.node
+    else:
+        return None
+
+
+def check_method_type(functype: FunctionLike, itype: Instance,
+                      context: Context, msg: MessageBuilder) -> None:
+    for item in functype.items():
+        if not item.arg_types or item.arg_kinds[0] != ARG_POS:
+            # No positional first (self) argument.
+            msg.invalid_method_type(item, context)
+        else:
+            # Check that self argument has type 'Any' or valid instance type.
+            selfarg = item.arg_types[0]
+            if not subtypes.is_equivalent(selfarg, itype):
+                msg.invalid_method_type(item, context)
+
+
+def analyze_class_attribute_access(itype: Instance,
+                                   name: str,
+                                   context: Context,
+                                   is_lvalue: bool,
+                                   builtin_type: Callable[[str], Instance],
+                                   not_ready_callback: Callable[[str, Context], None],
+                                   msg: MessageBuilder) -> Type:
+    node = itype.type.get(name)
+    if not node:
+        if itype.type.fallback_to_any:
+            return AnyType()
+        return None
+
+    is_decorated = isinstance(node.node, Decorator)
+    is_method = is_decorated or isinstance(node.node, FuncDef)
+    if is_lvalue:
+        if is_method:
+            msg.cant_assign_to_method(context)
+        if isinstance(node.node, TypeInfo):
+            msg.fail(messages.CANNOT_ASSIGN_TO_TYPE, context)
+
+    if itype.type.is_enum and not (is_lvalue or is_decorated or is_method):
+        return itype
+
+    t = node.type
+    if t:
+        if isinstance(t, PartialType):
+            return handle_partial_attribute_type(t, is_lvalue, msg, node.node)
+        is_classmethod = is_decorated and cast(Decorator, node.node).func.is_class
+        return add_class_tvars(t, itype.type, is_classmethod, builtin_type)
+    elif isinstance(node.node, Var):
+        not_ready_callback(name, context)
+        return AnyType()
+
+    if isinstance(node.node, TypeInfo):
+        return type_object_type(cast(TypeInfo, node.node), builtin_type)
+
+    if is_decorated:
+        # TODO: Return type of decorated function. This is quick hack to work around #998.
+        return AnyType()
+    else:
+        return function_type(cast(FuncBase, node.node), builtin_type('builtins.function'))
+
+
+def add_class_tvars(t: Type, info: TypeInfo, is_classmethod: bool,
+                    builtin_type: Callable[[str], Instance]) -> Type:
+    if isinstance(t, CallableType):
+        # TODO: Should we propagate type variable values?
+        vars = [TypeVarDef(n, i + 1, None, builtin_type('builtins.object'), tv.variance)
+                for (i, n), tv in zip(enumerate(info.type_vars), info.defn.type_vars)]
+        arg_types = t.arg_types
+        arg_kinds = t.arg_kinds
+        arg_names = t.arg_names
+        if is_classmethod:
+            arg_types = arg_types[1:]
+            arg_kinds = arg_kinds[1:]
+            arg_names = arg_names[1:]
+        return t.copy_modified(arg_types=arg_types, arg_kinds=arg_kinds, arg_names=arg_names,
+                               variables=vars + t.variables)
+    elif isinstance(t, Overloaded):
+        return Overloaded([cast(CallableType, add_class_tvars(i, info, is_classmethod,
+                                                              builtin_type))
+                           for i in t.items()])
+    return t
+
+
+def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> Type:
+    """Return the type of a type object.
+
+    For a generic type G with type variables T and S the type is generally of form
+
+      Callable[..., G[T, S]]
+
+    where ... are argument types for the __init__/__new__ method (without the self
+    argument). Also, the fallback type will be 'type' instead of 'function'.
+    """
+    init_method = info.get_method('__init__')
+    if not init_method:
+        # Must be an invalid class definition.
+        return AnyType()
+    else:
+        fallback = builtin_type('builtins.type')
+        if init_method.info.fullname() == 'builtins.object':
+            # No non-default __init__ -> look at __new__ instead.
+            new_method = info.get_method('__new__')
+            if new_method and new_method.info.fullname() != 'builtins.object':
+                # Found one! Get signature from __new__.
+                return type_object_type_from_function(new_method, info, fallback)
+            # Both are defined by object.  But if we've got a bogus
+            # base class, we can't know for sure, so check for that.
+            if info.fallback_to_any:
+                # Construct a universal callable as the prototype.
+                sig = CallableType(arg_types=[AnyType(), AnyType()],
+                                   arg_kinds=[ARG_STAR, ARG_STAR2],
+                                   arg_names=["_args", "_kwds"],
+                                   ret_type=AnyType(),
+                                   fallback=builtin_type('builtins.function'))
+                return class_callable(sig, info, fallback)
+        # Construct callable type based on signature of __init__. Adjust
+        # return type and insert type arguments.
+        return type_object_type_from_function(init_method, info, fallback)
+
+
+def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo,
+                                   fallback: Instance) -> FunctionLike:
+    signature = method_type_with_fallback(init_or_new, fallback)
+    if isinstance(signature, CallableType):
+        return class_callable(signature, info, fallback)
+    else:
+        # Overloaded __init__/__new__.
+        items = []  # type: List[CallableType]
+        for item in cast(Overloaded, signature).items():
+            items.append(class_callable(item, info, fallback))
+        return Overloaded(items)
+
+
+def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance) -> CallableType:
+    """Create a type object type based on the signature of __init__."""
+    variables = []  # type: List[TypeVarDef]
+    for i, tvar in enumerate(info.defn.type_vars):
+        variables.append(TypeVarDef(tvar.name, i + 1, tvar.values, tvar.upper_bound,
+                                    tvar.variance))
+
+    initvars = init_type.variables
+    variables.extend(initvars)
+
+    callable_type = init_type.copy_modified(
+        ret_type=self_type(info), fallback=type_type, name=None, variables=variables)
+    c = callable_type.with_name('"{}"'.format(info.name()))
+    return convert_class_tvars_to_func_tvars(c, len(initvars))
+
+
+def convert_class_tvars_to_func_tvars(callable: CallableType,
+                                      num_func_tvars: int) -> CallableType:
+    return cast(CallableType, callable.accept(TvarTranslator(num_func_tvars)))
+
+
+class TvarTranslator(TypeTranslator):
+    def __init__(self, num_func_tvars: int) -> None:
+        super().__init__()
+        self.num_func_tvars = num_func_tvars
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        if t.id < 0:
+            return t
+        else:
+            return TypeVarType(t.name, -t.id - self.num_func_tvars, t.values, t.upper_bound,
+                               t.variance)
+
+    def translate_variables(self,
+                            variables: List[TypeVarDef]) -> List[TypeVarDef]:
+        if not variables:
+            return variables
+        items = []  # type: List[TypeVarDef]
+        for v in variables:
+            if v.id > 0:
+                items.append(TypeVarDef(v.name, -v.id - self.num_func_tvars,
+                                        v.values, v.upper_bound, v.variance))
+            else:
+                items.append(v)
+        return items
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
new file mode 100644
index 0000000..39e3fe4
--- /dev/null
+++ b/mypy/checkstrformat.py
@@ -0,0 +1,297 @@
+"""Expression type checker. This file is conceptually part of ExpressionChecker and TypeChecker."""
+
+import re
+
+from typing import cast, List, Tuple, Dict, Callable
+
+from mypy.types import (
+    Type, AnyType, TupleType, Instance, UnionType
+)
+from mypy.nodes import (
+    Node, StrExpr, TupleExpr, DictExpr, Context
+)
+if False:
+    # break import cycle only needed for mypy
+    import mypy.checker
+from mypy import messages
+from mypy.messages import MessageBuilder
+
+
+class ConversionSpecifier:
+    def __init__(self, key: str, flags: str, width: str, precision: str, type: str) -> None:
+        self.key = key
+        self.flags = flags
+        self.width = width
+        self.precision = precision
+        self.type = type
+
+    def has_key(self):
+        return self.key is not None
+
+    def has_star(self):
+        return self.width == '*' or self.precision == '*'
+
+
+class StringFormatterChecker:
+    """String interplation/formatter type checker.
+
+    This class works closely together with checker.ExpressionChecker.
+    """
+
+    # Some services are provided by a TypeChecker instance.
+    chk = None  # type: mypy.checker.TypeChecker
+    # This is shared with TypeChecker, but stored also here for convenience.
+    msg = None  # type: MessageBuilder
+    # Some services are provided by a ExpressionChecker instance.
+    exprchk = None  # type: mypy.checkexpr.ExpressionChecker
+
+    def __init__(self,
+                 exprchk: 'mypy.checkexpr.ExpressionChecker',
+                 chk: 'mypy.checker.TypeChecker',
+                 msg: MessageBuilder) -> None:
+        """Construct an expression type checker."""
+        self.chk = chk
+        self.exprchk = exprchk
+        self.msg = msg
+
+    def check_str_interpolation(self, str: StrExpr, replacements: Node) -> Type:
+        """Check the types of the 'replacements' in a string interpolation
+        expression: str % replacements
+        """
+        specifiers = self.parse_conversion_specifiers(str.value)
+        has_mapping_keys = self.analyze_conversion_specifiers(specifiers, str)
+        if has_mapping_keys is None:
+            pass  # Error was reported
+        elif has_mapping_keys:
+            self.check_mapping_str_interpolation(specifiers, replacements)
+        else:
+            self.check_simple_str_interpolation(specifiers, replacements)
+        return self.named_type('builtins.str')
+
+    def parse_conversion_specifiers(self, format: str) -> List[ConversionSpecifier]:
+        key_regex = r'(\((\w*)\))?'  # (optional) parenthesised sequence of characters
+        flags_regex = r'([#0\-+ ]*)'  # (optional) sequence of flags
+        width_regex = r'(\*|[1-9][0-9]*)?'  # (optional) minimum field width (* or numbers)
+        precision_regex = r'(?:\.(\*|[0-9]+)?)?'  # (optional) . followed by * of numbers
+        length_mod_regex = r'[hlL]?'  # (optional) length modifier (unused)
+        type_regex = r'(.)?'  # conversion type
+        regex = ('%' + key_regex + flags_regex + width_regex +
+                 precision_regex + length_mod_regex + type_regex)
+        specifiers = []  # type: List[ConversionSpecifier]
+        for parens_key, key, flags, width, precision, type in re.findall(regex, format):
+            if parens_key == '':
+                key = None
+            specifiers.append(ConversionSpecifier(key, flags, width, precision, type))
+        return specifiers
+
+    def analyze_conversion_specifiers(self, specifiers: List[ConversionSpecifier],
+                                      context: Context) -> bool:
+        has_star = any(specifier.has_star() for specifier in specifiers)
+        has_key = any(specifier.has_key() for specifier in specifiers)
+        all_have_keys = all(specifier.has_key() for specifier in specifiers)
+
+        if has_key and has_star:
+            self.msg.string_interpolation_with_star_and_key(context)
+            return None
+        if has_key and not all_have_keys:
+            self.msg.string_interpolation_mixing_key_and_non_keys(context)
+            return None
+        return has_key
+
+    def check_simple_str_interpolation(self, specifiers: List[ConversionSpecifier],
+                                       replacements: Node) -> None:
+        checkers = self.build_replacement_checkers(specifiers, replacements)
+        if checkers is None:
+            return
+
+        rhs_type = self.accept(replacements)
+        rep_types = []  # type: List[Type]
+        if isinstance(rhs_type, TupleType):
+            rep_types = rhs_type.items
+        elif isinstance(rhs_type, AnyType):
+            return
+        else:
+            rep_types = [rhs_type]
+
+        if len(checkers) > len(rep_types):
+            self.msg.too_few_string_formatting_arguments(replacements)
+        elif len(checkers) < len(rep_types):
+            self.msg.too_many_string_formatting_arguments(replacements)
+        else:
+            if len(checkers) == 1:
+                check_node, check_type = checkers[0]
+                if isinstance(rhs_type, TupleType) and len(rhs_type.items) == 1:
+                    check_type(rhs_type.items[0])
+                else:
+                    check_node(replacements)
+            elif isinstance(replacements, TupleExpr):
+                for checks, rep_node in zip(checkers, replacements.items):
+                    check_node, check_type = checks
+                    check_node(rep_node)
+            else:
+                for checks, rep_type in zip(checkers, rep_types):
+                    check_node, check_type = checks
+                    check_type(rep_type)
+
+    def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier],
+                                       replacements: Node) -> None:
+        dict_with_only_str_literal_keys = (isinstance(replacements, DictExpr) and
+                                          all(isinstance(k, StrExpr)
+                                              for k, v in cast(DictExpr, replacements).items))
+        if dict_with_only_str_literal_keys:
+            mapping = {}  # type: Dict[str, Type]
+            for k, v in cast(DictExpr, replacements).items:
+                key_str = cast(StrExpr, k).value
+                mapping[key_str] = self.accept(v)
+
+            for specifier in specifiers:
+                if specifier.key not in mapping:
+                    self.msg.key_not_in_mapping(specifier.key, replacements)
+                    return
+                rep_type = mapping[specifier.key]
+                expected_type = self.conversion_type(specifier.type, replacements)
+                if expected_type is None:
+                    return
+                self.chk.check_subtype(rep_type, expected_type, replacements,
+                                       messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
+                                       'expression has type',
+                                       'placeholder with key \'%s\' has type' % specifier.key)
+        else:
+            rep_type = self.accept(replacements)
+            dict_type = self.chk.named_generic_type('builtins.dict',
+                                            [AnyType(), AnyType()])
+            self.chk.check_subtype(rep_type, dict_type, replacements,
+                                   messages.FORMAT_REQUIRES_MAPPING,
+                                   'expression has type', 'expected type for mapping is')
+
+    def build_replacement_checkers(self, specifiers: List[ConversionSpecifier],
+                                   context: Context) -> List[Tuple[Callable[[Node], None],
+                                                                   Callable[[Type], None]]]:
+        checkers = []  # type: List[Tuple[Callable[[Node], None], Callable[[Type], None]]]
+        for specifier in specifiers:
+            checker = self.replacement_checkers(specifier, context)
+            if checker is None:
+                return None
+            checkers.extend(checker)
+        return checkers
+
+    def replacement_checkers(self, specifier: ConversionSpecifier,
+                             context: Context) -> List[Tuple[Callable[[Node], None],
+                                                             Callable[[Type], None]]]:
+        """Returns a list of tuples of two functions that check whether a replacement is
+        of the right type for the specifier. The first functions take a node and checks
+        its type in the right type context. The second function just checks a type.
+        """
+        checkers = []  # type: List[ Tuple[ Callable[[Node], None], Callable[[Type], None] ] ]
+
+        if specifier.width == '*':
+            checkers.append(self.checkers_for_star(context))
+        if specifier.precision == '*':
+            checkers.append(self.checkers_for_star(context))
+        if specifier.type == 'c':
+            c = self.checkers_for_c_type(specifier.type, context)
+            if c is None:
+                return None
+            checkers.append(c)
+        elif specifier.type != '%':
+            c = self.checkers_for_regular_type(specifier.type, context)
+            if c is None:
+                return None
+            checkers.append(c)
+        return checkers
+
+    def checkers_for_star(self, context: Context) -> Tuple[Callable[[Node], None],
+                                                           Callable[[Type], None]]:
+        """Returns a tuple of check functions that check whether, respectively,
+        a node or a type is compatible with a star in a conversion specifier
+        """
+        expected = self.named_type('builtins.int')
+
+        def check_type(type: Type = None) -> None:
+            expected = self.named_type('builtins.int')
+            self.chk.check_subtype(type, expected, context, '* wants int')
+
+        def check_node(node: Node) -> None:
+            type = self.accept(node, expected)
+            check_type(type)
+
+        return check_node, check_type
+
+    def checkers_for_regular_type(self, type: str,
+                                  context: Context) -> Tuple[Callable[[Node], None],
+                                                             Callable[[Type], None]]:
+        """Returns a tuple of check functions that check whether, respectively,
+        a node or a type is compatible with 'type'. Return None in case of an
+        """
+        expected_type = self.conversion_type(type, context)
+        if expected_type is None:
+            return None
+
+        def check_type(type: Type = None) -> None:
+            self.chk.check_subtype(type, expected_type, context,
+                              messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
+                              'expression has type', 'placeholder has type')
+
+        def check_node(node: Node) -> None:
+            type = self.accept(node, expected_type)
+            check_type(type)
+
+        return check_node, check_type
+
+    def checkers_for_c_type(self, type: str, context: Context) -> Tuple[Callable[[Node], None],
+                                                                        Callable[[Type], None]]:
+        """Returns a tuple of check functions that check whether, respectively,
+        a node or a type is compatible with 'type' that is a character type
+        """
+        expected_type = self.conversion_type(type, context)
+        if expected_type is None:
+            return None
+
+        def check_type(type: Type = None) -> None:
+            self.chk.check_subtype(type, expected_type, context,
+                              messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
+                              'expression has type', 'placeholder has type')
+
+        def check_node(node: Node) -> None:
+            """int, or str with length 1"""
+            type = self.accept(node, expected_type)
+            if isinstance(node, StrExpr) and len(cast(StrExpr, node).value) != 1:
+                self.msg.requires_int_or_char(context)
+            check_type(type)
+
+        return check_node, check_type
+
+    def conversion_type(self, p: str, context: Context) -> Type:
+        """Return the type that is accepted for a string interpolation
+        conversion specifier type.
+
+        Note that both Python's float (e.g. %f) and integer (e.g. %d)
+        specifier types accept both float and integers.
+        """
+        if p in ['s', 'r']:
+            return AnyType()
+        elif p in ['d', 'i', 'o', 'u', 'x', 'X',
+                   'e', 'E', 'f', 'F', 'g', 'G']:
+            return UnionType([self.named_type('builtins.int'),
+                              self.named_type('builtins.float')])
+        elif p in ['c']:
+            return UnionType([self.named_type('builtins.int'),
+                              self.named_type('builtins.float'),
+                              self.named_type('builtins.str')])
+        else:
+            self.msg.unsupported_placeholder(p, context)
+            return None
+
+    #
+    # Helpers
+    #
+
+    def named_type(self, name: str) -> Instance:
+        """Return an instance type with type given by the name and no type
+        arguments. Alias for TypeChecker.named_type.
+        """
+        return self.chk.named_type(name)
+
+    def accept(self, node: Node, context: Type = None) -> Type:
+        """Type check a node. Alias for TypeChecker.accept."""
+        return self.chk.accept(node, context)
diff --git a/mypy/constraints.py b/mypy/constraints.py
new file mode 100644
index 0000000..cfb9d06
--- /dev/null
+++ b/mypy/constraints.py
@@ -0,0 +1,319 @@
+"""Type inference constraints."""
+
+from typing import List, cast
+
+from mypy.types import (
+    CallableType, Type, TypeVisitor, UnboundType, AnyType, Void, NoneTyp, TypeVarType,
+    Instance, TupleType, UnionType, Overloaded, ErasedType, PartialType, DeletedType,
+    is_named_instance
+)
+from mypy.maptype import map_instance_to_supertype
+from mypy import nodes
+import mypy.subtypes
+
+
+SUBTYPE_OF = 0  # type: int
+SUPERTYPE_OF = 1  # type: int
+
+
+class Constraint:
+    """A representation of a type constraint.
+
+    It can be either T <: type or T :> type (T is a type variable).
+    """
+
+    type_var = 0   # Type variable id
+    op = 0         # SUBTYPE_OF or SUPERTYPE_OF
+    target = None  # type: Type
+
+    def __init__(self, type_var: int, op: int, target: Type) -> None:
+        self.type_var = type_var
+        self.op = op
+        self.target = target
+
+    def __repr__(self) -> str:
+        op_str = '<:'
+        if self.op == SUPERTYPE_OF:
+            op_str = ':>'
+        return '{} {} {}'.format(self.type_var, op_str, self.target)
+
+
+def infer_constraints_for_callable(
+        callee: CallableType, arg_types: List[Type], arg_kinds: List[int],
+        formal_to_actual: List[List[int]]) -> List[Constraint]:
+    """Infer type variable constraints for a callable and actual arguments.
+
+    Return a list of constraints.
+    """
+
+    constraints = []  # type: List[Constraint]
+    tuple_counter = [0]
+
+    for i, actuals in enumerate(formal_to_actual):
+        for actual in actuals:
+            actual_type = get_actual_type(arg_types[actual], arg_kinds[actual],
+                                          tuple_counter)
+            c = infer_constraints(callee.arg_types[i], actual_type,
+                                  SUPERTYPE_OF)
+            constraints.extend(c)
+
+    return constraints
+
+
+def get_actual_type(arg_type: Type, kind: int,
+                    tuple_counter: List[int]) -> Type:
+    """Return the type of an actual argument with the given kind.
+
+    If the argument is a *arg, return the individual argument item.
+    """
+
+    if kind == nodes.ARG_STAR:
+        if isinstance(arg_type, Instance):
+            if arg_type.type.fullname() == 'builtins.list':
+                # List *arg.
+                return arg_type.args[0]
+            elif arg_type.args:
+                # TODO try to map type arguments to Iterable
+                return arg_type.args[0]
+            else:
+                return AnyType()
+        elif isinstance(arg_type, TupleType):
+            # Get the next tuple item of a tuple *arg.
+            tuplet = cast(TupleType, arg_type)
+            tuple_counter[0] += 1
+            return tuplet.items[tuple_counter[0] - 1]
+        else:
+            return AnyType()
+    elif kind == nodes.ARG_STAR2:
+        if isinstance(arg_type, Instance) and (
+                (cast(Instance, arg_type)).type.fullname() == 'builtins.dict'):
+            # Dict **arg. TODO more general (Mapping)
+            return (cast(Instance, arg_type)).args[1]
+        else:
+            return AnyType()
+    else:
+        # No translation for other kinds.
+        return arg_type
+
+
+def infer_constraints(template: Type, actual: Type,
+                      direction: int) -> List[Constraint]:
+    """Infer type constraints.
+
+    Match a template type, which may contain type variable references,
+    recursively against a type which does not contain (the same) type
+    variable references. The result is a list of type constrains of
+    form 'T is a supertype/subtype of x', where T is a type variable
+    present in the the template and x is a type without reference to
+    type variables present in the template.
+
+    Assume T and S are type variables. Now the following results can be
+    calculated (read as '(template, actual) --> result'):
+
+      (T, X)            -->  T :> X
+      (X[T], X[Y])      -->  T <: Y and T :> Y
+      ((T, T), (X, Y))  -->  T :> X and T :> Y
+      ((T, S), (X, Y))  -->  T :> X and S :> Y
+      (X[T], Any)       -->  T <: Any and T :> Any
+
+    The constraints are represented as Constraint objects.
+    """
+
+    return template.accept(ConstraintBuilderVisitor(actual, direction))
+
+
+class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
+    """Visitor class for inferring type constraints."""
+
+    # The type that is compared against a template
+    # TODO: The value may be None. Is that actually correct?
+    actual = None  # type: Type
+
+    def __init__(self, actual: Type, direction: int) -> None:
+        # Direction must be SUBTYPE_OF or SUPERTYPE_OF.
+        self.actual = actual
+        self.direction = direction
+
+    # Trivial leaf types
+
+    def visit_unbound_type(self, template: UnboundType) -> List[Constraint]:
+        return []
+
+    def visit_any(self, template: AnyType) -> List[Constraint]:
+        return []
+
+    def visit_void(self, template: Void) -> List[Constraint]:
+        return []
+
+    def visit_none_type(self, template: NoneTyp) -> List[Constraint]:
+        return []
+
+    def visit_erased_type(self, template: ErasedType) -> List[Constraint]:
+        return []
+
+    def visit_deleted_type(self, template: DeletedType) -> List[Constraint]:
+        return []
+
+    # Errors
+
+    def visit_partial_type(self, template: PartialType) -> List[Constraint]:
+        # We can't do anything useful with a partial type here.
+        assert False, "Internal error"
+
+    # Non-trivial leaf type
+
+    def visit_type_var(self, template: TypeVarType) -> List[Constraint]:
+        if self.actual:
+            return [Constraint(template.id, self.direction, self.actual)]
+        else:
+            return []
+
+    # Non-leaf types
+
+    def visit_instance(self, template: Instance) -> List[Constraint]:
+        actual = self.actual
+        res = []  # type: List[Constraint]
+        if isinstance(actual, Instance):
+            instance = cast(Instance, actual)
+            if (self.direction == SUBTYPE_OF and
+                    template.type.has_base(instance.type.fullname())):
+                mapped = map_instance_to_supertype(template, instance.type)
+                for i in range(len(instance.args)):
+                    # The constraints for generic type parameters are
+                    # invariant. Include the default constraint and its
+                    # negation to achieve the effect.
+                    cb = infer_constraints(mapped.args[i], instance.args[i],
+                                           self.direction)
+                    res.extend(cb)
+                    res.extend(negate_constraints(cb))
+                return res
+            elif (self.direction == SUPERTYPE_OF and
+                    instance.type.has_base(template.type.fullname())):
+                mapped = map_instance_to_supertype(instance, template.type)
+                for j in range(len(template.args)):
+                    # The constraints for generic type parameters are
+                    # invariant.
+                    cb = infer_constraints(template.args[j], mapped.args[j],
+                                           self.direction)
+                    res.extend(cb)
+                    res.extend(negate_constraints(cb))
+                return res
+        if isinstance(actual, AnyType):
+            # IDEA: Include both ways, i.e. add negation as well?
+            return self.infer_against_any(template.args)
+        if (isinstance(actual, TupleType) and
+            (is_named_instance(template, 'typing.Iterable') or
+             is_named_instance(template, 'typing.Sequence') or
+             is_named_instance(template, 'typing.Reversible'))
+                and self.direction == SUPERTYPE_OF):
+            actual = cast(TupleType, actual)
+            for item in actual.items:
+                cb = infer_constraints(template.args[0], item, SUPERTYPE_OF)
+                res.extend(cb)
+            return res
+        else:
+            return []
+
+    def visit_callable_type(self, template: CallableType) -> List[Constraint]:
+        if isinstance(self.actual, CallableType):
+            cactual = cast(CallableType, self.actual)
+            # FIX verify argument counts
+            # FIX what if one of the functions is generic
+            res = []  # type: List[Constraint]
+
+            # We can't infer constraints from arguments if the template is Callable[..., T] (with
+            # literal '...').
+            if not template.is_ellipsis_args:
+                # The lengths should match, but don't crash (it will error elsewhere).
+                for t, a in zip(template.arg_types, cactual.arg_types):
+                    # Negate constraints due function argument type contravariance.
+                    res.extend(negate_constraints(infer_constraints(t, a, self.direction)))
+            res.extend(infer_constraints(template.ret_type, cactual.ret_type,
+                                         self.direction))
+            return res
+        elif isinstance(self.actual, AnyType):
+            # FIX what if generic
+            res = self.infer_against_any(template.arg_types)
+            res.extend(infer_constraints(template.ret_type, AnyType(),
+                                         self.direction))
+            return res
+        elif isinstance(self.actual, Overloaded):
+            return self.infer_against_overloaded(cast(Overloaded, self.actual),
+                                                 template)
+        else:
+            return []
+
+    def infer_against_overloaded(self, overloaded: Overloaded,
+                                 template: CallableType) -> List[Constraint]:
+        # Create constraints by matching an overloaded type against a template.
+        # This is tricky to do in general. We cheat by only matching against
+        # the first overload item, and by only matching the return type. This
+        # seems to work somewhat well, but we should really use a more
+        # reliable technique.
+        item = find_matching_overload_item(overloaded, template)
+        return infer_constraints(template.ret_type, item.ret_type,
+                                 self.direction)
+
+    def visit_tuple_type(self, template: TupleType) -> List[Constraint]:
+        actual = self.actual
+        if (isinstance(actual, TupleType) and
+                len((cast(TupleType, actual)).items) == len(template.items)):
+            res = []  # type: List[Constraint]
+            for i in range(len(template.items)):
+                res.extend(infer_constraints(template.items[i],
+                                             cast(TupleType, actual).items[i],
+                                             self.direction))
+            return res
+        elif isinstance(actual, AnyType):
+            return self.infer_against_any(template.items)
+        else:
+            return []
+
+    def visit_union_type(self, template: UnionType) -> List[Constraint]:
+        res = []  # type: List[Constraint]
+        for item in template.items:
+            res.extend(infer_constraints(item, self.actual, self.direction))
+        return res
+
+    def infer_against_any(self, types: List[Type]) -> List[Constraint]:
+        res = []  # type: List[Constraint]
+        for t in types:
+            res.extend(infer_constraints(t, AnyType(), self.direction))
+        return res
+
+    def visit_overloaded(self, type: Overloaded) -> List[Constraint]:
+        res = []  # type: List[Constraint]
+        for t in type.items():
+            res.extend(infer_constraints(t, self.actual, self.direction))
+        return res
+
+
+def negate_constraints(constraints: List[Constraint]) -> List[Constraint]:
+    res = []  # type: List[Constraint]
+    for c in constraints:
+        res.append(Constraint(c.type_var, neg_op(c.op), c.target))
+    return res
+
+
+def neg_op(op: int) -> int:
+    """Map SubtypeOf to SupertypeOf and vice versa."""
+
+    if op == SUBTYPE_OF:
+        return SUPERTYPE_OF
+    elif op == SUPERTYPE_OF:
+        return SUBTYPE_OF
+    else:
+        raise ValueError('Invalid operator {}'.format(op))
+
+
+def find_matching_overload_item(overloaded: Overloaded, template: CallableType) -> CallableType:
+    """Disambiguate overload item against a template."""
+    items = overloaded.items()
+    for item in items:
+        # Return type may be indeterminate in the template, so ignore it when performing a
+        # subtype check.
+        if mypy.subtypes.is_callable_subtype(item, template, ignore_return=True):
+            return item
+    # Fall back to the first item if we can't find a match. This is totally arbitrary --
+    # maybe we should just bail out at this point.
+    return items[0]
diff --git a/mypy/defaults.py b/mypy/defaults.py
new file mode 100644
index 0000000..5a0875f
--- /dev/null
+++ b/mypy/defaults.py
@@ -0,0 +1,2 @@
+PYTHON2_VERSION = (2, 7)
+PYTHON3_VERSION = (3, 5)
diff --git a/mypy/docstring.py b/mypy/docstring.py
new file mode 100644
index 0000000..4ee14bd
--- /dev/null
+++ b/mypy/docstring.py
@@ -0,0 +1,204 @@
+"""Find type annotations from a docstring.
+
+Do not actually try to parse the annotations, just return them as strings.
+
+Also recognize some common non-PEP-484 aliases such as 'a string' for 'str'
+and 'list of int' for 'List[int]'.
+
+Based on original implementation by Kyle Consalus.
+
+TODO: Decide whether it makes sense to do the heuristic analysis of aliases and natural
+  language type descriptions as it's all kind of ad hoc.
+"""
+
+import re
+from typing import Optional, List, Tuple, Dict, Sequence
+from collections import OrderedDict
+
+
+_example1 = """Fetches rows from a Bigtable.
+
+    Retrieves rows pertaining to the given keys from the Table instance
+    represented by big_table.  Silly things may happen if
+    other_silly_variable is not None.
+
+    Args:
+        big_table: An open Bigtable Table instance.
+        keys (Sequence[str]): A sequence of strings representing the key of each table row
+            to fetch.
+            but: if the keys are broken, we die.
+        other_silly_variable (int): Another optional variable, that has a much
+            longer name than the other args, and which does nothing.
+        abc0 (Tuple[int, bool]): Hi.
+        abc (Tuple[int, bool], optional): Hi.
+
+    Returns:
+        Dict[str, int]: Things.
+
+    Raises:
+        IOError: An error occurred accessing the bigtable.Table object.
+    """
+
+
+# Regular expression that finds the argument name and type in a line such
+# as '   name (type): description'.
+PARAM_RE = re.compile(r'^\s*(?P<name>[A-Za-z_][A-Za-z_0-9]*)(\s+\((?P<type>[^)]+)\))?:')
+
+# Type strings with these brackets are rejected.
+BRACKET_RE = re.compile(r'\(|\)|\{|\}')
+
+# Support some commonly used type aliases that aren't normally valid in annotations.
+# TODO: Optionally reject these (or give a warning if these are used).
+translations = {
+    'obj': 'Any',
+    'boolean': 'bool',
+    'string': 'str',
+    'integer': 'int',
+    'number': 'float',
+    'list': 'List[Any]',
+    'set': 'Set[Any]',
+    'sequence': 'Sequence[Any]',
+    'iterable': 'Iterable[Any]',
+    'dict': 'Dict[Any, Any]',
+    'dictionary': 'Dict[Any, Any]',
+    'mapping': 'Mapping[Any, Any]',
+}
+
+# Some common types that we should recognize.
+known_types = [
+    'int', 'str', 'unicode', 'bool', 'float', 'None', 'tuple',
+]
+
+known_generic_types = [
+    'List', 'Set', 'Dict', 'Iterable', 'Sequence', 'Mapping',
+]
+
+# Some natural language patterns that we want to support in docstrings.
+known_patterns = [
+    ('list of ?', 'List[?]'),
+    ('set of ?', 'List[?]'),
+    ('sequence of ?', 'Sequence[?]'),
+    ('optional ?', 'Optional[?]'),
+]
+
+
+class DocstringTypes(object):
+    def __init__(self):
+        self.args = OrderedDict()  # type: Dict[str, Optional[str]]
+        self.rettype = None  # type: Optional[str]
+
+    def as_type_str(self) -> str:
+        return ('(' + ','.join([v or 'Any' for v in self.args.values()]) +
+                ') -> ' + (self.rettype or 'Any'))
+
+    def __str__(self):
+        return repr({'args': self.args, 'return': self.rettype})
+
+
+def wsprefix(s: str) -> str:
+    return s[:len(s) - len(s.lstrip())]
+
+
+def scrubtype(typestr: Optional[str], only_known=False) -> Optional[str]:
+    if typestr is None:
+        return typestr
+
+    # Reject typestrs with parentheses or curly braces.
+    if BRACKET_RE.search(typestr):
+        return None
+
+    # Reject typestrs whose square brackets don't match & those with commas outside square
+    # brackets.
+    bracket_level = 0
+    for c in typestr:
+        if c == '[':
+            bracket_level += 1
+        elif c == ']':
+            bracket_level -= 1
+            if bracket_level < 0:  # Square brackets don't match
+                return None
+        elif c == ',' and bracket_level == 0:  # A comma appears outside brackets
+            return None
+    if bracket_level > 0:
+        return None
+
+    recognized = False
+    typestr = typestr.strip()
+    for prefix in ('a', 'A', 'an', 'An', 'the', 'The'):
+        if typestr.startswith(prefix + ' '):
+            typestr = typestr[len(prefix) + 1:]
+    if typestr in translations:
+        typestr = translations[typestr]
+        recognized = True
+    if typestr in known_types:
+        recognized = True
+    if any(typestr.startswith(t + '[') for t in known_generic_types):
+        recognized = True
+    for pattern, repl in known_patterns:
+        pattern = pattern.replace('?', '([a-zA-Z]+)') + '$'
+        m = re.match(pattern, typestr)
+        if m:
+            arg = scrubtype(m.group(1), only_known=only_known)
+            if arg:
+                typestr = repl.replace('?', arg)
+                recognized = True
+    if not recognized and only_known:
+        # This is potentially a type but not one of the known types.
+        return None
+    return typestr
+
+
+def parse_args(lines: List[str]) -> Tuple[Dict[str, str], List[str]]:
+    res = OrderedDict()  # type: Dict[str, str]
+    indent = wsprefix(lines[0])
+    while lines:
+        l = lines[0]
+        if l.strip() in ('Returns:', 'Raises:'):
+            break
+        lines = lines[1:]
+        if not l or l.isspace():
+            break
+        if not wsprefix(l) == indent:
+            continue
+        m = PARAM_RE.match(l)
+        if m:
+            gd = m.groupdict()
+            res[gd['name']] = scrubtype(gd['type'])
+    return res, lines
+
+
+def parse_return(lines: List[str]) -> Tuple[Optional[str], List[str]]:
+    res = None  # type: Optional[str]
+    while lines and lines[0].strip == '':
+        lines = lines[1:]
+    if lines:
+        l = lines[0]
+        lines = lines[1:]
+        segs = l.strip().split(':', 1)
+        if len(segs) >= 1:
+            res = scrubtype(segs[0], only_known=(len(segs) == 1))
+    return res, lines
+
+
+def parse_docstring(pds: str) -> DocstringTypes:
+    ds = DocstringTypes()
+    lines = pds.splitlines()
+    while lines:
+        first = lines[0]
+        if first.strip() in ('Args:', 'Params:', 'Arguments:'):
+            ds.args, lines = parse_args(lines[1:])
+            break
+        lines = lines[1:]
+    while lines:
+        first = lines[0]
+        if first.strip() == 'Returns:':
+            ds.rettype, lines = parse_return(lines[1:])
+            break
+        lines = lines[1:]
+    if not ds.args:
+        return None
+    return ds
+
+
+if __name__ == '__main__':
+    print(parse_docstring(_example1))
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
new file mode 100644
index 0000000..e67d20b
--- /dev/null
+++ b/mypy/erasetype.py
@@ -0,0 +1,110 @@
+import typing
+
+from mypy.types import (
+    Type, TypeVisitor, UnboundType, ErrorType, AnyType, Void, NoneTyp,
+    Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded, ErasedType,
+    PartialType, DeletedType, TypeTranslator, TypeList
+)
+
+
+def erase_type(typ: Type) -> Type:
+    """Erase any type variables from a type.
+
+    Also replace tuple types with the corresponding concrete types. Replace
+    callable types with empty callable types.
+
+    Examples:
+      A -> A
+      B[X] -> B[Any]
+      Tuple[A, B] -> tuple
+      Callable[...] -> Callable[[], None]
+    """
+
+    return typ.accept(EraseTypeVisitor())
+
+
+class EraseTypeVisitor(TypeVisitor[Type]):
+    def visit_unbound_type(self, t: UnboundType) -> Type:
+        assert False, 'Not supported'
+
+    def visit_error_type(self, t: ErrorType) -> Type:
+        return t
+
+    def visit_type_list(self, t: TypeList) -> Type:
+        assert False, 'Not supported'
+
+    def visit_any(self, t: AnyType) -> Type:
+        return t
+
+    def visit_void(self, t: Void) -> Type:
+        return t
+
+    def visit_none_type(self, t: NoneTyp) -> Type:
+        return t
+
+    def visit_erased_type(self, t: ErasedType) -> Type:
+        # Should not get here.
+        raise RuntimeError()
+
+    def visit_partial_type(self, t: PartialType) -> Type:
+        # Should not get here.
+        raise RuntimeError()
+
+    def visit_deleted_type(self, t: DeletedType) -> Type:
+        return t
+
+    def visit_instance(self, t: Instance) -> Type:
+        return Instance(t.type, [AnyType()] * len(t.args), t.line)
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        return AnyType()
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        # We must preserve the fallback type for overload resolution to work.
+        return CallableType([], [], [], Void(), t.fallback)
+
+    def visit_overloaded(self, t: Overloaded) -> Type:
+        return t.items()[0].accept(self)
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        return t.fallback
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        return AnyType()        # XXX: return underlying type if only one?
+
+
+def erase_generic_types(t: Type) -> Type:
+    """Remove generic type arguments and type variables from a type.
+
+    Replace all types A[...] with simply A, and all type variables
+    with 'Any'.
+    """
+
+    if t:
+        return t.accept(GenericTypeEraser())
+    else:
+        return None
+
+
+class GenericTypeEraser(TypeTranslator):
+    """Implementation of type erasure"""
+
+    # FIX: What about generic function types?
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        return AnyType()
+
+    def visit_instance(self, t: Instance) -> Type:
+        return Instance(t.type, [], t.line)
+
+
+def erase_typevars(t: Type) -> Type:
+    """Replace all type variables in a type with any."""
+    return t.accept(TypeVarEraser())
+
+
+class TypeVarEraser(TypeTranslator):
+    """Implementation of type erasure"""
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        return AnyType()
diff --git a/mypy/errors.py b/mypy/errors.py
new file mode 100644
index 0000000..69a0b27
--- /dev/null
+++ b/mypy/errors.py
@@ -0,0 +1,381 @@
+import os
+import os.path
+import sys
+import traceback
+
+from typing import Tuple, List, TypeVar, Sequence, Any, Callable, Set
+
+
+T = TypeVar('T')
+
+
+class ErrorInfo:
+    """Representation of a single error message."""
+
+    # Description of a sequence of imports that refer to the source file
+    # related to this error. Each item is a (path, line number) tuple.
+    import_ctx = None  # type: List[Tuple[str, int]]
+
+    # The source file that was the source of this error.
+    file = ''
+
+    # The name of the type in which this error is located at.
+    type = ''     # Unqualified, may be None
+
+    # The name of the function or member in which this error is located at.
+    function_or_member = ''     # Unqualified, may be None
+
+    # The line number related to this error within file.
+    line = 0     # -1 if unknown
+
+    # Either 'error' or 'note'.
+    severity = ''
+
+    # The error message.
+    message = ''
+
+    # If True, we should halt build after the file that generated this error.
+    blocker = False
+
+    # Only report this particular messages once per program.
+    only_once = False
+
+    def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str,
+                 function_or_member: str, line: int, severity: str, message: str,
+                 blocker: bool, only_once: bool) -> None:
+        self.import_ctx = import_ctx
+        self.file = file
+        self.type = typ
+        self.function_or_member = function_or_member
+        self.line = line
+        self.severity = severity
+        self.message = message
+        self.blocker = blocker
+        self.only_once = only_once
+
+
+class Errors:
+    """Container for compile errors.
+
+    This class generates and keeps tracks of compile errors and the
+    current error context (nested imports).
+    """
+
+    # List of generated error messages.
+    error_info = None  # type: List[ErrorInfo]
+
+    # Current error context: nested import context/stack, as a list of (path, line) pairs.
+    import_ctx = None  # type: List[Tuple[str, int]]
+
+    # Path name prefix that is removed from all paths, if set.
+    ignore_prefix = None  # type: str
+
+    # Path to current file.
+    file = None  # type: str
+
+    # Stack of short names of currents types (or None).
+    type_name = None  # type: List[str]
+
+    # Stack of short names of current functions or members (or None).
+    function_or_member = None  # type: List[str]
+
+    # Ignore errors on these lines.
+    ignored_lines = None  # type: Set[int]
+
+    # Collection of reported only_once messages.
+    only_once_messages = None  # type: Set[str]
+
+    def __init__(self) -> None:
+        self.error_info = []
+        self.import_ctx = []
+        self.type_name = [None]
+        self.function_or_member = [None]
+        self.ignored_lines = set()
+        self.only_once_messages = set()
+
+    def copy(self) -> 'Errors':
+        new = Errors()
+        new.file = self.file
+        new.import_ctx = self.import_ctx[:]
+        new.type_name = self.type_name[:]
+        new.function_or_member = self.function_or_member[:]
+        return new
+
+    def set_ignore_prefix(self, prefix: str) -> None:
+        """Set path prefix that will be removed from all paths."""
+        prefix = os.path.normpath(prefix)
+        # Add separator to the end, if not given.
+        if os.path.basename(prefix) != '':
+            prefix += os.sep
+        self.ignore_prefix = prefix
+
+    def set_file(self, file: str) -> None:
+        """Set the path of the current file."""
+        file = os.path.normpath(file)
+        self.file = remove_path_prefix(file, self.ignore_prefix)
+
+    def set_ignored_lines(self, ignored_lines: Set[int]) -> None:
+        self.ignored_lines = ignored_lines
+
+    def push_function(self, name: str) -> None:
+        """Set the current function or member short name (it can be None)."""
+        self.function_or_member.append(name)
+
+    def pop_function(self) -> None:
+        self.function_or_member.pop()
+
+    def push_type(self, name: str) -> None:
+        """Set the short name of the current type (it can be None)."""
+        self.type_name.append(name)
+
+    def pop_type(self) -> None:
+        self.type_name.pop()
+
+    def push_import_context(self, path: str, line: int) -> None:
+        """Add a (file, line) tuple to the import context."""
+        self.import_ctx.append((os.path.normpath(path), line))
+
+    def pop_import_context(self) -> None:
+        """Remove the topmost item from the import context."""
+        self.import_ctx.pop()
+
+    def import_context(self) -> List[Tuple[str, int]]:
+        """Return a copy of the import context."""
+        return self.import_ctx[:]
+
+    def set_import_context(self, ctx: List[Tuple[str, int]]) -> None:
+        """Replace the entire import context with a new value."""
+        self.import_ctx = ctx[:]
+
+    def report(self, line: int, message: str, blocker: bool = False,
+               severity: str = 'error', file: str = None, only_once: bool = False) -> None:
+        """Report message at the given line using the current error context.
+
+        Args:
+            line: line number of error
+            message: message to report
+            blocker: if True, don't continue analysis after this error
+            severity: 'error', 'note' or 'warning'
+            file: if non-None, override current file as context
+            only_once: if True, only report this exact message once per build
+        """
+        type = self.type_name[-1]
+        if len(self.function_or_member) > 2:
+            type = None  # Omit type context if nested function
+        if file is None:
+            file = self.file
+        info = ErrorInfo(self.import_context(), file, type,
+                         self.function_or_member[-1], line, severity, message,
+                         blocker, only_once)
+        self.add_error_info(info)
+
+    def add_error_info(self, info: ErrorInfo) -> None:
+        if info.line in self.ignored_lines:
+            # Annotation requests us to ignore all errors on this line.
+            return
+        if info.only_once:
+            if info.message in self.only_once_messages:
+                return
+            self.only_once_messages.add(info.message)
+        self.error_info.append(info)
+
+    def num_messages(self) -> int:
+        """Return the number of generated messages."""
+        return len(self.error_info)
+
+    def is_errors(self) -> bool:
+        """Are there any generated errors?"""
+        return bool(self.error_info)
+
+    def is_blockers(self) -> bool:
+        """Are the any errors that are blockers?"""
+        return any(err for err in self.error_info if err.blocker)
+
+    def raise_error(self) -> None:
+        """Raise a CompileError with the generated messages.
+
+        Render the messages suitable for displaying.
+        """
+        raise CompileError(self.messages())
+
+    def messages(self) -> List[str]:
+        """Return a string list that represents the error messages.
+
+        Use a form suitable for displaying to the user.
+        """
+        a = []  # type: List[str]
+        errors = self.render_messages(self.sort_messages(self.error_info))
+        errors = self.remove_duplicates(errors)
+        for file, line, severity, message in errors:
+            s = ''
+            if file is not None:
+                if line is not None and line >= 0:
+                    srcloc = '{}:{}'.format(file, line)
+                else:
+                    srcloc = file
+                s = '{}: {}: {}'.format(srcloc, severity, message)
+            else:
+                s = message
+            a.append(s)
+        return a
+
+    def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[str, int,
+                                                                     str, str]]:
+        """Translate the messages into a sequence of tuples.
+
+        Each tuple is of form (path, line, message.  The rendered
+        sequence includes information about error contexts. The path
+        item may be None. If the line item is negative, the line
+        number is not defined for the tuple.
+        """
+        result = []  # type: List[Tuple[str, int, str, str]] # (path, line, severity, message)
+
+        prev_import_context = []  # type: List[Tuple[str, int]]
+        prev_function_or_member = None  # type: str
+        prev_type = None  # type: str
+
+        for e in errors:
+            # Report module import context, if different from previous message.
+            if e.import_ctx != prev_import_context:
+                last = len(e.import_ctx) - 1
+                i = last
+                while i >= 0:
+                    path, line = e.import_ctx[i]
+                    fmt = '{}:{}: note: In module imported here'
+                    if i < last:
+                        fmt = '{}:{}: note: ... from here'
+                    if i > 0:
+                        fmt += ','
+                    else:
+                        fmt += ':'
+                    # Remove prefix to ignore from path (if present) to
+                    # simplify path.
+                    path = remove_path_prefix(path, self.ignore_prefix)
+                    result.append((None, -1, 'note', fmt.format(path, line)))
+                    i -= 1
+
+            # Report context within a source file.
+            if (e.function_or_member != prev_function_or_member or
+                    e.type != prev_type):
+                if e.function_or_member is None:
+                    if e.type is None:
+                        result.append((e.file, -1, 'note', 'At top level:'))
+                    else:
+                        result.append((e.file, -1, 'note', 'In class "{}":'.format(
+                            e.type)))
+                else:
+                    if e.type is None:
+                        result.append((e.file, -1, 'note',
+                                       'In function "{}":'.format(
+                                           e.function_or_member)))
+                    else:
+                        result.append((e.file, -1, 'note',
+                                       'In member "{}" of class "{}":'.format(
+                                           e.function_or_member, e.type)))
+            elif e.type != prev_type:
+                if e.type is None:
+                    result.append((e.file, -1, 'note', 'At top level:'))
+                else:
+                    result.append((e.file, -1, 'note',
+                                   'In class "{}":'.format(e.type)))
+
+            result.append((e.file, e.line, e.severity, e.message))
+
+            prev_import_context = e.import_ctx
+            prev_function_or_member = e.function_or_member
+            prev_type = e.type
+
+        return result
+
+    def sort_messages(self, errors: List[ErrorInfo]) -> List[ErrorInfo]:
+        """Sort an array of error messages locally by line number.
+
+        I.e., sort a run of consecutive messages with the same file
+        context by line number, but otherwise retain the general
+        ordering of the messages.
+        """
+        result = []  # type: List[ErrorInfo]
+        i = 0
+        while i < len(errors):
+            i0 = i
+            # Find neighbouring errors with the same context and file.
+            while (i + 1 < len(errors) and
+                    errors[i + 1].import_ctx == errors[i].import_ctx and
+                    errors[i + 1].file == errors[i].file):
+                i += 1
+            i += 1
+
+            # Sort the errors specific to a file according to line number.
+            a = sorted(errors[i0:i], key=lambda x: x.line)
+            result.extend(a)
+        return result
+
+    def remove_duplicates(self, errors: List[Tuple[str, int, str, str]]
+                          ) -> List[Tuple[str, int, str, str]]:
+        """Remove duplicates from a sorted error list."""
+        res = []  # type: List[Tuple[str, int, str, str]]
+        i = 0
+        while i < len(errors):
+            dup = False
+            j = i - 1
+            while (j >= 0 and errors[j][0] == errors[i][0] and
+                    errors[j][1] == errors[i][1]):
+                if errors[j] == errors[i]:
+                    dup = True
+                    break
+                j -= 1
+            if not dup:
+                res.append(errors[i])
+            i += 1
+        return res
+
+
+class CompileError(Exception):
+    """Exception raised when there is a compile error.
+
+    It can be a parse, semantic analysis, type check or other
+    compilation-related error.
+    """
+
+    messages = None  # type: List[str]
+
+    def __init__(self, messages: List[str]) -> None:
+        super().__init__('\n'.join(messages))
+        self.messages = messages
+
+
+def remove_path_prefix(path: str, prefix: str) -> str:
+    """If path starts with prefix, return copy of path with the prefix removed.
+    Otherwise, return path. If path is None, return None.
+    """
+    if prefix is not None and path.startswith(prefix):
+        return path[len(prefix):]
+    else:
+        return path
+
+
+# Corresponds to command-line flag --pdb.
+drop_into_pdb = False
+
+
+def set_drop_into_pdb(flag: bool) -> None:
+    global drop_into_pdb
+    drop_into_pdb = flag
+
+
+def report_internal_error(err: Exception, file: str, line: int) -> None:
+    """Display stack trace and file location for an internal error + exit."""
+    if drop_into_pdb:
+        import pdb  # type: ignore
+        pdb.post_mortem(sys.exc_info()[2])
+    tb = traceback.extract_stack()[:-2]
+    tb2 = traceback.extract_tb(sys.exc_info()[2])
+    print('Traceback (most recent call last):')
+    for s in traceback.format_list(tb + tb2):
+        print(s.rstrip('\n'))
+    print('{}: {}'.format(type(err).__name__, err))
+    print('\n*** INTERNAL ERROR ***')
+    print('\n{}:{}: error: Internal error --'.format(file, line),
+          'please report a bug at https://github.com/JukkaL/mypy/issues')
+    print('\nNOTE: you can use "mypy --pdb ..." to drop into the debugger when this happens.')
+    sys.exit(1)
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
new file mode 100644
index 0000000..3b8ac51
--- /dev/null
+++ b/mypy/expandtype.py
@@ -0,0 +1,121 @@
+from typing import Dict, Tuple, List, cast
+
+from mypy.types import (
+    Type, Instance, CallableType, TypeVisitor, UnboundType, ErrorType, AnyType,
+    Void, NoneTyp, TypeVarType, Overloaded, TupleType, UnionType, ErasedType, TypeList,
+    PartialType, DeletedType
+)
+
+
+def expand_type(typ: Type, env: Dict[int, Type]) -> Type:
+    """Substitute any type variable references in a type given by a type
+    environment.
+    """
+
+    return typ.accept(ExpandTypeVisitor(env))
+
+
+def expand_type_by_instance(typ: Type, instance: Instance) -> Type:
+    """Substitute type variables in type using values from an Instance."""
+
+    if instance.args == []:
+        return typ
+    else:
+        variables = {}  # type: Dict[int, Type]
+        for i in range(len(instance.args)):
+            variables[i + 1] = instance.args[i]
+        typ = expand_type(typ, variables)
+        if isinstance(typ, CallableType):
+            bounds = []  # type: List[Tuple[int, Type]]
+            for j in range(len(instance.args)):
+                bounds.append((j + 1, instance.args[j]))
+            typ = update_callable_implicit_bounds(cast(CallableType, typ), bounds)
+        else:
+            pass
+        return typ
+
+
+class ExpandTypeVisitor(TypeVisitor[Type]):
+    """Visitor that substitutes type variables with values."""
+
+    variables = None  # type: Dict[int, Type]  # TypeVar id -> TypeVar value
+
+    def __init__(self, variables: Dict[int, Type]) -> None:
+        self.variables = variables
+
+    def visit_unbound_type(self, t: UnboundType) -> Type:
+        return t
+
+    def visit_error_type(self, t: ErrorType) -> Type:
+        return t
+
+    def visit_type_list(self, t: TypeList) -> Type:
+        assert False, 'Not supported'
+
+    def visit_any(self, t: AnyType) -> Type:
+        return t
+
+    def visit_void(self, t: Void) -> Type:
+        return t
+
+    def visit_none_type(self, t: NoneTyp) -> Type:
+        return t
+
+    def visit_deleted_type(self, t: DeletedType) -> Type:
+        return t
+
+    def visit_erased_type(self, t: ErasedType) -> Type:
+        # Should not get here.
+        raise RuntimeError()
+
+    def visit_instance(self, t: Instance) -> Type:
+        args = self.expand_types(t.args)
+        return Instance(t.type, args, t.line)
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        repl = self.variables.get(t.id, t)
+        if isinstance(repl, Instance):
+            inst = cast(Instance, repl)
+            # Return copy of instance with type erasure flag on.
+            return Instance(inst.type, inst.args, inst.line, True)
+        else:
+            return repl
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        return t.copy_modified(arg_types=self.expand_types(t.arg_types),
+                               ret_type=t.ret_type.accept(self),
+                               bound_vars=self.expand_bound_vars(t.bound_vars))
+
+    def visit_overloaded(self, t: Overloaded) -> Type:
+        items = []  # type: List[CallableType]
+        for item in t.items():
+            items.append(cast(CallableType, item.accept(self)))
+        return Overloaded(items)
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        return TupleType(self.expand_types(t.items), t.fallback, t.line)
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        return UnionType(self.expand_types(t.items), t.line)
+
+    def visit_partial_type(self, t: PartialType) -> Type:
+        return t
+
+    def expand_types(self, types: List[Type]) -> List[Type]:
+        a = []  # type: List[Type]
+        for t in types:
+            a.append(t.accept(self))
+        return a
+
+    def expand_bound_vars(
+            self, types: List[Tuple[int, Type]]) -> List[Tuple[int, Type]]:
+        a = []  # type: List[Tuple[int, Type]]
+        for id, t in types:
+            a.append((id, t.accept(self)))
+        return a
+
+
+def update_callable_implicit_bounds(
+        t: CallableType, arg_types: List[Tuple[int, Type]]) -> CallableType:
+    # FIX what if there are existing bounds?
+    return t.copy_modified(bound_vars=arg_types)
diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
new file mode 100644
index 0000000..4879cec
--- /dev/null
+++ b/mypy/exprtotype.py
@@ -0,0 +1,72 @@
+"""Translate an expression (Node) to a Type value."""
+
+from typing import cast
+
+from mypy.nodes import (
+    Node, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, StrExpr, EllipsisExpr
+)
+from mypy.parsetype import parse_str_as_type, TypeParseError
+from mypy.types import Type, UnboundType, TypeList, EllipsisType
+
+
+class TypeTranslationError(Exception):
+    """Exception raised when an expression is not valid as a type."""
+
+
+def expr_to_unanalyzed_type(expr: Node) -> Type:
+    """Translate an expression to the corresponding type.
+
+    The result is not semantically analyzed. It can be UnboundType or TypeList.
+    Raise TypeTranslationError if the expression cannot represent a type.
+    """
+    if isinstance(expr, NameExpr):
+        name = expr.name
+        return UnboundType(name, line=expr.line)
+    elif isinstance(expr, MemberExpr):
+        fullname = get_member_expr_fullname(expr)
+        if fullname:
+            return UnboundType(fullname, line=expr.line)
+        else:
+            raise TypeTranslationError()
+    elif isinstance(expr, IndexExpr):
+        base = expr_to_unanalyzed_type(expr.base)
+        if isinstance(base, UnboundType):
+            if base.args:
+                raise TypeTranslationError()
+            if isinstance(expr.index, TupleExpr):
+                args = expr.index.items
+            else:
+                args = [expr.index]
+            base.args = [expr_to_unanalyzed_type(arg) for arg in args]
+            return base
+        else:
+            raise TypeTranslationError()
+    elif isinstance(expr, ListExpr):
+        return TypeList([expr_to_unanalyzed_type(t) for t in expr.items],
+                        line=expr.line)
+    elif isinstance(expr, StrExpr):
+        # Parse string literal type.
+        try:
+            result = parse_str_as_type(expr.value, expr.line)
+        except TypeParseError:
+            raise TypeTranslationError()
+        return result
+    elif isinstance(expr, EllipsisExpr):
+        return EllipsisType(expr.line)
+    else:
+        raise TypeTranslationError()
+
+
+def get_member_expr_fullname(expr: MemberExpr) -> str:
+    """Return the qualified name representation of a member expression.
+
+    Return a string of form foo.bar, foo.bar.baz, or similar, or None if the
+    argument cannot be represented in this form.
+    """
+    if isinstance(expr.expr, NameExpr):
+        initial = expr.expr.name
+    elif isinstance(expr.expr, MemberExpr):
+        initial = get_member_expr_fullname(expr.expr)
+    else:
+        return None
+    return '{}.{}'.format(initial, expr.name)
diff --git a/mypy/git.py b/mypy/git.py
new file mode 100644
index 0000000..508c8e9
--- /dev/null
+++ b/mypy/git.py
@@ -0,0 +1,136 @@
+"""Utilities for verifying git integrity."""
+
+# Used also from setup.py, so don't pull in anything additional here (like mypy or typing):
+import os
+import pipes
+import subprocess
+import sys
+
+
+def is_git_repo(dir: str) -> bool:
+    """Is the given directory version-controlled with git?"""
+    return os.path.exists(os.path.join(dir, ".git"))
+
+
+def have_git() -> bool:
+    """Can we run the git executable?"""
+    try:
+        subprocess.check_output(["git", "--help"])
+        return True
+    except subprocess.CalledProcessError:
+        return False
+    except OSError:
+        return False
+
+
+def get_submodules(dir: str):
+    """Return a list of all git top-level submodules in a given directory."""
+    # It would be nicer to do
+    # "git submodule foreach 'echo MODULE $name $path $sha1 $toplevel'"
+    # but that wouldn't work on Windows.
+    output = subprocess.check_output(["git", "submodule", "status"], cwd=dir)
+    # "<status><sha1> name desc"
+    # status='-': not initialized
+    # status='+': changed
+    # status='u': merge conflicts
+    # status=' ': up-to-date
+    for line in output.splitlines():
+        # Skip the status indicator, as it could be a space can confuse the split.
+        line = line[1:]
+        name = line.split(b" ")[1]
+        yield name.decode(sys.getfilesystemencoding())
+
+
+def git_revision(dir: str) -> bytes:
+    """Get the SHA-1 of the HEAD of a git repository."""
+    return subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=dir).strip()
+
+
+def submodule_revision(dir: str, submodule: str) -> bytes:
+    """Get the SHA-1 a submodule is supposed to have."""
+    output = subprocess.check_output(["git", "ls-files", "-s", submodule], cwd=dir).strip()
+    # E.g.: "160000 e4a7edb949e0b920b16f61aeeb19fc3d328f3012 0       typeshed"
+    return output.split()[1]
+
+
+def is_dirty(dir: str) -> bool:
+    """Check whether a git repository has uncommitted changes."""
+    output = subprocess.check_output(["git", "status", "-uno", "--porcelain"], cwd=dir)
+    return output.strip() != b""
+
+
+def has_extra_files(dir: str) -> bool:
+    """Check whether a git repository has untracked files."""
+    output = subprocess.check_output(["git", "clean", "--dry-run", "-d"], cwd=dir)
+    return output.strip() != b""
+
+
+def warn_no_git_executable() -> None:
+    print("Warning: Couldn't check git integrity. "
+          "git executable not in path.", file=sys.stderr)
+
+
+def warn_dirty(dir) -> None:
+    print("Warning: git module '{}' has uncommitted changes.".format(dir),
+          file=sys.stderr)
+    print("Go to the directory", file=sys.stderr)
+    print("  {}".format(dir), file=sys.stderr)
+    print("and commit or reset your changes", file=sys.stderr)
+
+
+def warn_extra_files(dir) -> None:
+    print("Warning: git module '{}' has untracked files.".format(dir),
+          file=sys.stderr)
+    print("Go to the directory", file=sys.stderr)
+    print("  {}".format(dir), file=sys.stderr)
+    print("and add & commit your new files.", file=sys.stderr)
+
+
+def chdir_prefix(dir) -> str:
+    """Return the command to change to the target directory, plus '&&'."""
+    if os.path.relpath(dir) != ".":
+        return "cd " + pipes.quote(dir) + " && "
+    else:
+        return ""
+
+
+def error_submodule_not_initialized(name: str, dir: str) -> None:
+    print("Submodule '{}' not initialized.".format(name), file=sys.stderr)
+    print("Please run:", file=sys.stderr)
+    print("  {}git submodule update --init {}".format(
+        chdir_prefix(dir), name), file=sys.stderr)
+
+
+def error_submodule_not_updated(name: str, dir: str) -> None:
+    print("Submodule '{}' not updated.".format(name), file=sys.stderr)
+    print("Please run:", file=sys.stderr)
+    print("  {}git submodule update {}".format(
+        chdir_prefix(dir), name), file=sys.stderr)
+    print("(If you got this message because you updated {} yourself".format(name), file=sys.stderr)
+    print(" then run \"git add {}\" to silence this check)".format(name), file=sys.stderr)
+
+
+def verify_git_integrity_or_abort(datadir: str) -> None:
+    """Verify the (submodule) integrity of a git repository.
+
+    Potentially output warnings/errors (to stderr), and exit with status 1
+    if we detected a severe problem.
+    """
+    datadir = datadir or '.'
+    if not is_git_repo(datadir):
+        return
+    if not have_git():
+        warn_no_git_executable()
+        return
+    for submodule in get_submodules(datadir):
+        submodule_path = os.path.join(datadir, submodule)
+        if not is_git_repo(submodule_path):
+            error_submodule_not_initialized(submodule, datadir)
+            sys.exit(1)
+        elif submodule_revision(datadir, submodule) != git_revision(submodule_path):
+            error_submodule_not_updated(submodule, datadir)
+            sys.exit(1)
+        elif is_dirty(submodule_path):
+            warn_dirty(submodule)
+        elif has_extra_files(submodule_path):
+            warn_extra_files(submodule)
diff --git a/mypy/infer.py b/mypy/infer.py
new file mode 100644
index 0000000..92d1be6
--- /dev/null
+++ b/mypy/infer.py
@@ -0,0 +1,42 @@
+"""Utilities for type argument inference."""
+
+from typing import List
+
+from mypy.constraints import infer_constraints, infer_constraints_for_callable
+from mypy.types import Type, CallableType
+from mypy.solve import solve_constraints
+from mypy.constraints import SUBTYPE_OF
+
+
+def infer_function_type_arguments(callee_type: CallableType,
+                                  arg_types: List[Type],
+                                  arg_kinds: List[int],
+                                  formal_to_actual: List[List[int]],
+                                  strict: bool = True) -> List[Type]:
+    """Infer the type arguments of a generic function.
+
+    Return an array of lower bound types for the type variables -1 (at
+    index 0), -2 (at index 1), etc. A lower bound is None if a value
+    could not be inferred.
+
+    Arguments:
+      callee_type: the target generic function
+      arg_types: argument types at the call site
+      arg_kinds: nodes.ARG_* values for arg_types
+      formal_to_actual: mapping from formal to actual variable indices
+    """
+    # Infer constraints.
+    constraints = infer_constraints_for_callable(
+        callee_type, arg_types, arg_kinds, formal_to_actual)
+
+    # Solve constraints.
+    type_vars = callee_type.type_var_ids()
+    return solve_constraints(type_vars, constraints, strict)
+
+
+def infer_type_arguments(type_var_ids: List[int],
+                         template: Type, actual: Type) -> List[Type]:
+    # Like infer_function_type_arguments, but only match a single type
+    # against a generic type.
+    constraints = infer_constraints(template, actual, SUBTYPE_OF)
+    return solve_constraints(type_var_ids, constraints)
diff --git a/mypy/join.py b/mypy/join.py
new file mode 100644
index 0000000..7489fec
--- /dev/null
+++ b/mypy/join.py
@@ -0,0 +1,321 @@
+"""Calculation of the least upper bound types (joins)."""
+
+from typing import cast, List
+
+from mypy.types import (
+    Type, AnyType, NoneTyp, Void, TypeVisitor, Instance, UnboundType,
+    ErrorType, TypeVarType, CallableType, TupleType, ErasedType, TypeList,
+    UnionType, FunctionLike, Overloaded, PartialType, DeletedType
+)
+from mypy.maptype import map_instance_to_supertype
+from mypy.subtypes import is_subtype, is_equivalent, is_subtype_ignoring_tvars
+
+
+def join_simple(declaration: Type, s: Type, t: Type) -> Type:
+    """Return a simple least upper bound given the declared type."""
+
+    if isinstance(s, AnyType):
+        return s
+
+    if isinstance(s, NoneTyp) and not isinstance(t, Void):
+        return t
+
+    if isinstance(s, ErasedType):
+        return t
+
+    if is_subtype(s, t):
+        return t
+
+    if is_subtype(t, s):
+        return s
+
+    if isinstance(declaration, UnionType):
+        return UnionType.make_simplified_union([s, t])
+
+    value = t.accept(TypeJoinVisitor(s))
+
+    if value is None:
+        # XXX this code path probably should be avoided.
+        # It seems to happen when a line (x = y) is a type error, and
+        # it's not clear that assuming that x is arbitrary afterward
+        # is a good idea.
+        return declaration
+
+    if declaration is None or is_subtype(value, declaration):
+        return value
+
+    return declaration
+
+
+def join_types(s: Type, t: Type) -> Type:
+    """Return the least upper bound of s and t.
+
+    For example, the join of 'int' and 'object' is 'object'.
+
+    If the join does not exist, return an ErrorType instance.
+    """
+    if isinstance(s, AnyType):
+        return s
+
+    if isinstance(s, NoneTyp) and not isinstance(t, Void):
+        return t
+
+    if isinstance(s, ErasedType):
+        return t
+
+    # Use a visitor to handle non-trivial cases.
+    return t.accept(TypeJoinVisitor(s))
+
+
+class TypeJoinVisitor(TypeVisitor[Type]):
+    """Implementation of the least upper bound algorithm.
+
+    Attributes:
+      s: The other (left) type operand.
+    """
+
+    def __init__(self, s: Type) -> None:
+        self.s = s
+
+    def visit_unbound_type(self, t: UnboundType) -> Type:
+        if isinstance(self.s, Void) or isinstance(self.s, ErrorType):
+            return ErrorType()
+        else:
+            return AnyType()
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        if is_subtype(self.s, t):
+            return t
+        else:
+            return UnionType(t.items + [self.s])
+
+    def visit_error_type(self, t: ErrorType) -> Type:
+        return t
+
+    def visit_type_list(self, t: TypeList) -> Type:
+        assert False, 'Not supported'
+
+    def visit_any(self, t: AnyType) -> Type:
+        return t
+
+    def visit_void(self, t: Void) -> Type:
+        if isinstance(self.s, Void):
+            return t
+        else:
+            return ErrorType()
+
+    def visit_none_type(self, t: NoneTyp) -> Type:
+        if not isinstance(self.s, Void):
+            return self.s
+        else:
+            return self.default(self.s)
+
+    def visit_deleted_type(self, t: DeletedType) -> Type:
+        if not isinstance(self.s, Void):
+            return self.s
+        else:
+            return self.default(self.s)
+
+    def visit_erased_type(self, t: ErasedType) -> Type:
+        return self.s
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        if isinstance(self.s, TypeVarType) and (cast(TypeVarType, self.s)).id == t.id:
+            return self.s
+        else:
+            return self.default(self.s)
+
+    def visit_instance(self, t: Instance) -> Type:
+        if isinstance(self.s, Instance):
+            return join_instances(t, cast(Instance, self.s))
+        elif isinstance(self.s, FunctionLike):
+            return join_types(t, self.s.fallback)
+        else:
+            return self.default(self.s)
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        # TODO: Consider subtyping instead of just similarity.
+        if isinstance(self.s, CallableType) and is_similar_callables(
+                t, cast(CallableType, self.s)):
+            return combine_similar_callables(t, cast(CallableType, self.s))
+        elif isinstance(self.s, Overloaded):
+            # Switch the order of arguments to that we'll get to visit_overloaded.
+            return join_types(t, self.s)
+        else:
+            return join_types(t.fallback, self.s)
+
+    def visit_overloaded(self, t: Overloaded) -> Type:
+        # This is more complex than most other cases. Here are some
+        # examples that illustrate how this works.
+        #
+        # First let's define a concise notation:
+        #  - Cn are callable types (for n in 1, 2, ...)
+        #  - Ov(C1, C2, ...) is an overloaded type with items C1, C2, ...
+        #  - Callable[[T, ...], S] is written as [T, ...] -> S.
+        #
+        # We want some basic properties to hold (assume Cn are all
+        # unrelated via Any-similarity):
+        #
+        #   join(Ov(C1, C2), C1) == C1
+        #   join(Ov(C1, C2), Ov(C1, C2)) == Ov(C1, C2)
+        #   join(Ov(C1, C2), Ov(C1, C3)) == C1
+        #   join(Ov(C2, C2), C3) == join of fallback types
+        #
+        # The presence of Any types makes things more interesting. The join is the
+        # most general type we can get with respect to Any:
+        #
+        #   join(Ov([int] -> int, [str] -> str), [Any] -> str) == Any -> str
+        #
+        # We could use a simplification step that removes redundancies, but that's not
+        # implemented right now. Consider this example, where we get a redundancy:
+        #
+        #   join(Ov([int, Any] -> Any, [str, Any] -> Any), [Any, int] -> Any) ==
+        #       Ov([Any, int] -> Any, [Any, int] -> Any)
+        #
+        # TODO: Use callable subtyping instead of just similarity.
+        result = []  # type: List[CallableType]
+        s = self.s
+        if isinstance(s, FunctionLike):
+            # The interesting case where both types are function types.
+            for t_item in t.items():
+                for s_item in s.items():
+                    if is_similar_callables(t_item, s_item):
+                        result.append(combine_similar_callables(t_item, s_item))
+            if result:
+                # TODO: Simplify redundancies from the result.
+                if len(result) == 1:
+                    return result[0]
+                else:
+                    return Overloaded(result)
+            return join_types(t.fallback, s.fallback)
+        return join_types(t.fallback, s)
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        if (isinstance(self.s, TupleType) and
+                cast(TupleType, self.s).length() == t.length()):
+            items = []  # type: List[Type]
+            for i in range(t.length()):
+                items.append(self.join(t.items[i],
+                                       (cast(TupleType, self.s)).items[i]))
+            # TODO: What if the fallback types are different?
+            return TupleType(items, t.fallback)
+        else:
+            return self.default(self.s)
+
+    def visit_partial_type(self, t: PartialType) -> Type:
+        # We only have partial information so we can't decide the join result. We should
+        # never get here.
+        assert False, "Internal error"
+
+    def join(self, s: Type, t: Type) -> Type:
+        return join_types(s, t)
+
+    def default(self, typ: Type) -> Type:
+        if isinstance(typ, Instance):
+            return object_from_instance(typ)
+        elif isinstance(typ, UnboundType):
+            return AnyType()
+        elif isinstance(typ, Void) or isinstance(typ, ErrorType):
+            return ErrorType()
+        elif isinstance(typ, TupleType):
+            return self.default(typ.fallback)
+        elif isinstance(typ, FunctionLike):
+            return self.default(typ.fallback)
+        elif isinstance(typ, TypeVarType):
+            return self.default(typ.upper_bound)
+        else:
+            return AnyType()
+
+
+def join_instances(t: Instance, s: Instance) -> Type:
+    """Calculate the join of two instance types.
+
+    If allow_interfaces is True, also consider interface-type results for
+    non-interface types.
+
+    Return ErrorType if the result is ambiguous.
+    """
+
+    if t.type == s.type:
+        # Simplest case: join two types with the same base type (but
+        # potentially different arguments).
+        if is_subtype(t, s) or is_subtype(s, t):
+            # Compatible; combine type arguments.
+            args = []  # type: List[Type]
+            for i in range(len(t.args)):
+                args.append(join_types(t.args[i], s.args[i]))
+            return Instance(t.type, args)
+        else:
+            # Incompatible; return trivial result object.
+            return object_from_instance(t)
+    elif t.type.bases and is_subtype_ignoring_tvars(t, s):
+        return join_instances_via_supertype(t, s)
+    else:
+        # Now t is not a subtype of s, and t != s. Now s could be a subtype
+        # of t; alternatively, we need to find a common supertype. This works
+        # in of the both cases.
+        return join_instances_via_supertype(s, t)
+
+
+def join_instances_via_supertype(t: Instance, s: Instance) -> Type:
+    # Give preference to joins via duck typing relationship, so that
+    # join(int, float) == float, for example.
+    if t.type._promote and is_subtype(t.type._promote, s):
+        return join_types(t.type._promote, s)
+    elif s.type._promote and is_subtype(s.type._promote, t):
+        return join_types(t, s.type._promote)
+    res = s
+    mapped = map_instance_to_supertype(t, t.type.bases[0].type)
+    join = join_instances(mapped, res)
+    # If the join failed, fail. This is a defensive measure (this might
+    # never happen).
+    if isinstance(join, ErrorType):
+        return join
+    # Now the result must be an Instance, so the cast below cannot fail.
+    res = cast(Instance, join)
+    return res
+
+
+def is_similar_callables(t: CallableType, s: CallableType) -> bool:
+    """Return True if t and s are equivalent and have identical numbers of
+    arguments, default arguments and varargs.
+    """
+
+    return (len(t.arg_types) == len(s.arg_types) and t.min_args == s.min_args
+            and t.is_var_arg == s.is_var_arg and is_equivalent(t, s))
+
+
+def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType:
+    arg_types = []  # type: List[Type]
+    for i in range(len(t.arg_types)):
+        arg_types.append(join_types(t.arg_types[i], s.arg_types[i]))
+    # TODO kinds and argument names
+    # The fallback type can be either 'function' or 'type'. The result should have 'type' as
+    # fallback only if both operands have it as 'type'.
+    if t.fallback.type.fullname() != 'builtins.type':
+        fallback = t.fallback
+    else:
+        fallback = s.fallback
+    return t.copy_modified(arg_types=arg_types,
+                           ret_type=join_types(t.ret_type, s.ret_type),
+                           fallback=fallback,
+                           name=None)
+
+
+def object_from_instance(instance: Instance) -> Instance:
+    """Construct the type 'builtins.object' from an instance type."""
+    # Use the fact that 'object' is always the last class in the mro.
+    res = Instance(instance.type.mro[-1], [])
+    return res
+
+
+def join_type_list(types: List[Type]) -> Type:
+    if not types:
+        # This is a little arbitrary but reasonable. Any empty tuple should be compatible
+        # with all variable length tuples, and this makes it possible. A better approach
+        # would be to use a special bottom type.
+        return NoneTyp()
+    joined = types[0]
+    for t in types[1:]:
+        joined = join_types(joined, t)
+    return joined
diff --git a/mypy/lex.py b/mypy/lex.py
new file mode 100644
index 0000000..d948785
--- /dev/null
+++ b/mypy/lex.py
@@ -0,0 +1,888 @@
+"""Lexical analyzer for mypy.
+
+Translate a string that represents a file or a compilation unit to a list of
+tokens.
+
+This module can be run as a script (lex.py FILE).
+"""
+
+import re
+
+from mypy.util import short_type, find_python_encoding
+from mypy import defaults
+from typing import List, Callable, Dict, Any, Match, Pattern, Set, Union, Tuple
+
+
+class Token:
+    """Base class for all tokens."""
+
+    def __init__(self, string: str, pre: str = '') -> None:
+        """Initialize a token.
+
+        Arguments:
+          string: Token string in program text
+          pre:    Space, comments etc. before token
+        """
+
+        self.string = string
+        self.pre = pre
+        self.line = 0
+
+    def __repr__(self) -> str:
+        """The representation is of form 'Keyword(  if)'."""
+        t = short_type(self)
+        return t + '(' + self.fix(self.pre) + self.fix(self.string) + ')'
+
+    def rep(self) -> str:
+        return self.pre + self.string
+
+    def fix(self, s: str) -> str:
+        """Replace common non-printable chars with escape sequences.
+
+        Do not use repr() since we don't want do duplicate backslashes.
+        """
+        return s.replace('\n', '\\n').replace('\t', '\\t').replace('\r', '\\r')
+
+
+# Token classes
+
+
+class Break(Token):
+    """Statement break (line break or semicolon)"""
+
+
+class Indent(Token):
+    """Increase block indent level."""
+
+
+class Dedent(Token):
+    """Decrease block indent level."""
+
+
+class Eof(Token):
+    """End of file"""
+
+
+class Keyword(Token):
+    """Reserved word (other than keyword operators; they use Op).
+
+    Examples: if, class, while, def.
+    """
+
+
+class Name(Token):
+    """An alphanumeric identifier"""
+
+
+class IntLit(Token):
+    """Integer literal"""
+
+
+class StrLit(Token):
+    """String literal"""
+
+    def parsed(self) -> str:
+        """Return the parsed contents of the literal."""
+        return _parse_str_literal(self.string)
+
+
+class BytesLit(Token):
+    """Bytes literal"""
+
+    def parsed(self) -> str:
+        """Return the parsed contents of the literal."""
+        return _parse_str_literal(self.string)
+
+
+class UnicodeLit(Token):
+    """Unicode literal (Python 2.x)"""
+
+    def parsed(self) -> str:
+        """Return the parsed contents of the literal."""
+        return _parse_str_literal(self.string)
+
+
+class FloatLit(Token):
+    """Float literal"""
+
+
+class ComplexLit(Token):
+    """Complex literal"""
+
+
+class Punct(Token):
+    """Punctuator (e.g. comma, '(' or '=')"""
+
+
+class Colon(Token):
+    pass
+
+
+class EllipsisToken(Token):
+    pass
+
+
+class Op(Token):
+    """Operator (e.g. '+' or 'in')"""
+
+
+class Bom(Token):
+    """Byte order mark (at the start of a file)"""
+
+
+class LexError(Token):
+    """Lexer error token"""
+
+    def __init__(self, string: str, type: int, message: str = None) -> None:
+        """Initialize token.
+
+        The type argument is one of the error types below.
+        """
+        super().__init__(string)
+        self.type = type
+        self.message = message
+
+    def __str__(self):
+        if self.message:
+            return 'LexError(%s)' % self.message
+        else:
+            return super().__str__()
+
+
+# Lexer error types
+NUMERIC_LITERAL_ERROR = 0
+UNTERMINATED_STRING_LITERAL = 1
+INVALID_CHARACTER = 2
+DECODE_ERROR = 3
+INVALID_BACKSLASH = 4
+INVALID_DEDENT = 5
+
+
+def lex(string: Union[str, bytes], first_line: int = 1,
+        pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+        is_stub_file: bool = False) -> Tuple[List[Token], Set[int]]:
+    """Analyze string, and return an array of token objects and the lines to ignore.
+
+    The last token is always Eof. The intention is to ignore any
+    semantic and type check errors on the ignored lines.
+    """
+    l = Lexer(pyversion, is_stub_file=is_stub_file)
+    l.lex(string, first_line)
+    return l.tok, l.ignored_lines
+
+
+# Reserved words (not including operators)
+keywords_common = set([
+    'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif',
+    'else', 'except', 'finally', 'from', 'for', 'global', 'if', 'import',
+    'lambda', 'pass', 'raise', 'return', 'try', 'while', 'with',
+    'yield'])
+
+# Reserved words specific for Python version 2
+# TODO (jukka): 'print' should be here, but it breaks the parsing of Python 2
+#               builtins, since they also define the function 'print'.
+keywords2 = set([])  # type: Set[str]
+
+# Reserved words specific for Python version 3
+keywords3 = set(['nonlocal'])
+
+# Alphabetical operators (reserved words)
+alpha_operators = set(['in', 'is', 'not', 'and', 'or'])
+
+# String literal prefixes
+str_prefixes = set(['r', 'b', 'br', 'rb', 'u', 'ur', 'R', 'B', 'U'])
+
+# List of regular expressions that match non-alphabetical operators
+operators = [re.compile('[-+*/<>.%&|^~]'),
+             re.compile('==|!=|<=|>=|\\*\\*|//|<<|>>|<>')]
+
+# List of regular expressions that match punctuator tokens
+punctuators = [re.compile('[=,()@`]|(->)'),
+               re.compile('\\['),
+               re.compile(']'),
+               re.compile('([-+*/%&|^]|\\*\\*|//|<<|>>)=')]
+
+
+# Map single-character string escape sequences to corresponding characters.
+escape_map = {'a': '\x07',
+              'b': '\x08',
+              'f': '\x0c',
+              'n': '\x0a',
+              'r': '\x0d',
+              't': '\x09',
+              'v': '\x0b',
+              '"': '"',
+              "'": "'"}
+
+
+# Matches the optional prefix of a string literal, e.g. the 'r' in r"foo".
+str_prefix_re = re.compile('[rRbBuU]*')
+
+# Matches an escape sequence in a string, e.g. \n or \x4F.
+escape_re = re.compile(
+    "\\\\([abfnrtv'\"]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|[0-7]{1,3})")
+
+
+def _parse_str_literal(string: str) -> str:
+    """Translate escape sequences in str literal to the corresponding chars.
+
+    For example, \t is translated to the tab character (ascii 9).
+
+    Return the translated contents of the literal.  Also handle raw and
+    triple-quoted string literals.
+    """
+
+    prefix = str_prefix_re.match(string).group(0).lower()
+    s = string[len(prefix):]
+    if s.startswith("'''") or s.startswith('"""'):
+        return s[3:-3]
+    elif 'r' in prefix or 'R' in prefix:
+        return s[1:-1].replace('\\' + s[0], s[0])
+    else:
+        return escape_re.sub(lambda m: escape_repl(m, prefix), s[1:-1])
+
+
+def escape_repl(m: Match[str], prefix: str) -> str:
+    """Translate a string escape sequence, e.g. \t -> the tab character.
+
+    Assume that the Match object is from escape_re.
+    """
+
+    seq = m.group(1)
+    if len(seq) == 1 and seq in escape_map:
+        # Single-character escape sequence, e.g. \n.
+        return escape_map[seq]
+    elif seq.startswith('x'):
+        # Hexadecimal sequence \xNN.
+        return chr(int(seq[1:], 16))
+    elif seq.startswith('u'):
+        # Unicode sequence \uNNNN.
+        if 'b' not in prefix and 'B' not in prefix:
+            return chr(int(seq[1:], 16))
+        else:
+            return '\\' + seq
+    else:
+        # Octal sequence.
+        ord = int(seq, 8)
+        if 'b' in prefix and 'B' in prefix:
+            # Make sure code is no larger than 255 for bytes literals.
+            ord = ord % 256
+        return chr(ord)
+
+
+class Lexer:
+    """Lexical analyzer."""
+
+    i = 0      # Current string index (into s)
+    s = ''     # The string being analyzed
+    line = 0   # Current line number
+    pre_whitespace = ''     # Whitespace and comments before the next token
+    enc = ''                # Encoding
+
+    # Generated tokens
+    tok = None  # type: List[Token]
+
+    # Table from byte character value to lexer method. E.g. entry at ord('0')
+    # contains the method lex_number().
+    map = None  # type: Dict[str, Callable[[], None]]
+
+    # Indent levels of currently open blocks, in spaces.
+    indents = None  # type: List[int]
+
+    # Open ('s, ['s and {'s without matching closing bracket; used for ignoring
+    # newlines within parentheses/brackets.
+    open_brackets = None  # type: List[str]
+
+    pyversion = defaults.PYTHON3_VERSION
+
+    # Ignore errors on these lines (defined using '# type: ignore').
+    ignored_lines = None  # type: Set[int]
+
+    def __init__(self, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+                 is_stub_file: bool = False) -> None:
+        self.map = {}
+        self.tok = []
+        self.indents = [0]
+        self.open_brackets = []
+        self.pyversion = pyversion
+        self.is_stub_file = is_stub_file
+        self.ignored_lines = set()
+        # Fill in the map from valid character codes to relevant lexer methods.
+        extra_misc = '' if pyversion[0] >= 3 else '`'
+        for seq, method in [('ABCDEFGHIJKLMNOPQRSTUVWXYZ', self.lex_name),
+                            ('abcdefghijklmnopqrstuvwxyz_', self.lex_name),
+                            ('0123456789', self.lex_number),
+                            ('.', self.lex_number_or_dot),
+                            (' ' + '\t' + '\x0c', self.lex_space),
+                            ('"', self.lex_str_double),
+                            ("'", self.lex_str_single),
+                            ('\r' + '\n', self.lex_break),
+                            (';', self.lex_semicolon),
+                            (':', self.lex_colon),
+                            ('#', self.lex_comment),
+                            ('\\', self.lex_backslash),
+                            ('([{', self.lex_open_bracket),
+                            (')]}', self.lex_close_bracket),
+                            ('-+*/<>%&|^~=!,@' + extra_misc, self.lex_misc)]:
+            for c in seq:
+                self.map[c] = method
+        if pyversion[0] == 2:
+            self.keywords = keywords_common | keywords2
+            # Decimal/hex/octal/binary literal or integer complex literal
+            self.number_exp1 = re.compile('(0[xXoObB][0-9a-fA-F]+|[0-9]+)[lL]?')
+
+        if pyversion[0] == 3:
+            self.keywords = keywords_common | keywords3
+            self.number_exp1 = re.compile('0[xXoObB][0-9a-fA-F]+|[0-9]+')
+
+    def lex(self, text: Union[str, bytes], first_line: int) -> None:
+        """Lexically analyze a string, storing the tokens at the tok list."""
+        self.i = 0
+        self.line = first_line
+
+        if isinstance(text, bytes):
+            if text.startswith(b'\xef\xbb\xbf'):
+                self.enc = 'utf8'
+                bom = True
+            else:
+                self.enc, enc_line = find_python_encoding(text, self.pyversion)
+                bom = False
+            try:
+                decoded_text = text.decode(self.enc)
+            except UnicodeDecodeError as err:
+                self.report_unicode_decode_error(err, text)
+                return
+            except LookupError:
+                self.report_unknown_encoding(enc_line)
+                return
+            text = decoded_text
+            if bom:
+                self.add_token(Bom(text[0]))
+        self.s = text
+
+        # Parse initial indent; otherwise first-line indent would not generate
+        # an error.
+        self.lex_indent()
+
+        # Use some local variables as a simple optimization.
+        map = self.map
+        default = self.unknown_character
+
+        # Lex the file. Repeatedly call the lexer method for the current char.
+        while self.i < len(text):
+            # Get the character code of the next character to lex.
+            c = text[self.i]
+            # Dispatch to the relevant lexer method. This will consume some
+            # characters in the text, add a token to self.tok and increment
+            # self.i.
+            map.get(c, default)()
+
+        # Append a break if there is no statement/block terminator at the end
+        # of input.
+        if len(self.tok) > 0 and (not isinstance(self.tok[-1], Break) and
+                                  not isinstance(self.tok[-1], Dedent)):
+            self.add_token(Break(''))
+
+        # Attach any dangling comments/whitespace to a final Break token.
+        if self.tok and isinstance(self.tok[-1], Break):
+            self.tok[-1].string += self.pre_whitespace
+            self.pre_whitespace = ''
+
+        # Close remaining open blocks with Dedent tokens.
+        self.lex_indent()
+
+        self.add_token(Eof(''))
+
+    def report_unicode_decode_error(self, exc: UnicodeDecodeError, text: bytes) -> None:
+        lines = text.splitlines()
+        for line in lines:
+            try:
+                line.decode(self.enc)
+            except UnicodeDecodeError as new_exc:
+                exc = new_exc
+                break
+            self.line += 1
+        else:
+            self.line = 1
+        self.add_token(
+            LexError('', DECODE_ERROR,
+                     "%r codec can't decode byte %d in column %d" % (
+                         self.enc, line[exc.start], exc.start + 1)))
+        self.add_token(Break(''))
+        self.add_token(Eof(''))
+
+    def report_unknown_encoding(self, encoding_line: int) -> None:
+        self.line = encoding_line
+        self.add_token(
+            LexError('', DECODE_ERROR,
+                     "Unknown encoding %r" % self.enc))
+        self.add_token(Break(''))
+        self.add_token(Eof(''))
+
+    def lex_number_or_dot(self) -> None:
+        """Analyse a token starting with a dot.
+
+        It can be the member access operator, a float literal such as '.123',
+        or an ellipsis (for Python 3 and for all stub files).
+        """
+        if self.is_at_number():
+            self.lex_number()
+        elif self.is_at_ellipsis():
+            # '...' is valid in Python 2 as a token but it's use is limited to indexing.
+            # Example: Tuple[int, ...] is valid in Python 2.
+            self.lex_ellipsis()
+        else:
+            self.lex_misc()
+
+    number_exp = re.compile(r'[0-9]|\.[0-9]')
+
+    def is_at_number(self) -> bool:
+        """Is the current location at a numeric literal?"""
+        return self.match(self.number_exp) != ''
+
+    ellipsis_exp = re.compile(r'\.\.\.')
+
+    def is_at_ellipsis(self) -> bool:
+        """Is the current location at a ellipsis '...'"""
+        return self.match(self.ellipsis_exp) != ''
+
+    # Regexps used by lex_number
+
+    # NOTE: number_exp1 depends on Python version and is defined in __init__.
+
+    # Float literal, e.g. '1.23' or '12e+34' or '1.2j'
+    number_exp2 = re.compile(
+        r'[0-9]*\.[0-9]*([eE][-+]?[0-9]+)?|[0-9]+[eE][-+]?[0-9]+')
+
+    # Complex literal, e.g. '3j' or '1.5e2J'
+    number_complex = re.compile(
+        r'([0-9]*\.[0-9]*([eE][-+]?[0-9]+)?|[0-9]+([eE][-+]?[0-9]+)?)[jJ]')
+
+    # These characters must not appear after a number literal.
+    name_char_exp = re.compile('[a-zA-Z0-9_]')
+    octal_int = re.compile('0+[1-9]')
+
+    def lex_number(self) -> None:
+        """Analyse an int or float literal.
+
+        Assume that the current location points to one of them.
+        """
+        s1 = self.match(self.number_exp1)
+        s2 = self.match(self.number_exp2)
+        sc = self.match(self.number_complex)
+
+        maxlen = max(len(s1), len(s2), len(sc))
+        if self.name_char_exp.match(
+                self.s[self.i + maxlen:self.i + maxlen + 1]) is not None:
+            # Error: alphanumeric character after number literal.
+            s3 = self.match(re.compile('[0-9][0-9a-zA-Z_]*'))
+            maxlen = max(maxlen, len(s3))
+            self.add_token(LexError(' ' * maxlen, NUMERIC_LITERAL_ERROR))
+        elif len(s1) == maxlen:
+            # Integer literal.
+            if self.pyversion[0] >= 3 and self.octal_int.match(s1):
+                # Python 2 style octal literal such as 0377 not supported in Python 3.
+                self.add_token(LexError(s1, NUMERIC_LITERAL_ERROR))
+            else:
+                self.add_token(IntLit(s1))
+        elif len(s2) == maxlen:
+            # Float literal.
+            self.add_token(FloatLit(s2))
+        else:
+            # Complex literal.
+            self.add_token(ComplexLit(sc))
+
+    def lex_ellipsis(self) -> None:
+        self.add_token(EllipsisToken('...'))
+
+    name_exp = re.compile('[a-zA-Z_][a-zA-Z0-9_]*')
+
+    def lex_name(self) -> None:
+        """Analyse a name.
+
+        A name can be an identifier, a keyword or an alphabetical operator.
+        Also deal with prefixed string literals such as r'...'.
+        """
+        s = self.match(self.name_exp)
+        if s in self.keywords:
+            self.add_token(Keyword(s))
+        elif s in alpha_operators:
+            self.add_token(Op(s))
+        elif s in str_prefixes and self.match(re.compile('[a-zA-Z]+[\'"]')) != '':
+            self.lex_prefixed_str(s)
+        else:
+            self.add_token(Name(s))
+
+    # Regexps representing components of string literals
+
+    # Initial part of a single-quoted literal, e.g. b'foo' or b'foo\\\n
+    str_exp_single = re.compile(
+        r"[a-zA-Z]*'([^'\\\r\n]|\\[^\r\n])*('|\\(\n|\r\n?))")
+    # Non-initial part of a multiline single-quoted literal, e.g. foo'
+    str_exp_single_multi = re.compile(
+        r"([^'\\\r\n]|\\[^\r\n])*('|\\(\n|\r\n?))")
+    # Initial part of a single-quoted raw literal, e.g. r'foo' or r'foo\\\n
+    str_exp_raw_single = re.compile(
+        r"[a-zA-Z]*'([^'\r\n\\]|\\'|\\[^\n\r])*('|\\(\n|\r\n?))")
+    # Non-initial part of a raw multiline single-quoted literal, e.g. foo'
+    str_exp_raw_single_multi = re.compile(
+        r"([^'\r\n]|'')*('|\\(\n|\r\n?))")
+
+    # Start of a ''' literal, e.g. b'''
+    str_exp_single3 = re.compile("[a-z]*'''")
+    # End of a ''' literal, e.g. foo'''
+    str_exp_single3end = re.compile(r"([^\n\r\\]|\\[^\n\r])*?'''")
+
+    # The following are similar to above (but use double quotes).
+
+    str_exp_double = re.compile(
+        r'[a-z]*"([^"\\\r\n]|\\[^\r\n])*("|\\(\n|\r\n?))')
+    str_exp_double_multi = re.compile(
+        r'([^"\\\r\n]|\\[^\r\n])*("|\\(\n|\r\n?))')
+    str_exp_raw_double = re.compile(
+        r'[a-z]*"([^"\r\n\\]|\\"|\\[^\n\r])*("|\\(\n|\r\n?))')
+    str_exp_raw_double_multi = re.compile(
+        r'([^"\r\n]|"")*("|\\(\n|\r\n?))')
+
+    str_exp_double3 = re.compile('[a-z]*"""')
+    str_exp_double3end = re.compile(r'([^\n\r\\]|\\[^\n\r])*?"""')
+
+    def lex_str_single(self) -> None:
+        """Analyse single-quoted string literal"""
+        self.lex_str(self.str_exp_single, self.str_exp_single_multi,
+                     self.str_exp_single3, self.str_exp_single3end)
+
+    def lex_str_double(self) -> None:
+        """Analyse double-quoted string literal"""
+        self.lex_str(self.str_exp_double, self.str_exp_double_multi,
+                     self.str_exp_double3, self.str_exp_double3end)
+
+    def lex_prefixed_str(self, prefix: str) -> None:
+        """Analyse a string literal with a prefix, such as r'...'."""
+        s = self.match(re.compile('[a-zA-Z]+[\'"]'))
+        if s.endswith("'"):
+            re1 = self.str_exp_single
+            re2 = self.str_exp_single_multi
+            if 'r' in prefix or 'R' in prefix:
+                re1 = self.str_exp_raw_single
+                re2 = self.str_exp_raw_single_multi
+            self.lex_str(re1, re2, self.str_exp_single3,
+                         self.str_exp_single3end, prefix)
+        else:
+            re1 = self.str_exp_double
+            re2 = self.str_exp_double_multi
+            if 'r' in prefix or 'R' in prefix:
+                re1 = self.str_exp_raw_double
+                re2 = self.str_exp_raw_double_multi
+            self.lex_str(re1, re2, self.str_exp_double3,
+                         self.str_exp_double3end, prefix)
+
+    def lex_str(self, regex: Pattern[str], re2: Pattern[str],
+                re3: Pattern[str], re3end: Pattern[str],
+                prefix: str = '') -> None:
+        """Analyse a string literal described by regexps.
+
+        Assume that the current location is at the beginning of the
+        literal. The arguments re3 and re3end describe the
+        corresponding triple-quoted literals.
+        """
+        s3 = self.match(re3)
+        if s3 != '':
+            # Triple-quoted string literal.
+            self.lex_triple_quoted_str(re3end, prefix)
+        else:
+            # Single or double quoted string literal.
+            s = self.match(regex)
+            if s != '':
+                if s.endswith('\n') or s.endswith('\r'):
+                    self.lex_multiline_string_literal(re2, s)
+                else:
+                    if 'b' in prefix or 'B' in prefix:
+                        self.add_token(BytesLit(s))
+                    elif 'u' in prefix or 'U' in prefix:
+                        self.add_token(UnicodeLit(s))
+                    else:
+                        self.add_token(StrLit(s))
+            else:
+                # Unterminated string literal.
+                s = self.match(re.compile('[^\\n\\r]*'))
+                self.add_token(LexError(s, UNTERMINATED_STRING_LITERAL))
+
+    def lex_triple_quoted_str(self, re3end: Pattern[str], prefix: str) -> None:
+        line = self.line
+        ss = self.s[self.i:self.i + len(prefix) + 3]
+        self.i += len(prefix) + 3
+        while True:
+            m = re3end.match(self.s, self.i)
+            if m is not None:
+                break
+            m = re.match('[^\\n\\r]*(\\n|\\r\\n?)', self.s[self.i:])
+            if m is None:
+                self.add_special_token(
+                    LexError(ss, UNTERMINATED_STRING_LITERAL), line, 0)
+                return
+            s = m.group(0)
+            ss += s
+            self.line += 1
+            self.i += len(s)
+        lit = None  # type: Token
+        if 'b' in prefix or 'B' in prefix:
+            lit = BytesLit(ss + m.group(0))
+        elif 'u' in prefix or 'U' in prefix:
+            lit = UnicodeLit(ss + m.group(0))
+        else:
+            lit = StrLit(ss + m.group(0))
+        self.add_special_token(lit, line, len(m.group(0)))
+
+    def lex_multiline_string_literal(self, re_end: Pattern[str],
+                                     prefix: str) -> None:
+        """Analyze multiline single/double-quoted string literal.
+
+        Use explicit \ for line continuation.
+        """
+        line = self.line
+        self.i += len(prefix)
+        ss = prefix
+        while True:
+            m = self.match(re_end)
+            if m == '':
+                self.add_special_token(
+                    LexError(ss, UNTERMINATED_STRING_LITERAL), line, 0)
+                return
+            ss += m
+            self.line += 1
+            self.i += len(m)
+            if not m.endswith('\n') and not m.endswith('\r'): break
+        self.add_special_token(StrLit(ss), line, 0)  # TODO bytes
+
+    comment_exp = re.compile(r'#[^\n\r]*')
+
+    def lex_comment(self) -> None:
+        """Analyze a comment."""
+        s = self.match(self.comment_exp)
+        self.add_pre_whitespace(s)
+
+    backslash_exp = re.compile(r'\\(\n|\r\n?)')
+
+    def lex_backslash(self) -> None:
+        s = self.match(self.backslash_exp)
+        if s != '':
+            self.add_pre_whitespace(s)
+            self.line += 1
+        else:
+            self.add_token(LexError('\\', INVALID_BACKSLASH))
+
+    space_exp = re.compile(r'[ \t\x0c]*')
+    indent_exp = re.compile(r'[ \t]*[#\n\r]?')
+
+    def lex_space(self) -> None:
+        """Analyze a run of whitespace characters (within a line, not indents).
+
+        Only store them in self.pre_whitespace.
+        """
+        s = self.match(self.space_exp)
+        self.add_pre_whitespace(s)
+
+    comment_or_newline = '#' + '\n' + '\r'  # type: str
+
+    def lex_indent(self) -> None:
+        """Analyze whitespace chars at the beginning of a line (indents)."""
+        s = self.match(self.indent_exp)
+        if s != '' and s[-1] in self.comment_or_newline:
+            # Empty line (whitespace only or comment only).
+            self.add_pre_whitespace(s[:-1])
+            if s[-1] == '#':
+                self.lex_comment()
+            else:
+                self.lex_break()
+            self.lex_indent()
+            return
+        indent = self.calc_indent(s)
+        if indent == self.indents[-1]:
+            # No change in indent: just whitespace.
+            self.add_pre_whitespace(s)
+        elif indent > self.indents[-1]:
+            # An increased indent (new block).
+            self.indents.append(indent)
+            self.add_token(Indent(s))
+        else:
+            # Decreased indent (end of one or more blocks).
+            pre = self.pre_whitespace
+            self.pre_whitespace = ''
+            while indent < self.indents[-1]:
+                self.add_token(Dedent(''))
+                self.indents.pop()
+            self.pre_whitespace = pre
+            self.add_pre_whitespace(s)
+            if indent != self.indents[-1]:
+                # Error: indent level does not match a previous indent level.
+                self.add_token(LexError('', INVALID_DEDENT))
+
+    def calc_indent(self, s: str) -> int:
+        indent = 0
+        for ch in s:
+            if ch == ' ':
+                indent += 1
+            else:
+                # Tab: 8 spaces (rounded to a multiple of 8).
+                indent += 8 - indent % 8
+        return indent
+
+    break_exp = re.compile(r'\r\n|\r|\n|;')
+
+    def lex_break(self) -> None:
+        """Analyse a line break."""
+        s = self.match(self.break_exp)
+        last_tok = self.tok[-1] if self.tok else None
+        if isinstance(last_tok, Break):
+            was_semicolon = last_tok.string == ';'
+            last_tok.string += self.pre_whitespace + s
+            self.i += len(s)
+            self.line += 1
+            self.pre_whitespace = ''
+            if was_semicolon:
+                self.lex_indent()
+        elif self.ignore_break():
+            self.add_pre_whitespace(s)
+            self.line += 1
+        else:
+            self.add_token(Break(s))
+            self.line += 1
+            self.lex_indent()
+
+    def lex_semicolon(self) -> None:
+        self.add_token(Break(';'))
+
+    def lex_colon(self) -> None:
+        self.add_token(Colon(':'))
+
+    open_bracket_exp = re.compile('[[({]')
+
+    def lex_open_bracket(self) -> None:
+        s = self.match(self.open_bracket_exp)
+        self.open_brackets.append(s)
+        self.add_token(Punct(s))
+
+    close_bracket_exp = re.compile('[])}]')
+
+    open_bracket = {')': '(', ']': '[', '}': '{'}
+
+    def lex_close_bracket(self) -> None:
+        s = self.match(self.close_bracket_exp)
+        if (self.open_brackets != []
+                and self.open_bracket[s] == self.open_brackets[-1]):
+            self.open_brackets.pop()
+        self.add_token(Punct(s))
+
+    def lex_misc(self) -> None:
+        """Analyze a non-alphabetical operator or a punctuator."""
+        s = ''
+        t = None  # type: Any
+        for re_list, type in [(operators, Op), (punctuators, Punct)]:
+            for regexp in re_list:
+                s2 = self.match(regexp)
+                if len(s2) > len(s):
+                    t = type
+                    s = s2
+        if s == '':
+            # Could not match any token; report an invalid character. This is
+            # reached at least if the current character is '!' not followed by
+            # '='.
+            self.add_token(LexError(self.s[self.i], INVALID_CHARACTER))
+        else:
+            if s == '<>':
+                if self.pyversion[0] == 2:
+                    s = '!='
+                else:
+                    self.add_token(Op('<'))
+                    s = '>'
+            self.add_token(t(s))
+
+    def unknown_character(self) -> None:
+        """Report an unknown character as a lexical analysis error."""
+        self.add_token(LexError(self.s[self.i], INVALID_CHARACTER))
+
+    # Utility methods
+
+    def match(self, pattern: Pattern[str]) -> str:
+        """Try to match a regular expression at current location.
+
+        If the argument regexp is matched at the current location,
+        return the matched string; otherwise return the empty string.
+        """
+        m = pattern.match(self.s, self.i)
+        if m is not None:
+            return m.group(0)
+        else:
+            return ''
+
+    def add_pre_whitespace(self, s: str) -> None:
+        """Record whitespace and comments before the next token.
+
+        The accumulated whitespace/comments will be stored in the next token
+        and then it will be cleared.
+
+        This is needed for pretty-printing the original source code while
+        preserving comments, indentation, whitespace etc.
+        """
+        self.pre_whitespace += s
+        self.i += len(s)
+
+    type_ignore_exp = re.compile(r'[ \t]*#[ \t]*type:[ \t]*ignore\b')
+
+    def add_token(self, tok: Token) -> None:
+        """Store a token.
+
+        Update its line number and record preceding whitespace
+        characters and comments.
+        """
+        if (tok.string == '' and not isinstance(tok, Eof)
+                and not isinstance(tok, Break)
+                and not isinstance(tok, LexError)
+                and not isinstance(tok, Dedent)):
+            raise ValueError('Empty token')
+        tok.pre = self.pre_whitespace
+        if self.type_ignore_exp.match(tok.pre):
+            delta = 0
+            if '\n' in tok.pre or '\r' in tok.pre:
+                delta += 1
+            self.ignored_lines.add(self.line - delta)
+        tok.line = self.line
+        self.tok.append(tok)
+        self.i += len(tok.string)
+        self.pre_whitespace = ''
+
+    def add_special_token(self, tok: Token, line: int, skip: int) -> None:
+        """Like add_token, but caller sets the number of chars to skip."""
+        if (tok.string == '' and not isinstance(tok, Eof)
+                and not isinstance(tok, Break)
+                and not isinstance(tok, LexError)
+                and not isinstance(tok, Dedent)):
+            raise ValueError('Empty token')
+        tok.pre = self.pre_whitespace
+        tok.line = line
+        self.tok.append(tok)
+        self.i += skip
+        self.pre_whitespace = ''
+
+    def ignore_break(self) -> bool:
+        """If the next token is a break, can we ignore it?"""
+        if len(self.open_brackets) > 0 or len(self.tok) == 0:
+            # Ignore break after open ( [ or { or at the beginning of file.
+            return True
+        else:
+            # Ignore break after another break or dedent.
+            t = self.tok[-1]
+            return isinstance(t, Break) or isinstance(t, Dedent)
+
+
+if __name__ == '__main__':
+    # Lexically analyze a file and dump the tokens to stdout.
+    import sys
+    if len(sys.argv) != 2:
+        print('Usage: lex.py FILE')
+        sys.exit(2)
+    fnam = sys.argv[1]
+    s = open(fnam, 'rb').read()
+    for t in lex(s):
+        print(t)
diff --git a/mypy/main.py b/mypy/main.py
new file mode 100644
index 0000000..6942da9
--- /dev/null
+++ b/mypy/main.py
@@ -0,0 +1,340 @@
+"""Mypy type checker command line tool."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import typing
+from typing import Optional, Dict, List, Tuple
+
+from mypy import build
+from mypy import defaults
+from mypy import git
+from mypy.build import BuildSource, PYTHON_EXTENSIONS
+from mypy.errors import CompileError, set_drop_into_pdb
+
+from mypy.version import __version__
+
+PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS)
+
+
+class Options:
+    def __init__(self) -> None:
+        # Set default options.
+        self.target = build.TYPE_CHECK
+        self.build_flags = []  # type: List[str]
+        self.pyversion = defaults.PYTHON3_VERSION
+        self.custom_typing_module = None  # type: str
+        self.implicit_any = False
+        self.report_dirs = {}  # type: Dict[str, str]
+        self.python_path = False
+        self.dirty_stubs = False
+        self.pdb = False
+
+
+def main(script_path: str) -> None:
+    """Main entry point to the type checker.
+
+    Args:
+        script_path: Path to the 'mypy' script (used for finding data files).
+    """
+    if script_path:
+        bin_dir = find_bin_directory(script_path)
+    else:
+        bin_dir = None
+    sources, options = process_options(sys.argv[1:])
+    if options.pdb:
+        set_drop_into_pdb(True)
+    if not options.dirty_stubs:
+        git.verify_git_integrity_or_abort(build.default_data_dir(bin_dir))
+    try:
+        if options.target == build.TYPE_CHECK:
+            type_check_only(sources, bin_dir, options)
+        else:
+            raise RuntimeError('unsupported target %d' % options.target)
+    except CompileError as e:
+        for m in e.messages:
+            sys.stdout.write(m + '\n')
+        sys.exit(1)
+
+
+def find_bin_directory(script_path: str) -> str:
+    """Find the directory that contains this script.
+
+    This is used by build to find stubs and other data files.
+    """
+    # Follow up to 5 symbolic links (cap to avoid cycles).
+    for i in range(5):
+        if os.path.islink(script_path):
+            script_path = readlinkabs(script_path)
+        else:
+            break
+    return os.path.dirname(script_path)
+
+
+def readlinkabs(link: str) -> str:
+    """Return an absolute path to symbolic link destination."""
+    # Adapted from code by Greg Smith.
+    assert os.path.islink(link)
+    path = os.readlink(link)
+    if os.path.isabs(path):
+        return path
+    return os.path.join(os.path.dirname(link), path)
+
+
+def type_check_only(sources: List[BuildSource],
+        bin_dir: str, options: Options) -> None:
+    # Type-check the program and dependencies and translate to Python.
+    build.build(sources=sources,
+                target=build.TYPE_CHECK,
+                bin_dir=bin_dir,
+                pyversion=options.pyversion,
+                custom_typing_module=options.custom_typing_module,
+                implicit_any=options.implicit_any,
+                report_dirs=options.report_dirs,
+                flags=options.build_flags,
+                python_path=options.python_path)
+
+
+def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
+    """Process command line arguments.
+
+    Return (mypy program path (or None),
+            module to run as script (or None),
+            parsed flags)
+    """
+    # TODO: Rewrite using argparse.
+    options = Options()
+    help = False
+    ver = False
+    while args and args[0].startswith('-'):
+        if args[0] in ('--verbose', '-v'):
+            options.build_flags.append(build.VERBOSE)
+            args = args[1:]
+        elif args[0] == '--py2':
+            # Use Python 2 mode.
+            options.pyversion = defaults.PYTHON2_VERSION
+            args = args[1:]
+        elif args[0] == '--python-version':
+            version_components = args[1].split(".")[0:2]
+            if len(version_components) != 2:
+                fail("Invalid python version {} (expected format: 'x.y')".format(
+                    repr(args[1])))
+            if not all(item.isdigit() for item in version_components):
+                fail("Found non-digit in python version: {}".format(
+                    args[1]))
+            options.pyversion = (int(version_components[0]), int(version_components[1]))
+            args = args[2:]
+        elif args[0] == '-f' or args[0] == '--dirty-stubs':
+            options.dirty_stubs = True
+            args = args[1:]
+        elif args[0] == '-m' and args[1:]:
+            if args[2:]:
+                fail("No extra argument should appear after '-m mod'")
+            options.build_flags.append(build.MODULE)
+            return [BuildSource(None, args[1], None)], options
+        elif args[0] == '--package' and args[1:]:
+            if args[2:]:
+                fail("No extra argument should appear after '--package dir'")
+            options.build_flags.append(build.MODULE)
+            lib_path = [os.getcwd()] + build.mypy_path()
+            targets = build.find_modules_recursive(args[1], lib_path)
+            if not targets:
+                fail("Can't find package '{}'".format(args[1]))
+            return targets, options
+        elif args[0] == '-c' and args[1:]:
+            if args[2:]:
+                fail("No extra argument should appear after '-c string'")
+            options.build_flags.append(build.PROGRAM_TEXT)
+            return [BuildSource(None, None, args[1])], options
+        elif args[0] in ('-h', '--help'):
+            help = True
+            args = args[1:]
+        elif args[0] == '--stats':
+            options.build_flags.append(build.DUMP_TYPE_STATS)
+            args = args[1:]
+        elif args[0] == '--inferstats':
+            options.build_flags.append(build.DUMP_INFER_STATS)
+            args = args[1:]
+        elif args[0] == '--custom-typing' and args[1:]:
+            options.custom_typing_module = args[1]
+            args = args[2:]
+        elif is_report(args[0]) and args[1:]:
+            report_type = args[0][2:-7]
+            report_dir = args[1]
+            options.report_dirs[report_type] = report_dir
+            args = args[2:]
+        elif args[0] == '--use-python-path':
+            options.python_path = True
+            args = args[1:]
+        elif args[0] in ('--silent-imports', '--silent', '-s'):
+            options.build_flags.append(build.SILENT_IMPORTS)
+            args = args[1:]
+        elif args[0] == '--pdb':
+            options.pdb = True
+            args = args[1:]
+        elif args[0] == '--implicit-any':
+            options.implicit_any = True
+            args = args[1:]
+        elif args[0] in ('--version', '-V'):
+            ver = True
+            args = args[1:]
+        else:
+            usage('Unknown option: {}'.format(args[0]))
+
+    if help:
+        usage()
+
+    if ver:
+        version()
+
+    if not args:
+        usage('Missing target file or module')
+
+    if options.python_path and options.pyversion[0] == 2:
+        usage('Python version 2 (or --py2) specified, '
+              'but --use-python-path will search in sys.path of Python 3')
+
+    targets = []
+    for arg in args:
+        if arg.endswith(PY_EXTENSIONS):
+            targets.append(BuildSource(arg, crawl_up(arg)[1], None))
+        elif os.path.isdir(arg):
+            targets.extend(expand_dir(arg))
+        else:
+            targets.append(BuildSource(arg, None, None))
+    return targets, options
+
+
+def expand_dir(arg: str) -> List[BuildSource]:
+    """Convert a directory name to a list of sources to build."""
+    dir, mod = crawl_up(arg)
+    if not mod:
+        # It's a directory without an __init__.py[i].
+        # List all the .py[i] files (but not recursively).
+        targets = []  # type: List[BuildSource]
+        for name in os.listdir(dir):
+            stripped = strip_py(name)
+            if stripped:
+                path = os.path.join(dir, name)
+                targets.append(BuildSource(path, stripped, None))
+        if not targets:
+            fail("There are no .py[i] files in directory '{}'".format(arg))
+        return targets
+
+    else:
+        lib_path = [dir]
+        targets = build.find_modules_recursive(mod, lib_path)
+        if not targets:
+            fail("Found no modules in package '{}'".format(arg))
+        return targets
+
+
+def crawl_up(arg: str) -> Tuple[str, str]:
+    """Given a .py[i] filename, return (root directory, module).
+
+    We crawl up the path until we find a directory without __init__.py[i].
+    """
+    dir, mod = os.path.split(arg)
+    mod = strip_py(mod) or mod
+    assert '.' not in mod
+    while dir and has_init_file(dir):
+        dir, base = os.path.split(dir)
+        if not base:
+            break
+        if mod == '__init__' or not mod:
+            mod = base
+        else:
+            mod = base + '.' + mod
+    return dir, mod
+
+
+def strip_py(arg: str) -> Optional[str]:
+    """Strip a trailing .py or .pyi suffix.
+
+    Return None if no such suffix is found.
+    """
+    for ext in PY_EXTENSIONS:
+        if arg.endswith(ext):
+            return arg[:-len(ext)]
+    return None
+
+
+def has_init_file(dir: str) -> bool:
+    """Return whether a directory contains a file named __init__.py[i]."""
+    for ext in PY_EXTENSIONS:
+        if os.path.isfile(os.path.join(dir, '__init__' + ext)):
+            return True
+    return False
+
+
+# Don't generate this from mypy.reports, not all are meant to be public.
+REPORTS = [
+    'html',
+    'old-html',
+    'xslt-html',
+    'xml',
+    'txt',
+    'xslt-txt',
+]
+
+
+def is_report(arg: str) -> bool:
+    if arg.startswith('--') and arg.endswith('-report'):
+        report_type = arg[2:-7]
+        return report_type in REPORTS
+    return False
+
+
+def usage(msg: str = None) -> None:
+    if msg:
+        sys.stderr.write('%s\n' % msg)
+        sys.stderr.write("""\
+usage: mypy [option ...] [-c cmd | -m mod | file_or_dir ...]
+Try 'mypy -h' for more information.
+""")
+        sys.exit(2)
+    else:
+        sys.stdout.write("""\
+usage: mypy [option ...] [-c string | -m mod | file_or_dir ...]
+
+Options:
+  -h, --help         print this help message and exit
+  -V, --version      show the current version information and exit
+  -v, --verbose      more verbose messages
+  --py2              use Python 2 mode
+  --python-version x.y  use Python x.y
+  -s, --silent-imports  don't follow imports to .py files
+  --implicit-any     behave as though all functions were annotated with Any
+  -f, --dirty-stubs  don't warn if typeshed is out of sync
+  --pdb              invoke pdb on fatal error
+  --use-python-path  search for modules in sys.path of running Python
+  --stats            dump stats
+  --inferstats       dump type inference stats
+  --custom-typing mod  use a custom typing module
+  --<fmt>-report dir generate a <fmt> report of type precision under dir/
+                     <fmt> may be one of: %s
+
+How to specify the code to type-check:
+  -m mod             type-check module (may be a dotted name)
+  -c string          type-check program passed in as string
+  --package dir      type-check all files in a directory
+  file ...           type-check given files
+  dir ...            type-check all files in given directories
+
+Environment variables:
+  MYPYPATH     additional module search path
+""" % ', '.join(REPORTS))
+        sys.exit(0)
+
+
+def version() -> None:
+    sys.stdout.write("mypy {}\n".format(__version__))
+    sys.exit(0)
+
+
+def fail(msg: str) -> None:
+    sys.stderr.write('%s\n' % msg)
+    sys.exit(1)
diff --git a/mypy/maptype.py b/mypy/maptype.py
new file mode 100644
index 0000000..5eb6a0d
--- /dev/null
+++ b/mypy/maptype.py
@@ -0,0 +1,96 @@
+from typing import Dict, List, cast
+
+from mypy.expandtype import expand_type
+from mypy.nodes import TypeInfo
+from mypy.types import Type, Instance, AnyType
+
+
+def map_instance_to_supertype(instance: Instance,
+                              superclass: TypeInfo) -> Instance:
+    """Produce a supertype of `instance` that is an Instance
+    of `superclass`, mapping type arguments up the chain of bases.
+
+    `superclass` is required to be a superclass of `instance.type`.
+    """
+    if instance.type == superclass:
+        # Fast path: `instance` already belongs to `superclass`.
+        return instance
+
+    if not superclass.type_vars:
+        # Fast path: `superclass` has no type variables to map to.
+        return Instance(superclass, [])
+
+    return map_instance_to_supertypes(instance, superclass)[0]
+
+
+def map_instance_to_supertypes(instance: Instance,
+                               supertype: TypeInfo) -> List[Instance]:
+    # FIX: Currently we should only have one supertype per interface, so no
+    #      need to return an array
+    result = []  # type: List[Instance]
+    for path in class_derivation_paths(instance.type, supertype):
+        types = [instance]
+        for sup in path:
+            a = []  # type: List[Instance]
+            for t in types:
+                a.extend(map_instance_to_direct_supertypes(t, sup))
+            types = a
+        result.extend(types)
+    return result
+
+
+def class_derivation_paths(typ: TypeInfo,
+                           supertype: TypeInfo) -> List[List[TypeInfo]]:
+    """Return an array of non-empty paths of direct base classes from
+    type to supertype.  Return [] if no such path could be found.
+
+      InterfaceImplementationPaths(A, B) == [[B]] if A inherits B
+      InterfaceImplementationPaths(A, C) == [[B, C]] if A inherits B and
+                                                        B inherits C
+    """
+    # FIX: Currently we might only ever have a single path, so this could be
+    #      simplified
+    result = []  # type: List[List[TypeInfo]]
+
+    for base in typ.bases:
+        if base.type == supertype:
+            result.append([base.type])
+        else:
+            # Try constructing a longer path via the base class.
+            for path in class_derivation_paths(base.type, supertype):
+                result.append([base.type] + path)
+
+    return result
+
+
+def map_instance_to_direct_supertypes(instance: Instance,
+                                      supertype: TypeInfo) -> List[Instance]:
+    # FIX: There should only be one supertypes, always.
+    typ = instance.type
+    result = []  # type: List[Instance]
+
+    for b in typ.bases:
+        if b.type == supertype:
+            env = instance_to_type_environment(instance)
+            result.append(cast(Instance, expand_type(b, env)))
+
+    if result:
+        return result
+    else:
+        # Relationship with the supertype not specified explicitly. Use dynamic
+        # type arguments implicitly.
+        return [Instance(supertype, [AnyType()] * len(supertype.type_vars))]
+
+
+def instance_to_type_environment(instance: Instance) -> Dict[int, Type]:
+    """Given an Instance, produce the resulting type environment for type
+    variables bound by the Instance's class definition.
+
+    An Instance is a type application of a class (a TypeInfo) to its
+    required number of type arguments.  So this environment consists
+    of the class's type variables mapped to the Instance's actual
+    arguments.  The type variables are mapped by their `id`.
+
+    """
+    # Type variables bound by a class have `id` of 1, 2, etc.
+    return {i + 1: instance.args[i] for i in range(len(instance.args))}
diff --git a/mypy/meet.py b/mypy/meet.py
new file mode 100644
index 0000000..1a3c8e2
--- /dev/null
+++ b/mypy/meet.py
@@ -0,0 +1,200 @@
+from typing import cast, List
+
+from mypy.join import is_similar_callables, combine_similar_callables
+from mypy.types import (
+    Type, AnyType, TypeVisitor, UnboundType, Void, ErrorType, NoneTyp, TypeVarType,
+    Instance, CallableType, TupleType, ErasedType, TypeList, UnionType, PartialType, DeletedType
+)
+from mypy.sametypes import is_same_type
+from mypy.subtypes import is_subtype
+from mypy.nodes import TypeInfo
+
+# TODO Describe this module.
+
+
+def meet_types(s: Type, t: Type) -> Type:
+    """Return the greatest lower bound of two types."""
+    if isinstance(s, ErasedType):
+        return s
+    if isinstance(s, AnyType):
+        return t
+    if isinstance(s, UnionType) and not isinstance(t, UnionType):
+        s, t = t, s
+    return t.accept(TypeMeetVisitor(s))
+
+
+def meet_simple(s: Type, t: Type, default_right: bool = True) -> Type:
+    if s == t:
+        return s
+    if isinstance(s, UnionType):
+        return UnionType.make_simplified_union([meet_types(x, t) for x in s.items])
+    elif not is_overlapping_types(s, t, use_promotions=True):
+        return NoneTyp()
+    else:
+        if default_right:
+            return t
+        else:
+            return s
+
+
+def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool:
+    """Can a value of type t be a value of type s, or vice versa?
+
+    Note that this effectively checks against erased types, since X[Any] is always
+    compatible with X[T].
+
+    If use_promitions is True, also consider type promotions (int and
+    float would only be overlapping if it's True).
+    """
+    if isinstance(t, Instance):
+        if isinstance(s, Instance):
+            # Only consider two classes non-disjoint if one is included in the mro
+            # of another.
+            if use_promotions:
+                # Consider cases like int vs float to be overlapping where
+                # there is only a type promition relationship but not proper
+                # subclassing.
+                if t.type._promote and is_overlapping_types(t.type._promote, s):
+                    return True
+                if s.type._promote and is_overlapping_types(s.type._promote, t):
+                    return True
+            return t.type in s.type.mro or s.type in t.type.mro
+    if isinstance(t, UnionType):
+        return any(is_overlapping_types(item, s)
+                   for item in t.items)
+    if isinstance(s, UnionType):
+        return any(is_overlapping_types(t, item)
+                   for item in s.items)
+    # We conservatively assume that non-instance, non-union types can overlap any other
+    # types.
+    return True
+
+
+def nearest_builtin_ancestor(type: TypeInfo) -> TypeInfo:
+    for base in type.mro:
+        if base.defn.is_builtinclass:
+            return base
+    else:
+        return None
+        assert False, 'No built-in ancestor found for {}'.format(type.name())
+
+
+class TypeMeetVisitor(TypeVisitor[Type]):
+    def __init__(self, s: Type) -> None:
+        self.s = s
+
+    def visit_unbound_type(self, t: UnboundType) -> Type:
+        if isinstance(self.s, Void) or isinstance(self.s, ErrorType):
+            return ErrorType()
+        elif isinstance(self.s, NoneTyp):
+            return self.s
+        else:
+            return AnyType()
+
+    def visit_error_type(self, t: ErrorType) -> Type:
+        return t
+
+    def visit_type_list(self, t: TypeList) -> Type:
+        assert False, 'Not supported'
+
+    def visit_any(self, t: AnyType) -> Type:
+        return self.s
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        if isinstance(self.s, UnionType):
+            meets = []  # type: List[Type]
+            for x in t.items:
+                for y in self.s.items:
+                    meets.append(meet_types(x, y))
+        else:
+            meets = [meet_types(x, self.s)
+                     for x in t.items]
+        return UnionType.make_simplified_union(meets)
+
+    def visit_void(self, t: Void) -> Type:
+        if isinstance(self.s, Void):
+            return t
+        else:
+            return ErrorType()
+
+    def visit_none_type(self, t: NoneTyp) -> Type:
+        if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
+            return t
+        else:
+            return ErrorType()
+
+    def visit_deleted_type(self, t: DeletedType) -> Type:
+        if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
+            if isinstance(self.s, NoneTyp):
+                return self.s
+            else:
+                return t
+        else:
+            return ErrorType()
+
+    def visit_erased_type(self, t: ErasedType) -> Type:
+        return self.s
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        if isinstance(self.s, TypeVarType) and (cast(TypeVarType, self.s)).id == t.id:
+            return self.s
+        else:
+            return self.default(self.s)
+
+    def visit_instance(self, t: Instance) -> Type:
+        if isinstance(self.s, Instance):
+            si = cast(Instance, self.s)
+            if t.type == si.type:
+                if is_subtype(t, self.s) or is_subtype(self.s, t):
+                    # Combine type arguments. We could have used join below
+                    # equivalently.
+                    args = []  # type: List[Type]
+                    for i in range(len(t.args)):
+                        args.append(self.meet(t.args[i], si.args[i]))
+                    return Instance(t.type, args)
+                else:
+                    return NoneTyp()
+            else:
+                if is_subtype(t, self.s):
+                    return t
+                elif is_subtype(self.s, t):
+                    # See also above comment.
+                    return self.s
+                else:
+                    return NoneTyp()
+        else:
+            return self.default(self.s)
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        if isinstance(self.s, CallableType) and is_similar_callables(
+                t, cast(CallableType, self.s)):
+            return combine_similar_callables(t, cast(CallableType, self.s))
+        else:
+            return self.default(self.s)
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        if isinstance(self.s, TupleType) and (
+                cast(TupleType, self.s).length() == t.length()):
+            items = []  # type: List[Type]
+            for i in range(t.length()):
+                items.append(self.meet(t.items[i],
+                                       (cast(TupleType, self.s)).items[i]))
+            # TODO: What if the fallbacks are different?
+            return TupleType(items, t.fallback)
+        else:
+            return self.default(self.s)
+
+    def visit_partial_type(self, t: PartialType) -> Type:
+        # We can't determine the meet of partial types. We should never get here.
+        assert False, 'Internal error'
+
+    def meet(self, s, t):
+        return meet_types(s, t)
+
+    def default(self, typ):
+        if isinstance(typ, UnboundType):
+            return AnyType()
+        elif isinstance(typ, Void) or isinstance(typ, ErrorType):
+            return ErrorType()
+        else:
+            return NoneTyp()
diff --git a/mypy/messages.py b/mypy/messages.py
new file mode 100644
index 0000000..83344b4
--- /dev/null
+++ b/mypy/messages.py
@@ -0,0 +1,874 @@
+"""Facilities and constants for generating error messages during type checking.
+
+The type checker itself does not deal with message string literals to
+improve code clarity and to simplify localization (in the future)."""
+
+import re
+import difflib
+
+from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple
+
+from mypy.errors import Errors
+from mypy.types import (
+    Type, CallableType, Instance, TypeVarType, TupleType, UnionType, Void, NoneTyp, AnyType,
+    Overloaded, FunctionLike, DeletedType, PartialType
+)
+from mypy.nodes import TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases
+
+
+# Constants that represent simple type checker error message, i.e. messages
+# that do not have any parameters.
+
+NO_RETURN_VALUE_EXPECTED = 'No return value expected'
+INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type'
+RETURN_VALUE_EXPECTED = 'Return value expected'
+BOOLEAN_VALUE_EXPECTED = 'Boolean value expected'
+BOOLEAN_EXPECTED_FOR_IF = 'Boolean value expected for if condition'
+BOOLEAN_EXPECTED_FOR_WHILE = 'Boolean value expected for while condition'
+BOOLEAN_EXPECTED_FOR_UNTIL = 'Boolean value expected for until condition'
+BOOLEAN_EXPECTED_FOR_NOT = 'Boolean value expected for not operand'
+INVALID_EXCEPTION = 'Exception must be derived from BaseException'
+INVALID_EXCEPTION_TYPE = 'Exception type must be derived from BaseException'
+INVALID_RETURN_TYPE_FOR_GENERATOR = \
+    'The return type of a generator function should be "Generator" or one of its supertypes'
+INVALID_GENERATOR_RETURN_ITEM_TYPE = \
+    'The return type of a generator function must be None in its third type parameter in Python 2'
+YIELD_VALUE_EXPECTED = 'Yield value expected'
+INCOMPATIBLE_TYPES = 'Incompatible types'
+INCOMPATIBLE_TYPES_IN_ASSIGNMENT = 'Incompatible types in assignment'
+INCOMPATIBLE_REDEFINITION = 'Incompatible redefinition'
+INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in yield'
+INCOMPATIBLE_TYPES_IN_YIELD_FROM = 'Incompatible types in "yield from"'
+INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION = 'Incompatible types in string interpolation'
+INIT_MUST_HAVE_NONE_RETURN_TYPE = 'The return type of "__init__" must be None'
+GETTER_TYPE_INCOMPATIBLE_WITH_SETTER = \
+    'Type of getter incompatible with setter'
+TUPLE_INDEX_MUST_BE_AN_INT_LITERAL = 'Tuple index must be an integer literal'
+TUPLE_SLICE_MUST_BE_AN_INT_LITERAL = 'Tuple slice must be an integer literal'
+TUPLE_INDEX_OUT_OF_RANGE = 'Tuple index out of range'
+TYPE_CONSTANT_EXPECTED = 'Type "Constant" or initializer expected'
+INCOMPATIBLE_PAIR_ITEM_TYPE = 'Incompatible Pair item type'
+INVALID_TYPE_APPLICATION_TARGET_TYPE = 'Invalid type application target type'
+INCOMPATIBLE_TUPLE_ITEM_TYPE = 'Incompatible tuple item type'
+INCOMPATIBLE_KEY_TYPE = 'Incompatible dictionary key type'
+INCOMPATIBLE_VALUE_TYPE = 'Incompatible dictionary value type'
+NEED_ANNOTATION_FOR_VAR = 'Need type annotation for variable'
+ITERABLE_EXPECTED = 'Iterable expected'
+INCOMPATIBLE_TYPES_IN_FOR = 'Incompatible types in for statement'
+INCOMPATIBLE_ARRAY_VAR_ARGS = 'Incompatible variable arguments in call'
+INVALID_SLICE_INDEX = 'Slice index must be an integer or None'
+CANNOT_INFER_LAMBDA_TYPE = 'Cannot infer type of lambda'
+CANNOT_INFER_ITEM_TYPE = 'Cannot infer iterable item type'
+CANNOT_ACCESS_INIT = 'Cannot access "__init__" directly'
+CANNOT_ASSIGN_TO_METHOD = 'Cannot assign to a method'
+CANNOT_ASSIGN_TO_TYPE = 'Cannot assign to a type'
+INCONSISTENT_ABSTRACT_OVERLOAD = \
+    'Overloaded method has both abstract and non-abstract variants'
+READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE = \
+    'Read-only property cannot override read-write property'
+INSTANCE_LAYOUT_CONFLICT = 'Instance layout conflict in multiple inheritance'
+FORMAT_REQUIRES_MAPPING = 'Format requires a mapping'
+GENERIC_TYPE_NOT_VALID_AS_EXPRESSION = \
+    "Generic type not valid as an expression any more (use '# type:' comment instead)"
+RETURN_TYPE_CANNOT_BE_CONTRAVARIANT = "Cannot use a contravariant type variable as return type"
+FUNCTION_PARAMETER_CANNOT_BE_COVARIANT = "Cannot use a covariant type variable as a parameter"
+INCOMPATIBLE_IMPORT_OF = "Incompatible import of"
+
+
+class MessageBuilder:
+    """Helper class for reporting type checker error messages with parameters.
+
+    The methods of this class need to be provided with the context within a
+    file; the errors member manages the wider context.
+
+    IDEA: Support a 'verbose mode' that includes full information about types
+          in error messages and that may otherwise produce more detailed error
+          messages.
+    """
+
+    # Report errors using this instance. It knows about the current file and
+    # import context.
+    errors = None  # type: Errors
+
+    modules = None  # type: Dict[str, MypyFile]
+
+    # Number of times errors have been disabled.
+    disable_count = 0
+
+    # Hack to deduplicate error messages from union types
+    disable_type_names = 0
+
+    def __init__(self, errors: Errors, modules: Dict[str, MypyFile]) -> None:
+        self.errors = errors
+        self.modules = modules
+        self.disable_count = 0
+        self.disable_type_names = 0
+
+    #
+    # Helpers
+    #
+
+    def copy(self) -> 'MessageBuilder':
+        new = MessageBuilder(self.errors.copy(), self.modules)
+        new.disable_count = self.disable_count
+        new.disable_type_names = self.disable_type_names
+        return new
+
+    def add_errors(self, messages: 'MessageBuilder') -> None:
+        """Add errors in messages to this builder."""
+        if self.disable_count <= 0:
+            for info in messages.errors.error_info:
+                self.errors.add_error_info(info)
+
+    def disable_errors(self) -> None:
+        self.disable_count += 1
+
+    def enable_errors(self) -> None:
+        self.disable_count -= 1
+
+    def is_errors(self) -> bool:
+        return self.errors.is_errors()
+
+    def report(self, msg: str, context: Context, severity: str, file: str = None) -> None:
+        """Report an error or note (unless disabled)."""
+        if self.disable_count <= 0:
+            self.errors.report(context.get_line(), msg.strip(), severity=severity, file=file)
+
+    def fail(self, msg: str, context: Context, file: str = None) -> None:
+        """Report an error message (unless disabled)."""
+        self.report(msg, context, 'error', file=file)
+
+    def note(self, msg: str, context: Context, file: str = None) -> None:
+        """Report an error message (unless disabled)."""
+        self.report(msg, context, 'note', file=file)
+
+    def format(self, typ: Type, verbosity: int = 0) -> str:
+        """Convert a type to a relatively short string that is suitable for error messages.
+
+        Mostly behave like format_simple below, but never return an empty string.
+        """
+        s = self.format_simple(typ, verbosity)
+        if s != '':
+            # If format_simple returns a non-trivial result, use that.
+            return s
+        elif isinstance(typ, FunctionLike):
+            func = cast(FunctionLike, typ)
+            if func.is_type_obj():
+                # The type of a type object type can be derived from the
+                # return type (this always works).
+                itype = cast(Instance, func.items()[0].ret_type)
+                result = self.format(itype)
+                if verbosity >= 1:
+                    # In some contexts we want to be explicit about the distinction
+                    # between type X and the type of type object X.
+                    result += ' (type object)'
+                return result
+            elif isinstance(func, CallableType):
+                return_type = strip_quotes(self.format(func.ret_type))
+                if func.is_ellipsis_args:
+                    return 'Callable[..., {}]'.format(return_type)
+                arg_types = [strip_quotes(self.format(t)) for t in func.arg_types]
+                return 'Callable[[{}], {}]'.format(", ".join(arg_types), return_type)
+            else:
+                # Use a simple representation for function types; proper
+                # function types may result in long and difficult-to-read
+                # error messages.
+                return 'overloaded function'
+        else:
+            # Default case; we simply have to return something meaningful here.
+            return 'object'
+
+    def format_simple(self, typ: Type, verbosity: int = 0) -> str:
+        """Convert simple types to string that is suitable for error messages.
+
+        Return "" for complex types. Try to keep the length of the result
+        relatively short to avoid overly long error messages.
+
+        Examples:
+          builtins.int -> 'int'
+          Any type -> 'Any'
+          None -> None
+          callable type -> "" (empty string)
+        """
+        if isinstance(typ, Instance):
+            itype = cast(Instance, typ)
+            # Get the short name of the type.
+            if verbosity >= 2:
+                base_str = itype.type.fullname()
+            else:
+                base_str = itype.type.name()
+            if itype.args == []:
+                # No type arguments. Place the type name in quotes to avoid
+                # potential for confusion: otherwise, the type name could be
+                # interpreted as a normal word.
+                return '"{}"'.format(base_str)
+            elif itype.type.fullname() == 'builtins.tuple':
+                item_type = strip_quotes(self.format(itype.args[0]))
+                return 'Tuple[{}, ...]'.format(item_type)
+            elif itype.type.fullname() in reverse_type_aliases:
+                alias = reverse_type_aliases[itype.type.fullname()]
+                alias = alias.split('.')[-1]
+                items = [strip_quotes(self.format(arg)) for arg in itype.args]
+                return '{}[{}]'.format(alias, ', '.join(items))
+            else:
+                # There are type arguments. Convert the arguments to strings
+                # (using format() instead of format_simple() to avoid empty
+                # strings). If the result is too long, replace arguments
+                # with [...].
+                a = []  # type: List[str]
+                for arg in itype.args:
+                    a.append(strip_quotes(self.format(arg)))
+                s = ', '.join(a)
+                if len((base_str + s)) < 25:
+                    return '{}[{}]'.format(base_str, s)
+                else:
+                    return '{}[...]'.format(base_str)
+        elif isinstance(typ, TypeVarType):
+            # This is similar to non-generic instance types.
+            return '"{}"'.format((cast(TypeVarType, typ)).name)
+        elif isinstance(typ, TupleType):
+            # Prefer the name of the fallback class (if not tuple), as it's more informative.
+            if typ.fallback.type.fullname() != 'builtins.tuple':
+                return self.format_simple(typ.fallback)
+            items = []
+            for t in (cast(TupleType, typ)).items:
+                items.append(strip_quotes(self.format(t)))
+            s = '"Tuple[{}]"'.format(', '.join(items))
+            if len(s) < 40:
+                return s
+            else:
+                return 'tuple(length {})'.format(len(items))
+        elif isinstance(typ, UnionType):
+            items = []
+            for t in (cast(UnionType, typ)).items:
+                items.append(strip_quotes(self.format(t)))
+            s = '"Union[{}]"'.format(', '.join(items))
+            if len(s) < 40:
+                return s
+            else:
+                return 'union type ({} items)'.format(len(items))
+        elif isinstance(typ, Void):
+            return 'None'
+        elif isinstance(typ, NoneTyp):
+            return 'None'
+        elif isinstance(typ, AnyType):
+            return '"Any"'
+        elif isinstance(typ, DeletedType):
+            return '<deleted>'
+        elif typ is None:
+            raise RuntimeError('Type is None')
+        else:
+            # No simple representation for this type that would convey very
+            # useful information. No need to mention the type explicitly in a
+            # message.
+            return ''
+
+    def format_distinctly(self, type1: Type, type2: Type) -> Tuple[str, str]:
+        """Jointly format a pair of types to distinct strings.
+
+        Increase the verbosity of the type strings until they become distinct.
+        """
+        verbosity = 0
+        for verbosity in range(3):
+            str1 = self.format(type1, verbosity=verbosity)
+            str2 = self.format(type2, verbosity=verbosity)
+            if str1 != str2:
+                return (str1, str2)
+        return (str1, str2)
+
+    #
+    # Specific operations
+    #
+
+    # The following operations are for genering specific error messages. They
+    # get some information as arguments, and they build an error message based
+    # on them.
+
+    def has_no_attr(self, typ: Type, member: str, context: Context) -> Type:
+        """Report a missing or non-accessible member.
+
+        The type argument is the base type. If member corresponds to
+        an operator, use the corresponding operator name in the
+        messages. Return type Any.
+        """
+        if (isinstance(typ, Instance) and
+                (cast(Instance, typ)).type.has_readable_member(member)):
+            self.fail('Member "{}" is not assignable'.format(member), context)
+        elif isinstance(typ, Void):
+            self.check_void(typ, context)
+        elif member == '__contains__':
+            self.fail('Unsupported right operand type for in ({})'.format(
+                self.format(typ)), context)
+        elif member in op_methods.values():
+            # Access to a binary operator member (e.g. _add). This case does
+            # not handle indexing operations.
+            for op, method in op_methods.items():
+                if method == member:
+                    self.unsupported_left_operand(op, typ, context)
+                    break
+        elif member == '__neg__':
+            self.fail('Unsupported operand type for unary - ({})'.format(
+                self.format(typ)), context)
+        elif member == '__pos__':
+            self.fail('Unsupported operand type for unary + ({})'.format(
+                self.format(typ)), context)
+        elif member == '__invert__':
+            self.fail('Unsupported operand type for ~ ({})'.format(
+                self.format(typ)), context)
+        elif member == '__getitem__':
+            # Indexed get.
+            self.fail('Value of type {} is not indexable'.format(
+                self.format(typ)), context)
+        elif member == '__setitem__':
+            # Indexed set.
+            self.fail('Unsupported target for indexed assignment', context)
+        elif member == '__call__':
+            self.fail('{} not callable'.format(self.format(typ)), context)
+        else:
+            # The non-special case: a missing ordinary attribute.
+            if not self.disable_type_names:
+                failed = False
+                if isinstance(typ, Instance) and cast(Instance, typ).type.names:
+                    typ = cast(Instance, typ)
+                    alternatives = set(typ.type.names.keys())
+                    matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives]
+                    matches.extend(best_matches(member, alternatives)[:3])
+                    if matches:
+                        self.fail('{} has no attribute "{}"; maybe {}?'.format(
+                            self.format(typ), member, pretty_or(matches)), context)
+                        failed = True
+                if not failed:
+                    self.fail('{} has no attribute "{}"'.format(self.format(typ),
+                                                                member), context)
+            else:
+                self.fail('Some element of union has no attribute "{}"'.format(
+                    member), context)
+        return AnyType()
+
+    def unsupported_operand_types(self, op: str, left_type: Any,
+                                  right_type: Any, context: Context) -> None:
+        """Report unsupported operand types for a binary operation.
+
+        Types can be Type objects or strings.
+        """
+        if isinstance(left_type, Void) or isinstance(right_type, Void):
+            self.check_void(left_type, context)
+            self.check_void(right_type, context)
+            return
+        left_str = ''
+        if isinstance(left_type, str):
+            left_str = left_type
+        else:
+            left_str = self.format(left_type)
+
+        right_str = ''
+        if isinstance(right_type, str):
+            right_str = right_type
+        else:
+            right_str = self.format(right_type)
+
+        if self.disable_type_names:
+            msg = 'Unsupported operand types for {} (likely involving Union)'.format(op)
+        else:
+            msg = 'Unsupported operand types for {} ({} and {})'.format(
+                op, left_str, right_str)
+        self.fail(msg, context)
+
+    def unsupported_left_operand(self, op: str, typ: Type,
+                                 context: Context) -> None:
+        if not self.check_void(typ, context):
+            if self.disable_type_names:
+                msg = 'Unsupported left operand type for {} (some union)'.format(op)
+            else:
+                msg = 'Unsupported left operand type for {} ({})'.format(
+                    op, self.format(typ))
+            self.fail(msg, context)
+
+    def type_expected_as_right_operand_of_is(self, context: Context) -> None:
+        self.fail('Type expected as right operand of "is"', context)
+
+    def not_callable(self, typ: Type, context: Context) -> Type:
+        self.fail('{} not callable'.format(self.format(typ)), context)
+        return AnyType()
+
+    def incompatible_argument(self, n: int, m: int, callee: CallableType, arg_type: Type,
+                              context: Context) -> None:
+        """Report an error about an incompatible argument type.
+
+        The argument type is arg_type, argument number is n and the
+        callee type is 'callee'. If the callee represents a method
+        that corresponds to an operator, use the corresponding
+        operator name in the messages.
+        """
+        target = ''
+        if callee.name:
+            name = callee.name
+            base = extract_type(name)
+
+            for op, method in op_methods.items():
+                for variant in method, '__r' + method[2:]:
+                    if name.startswith('"{}" of'.format(variant)):
+                        if op == 'in' or variant != method:
+                            # Reversed order of base/argument.
+                            self.unsupported_operand_types(op, arg_type, base,
+                                                           context)
+                        else:
+                            self.unsupported_operand_types(op, base, arg_type,
+                                                           context)
+                        return
+
+            if name.startswith('"__getitem__" of'):
+                self.invalid_index_type(arg_type, base, context)
+                return
+
+            if name.startswith('"__setitem__" of'):
+                if n == 1:
+                    self.invalid_index_type(arg_type, base, context)
+                else:
+                    self.fail(INCOMPATIBLE_TYPES_IN_ASSIGNMENT, context)
+                return
+
+            target = 'to {} '.format(name)
+
+        msg = ''
+        if callee.name == '<list>':
+            name = callee.name[1:-1]
+            n -= 1
+            msg = '{} item {} has incompatible type {}'.format(
+                name[0].upper() + name[1:], n, self.format_simple(arg_type))
+        elif callee.name == '<list-comprehension>':
+            msg = 'List comprehension has incompatible type List[{}]'.format(
+                strip_quotes(self.format(arg_type)))
+        elif callee.name == '<set-comprehension>':
+            msg = 'Set comprehension has incompatible type Set[{}]'.format(
+                strip_quotes(self.format(arg_type)))
+        elif callee.name == '<dictionary-comprehension>':
+            msg = ('{} expression in dictionary comprehension has incompatible type {}; '
+                   'expected type {}').format(
+                'Key' if n == 1 else 'Value',
+                self.format(arg_type),
+                self.format(callee.arg_types[n - 1]))
+        elif callee.name == '<generator>':
+            msg = 'Generator has incompatible item type {}'.format(
+                self.format_simple(arg_type))
+        else:
+            try:
+                expected_type = callee.arg_types[m - 1]
+            except IndexError:  # Varargs callees
+                expected_type = callee.arg_types[-1]
+            arg_type_str, expected_type_str = self.format_distinctly(arg_type, expected_type)
+            msg = 'Argument {} {}has incompatible type {}; expected {}'.format(
+                n, target, arg_type_str, expected_type_str)
+        self.fail(msg, context)
+
+    def invalid_index_type(self, index_type: Type, base_str: str,
+                           context: Context) -> None:
+        self.fail('Invalid index type {} for {}'.format(
+            self.format(index_type), base_str), context)
+
+    def too_few_arguments(self, callee: CallableType, context: Context,
+                          argument_names: List[str]) -> None:
+        if (argument_names is not None and not all(k is None for k in argument_names)
+                and len(argument_names) >= 1):
+            diff = [k for k in callee.arg_names if k not in argument_names]
+            if len(diff) == 1:
+                msg = 'Missing positional argument'
+            else:
+                msg = 'Missing positional arguments'
+            if callee.name and diff:
+                msg += ' "{}" in call to {}'.format('", "'.join(diff), callee.name)
+        else:
+            msg = 'Too few arguments'
+            if callee.name:
+                msg += ' for {}'.format(callee.name)
+        self.fail(msg, context)
+
+    def too_many_arguments(self, callee: CallableType, context: Context) -> None:
+        msg = 'Too many arguments'
+        if callee.name:
+            msg += ' for {}'.format(callee.name)
+        self.fail(msg, context)
+
+    def too_many_positional_arguments(self, callee: CallableType,
+                                      context: Context) -> None:
+        msg = 'Too many positional arguments'
+        if callee.name:
+            msg += ' for {}'.format(callee.name)
+        self.fail(msg, context)
+
+    def unexpected_keyword_argument(self, callee: CallableType, name: str,
+                                    context: Context) -> None:
+        msg = 'Unexpected keyword argument "{}"'.format(name)
+        if callee.name:
+            msg += ' for {}'.format(callee.name)
+        self.fail(msg, context)
+        if callee.definition:
+            fullname = callee.definition.fullname()
+            if fullname is not None and '.' in fullname:
+                module_name = fullname.rsplit('.', 1)[0]
+                path = self.modules[module_name].path
+                self.note('{} defined here'.format(callee.name), callee.definition, file=path)
+
+    def duplicate_argument_value(self, callee: CallableType, index: int,
+                                 context: Context) -> None:
+        self.fail('{} gets multiple values for keyword argument "{}"'.
+                  format(capitalize(callable_name(callee)),
+                         callee.arg_names[index]), context)
+
+    def does_not_return_value(self, void_type: Type, context: Context) -> None:
+        """Report an error about a void type in a non-void context.
+
+        The first argument must be a void type. If the void type has a
+        source in it, report it in the error message. This allows
+        giving messages such as 'Foo does not return a value'.
+        """
+        if (cast(Void, void_type)).source is None:
+            self.fail('Function does not return a value', context)
+        else:
+            self.fail('{} does not return a value'.format(
+                capitalize((cast(Void, void_type)).source)), context)
+
+    def deleted_as_rvalue(self, typ: DeletedType, context: Context) -> None:
+        """Report an error about using an deleted type as an rvalue."""
+        if typ.source is None:
+            s = ""
+        else:
+            s = " '{}'".format(typ.source)
+        self.fail('Trying to read deleted variable{}'.format(s), context)
+
+    def deleted_as_lvalue(self, typ: DeletedType, context: Context) -> None:
+        """Report an error about using an deleted type as an lvalue.
+
+        Currently, this only occurs when trying to assign to an
+        exception variable outside the local except: blocks.
+        """
+        if typ.source is None:
+            s = ""
+        else:
+            s = " '{}'".format(typ.source)
+        self.fail('Assignment to variable{} outside except: block'.format(s), context)
+
+    def no_variant_matches_arguments(self, overload: Overloaded, arg_types: List[Type],
+                                     context: Context) -> None:
+        if overload.name():
+            self.fail('No overload variant of {} matches argument types {}'
+                      .format(overload.name(), arg_types), context)
+        else:
+            self.fail('No overload variant matches argument types {}'.format(arg_types), context)
+
+    def function_variants_overlap(self, n1: int, n2: int,
+                                  context: Context) -> None:
+        self.fail('Function signature variants {} and {} overlap'.format(
+            n1 + 1, n2 + 1), context)
+
+    def invalid_cast(self, target_type: Type, source_type: Type,
+                     context: Context) -> None:
+        if not self.check_void(source_type, context):
+            self.fail('Cannot cast from {} to {}'.format(
+                self.format(source_type), self.format(target_type)), context)
+
+    def wrong_number_values_to_unpack(self, provided: int, expected: int,
+                                      context: Context) -> None:
+        if provided < expected:
+            if provided == 1:
+                self.fail('Need more than 1 value to unpack ({} expected)'.format(expected),
+                          context)
+            else:
+                self.fail('Need more than {} values to unpack ({} expected)'.format(
+                    provided, expected), context)
+        elif provided > expected:
+            self.fail('Too many values to unpack ({} expected, {} provided)'.format(
+                expected, provided), context)
+
+    def type_not_iterable(self, type: Type, context: Context) -> None:
+        self.fail('\'{}\' object is not iterable'.format(type), context)
+
+    def incompatible_operator_assignment(self, op: str,
+                                         context: Context) -> None:
+        self.fail('Result type of {} incompatible in assignment'.format(op),
+                  context)
+
+    def incompatible_value_count_in_assignment(self, lvalue_count: int,
+                                               rvalue_count: int,
+                                               context: Context) -> None:
+        if rvalue_count < lvalue_count:
+            self.fail('Need {} values to assign'.format(lvalue_count), context)
+        elif rvalue_count > lvalue_count:
+            self.fail('Too many values to assign', context)
+
+    def type_incompatible_with_supertype(self, name: str, supertype: TypeInfo,
+                                         context: Context) -> None:
+        self.fail('Type of "{}" incompatible with supertype "{}"'.format(
+            name, supertype.name), context)
+
+    def signature_incompatible_with_supertype(
+            self, name: str, name_in_super: str, supertype: str,
+            context: Context) -> None:
+        target = self.override_target(name, name_in_super, supertype)
+        self.fail('Signature of "{}" incompatible with {}'.format(
+            name, target), context)
+
+    def argument_incompatible_with_supertype(
+            self, arg_num: int, name: str, name_in_supertype: str,
+            supertype: str, context: Context) -> None:
+        target = self.override_target(name, name_in_supertype, supertype)
+        self.fail('Argument {} of "{}" incompatible with {}'
+                  .format(arg_num, name, target), context)
+
+    def return_type_incompatible_with_supertype(
+            self, name: str, name_in_supertype: str, supertype: str,
+            context: Context) -> None:
+        target = self.override_target(name, name_in_supertype, supertype)
+        self.fail('Return type of "{}" incompatible with {}'
+                  .format(name, target), context)
+
+    def override_target(self, name: str, name_in_super: str,
+                        supertype: str) -> str:
+        target = 'supertype "{}"'.format(supertype)
+        if name_in_super != name:
+            target = '"{}" of {}'.format(name_in_super, target)
+        return target
+
+    def boolean_return_value_expected(self, method: str,
+                                      context: Context) -> None:
+        self.fail('Boolean return value expected for method "{}"'.format(
+            method), context)
+
+    def incompatible_type_application(self, expected_arg_count: int,
+                                      actual_arg_count: int,
+                                      context: Context) -> None:
+        if expected_arg_count == 0:
+            self.fail('Type application targets a non-generic function',
+                      context)
+        elif actual_arg_count > expected_arg_count:
+            self.fail('Type application has too many types ({} expected)'
+                      .format(expected_arg_count), context)
+        else:
+            self.fail('Type application has too few types ({} expected)'
+                      .format(expected_arg_count), context)
+
+    def incompatible_array_item_type(self, typ: Type, index: int,
+                                     context: Context) -> None:
+        self.fail('Array item {} has incompatible type {}'.format(
+            index, self.format(typ)), context)
+
+    def could_not_infer_type_arguments(self, callee_type: CallableType, n: int,
+                                       context: Context) -> None:
+        if callee_type.name and n > 0:
+            self.fail('Cannot infer type argument {} of {}'.format(
+                n, callee_type.name), context)
+        else:
+            self.fail('Cannot infer function type argument', context)
+
+    def invalid_var_arg(self, typ: Type, context: Context) -> None:
+        self.fail('List or tuple expected as variable arguments', context)
+
+    def invalid_keyword_var_arg(self, typ: Type, context: Context) -> None:
+        if isinstance(typ, Instance) and (
+                (cast(Instance, typ)).type.fullname() == 'builtins.dict'):
+            self.fail('Keywords must be strings', context)
+        else:
+            self.fail('Argument after ** must be a dictionary',
+                      context)
+
+    def incomplete_type_var_match(self, member: str, context: Context) -> None:
+        self.fail('"{}" has incomplete match to supertype type variable'
+                  .format(member), context)
+
+    def not_implemented(self, msg: str, context: Context) -> Type:
+        self.fail('Feature not implemented yet ({})'.format(msg), context)
+        return AnyType()
+
+    def undefined_in_superclass(self, member: str, context: Context) -> None:
+        self.fail('"{}" undefined in superclass'.format(member), context)
+
+    def check_void(self, typ: Type, context: Context) -> bool:
+        """If type is void, report an error such as '.. does not
+        return a value' and return True. Otherwise, return False.
+        """
+        if isinstance(typ, Void):
+            self.does_not_return_value(typ, context)
+            return True
+        else:
+            return False
+
+    def too_few_string_formatting_arguments(self, context: Context) -> None:
+        self.fail('Not enough arguments for format string', context)
+
+    def too_many_string_formatting_arguments(self, context: Context) -> None:
+        self.fail('Not all arguments converted during string formatting', context)
+
+    def incomplete_conversion_specifier_format(self, context: Context) -> None:
+        self.fail('Incomplete format', context)
+
+    def unsupported_placeholder(self, placeholder: str, context: Context) -> None:
+        self.fail('Unsupported format character \'%s\'' % placeholder, context)
+
+    def string_interpolation_with_star_and_key(self, context: Context) -> None:
+        self.fail('String interpolation contains both stars and mapping keys', context)
+
+    def requires_int_or_char(self, context: Context) -> None:
+        self.fail('%c requires int or char', context)
+
+    def key_not_in_mapping(self, key: str, context: Context) -> None:
+        self.fail('Key \'%s\' not found in mapping' % key, context)
+
+    def string_interpolation_mixing_key_and_non_keys(self, context: Context) -> None:
+        self.fail('String interpolation mixes specifier with and without mapping keys', context)
+
+    def cannot_determine_type(self, name: str, context: Context) -> None:
+        self.fail("Cannot determine type of '%s'" % name, context)
+
+    def cannot_determine_type_in_base(self, name: str, base: str, context: Context) -> None:
+        self.fail("Cannot determine type of '%s' in base class '%s'" % (name, base), context)
+
+    def invalid_method_type(self, sig: CallableType, context: Context) -> None:
+        self.fail('Invalid method type', context)
+
+    def incompatible_conditional_function_def(self, defn: FuncDef) -> None:
+        self.fail('All conditional function variants must have identical '
+                  'signatures', defn)
+
+    def cannot_instantiate_abstract_class(self, class_name: str,
+                                          abstract_attributes: List[str],
+                                          context: Context) -> None:
+        attrs = format_string_list("'%s'" % a for a in abstract_attributes[:5])
+        self.fail("Cannot instantiate abstract class '%s' with abstract "
+                  "attribute%s %s" % (class_name, plural_s(abstract_attributes),
+                                   attrs),
+                  context)
+
+    def base_class_definitions_incompatible(self, name: str, base1: TypeInfo,
+                                            base2: TypeInfo,
+                                            context: Context) -> None:
+        self.fail('Definition of "{}" in base class "{}" is incompatible '
+                  'with definition in base class "{}"'.format(
+                      name, base1.name(), base2.name()), context)
+
+    def cant_assign_to_method(self, context: Context) -> None:
+        self.fail(CANNOT_ASSIGN_TO_METHOD, context)
+
+    def read_only_property(self, name: str, type: TypeInfo,
+                           context: Context) -> None:
+        self.fail('Property "{}" defined in "{}" is read-only'.format(
+            name, type.name()), context)
+
+    def incompatible_typevar_value(self, callee: CallableType, index: int,
+                                   type: Type, context: Context) -> None:
+        self.fail('Type argument {} of {} has incompatible value {}'.format(
+            index, callable_name(callee), self.format(type)), context)
+
+    def overloaded_signatures_overlap(self, index1: int, index2: int,
+                                      context: Context) -> None:
+        self.fail('Overloaded function signatures {} and {} overlap with '
+                  'incompatible return types'.format(index1, index2), context)
+
+    def invalid_reverse_operator_signature(self, reverse: str, other: str,
+                                           context: Context) -> None:
+        self.fail('"Any" return type expected since argument to {} '
+                  'does not support {}'.format(reverse, other), context)
+
+    def reverse_operator_method_with_any_arg_must_return_any(
+            self, method: str, context: Context) -> None:
+        self.fail('"Any" return type expected since argument to {} '
+                  'has type "Any"'.format(method), context)
+
+    def operator_method_signatures_overlap(
+            self, reverse_class: str, reverse_method: str, forward_class: str,
+            forward_method: str, context: Context) -> None:
+        self.fail('Signatures of "{}" of "{}" and "{}" of "{}" '
+                  'are unsafely overlapping'.format(
+                      reverse_method, reverse_class,
+                      forward_method, forward_class),
+                  context)
+
+    def forward_operator_not_callable(
+            self, forward_method: str, context: Context) -> None:
+        self.fail('Forward operator "{}" is not callable'.format(
+            forward_method), context)
+
+    def signatures_incompatible(self, method: str, other_method: str,
+                                context: Context) -> None:
+        self.fail('Signatures of "{}" and "{}" are incompatible'.format(
+            method, other_method), context)
+
+    def yield_from_invalid_operand_type(self, expr: Type, context: Context) -> Type:
+        text = self.format(expr) if self.format(expr) != 'object' else expr
+        self.fail('"yield from" can\'t be applied to {}'.format(text), context)
+        return AnyType()
+
+    def invalid_signature(self, func_type: Type, context: Context) -> None:
+        self.fail('Invalid signature "{}"'.format(func_type), context)
+
+
+def capitalize(s: str) -> str:
+    """Capitalize the first character of a string."""
+    if s == '':
+        return ''
+    else:
+        return s[0].upper() + s[1:]
+
+
+def extract_type(name: str) -> str:
+    """If the argument is the name of a method (of form C.m), return
+    the type portion in quotes (e.g. "y"). Otherwise, return the string
+    unmodified.
+    """
+    name = re.sub('^"[a-zA-Z0-9_]+" of ', '', name)
+    return name
+
+
+def strip_quotes(s: str) -> str:
+    """Strip a double quote at the beginning and end of the string, if any."""
+    s = re.sub('^"', '', s)
+    s = re.sub('"$', '', s)
+    return s
+
+
+def plural_s(s: Sequence[Any]) -> str:
+    if len(s) > 1:
+        return 's'
+    else:
+        return ''
+
+
+def format_string_list(s: Iterable[str]) -> str:
+    l = list(s)
+    assert len(l) > 0
+    if len(l) == 1:
+        return l[0]
+    else:
+        return '%s and %s' % (', '.join(l[:-1]), l[-1])
+
+
+def callable_name(type: CallableType) -> str:
+    if type.name:
+        return type.name
+    else:
+        return 'function'
+
+
+def temp_message_builder() -> MessageBuilder:
+    """Return a message builder usable for throwaway errors (which may not format properly)."""
+    return MessageBuilder(Errors(), {})
+
+
+# For hard-coding suggested missing member alternatives.
+COMMON_MISTAKES = {
+    'add': ('append', 'extend'),
+}  # type: Dict[str, Sequence[str]]
+
+
+def best_matches(current: str, options: Iterable[str]) -> List[str]:
+    ratios = {v: difflib.SequenceMatcher(a=current, b=v).ratio() for v in options}
+    return sorted((o for o in options if ratios[o] > 0.75),
+                  reverse=True, key=lambda v: (ratios[v], v))
+
+
+def pretty_or(args: List[str]) -> str:
+    quoted = ['"' + a + '"' for a in args]
+    if len(quoted) == 1:
+        return quoted[0]
+    if len(quoted) == 2:
+        return "{} or {}".format(quoted[0], quoted[1])
+    return ", ".join(quoted[:-1]) + ", or " + quoted[-1]
diff --git a/mypy/moduleinfo.py b/mypy/moduleinfo.py
new file mode 100644
index 0000000..e0d3246
--- /dev/null
+++ b/mypy/moduleinfo.py
@@ -0,0 +1,560 @@
+"""Collection of names of notable Python library modules.
+
+Both standard library and third party modules are included. The
+selection criteria for third party modules is somewhat arbitrary.
+
+For packages we usually just include the top-level package name, but
+sometimes some or all submodules are enumerated. In the latter case if
+the top-level name is included we include all possible submodules
+(this is an implementation limitation).
+
+These are used to give more useful error messages when there is
+no stub for a module.
+"""
+
+from typing import Set
+
+
+third_party_modules = {
+    # From Python 3 Wall of Superpowers (https://python3wos.appspot.com/)
+    'simplejson',
+    'requests',
+    'six',
+    'pip',
+    'virtualenv',
+    'boto',
+    'setuptools',
+    'dateutil',
+    'pytz',
+    'pyasn1',
+    'nose',
+    'yaml',
+    'lxml',
+    'jinja2',
+    'docutils',
+    'markupsafe',
+    'paramiko',
+    'Crypto',
+    'rsa',
+    'django',
+    'coverage',
+    'pika',
+    'colorama',
+    'ecdsa',
+    'psycopg2',
+    'httplib2',
+    'mock',
+    'pycparser',
+    'pep8',
+    'redis',
+    'pymongo',
+    'sqlalchemy',
+    'pygments',
+    'werkzeug',
+    'selenium',
+    'supervisor',
+    'zc.buildout',
+    'meld3',
+    'PIL',
+    'paste',
+    'flask',
+    '_mysql',
+    'MySQLdb',
+    'greenlet',
+    'carbon',
+    'psutil',
+    'paste.deploy',
+    'kombu',
+    'babel',
+    'anyjson',
+    'py',
+    'OpenSSL',
+    'numpy',
+    'sphinx',
+    'tornado',
+    'zope.interface',
+    'itsdangerous',
+    'fabric',
+    'amqp',
+    'mako',
+    'pyflakes',
+    'concurrent.futures',
+    'pyparsing',
+    'celery',
+    'unittest2',
+    'setuptools_git',
+    'decorator',
+    'ordereddict',
+    'urllib3',
+    'iso8601',
+    'gunicorn',
+    'prettytable',
+    'webob',
+    'flake8',
+    'pytest',
+    'billiard',
+    'backports.ssl_match_hostname',
+    'south',
+    'gevent',
+    'netaddr',
+    'pylint',
+    'logilab.common',
+    'twisted',
+    'msgpack',
+    'blessings',
+    'oauth2client',
+    'ujson',
+    'mozrunner',
+    'googleapiclient',
+    'markdown',
+    'pyasn1',
+    'html5lib',
+    'isodate',
+    'tox',
+    'mozdevice',
+    'cython',
+    'raven',
+    'mozprocess',
+    'mozprofile',
+    'mozinfo',
+    'mozlog',
+    'pandas',
+    'lockfile',
+    'pycurl',
+    'novaclient',
+    'ply',
+    'eventlet',
+    'suds',
+    'zmq',
+    'memcache',
+    'netifaces',
+    'repoze.lru',
+    'testtools',
+    'cups',
+    'oauth2',
+    'scipy',
+    'thrift',
+    'statsd',
+    'BeautifulSoup',
+    'bs4',
+    'sklearn',
+    'cmd2',
+    'alembic',
+    'gflags',
+    'gflags_validators',
+    'sqlparse',
+    'debug_toolbar',
+    'cov_core',
+    'unidecode',
+    'websocket',
+    'webtest',
+    'django_extensions',
+    'networkx',
+    'newrelic',
+    'pymysql',
+    'pytest_cov',
+    'matplotlib',
+    'djcelery',
+    'google.protobuf',
+    'compressor',
+    'aspen',
+    'subunit',
+    'libcloud',
+    'versiontools',
+    'chardet',
+    'blinker',
+    'pystache',
+    'passlib',
+    'xlrd',
+    'pygeoip',
+    'configobj',
+    'cjson',
+    'testrepository',
+    'zc.recipe.egg',
+    'jsonpointer',
+    'amqplib',
+    'rdflib',
+    'SPARQLWrapper',
+    'jsonpatch',
+    'mrjob',
+    'pexpect',
+    'google.apputils',
+    'ansible',
+    'django_nose',
+    'd2to1',
+    'routes',
+    'tempita',
+    'testscenarios',
+    'demjson',
+    'openid',
+    'uwsgidecorators',
+    'wtforms',
+    'dns',
+    'alabaster',
+    'M2Crypto',
+    'reportlab',
+    'feedparser',
+    'ldap',
+    'hgtools',
+    'whoosh',
+    'path',
+    'nosexcover',
+    'iptools',
+    'xlwt',
+    'keyring',
+    'termcolor',
+    'flask_sqlalchemy',
+    'httpretty',
+    'magic',
+    'leaderboard',
+    'sqlobject',
+    'nltk',
+
+    # Skipped (name considered too generic):
+    # - fixtures
+    # - migrate (from sqlalchemy-migrate)
+    # - git (GitPython)
+
+    # Other
+    'formencode',
+    'pkg_resources',
+    'wx',
+    'gi.repository',
+    'pygtk',
+    'gtk',
+    'PyQt4',
+    'PyQt5',
+}
+
+# Modules and packages common to Python 2.7 and 3.x.
+common_std_lib_modules = {
+    'abc',
+    'aifc',
+    'antigravity',
+    'argparse',
+    'array',
+    'ast',
+    'asynchat',
+    'asyncore',
+    'audioop',
+    'base64',
+    'bdb',
+    'binascii',
+    'binhex',
+    'bisect',
+    'bz2',
+    'cProfile',
+    'calendar',
+    'cgi',
+    'cgitb',
+    'chunk',
+    'cmath',
+    'cmd',
+    'code',
+    'codecs',
+    'codeop',
+    'collections',
+    'colorsys',
+    'compileall',
+    'contextlib',
+    'copy',
+    'crypt',
+    'csv',
+    'ctypes',
+    'curses',
+    'datetime',
+    'decimal',
+    'difflib',
+    'dis',
+    'doctest',
+    'dummy_threading',
+    'email',
+    'fcntl',
+    'filecmp',
+    'fileinput',
+    'fnmatch',
+    'formatter',
+    'fractions',
+    'ftplib',
+    'functools',
+    'genericpath',
+    'getopt',
+    'getpass',
+    'gettext',
+    'glob',
+    'grp',
+    'gzip',
+    'hashlib',
+    'heapq',
+    'hmac',
+    'imaplib',
+    'imghdr',
+    'importlib',
+    'inspect',
+    'io',
+    'json',
+    'keyword',
+    'linecache',
+    'locale',
+    'logging',
+    'macpath',
+    'macurl2path',
+    'mailbox',
+    'mailcap',
+    'math',
+    'mimetypes',
+    'mmap',
+    'modulefinder',
+    'msilib',
+    'multiprocessing',
+    'netrc',
+    'nis',
+    'nntplib',
+    'ntpath',
+    'nturl2path',
+    'numbers',
+    'opcode',
+    'operator',
+    'optparse',
+    'os',
+    'ossaudiodev',
+    'parser',
+    'pdb',
+    'pickle',
+    'pickletools',
+    'pipes',
+    'pkgutil',
+    'platform',
+    'plistlib',
+    'poplib',
+    'posixpath',
+    'pprint',
+    'profile',
+    'pstats',
+    'pty',
+    'py_compile',
+    'pyclbr',
+    'pydoc',
+    'pydoc_data',
+    'pyexpat',
+    'quopri',
+    'random',
+    're',
+    'resource',
+    'rlcompleter',
+    'runpy',
+    'sched',
+    'select',
+    'shelve',
+    'shlex',
+    'shutil',
+    'site',
+    'smtpd',
+    'smtplib',
+    'sndhdr',
+    'socket',
+    'spwd',
+    'sqlite3',
+    'sqlite3.dbapi2',
+    'sqlite3.dump',
+    'sre_compile',
+    'sre_constants',
+    'sre_parse',
+    'ssl',
+    'stat',
+    'string',
+    'stringprep',
+    'struct',
+    'subprocess',
+    'sunau',
+    'symbol',
+    'symtable',
+    'sysconfig',
+    'syslog',
+    'tabnanny',
+    'tarfile',
+    'telnetlib',
+    'tempfile',
+    'termios',
+    'textwrap',
+    'this',
+    'threading',
+    'timeit',
+    'token',
+    'tokenize',
+    'trace',
+    'traceback',
+    'tty',
+    'types',
+    'unicodedata',
+    'unittest',
+    'urllib',
+    'uu',
+    'uuid',
+    'warnings',
+    'wave',
+    'weakref',
+    'webbrowser',
+    'wsgiref',
+    'xdrlib',
+    'xml.dom',
+    'xml.dom.NodeFilter',
+    'xml.dom.domreg',
+    'xml.dom.expatbuilder',
+    'xml.dom.minicompat',
+    'xml.dom.minidom',
+    'xml.dom.pulldom',
+    'xml.dom.xmlbuilder',
+    'xml.etree',
+    'xml.etree.ElementInclude',
+    'xml.etree.ElementPath',
+    'xml.etree.ElementTree',
+    'xml.etree.cElementTree',
+    'xml.parsers',
+    'xml.parsers.expat',
+    'xml.sax',
+    'xml.sax._exceptions',
+    'xml.sax.expatreader',
+    'xml.sax.handler',
+    'xml.sax.saxutils',
+    'xml.sax.xmlreader',
+    'zipfile',
+    'zlib',
+}
+
+# Python 2 standard library modules.
+python2_std_lib_modules = common_std_lib_modules | {
+    'BaseHTTPServer',
+    'Bastion',
+    'CGIHTTPServer',
+    'ConfigParser',
+    'Cookie',
+    'DocXMLRPCServer',
+    'HTMLParser',
+    'MimeWriter',
+    'Queue',
+    'SimpleHTTPServer',
+    'SimpleXMLRPCServer',
+    'SocketServer',
+    'StringIO',
+    'UserDict',
+    'UserList',
+    'UserString',
+    'anydbm',
+    'atexit',
+    'audiodev',
+    'bsddb',
+    'cPickle',
+    'cStringIO',
+    'commands',
+    'cookielib',
+    'copy_reg',
+    'curses.wrapper',
+    'dbhash',
+    'dircache',
+    'dumbdbm',
+    'dummy_thread',
+    'fpformat',
+    'future_builtins',
+    'hotshot',
+    'htmlentitydefs',
+    'htmllib',
+    'httplib',
+    'ihooks',
+    'imputil',
+    'itertools',
+    'linuxaudiodev',
+    'markupbase',
+    'md5',
+    'mhlib',
+    'mimetools',
+    'mimify',
+    'multifile',
+    'multiprocessing.forking',
+    'mutex',
+    'new',
+    'os2emxpath',
+    'popen2',
+    'posixfile',
+    'repr',
+    'rexec',
+    'rfc822',
+    'robotparser',
+    'sets',
+    'sgmllib',
+    'sha',
+    'sre',
+    'statvfs',
+    'stringold',
+    'strop',
+    'sunaudio',
+    'time',
+    'toaiff',
+    'urllib2',
+    'urlparse',
+    'user',
+    'whichdb',
+    'xmllib',
+    'xmlrpclib',
+}
+
+# Python 3 standard library modules (based on Python 3.5.0).
+python3_std_lib_modules = common_std_lib_modules | {
+    'asyncio',
+    'collections.abc',
+    'concurrent',
+    'concurrent.futures',
+    'configparser',
+    'copyreg',
+    'dbm',
+    'ensurepip',
+    'enum',
+    'html',
+    'http',
+    'imp',
+    'ipaddress',
+    'lzma',
+    'pathlib',
+    'queue',
+    'readline',
+    'reprlib',
+    'selectors',
+    'signal',
+    'socketserver',
+    'statistics',
+    'tkinter',
+    'tracemalloc',
+    'turtle',
+    'turtledemo',
+    'typing',
+    'unittest.mock',
+    'urllib.error',
+    'urllib.parse',
+    'urllib.request',
+    'urllib.response',
+    'urllib.robotparser',
+    'venv',
+    'xmlrpc',
+    'xxlimited',
+    'zipapp',
+}
+
+
+def is_third_party_module(id: str) -> bool:
+    return is_in_module_collection(third_party_modules, id)
+
+
+def is_py2_std_lib_module(id: str) -> bool:
+    return is_in_module_collection(python2_std_lib_modules, id)
+
+
+def is_py3_std_lib_module(id: str) -> bool:
+    return is_in_module_collection(python3_std_lib_modules, id)
+
+
+def is_in_module_collection(collection: Set[str], id: str) -> bool:
+    components = id.split('.')
+    for prefix_length in range(1, len(components) + 1):
+        if '.'.join(components[:prefix_length]) in collection:
+            return True
+    return False
diff --git a/mypy/nodes.py b/mypy/nodes.py
new file mode 100644
index 0000000..6857a16
--- /dev/null
+++ b/mypy/nodes.py
@@ -0,0 +1,1819 @@
+"""Abstract syntax tree node classes (i.e. parse tree)."""
+
+import os
+import re
+from abc import abstractmethod, ABCMeta
+
+from typing import (
+    Any, overload, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional
+)
+
+from mypy.lex import Token
+import mypy.strconv
+from mypy.visitor import NodeVisitor
+from mypy.util import dump_tagged, short_type
+
+
+class Context(metaclass=ABCMeta):
+    """Base type for objects that are valid as error message locations."""
+    @abstractmethod
+    def get_line(self) -> int: pass
+
+
+if False:
+    # break import cycle only needed for mypy
+    import mypy.types
+
+
+T = TypeVar('T')
+
+
+# Symbol table node kinds
+#
+# TODO rename to use more descriptive names
+
+LDEF = 0  # type: int
+GDEF = 1  # type: int
+MDEF = 2  # type: int
+MODULE_REF = 3  # type: int
+# Type variable declared using TypeVar(...) has kind UNBOUND_TVAR. It's not
+# valid as a type. A type variable is valid as a type (kind TVAR) within
+# (1) a generic class that uses the type variable as a type argument or
+# (2) a generic function that refers to the type variable in its signature.
+UNBOUND_TVAR = 4  # type: int
+BOUND_TVAR = 5  # type: int
+TYPE_ALIAS = 6  # type: int
+# Placeholder for a name imported via 'from ... import'. Second phase of
+# semantic will replace this the actual imported reference. This is
+# needed so that we can detect whether a name has been imported during
+UNBOUND_IMPORTED = 7  # type: int
+
+
+LITERAL_YES = 2
+LITERAL_TYPE = 1
+LITERAL_NO = 0
+
+node_kinds = {
+    LDEF: 'Ldef',
+    GDEF: 'Gdef',
+    MDEF: 'Mdef',
+    MODULE_REF: 'ModuleRef',
+    UNBOUND_TVAR: 'UnboundTvar',
+    BOUND_TVAR: 'Tvar',
+    TYPE_ALIAS: 'TypeAlias',
+    UNBOUND_IMPORTED: 'UnboundImported',
+}
+
+
+implicit_module_attrs = {'__name__': '__builtins__.str',
+                         '__doc__': '__builtins__.str',
+                         '__file__': '__builtins__.str',
+                         '__package__': '__builtins__.str'}
+
+
+type_aliases = {
+    'typing.List': '__builtins__.list',
+    'typing.Dict': '__builtins__.dict',
+    'typing.Set': '__builtins__.set',
+}
+
+reverse_type_aliases = dict((name.replace('__builtins__', 'builtins'), alias)
+                            for alias, name in type_aliases.items())  # type: Dict[str, str]
+
+
+class Node(Context):
+    """Common base class for all non-type parse tree nodes."""
+
+    line = -1
+
+    literal = LITERAL_NO
+    literal_hash = None  # type: Any
+
+    def __str__(self) -> str:
+        ans = self.accept(mypy.strconv.StrConv())
+        if ans is None:
+            return repr(self)
+        return ans
+
+    def set_line(self, target: Union[Token, 'Node', int]) -> 'Node':
+        if isinstance(target, int):
+            self.line = target
+        else:
+            self.line = target.line
+        return self
+
+    def get_line(self) -> int:
+        # TODO this should be just 'line'
+        return self.line
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        raise RuntimeError('Not implemented')
+
+
+class SymbolNode(Node):
+    # Nodes that can be stored in a symbol table.
+
+    # TODO do not use methods for these
+
+    @abstractmethod
+    def name(self) -> str: pass
+
+    @abstractmethod
+    def fullname(self) -> str: pass
+
+
+class MypyFile(SymbolNode):
+    """The abstract syntax tree of a single source file."""
+
+    # Module name ('__main__' for initial file)
+    _name = None      # type: str
+    # Fully qualified module name
+    _fullname = None  # type: str
+    # Path to the file (None if not known)
+    path = ''
+    # Top-level definitions and statements
+    defs = None  # type: List[Node]
+    # Is there a UTF-8 BOM at the start?
+    is_bom = False
+    names = None  # type: SymbolTable
+    # All import nodes within the file (also ones within functions etc.)
+    imports = None  # type: List[ImportBase]
+    # Lines to ignore when checking
+    ignored_lines = None  # type: Set[int]
+    # Is this file represented by a stub file (.pyi)?
+    is_stub = False
+    # Do weak typing globally in the file?
+    weak_opts = None  # type: Set[str]
+
+    def __init__(self,
+                 defs: List[Node],
+                 imports: List['ImportBase'],
+                 is_bom: bool = False,
+                 ignored_lines: Set[int] = None,
+                 weak_opts: Set[str] = None) -> None:
+        self.defs = defs
+        self.line = 1  # Dummy line number
+        self.imports = imports
+        self.is_bom = is_bom
+        self.weak_opts = weak_opts
+        if ignored_lines:
+            self.ignored_lines = ignored_lines
+        else:
+            self.ignored_lines = set()
+
+    def name(self) -> str:
+        return self._name
+
+    def fullname(self) -> str:
+        return self._fullname
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_mypy_file(self)
+
+    def is_package_init_file(self) -> bool:
+        return not (self.path is None) and len(self.path) != 0 \
+            and os.path.basename(self.path).startswith('__init__.')
+
+
+class ImportBase(Node):
+    """Base class for all import statements."""
+    is_unreachable = False
+    # If an import replaces existing definitions, we construct dummy assignment
+    # statements that assign the imported names to the names in the current scope,
+    # for type checking purposes. Example:
+    #
+    #     x = 1
+    #     from m import x   <-- add assignment representing "x = m.x"
+    assignments = None  # type: List[AssignmentStmt]
+
+    def __init__(self) -> None:
+        self.assignments = []
+
+
+class Import(ImportBase):
+    """import m [as n]"""
+
+    ids = None  # type: List[Tuple[str, Optional[str]]]     # (module id, as id)
+
+    def __init__(self, ids: List[Tuple[str, Optional[str]]]) -> None:
+        super().__init__()
+        self.ids = ids
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_import(self)
+
+
+class ImportFrom(ImportBase):
+    """from m import x [as y], ..."""
+
+    names = None  # type: List[Tuple[str, Optional[str]]]  # Tuples (name, as name)
+
+    def __init__(self, id: str, relative: int, names: List[Tuple[str, Optional[str]]]) -> None:
+        super().__init__()
+        self.id = id
+        self.names = names
+        self.relative = relative
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_import_from(self)
+
+
+class ImportAll(ImportBase):
+    """from m import *"""
+
+    def __init__(self, id: str, relative: int) -> None:
+        super().__init__()
+        self.id = id
+        self.relative = relative
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_import_all(self)
+
+
+class FuncBase(SymbolNode):
+    """Abstract base class for function-like nodes"""
+
+    # Type signature. This is usually CallableType or Overloaded, but it can be something else for
+    # decorated functions/
+    type = None  # type: mypy.types.Type
+    # If method, reference to TypeInfo
+    info = None  # type: TypeInfo
+    is_property = False
+    _fullname = None  # type: str       # Name with module prefix
+
+    @abstractmethod
+    def name(self) -> str: pass
+
+    def fullname(self) -> str:
+        return self._fullname
+
+    def is_method(self) -> bool:
+        return bool(self.info)
+
+
+class OverloadedFuncDef(FuncBase):
+    """A logical node representing all the variants of an overloaded function.
+
+    This node has no explicit representation in the source program.
+    Overloaded variants must be consecutive in the source file.
+    """
+
+    items = None  # type: List[Decorator]
+
+    def __init__(self, items: List['Decorator']) -> None:
+        self.items = items
+        self.set_line(items[0].line)
+
+    def name(self) -> str:
+        return self.items[1].func.name()
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_overloaded_func_def(self)
+
+
+class Argument(Node):
+    """A single argument in a FuncItem."""
+
+    def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]',
+            initializer: Optional[Node], kind: int,
+            initialization_statement: Optional['AssignmentStmt'] = None) -> None:
+        self.variable = variable
+
+        self.type_annotation = type_annotation
+        self.initializer = initializer
+
+        self.initialization_statement = initialization_statement
+        if not self.initialization_statement:
+            self.initialization_statement = self._initialization_statement()
+
+        self.kind = kind
+
+    def _initialization_statement(self) -> Optional['AssignmentStmt']:
+        """Convert the initializer into an assignment statement.
+        """
+        if not self.initializer:
+            return None
+
+        rvalue = self.initializer
+        lvalue = NameExpr(self.variable.name())
+        assign = AssignmentStmt([lvalue], rvalue)
+        return assign
+
+    def set_line(self, target: Union[Token, Node, int]) -> Node:
+        super().set_line(target)
+
+        if self.initializer:
+            self.initializer.set_line(self.line)
+
+        self.variable.set_line(self.line)
+
+        if self.initialization_statement:
+            self.initialization_statement.set_line(self.line)
+            self.initialization_statement.lvalues[0].set_line(self.line)
+
+
+class FuncItem(FuncBase):
+    arguments = []  # type: List[Argument]
+    # Minimum number of arguments
+    min_args = 0
+    # Maximum number of positional arguments, -1 if no explicit limit (*args not included)
+    max_pos = 0
+    body = None  # type: Block
+    is_implicit = False    # Implicit dynamic types?
+    # Is this an overload variant of function with more than one overload variant?
+    is_overload = False
+    is_generator = False   # Contains a yield statement?
+    is_static = False      # Uses @staticmethod?
+    is_class = False       # Uses @classmethod?
+    # Variants of function with type variables with values expanded
+    expanded = None  # type: List[FuncItem]
+
+    def __init__(self, arguments: List[Argument], body: 'Block',
+                 typ: 'mypy.types.FunctionLike' = None) -> None:
+        self.arguments = arguments
+        arg_kinds = [arg.kind for arg in self.arguments]
+        self.max_pos = arg_kinds.count(ARG_POS) + arg_kinds.count(ARG_OPT)
+        self.body = body
+        self.type = typ
+        self.expanded = []
+
+        self.min_args = 0
+        for i in range(len(self.arguments)):
+            if self.arguments[i] is None and i < self.max_fixed_argc():
+                self.min_args = i + 1
+
+    def max_fixed_argc(self) -> int:
+        return self.max_pos
+
+    def set_line(self, target: Union[Token, Node, int]) -> Node:
+        super().set_line(target)
+        for arg in self.arguments:
+            arg.set_line(self.line)
+        return self
+
+    def is_dynamic(self):
+        return self.type is None
+
+
+class FuncDef(FuncItem):
+    """Function definition.
+
+    This is a non-lambda function defined using 'def'.
+    """
+
+    is_decorated = False
+    is_conditional = False             # Defined conditionally (within block)?
+    is_abstract = False
+    is_property = False
+    original_def = None  # type: Union[FuncDef, Var]  # Original conditional definition
+
+    def __init__(self,
+                 name: str,              # Function name
+                 arguments: List[Argument],
+                 body: 'Block',
+                 typ: 'mypy.types.FunctionLike' = None) -> None:
+        super().__init__(arguments, body, typ)
+        self._name = name
+
+    def name(self) -> str:
+        return self._name
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_func_def(self)
+
+    def is_constructor(self) -> bool:
+        return self.info is not None and self._name == '__init__'
+
+
+class Decorator(SymbolNode):
+    """A decorated function.
+
+    A single Decorator object can include any number of function decorators.
+    """
+
+    func = None  # type: FuncDef           # Decorated function
+    decorators = None  # type: List[Node]  # Decorators, at least one
+    var = None  # type: Var              # Represents the decorated function obj
+    is_overload = False
+
+    def __init__(self, func: FuncDef, decorators: List[Node],
+                 var: 'Var') -> None:
+        self.func = func
+        self.decorators = decorators
+        self.var = var
+        self.is_overload = False
+
+    def name(self) -> str:
+        return self.func.name()
+
+    def fullname(self) -> str:
+        return self.func.fullname()
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_decorator(self)
+
+
+class Var(SymbolNode):
+    """A variable.
+
+    It can refer to global/local variable or a data attribute.
+    """
+
+    _name = None      # type: str   # Name without module prefix
+    _fullname = None  # type: str   # Name with module prefix
+    info = None  # type: TypeInfo   # Defining class (for member variables)
+    type = None  # type: mypy.types.Type # Declared or inferred type, or None
+    # Is this the first argument to an ordinary method (usually "self")?
+    is_self = False
+    is_ready = False  # If inferred, is the inferred type available?
+    # Is this initialized explicitly to a non-None value in class body?
+    is_initialized_in_class = False
+    is_staticmethod = False
+    is_classmethod = False
+    is_property = False
+    is_settable_property = False
+
+    def __init__(self, name: str, type: 'mypy.types.Type' = None) -> None:
+        self._name = name
+        self.type = type
+        self.is_self = False
+        self.is_ready = True
+        self.is_initialized_in_class = False
+
+    def name(self) -> str:
+        return self._name
+
+    def fullname(self) -> str:
+        return self._fullname
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_var(self)
+
+
+class ClassDef(Node):
+    """Class definition"""
+
+    name = None  # type: str       # Name of the class without module prefix
+    fullname = None  # type: str   # Fully qualified name of the class
+    defs = None  # type: Block
+    type_vars = None  # type: List[mypy.types.TypeVarDef]
+    # Base class expressions (not semantically analyzed -- can be arbitrary expressions)
+    base_type_exprs = None  # type: List[Node]
+    # Semantically analyzed base types, derived from base_type_exprs during semantic analysis
+    base_types = None  # type: List[mypy.types.Instance]
+    info = None  # type: TypeInfo  # Related TypeInfo
+    metaclass = ''
+    decorators = None  # type: List[Node]
+    # Built-in/extension class? (single implementation inheritance only)
+    is_builtinclass = False
+
+    def __init__(self, name: str, defs: 'Block',
+                 type_vars: List['mypy.types.TypeVarDef'] = None,
+                 base_type_exprs: List[Node] = None,
+                 metaclass: str = None) -> None:
+        if not base_type_exprs:
+            base_type_exprs = []
+        self.name = name
+        self.defs = defs
+        self.type_vars = type_vars or []
+        self.base_type_exprs = base_type_exprs
+        self.base_types = []  # Not yet semantically analyzed --> don't know base types
+        self.metaclass = metaclass
+        self.decorators = []
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_class_def(self)
+
+    def is_generic(self) -> bool:
+        return self.info.is_generic()
+
+
+class GlobalDecl(Node):
+    """Declaration global x, y, ..."""
+
+    names = None  # type: List[str]
+
+    def __init__(self, names: List[str]) -> None:
+        self.names = names
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_global_decl(self)
+
+
+class NonlocalDecl(Node):
+    """Declaration nonlocal x, y, ..."""
+
+    names = None  # type: List[str]
+
+    def __init__(self, names: List[str]) -> None:
+        self.names = names
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_nonlocal_decl(self)
+
+
+class Block(Node):
+    body = None  # type: List[Node]
+    # True if we can determine that this block is not executed. For example,
+    # this applies to blocks that are protected by something like "if PY3:"
+    # when using Python 2.
+    is_unreachable = False
+
+    def __init__(self, body: List[Node]) -> None:
+        self.body = body
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_block(self)
+
+
+# Statements
+
+
+class ExpressionStmt(Node):
+    """An expression as a statament, such as print(s)."""
+    expr = None  # type: Node
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_expression_stmt(self)
+
+
+class AssignmentStmt(Node):
+    """Assignment statement
+    The same node class is used for single assignment, multiple assignment
+    (e.g. x, y = z) and chained assignment (e.g. x = y = z), assignments
+    that define new names, and assignments with explicit types (# type).
+
+    An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, IndexExpr.
+    """
+
+    lvalues = None  # type: List[Node]
+    rvalue = None  # type: Node
+    # Declared type in a comment, may be None.
+    type = None  # type: mypy.types.Type
+
+    def __init__(self, lvalues: List[Node], rvalue: Node,
+                 type: 'mypy.types.Type' = None) -> None:
+        self.lvalues = lvalues
+        self.rvalue = rvalue
+        self.type = type
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_assignment_stmt(self)
+
+
+class OperatorAssignmentStmt(Node):
+    """Operator assignment statement such as x += 1"""
+
+    op = ''
+    lvalue = None  # type: Node
+    rvalue = None  # type: Node
+
+    def __init__(self, op: str, lvalue: Node, rvalue: Node) -> None:
+        self.op = op
+        self.lvalue = lvalue
+        self.rvalue = rvalue
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_operator_assignment_stmt(self)
+
+
+class WhileStmt(Node):
+    expr = None  # type: Node
+    body = None  # type: Block
+    else_body = None  # type: Block
+
+    def __init__(self, expr: Node, body: Block, else_body: Block) -> None:
+        self.expr = expr
+        self.body = body
+        self.else_body = else_body
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_while_stmt(self)
+
+
+class ForStmt(Node):
+    # Index variables
+    index = None  # type: Node
+    # Expression to iterate
+    expr = None  # type: Node
+    body = None  # type: Block
+    else_body = None  # type: Block
+
+    def __init__(self, index: Node, expr: Node, body: Block,
+                 else_body: Block) -> None:
+        self.index = index
+        self.expr = expr
+        self.body = body
+        self.else_body = else_body
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_for_stmt(self)
+
+
+class ReturnStmt(Node):
+    expr = None  # type: Node   # Expression or None
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_return_stmt(self)
+
+
+class AssertStmt(Node):
+    expr = None  # type: Node
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_assert_stmt(self)
+
+
+class DelStmt(Node):
+    expr = None  # type: Node
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_del_stmt(self)
+
+
+class BreakStmt(Node):
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_break_stmt(self)
+
+
+class ContinueStmt(Node):
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_continue_stmt(self)
+
+
+class PassStmt(Node):
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_pass_stmt(self)
+
+
+class IfStmt(Node):
+    expr = None  # type: List[Node]
+    body = None  # type: List[Block]
+    else_body = None  # type: Block
+
+    def __init__(self, expr: List[Node], body: List[Block],
+                 else_body: Block) -> None:
+        self.expr = expr
+        self.body = body
+        self.else_body = else_body
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_if_stmt(self)
+
+
+class RaiseStmt(Node):
+    expr = None  # type: Node
+    from_expr = None  # type: Node
+
+    def __init__(self, expr: Node, from_expr: Node = None) -> None:
+        self.expr = expr
+        self.from_expr = from_expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_raise_stmt(self)
+
+
+class TryStmt(Node):
+    body = None  # type: Block                # Try body
+    types = None  # type: List[Node]          # Except type expressions
+    vars = None  # type: List[NameExpr]     # Except variable names
+    handlers = None  # type: List[Block]      # Except bodies
+    else_body = None  # type: Block
+    finally_body = None  # type: Block
+
+    def __init__(self, body: Block, vars: List['NameExpr'], types: List[Node],
+                 handlers: List[Block], else_body: Block,
+                 finally_body: Block) -> None:
+        self.body = body
+        self.vars = vars
+        self.types = types
+        self.handlers = handlers
+        self.else_body = else_body
+        self.finally_body = finally_body
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_try_stmt(self)
+
+
+class WithStmt(Node):
+    expr = None  # type: List[Node]
+    target = None  # type: List[Node]
+    body = None  # type: Block
+
+    def __init__(self, expr: List[Node], target: List[Node],
+                 body: Block) -> None:
+        self.expr = expr
+        self.target = target
+        self.body = body
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_with_stmt(self)
+
+
+class PrintStmt(Node):
+    """Python 2 print statement"""
+
+    args = None  # type: List[Node]
+    newline = False
+    # The file-like target object (given using >>).
+    target = None  # type: Optional[Node]
+
+    def __init__(self, args: List[Node], newline: bool, target: Node = None) -> None:
+        self.args = args
+        self.newline = newline
+        self.target = target
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_print_stmt(self)
+
+
+class ExecStmt(Node):
+    """Python 2 exec statement"""
+
+    expr = None  # type: Node
+    variables1 = None  # type: Optional[Node]
+    variables2 = None  # type: Optional[Node]
+
+    def __init__(self, expr: Node, variables1: Optional[Node], variables2: Optional[Node]) -> None:
+        self.expr = expr
+        self.variables1 = variables1
+        self.variables2 = variables2
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_exec_stmt(self)
+
+
+# Expressions
+
+
+class IntExpr(Node):
+    """Integer literal"""
+
+    value = 0
+    literal = LITERAL_YES
+
+    def __init__(self, value: int) -> None:
+        self.value = value
+        self.literal_hash = value
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_int_expr(self)
+
+
+class StrExpr(Node):
+    """String literal"""
+
+    value = ''
+    literal = LITERAL_YES
+
+    def __init__(self, value: str) -> None:
+        self.value = value
+        self.literal_hash = value
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_str_expr(self)
+
+
+class BytesExpr(Node):
+    """Bytes literal"""
+
+    value = ''  # TODO use bytes
+    literal = LITERAL_YES
+
+    def __init__(self, value: str) -> None:
+        self.value = value
+        self.literal_hash = value
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_bytes_expr(self)
+
+
+class UnicodeExpr(Node):
+    """Unicode literal (Python 2.x)"""
+
+    value = ''  # TODO use bytes
+    literal = LITERAL_YES
+
+    def __init__(self, value: str) -> None:
+        self.value = value
+        self.literal_hash = value
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_unicode_expr(self)
+
+
+class FloatExpr(Node):
+    """Float literal"""
+
+    value = 0.0
+    literal = LITERAL_YES
+
+    def __init__(self, value: float) -> None:
+        self.value = value
+        self.literal_hash = value
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_float_expr(self)
+
+
+class ComplexExpr(Node):
+    """Complex literal"""
+
+    value = 0.0j
+    literal = LITERAL_YES
+
+    def __init__(self, value: complex) -> None:
+        self.value = value
+        self.literal_hash = value
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_complex_expr(self)
+
+
+class EllipsisExpr(Node):
+    """Ellipsis (...)"""
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_ellipsis(self)
+
+
+class StarExpr(Node):
+    """Star expression"""
+
+    expr = None  # type: Node
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+        self.literal = self.expr.literal
+        self.literal_hash = ('Star', expr.literal_hash,)
+
+        # Whether this starred expression is used in a tuple/list and as lvalue
+        self.valid = False
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_star_expr(self)
+
+
+class RefExpr(Node):
+    """Abstract base class for name-like constructs"""
+
+    kind = None  # type: int      # LDEF/GDEF/MDEF/... (None if not available)
+    node = None  # type: Node        # Var, FuncDef or TypeInfo that describes this
+    fullname = None  # type: str  # Fully qualified name (or name if not global)
+
+    # Does this define a new name with inferred type?
+    #
+    # For members, after semantic analysis, this does not take base
+    # classes into consideration at all; the type checker deals with these.
+    is_def = False
+
+
+class NameExpr(RefExpr):
+    """Name expression
+
+    This refers to a local name, global name or a module.
+    """
+
+    name = None  # type: str      # Name referred to (may be qualified)
+    # TypeInfo of class surrounding expression (may be None)
+    info = None  # type: TypeInfo
+
+    literal = LITERAL_TYPE
+
+    def __init__(self, name: str) -> None:
+        self.name = name
+        self.literal_hash = ('Var', name,)
+
+    def type_node(self):
+        return cast(TypeInfo, self.node)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_name_expr(self)
+
+
+class MemberExpr(RefExpr):
+    """Member access expression x.y"""
+
+    expr = None  # type: Node
+    name = None  # type: str
+    # The variable node related to a definition.
+    def_var = None  # type: Var
+
+    def __init__(self, expr: Node, name: str) -> None:
+        self.expr = expr
+        self.name = name
+        self.literal = self.expr.literal
+        self.literal_hash = ('Member', expr.literal_hash, name)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_member_expr(self)
+
+
+# Kinds of arguments
+
+# Positional argument
+ARG_POS = 0  # type: int
+# Positional, optional argument (functions only, not calls)
+ARG_OPT = 1  # type: int
+# *arg argument
+ARG_STAR = 2  # type: int
+# Keyword argument x=y in call, or keyword-only function arg
+ARG_NAMED = 3  # type: int
+# **arg argument
+ARG_STAR2 = 4  # type: int
+
+
+class CallExpr(Node):
+    """Call expression.
+
+    This can also represent several special forms that are syntactically calls
+    such as cast(...) and None  # type: ....
+    """
+
+    callee = None  # type: Node
+    args = None  # type: List[Node]
+    arg_kinds = None  # type: List[int]  # ARG_ constants
+    # Each name can be None if not a keyword argument.
+    arg_names = None  # type: List[str]
+    # If not None, the node that represents the meaning of the CallExpr. For
+    # cast(...) this is a CastExpr.
+    analyzed = None  # type: Node
+
+    def __init__(self, callee: Node, args: List[Node], arg_kinds: List[int],
+                 arg_names: List[str] = None, analyzed: Node = None) -> None:
+        if not arg_names:
+            arg_names = [None] * len(args)
+        self.callee = callee
+        self.args = args
+        self.arg_kinds = arg_kinds
+        self.arg_names = arg_names
+        self.analyzed = analyzed
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_call_expr(self)
+
+
+class YieldFromExpr(Node):
+    expr = None  # type: Node
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_yield_from_expr(self)
+
+
+class YieldExpr(Node):
+    expr = None  # type: Optional[Node]
+
+    def __init__(self, expr: Optional[Node]) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_yield_expr(self)
+
+
+class IndexExpr(Node):
+    """Index expression x[y].
+
+    Also wraps type application such as List[int] as a special form.
+    """
+
+    base = None  # type: Node
+    index = None  # type: Node
+    # Inferred __getitem__ method type
+    method_type = None  # type: mypy.types.Type
+    # If not None, this is actually semantically a type application
+    # Class[type, ...] or a type alias initializer.
+    analyzed = None  # type: Union[TypeApplication, TypeAliasExpr]
+
+    def __init__(self, base: Node, index: Node) -> None:
+        self.base = base
+        self.index = index
+        self.analyzed = None
+        if self.index.literal == LITERAL_YES:
+            self.literal = self.base.literal
+            self.literal_hash = ('Member', base.literal_hash,
+                                 index.literal_hash)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_index_expr(self)
+
+
+class UnaryExpr(Node):
+    """Unary operation"""
+
+    op = ''
+    expr = None  # type: Node
+    # Inferred operator method type
+    method_type = None  # type: mypy.types.Type
+
+    def __init__(self, op: str, expr: Node) -> None:
+        self.op = op
+        self.expr = expr
+        self.literal = self.expr.literal
+        self.literal_hash = ('Unary', op, expr.literal_hash)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_unary_expr(self)
+
+
+# Map from binary operator id to related method name (in Python 3).
+op_methods = {
+    '+': '__add__',
+    '-': '__sub__',
+    '*': '__mul__',
+    '/': '__truediv__',
+    '%': '__mod__',
+    '//': '__floordiv__',
+    '**': '__pow__',
+    '&': '__and__',
+    '|': '__or__',
+    '^': '__xor__',
+    '<<': '__lshift__',
+    '>>': '__rshift__',
+    '==': '__eq__',
+    '!=': '__ne__',
+    '<': '__lt__',
+    '>=': '__ge__',
+    '>': '__gt__',
+    '<=': '__le__',
+    'in': '__contains__',
+}  # type: Dict[str, str]
+
+ops_with_inplace_method = {
+    '+', '-', '*', '/', '%', '//', '**', '&', '|', '^', '<<', '>>'}
+
+inplace_operator_methods = set(
+    '__i' + op_methods[op][2:] for op in ops_with_inplace_method)
+
+reverse_op_methods = {
+    '__add__': '__radd__',
+    '__sub__': '__rsub__',
+    '__mul__': '__rmul__',
+    '__truediv__': '__rtruediv__',
+    '__mod__': '__rmod__',
+    '__floordiv__': '__rfloordiv__',
+    '__pow__': '__rpow__',
+    '__and__': '__rand__',
+    '__or__': '__ror__',
+    '__xor__': '__rxor__',
+    '__lshift__': '__rlshift__',
+    '__rshift__': '__rrshift__',
+    '__eq__': '__eq__',
+    '__ne__': '__ne__',
+    '__lt__': '__gt__',
+    '__ge__': '__le__',
+    '__gt__': '__lt__',
+    '__le__': '__ge__',
+}
+
+normal_from_reverse_op = dict((m, n) for n, m in reverse_op_methods.items())
+reverse_op_method_set = set(reverse_op_methods.values())
+
+
+class OpExpr(Node):
+    """Binary operation (other than . or [] or comparison operators,
+    which have specific nodes)."""
+
+    op = ''
+    left = None  # type: Node
+    right = None  # type: Node
+    # Inferred type for the operator method type (when relevant).
+    method_type = None  # type: mypy.types.Type
+
+    def __init__(self, op: str, left: Node, right: Node) -> None:
+        self.op = op
+        self.left = left
+        self.right = right
+        self.literal = min(self.left.literal, self.right.literal)
+        self.literal_hash = ('Binary', op, left.literal_hash, right.literal_hash)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_op_expr(self)
+
+
+class ComparisonExpr(Node):
+    """Comparison expression (e.g. a < b > c < d)."""
+
+    operators = None  # type: List[str]
+    operands = None  # type: List[Node]
+    # Inferred type for the operator methods (when relevant; None for 'is').
+    method_types = None  # type: List[mypy.types.Type]
+
+    def __init__(self, operators: List[str], operands: List[Node]) -> None:
+        self.operators = operators
+        self.operands = operands
+        self.method_types = []
+        self.literal = min(o.literal for o in self.operands)
+        self.literal_hash = (('Comparison',) + tuple(operators) +
+                             tuple(o.literal_hash for o in operands))
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_comparison_expr(self)
+
+
+class SliceExpr(Node):
+    """Slice expression (e.g. 'x:y', 'x:', '::2' or ':').
+
+    This is only valid as index in index expressions.
+    """
+
+    begin_index = None  # type: Node  # May be None
+    end_index = None  # type: Node    # May be None
+    stride = None  # type: Node       # May be None
+
+    def __init__(self, begin_index: Node, end_index: Node,
+                 stride: Node) -> None:
+        self.begin_index = begin_index
+        self.end_index = end_index
+        self.stride = stride
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_slice_expr(self)
+
+
+class CastExpr(Node):
+    """Cast expression cast(type, expr)."""
+
+    expr = None  # type: Node
+    type = None  # type: mypy.types.Type
+
+    def __init__(self, expr: Node, typ: 'mypy.types.Type') -> None:
+        self.expr = expr
+        self.type = typ
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_cast_expr(self)
+
+
+class SuperExpr(Node):
+    """Expression super().name"""
+
+    name = ''
+    info = None  # type: TypeInfo  # Type that contains this super expression
+
+    def __init__(self, name: str) -> None:
+        self.name = name
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_super_expr(self)
+
+
+class FuncExpr(FuncItem):
+    """Lambda expression"""
+
+    def name(self) -> str:
+        return '<lambda>'
+
+    def expr(self) -> Node:
+        """Return the expression (the body) of the lambda."""
+        ret = cast(ReturnStmt, self.body.body[0])
+        return ret.expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_func_expr(self)
+
+
+class ListExpr(Node):
+    """List literal expression [...]."""
+
+    items = None  # type: List[Node]
+
+    def __init__(self, items: List[Node]) -> None:
+        self.items = items
+        if all(x.literal == LITERAL_YES for x in items):
+            self.literal = LITERAL_YES
+            self.literal_hash = ('List',) + tuple(x.literal_hash for x in items)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_list_expr(self)
+
+
+class DictExpr(Node):
+    """Dictionary literal expression {key: value, ...}."""
+
+    items = None  # type: List[Tuple[Node, Node]]
+
+    def __init__(self, items: List[Tuple[Node, Node]]) -> None:
+        self.items = items
+        if all(x[0].literal == LITERAL_YES and x[1].literal == LITERAL_YES
+               for x in items):
+            self.literal = LITERAL_YES
+            self.literal_hash = ('Dict',) + tuple((x[0].literal_hash, x[1].literal_hash)
+                                                  for x in items)  # type: ignore
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_dict_expr(self)
+
+
+class TupleExpr(Node):
+    """Tuple literal expression (..., ...)"""
+
+    items = None  # type: List[Node]
+
+    def __init__(self, items: List[Node]) -> None:
+        self.items = items
+        if all(x.literal == LITERAL_YES for x in items):
+            self.literal = LITERAL_YES
+            self.literal_hash = ('Tuple',) + tuple(x.literal_hash for x in items)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_tuple_expr(self)
+
+
+class SetExpr(Node):
+    """Set literal expression {value, ...}."""
+
+    items = None  # type: List[Node]
+
+    def __init__(self, items: List[Node]) -> None:
+        self.items = items
+        if all(x.literal == LITERAL_YES for x in items):
+            self.literal = LITERAL_YES
+            self.literal_hash = ('Set',) + tuple(x.literal_hash for x in items)
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_set_expr(self)
+
+
+class GeneratorExpr(Node):
+    """Generator expression ... for ... in ... [ for ...  in ... ] [ if ... ]."""
+
+    left_expr = None  # type: Node
+    sequences_expr = None  # type: List[Node]
+    condlists = None  # type: List[List[Node]]
+    indices = None  # type: List[Node]
+
+    def __init__(self, left_expr: Node, indices: List[Node],
+                 sequences: List[Node], condlists: List[List[Node]]) -> None:
+        self.left_expr = left_expr
+        self.sequences = sequences
+        self.condlists = condlists
+        self.indices = indices
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_generator_expr(self)
+
+
+class ListComprehension(Node):
+    """List comprehension (e.g. [x + 1 for x in a])"""
+
+    generator = None  # type: GeneratorExpr
+
+    def __init__(self, generator: GeneratorExpr) -> None:
+        self.generator = generator
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_list_comprehension(self)
+
+
+class SetComprehension(Node):
+    """Set comprehension (e.g. {x + 1 for x in a})"""
+
+    generator = None  # type: GeneratorExpr
+
+    def __init__(self, generator: GeneratorExpr) -> None:
+        self.generator = generator
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_set_comprehension(self)
+
+
+class DictionaryComprehension(Node):
+    """Dictionary comprehension (e.g. {k: v for k, v in a}"""
+
+    key = None  # type: Node
+    value = None  # type: Node
+    sequences_expr = None  # type: List[Node]
+    condlists = None  # type: List[List[Node]]
+    indices = None  # type: List[Node]
+
+    def __init__(self, key: Node, value: Node, indices: List[Node],
+                 sequences: List[Node], condlists: List[List[Node]]) -> None:
+        self.key = key
+        self.value = value
+        self.sequences = sequences
+        self.condlists = condlists
+        self.indices = indices
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_dictionary_comprehension(self)
+
+
+class ConditionalExpr(Node):
+    """Conditional expression (e.g. x if y else z)"""
+
+    cond = None  # type: Node
+    if_expr = None  # type: Node
+    else_expr = None  # type: Node
+
+    def __init__(self, cond: Node, if_expr: Node, else_expr: Node) -> None:
+        self.cond = cond
+        self.if_expr = if_expr
+        self.else_expr = else_expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_conditional_expr(self)
+
+
+class BackquoteExpr(Node):
+    """Python 2 expression `...`."""
+
+    expr = None  # type: Node
+
+    def __init__(self, expr: Node) -> None:
+        self.expr = expr
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_backquote_expr(self)
+
+
+class TypeApplication(Node):
+    """Type application expr[type, ...]"""
+
+    expr = None  # type: Node
+    types = None  # type: List[mypy.types.Type]
+
+    def __init__(self, expr: Node, types: List['mypy.types.Type']) -> None:
+        self.expr = expr
+        self.types = types
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_type_application(self)
+
+
+# Variance of a type variable. For example, T in the definition of
+# List[T] is invariant, so List[int] is not a subtype of List[object],
+# and also List[object] is not a subtype of List[int].
+#
+# The T in Iterable[T] is covariant, so Iterable[int] is a subtype of
+# Iterable[object], but not vice versa.
+#
+# If T is contravariant in Foo[T], Foo[object] is a subtype of
+# Foo[int], but not vice versa.
+INVARIANT = 0  # type: int
+COVARIANT = 1  # type: int
+CONTRAVARIANT = 2  # type: int
+
+
+class TypeVarExpr(SymbolNode):
+    """Type variable expression TypeVar(...)."""
+
+    _name = ''
+    _fullname = ''
+    # Value restriction: only types in the list are valid as values. If the
+    # list is empty, there is no restriction.
+    values = None  # type: List[mypy.types.Type]
+    # Variance of the type variable. Invariant is the default.
+    # TypeVar(..., covariant=True) defines a covariant type variable.
+    # TypeVar(..., contravariant=True) defines a contravariant type
+    # variable.
+    variance = INVARIANT
+
+    def __init__(self, name: str, fullname: str,
+                 values: List['mypy.types.Type'],
+                 variance: int=INVARIANT) -> None:
+        self._name = name
+        self._fullname = fullname
+        self.values = values
+        self.variance = variance
+
+    def name(self) -> str:
+        return self._name
+
+    def fullname(self) -> str:
+        return self._fullname
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_type_var_expr(self)
+
+
+class TypeAliasExpr(Node):
+    """Type alias expression (rvalue)."""
+
+    type = None  # type: mypy.types.Type
+
+    def __init__(self, type: 'mypy.types.Type') -> None:
+        self.type = type
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_type_alias_expr(self)
+
+
+class NamedTupleExpr(Node):
+    """Named tuple expression namedtuple(...)."""
+
+    # The class representation of this named tuple (its tuple_type attribute contains
+    # the tuple item types)
+    info = None  # type: TypeInfo
+
+    def __init__(self, info: 'TypeInfo') -> None:
+        self.info = info
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_namedtuple_expr(self)
+
+
+class PromoteExpr(Node):
+    """Ducktype class decorator expression _promote(...)."""
+
+    type = None  # type: mypy.types.Type
+
+    def __init__(self, type: 'mypy.types.Type') -> None:
+        self.type = type
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit__promote_expr(self)
+
+
+# Constants
+
+
+class TempNode(Node):
+    """Temporary dummy node used during type checking.
+
+    This node is not present in the original program; it is just an artifact
+    of the type checker implementation. It only represents an opaque node with
+    some fixed type.
+    """
+
+    type = None  # type: mypy.types.Type
+
+    def __init__(self, typ: 'mypy.types.Type') -> None:
+        self.type = typ
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_temp_node(self)
+
+
+class TypeInfo(SymbolNode):
+    """The type structure of a single class.
+
+    Each TypeInfo corresponds one-to-one to a ClassDef, which
+    represents the AST of the class.
+
+    In type-theory terms, this is a "type constructor", and if the
+    class is generic then it will be a type constructor of higher kind.
+    Where the class is used in an actual type, it's in the form of an
+    Instance, which amounts to a type application of the tycon to
+    the appropriate number of arguments.
+    """
+
+    _fullname = None  # type: str          # Fully qualified name
+    defn = None  # type: ClassDef          # Corresponding ClassDef
+    # Method Resolution Order: the order of looking up attributes. The first
+    # value always to refers to this class.
+    mro = None  # type: List[TypeInfo]
+    subtypes = None  # type: Set[TypeInfo] # Direct subclasses encountered so far
+    names = None  # type: SymbolTable      # Names defined directly in this type
+    is_abstract = False                    # Does the class have any abstract attributes?
+    abstract_attributes = None  # type: List[str]
+    is_enum = False
+    # If true, any unknown attributes should have type 'Any' instead
+    # of generating a type error.  This would be true if there is a
+    # base class with type 'Any', but other use cases may be
+    # possible. This is similar to having __getattr__ that returns Any
+    # (and __setattr__), but without the __getattr__ method.
+    fallback_to_any = False
+
+    # Information related to type annotations.
+
+    # Generic type variable names
+    type_vars = None  # type: List[str]
+
+    # Direct base classes.
+    bases = None  # type: List[mypy.types.Instance]
+
+    # Another type which this type will be treated as a subtype of,
+    # even though it's not a subclass in Python.  The non-standard
+    # `@_promote` decorator introduces this, and there are also
+    # several builtin examples, in particular `int` -> `float`.
+    _promote = None  # type: mypy.types.Type
+
+    # Representation of a Tuple[...] base class, if the class has any
+    # (e.g., for named tuples). If this is not None, the actual Type
+    # object used for this class is not an Instance but a TupleType;
+    # the corresponding Instance is set as the fallback type of the
+    # tuple type.
+    tuple_type = None  # type: mypy.types.TupleType
+
+    # Is this a named tuple type?
+    is_named_tuple = False
+
+    def __init__(self, names: 'SymbolTable', defn: ClassDef) -> None:
+        """Initialize a TypeInfo."""
+        self.names = names
+        self.defn = defn
+        self.subtypes = set()
+        self.type_vars = []
+        self.bases = []
+        # Leave self.mro uninitialized until we compute it for real,
+        # so we don't accidentally try to use it prematurely.
+        self._fullname = defn.fullname
+        self.is_abstract = False
+        self.abstract_attributes = []
+        if defn.type_vars:
+            for vd in defn.type_vars:
+                self.type_vars.append(vd.name)
+
+    def name(self) -> str:
+        """Short name."""
+        return self.defn.name
+
+    def fullname(self) -> str:
+        return self._fullname
+
+    def is_generic(self) -> bool:
+        """Is the type generic (i.e. does it have type variables)?"""
+        return self.type_vars is not None and len(self.type_vars) > 0
+
+    def get(self, name: str) -> 'SymbolTableNode':
+        for cls in self.mro:
+            n = cls.names.get(name)
+            if n:
+                return n
+        return None
+
+    def __getitem__(self, name: str) -> 'SymbolTableNode':
+        n = self.get(name)
+        if n:
+            return n
+        else:
+            raise KeyError(name)
+
+    def __repr__(self) -> str:
+        return '<TypeInfo %s>' % self.fullname()
+
+    # IDEA: Refactor the has* methods to be more consistent and document
+    #       them.
+
+    def has_readable_member(self, name: str) -> bool:
+        return self.get(name) is not None
+
+    def has_writable_member(self, name: str) -> bool:
+        return self.has_var(name)
+
+    def has_var(self, name: str) -> bool:
+        return self.get_var(name) is not None
+
+    def has_method(self, name: str) -> bool:
+        return self.get_method(name) is not None
+
+    def get_var(self, name: str) -> Var:
+        for cls in self.mro:
+            if name in cls.names:
+                node = cls.names[name].node
+                if isinstance(node, Var):
+                    return cast(Var, node)
+                else:
+                    return None
+        return None
+
+    def get_var_or_getter(self, name: str) -> SymbolNode:
+        # TODO getter
+        return self.get_var(name)
+
+    def get_var_or_setter(self, name: str) -> SymbolNode:
+        # TODO setter
+        return self.get_var(name)
+
+    def get_method(self, name: str) -> FuncBase:
+        for cls in self.mro:
+            if name in cls.names:
+                node = cls.names[name].node
+                if isinstance(node, FuncBase):
+                    return node
+                else:
+                    return None
+        return None
+
+    def calculate_mro(self) -> None:
+        """Calculate and set mro (method resolution order).
+
+        Raise MroError if cannot determine mro.
+        """
+        self.mro = linearize_hierarchy(self)
+
+    def has_base(self, fullname: str) -> bool:
+        """Return True if type has a base type with the specified name.
+
+        This can be either via extension or via implementation.
+        """
+        for cls in self.mro:
+            if cls.fullname() == fullname:
+                return True
+        return False
+
+    def all_subtypes(self) -> 'Set[TypeInfo]':
+        """Return TypeInfos of all subtypes, including this type, as a set."""
+        subtypes = set([self])
+        for subt in self.subtypes:
+            for t in subt.all_subtypes():
+                subtypes.add(t)
+        return subtypes
+
+    def all_base_classes(self) -> 'List[TypeInfo]':
+        """Return a list of base classes, including indirect bases."""
+        assert False
+
+    def direct_base_classes(self) -> 'List[TypeInfo]':
+        """Return a direct base classes.
+
+        Omit base classes of other base classes.
+        """
+        return [base.type for base in self.bases]
+
+    def __str__(self) -> str:
+        """Return a string representation of the type.
+
+        This includes the most important information about the type.
+        """
+        base = None  # type: str
+        if self.bases:
+            base = 'Bases({})'.format(', '.join(str(base)
+                                                for base in self.bases))
+        return dump_tagged(['Name({})'.format(self.fullname()),
+                            base,
+                            ('Names', sorted(self.names.keys()))],
+                           'TypeInfo')
+
+
+class SymbolTableNode:
+    # Kind of node. Possible values:
+    #  - LDEF: local definition (of any kind)
+    #  - GDEF: global (module-level) definition
+    #  - MDEF: class member definition
+    #  - UNBOUND_TVAR: TypeVar(...) definition, not bound
+    #  - TVAR: type variable in a bound scope (generic function / generic clas)
+    #  - MODULE_REF: reference to a module
+    #  - TYPE_ALIAS: type alias
+    #  - UNBOUND_IMPORTED: temporary kind for imported names
+    kind = None  # type: int
+    # AST node of definition (FuncDef/Var/TypeInfo/Decorator/TypeVarExpr,
+    # or None for a bound type variable).
+    node = None  # type: Optional[SymbolNode]
+    # Type variable id (for bound type variables only)
+    tvar_id = 0
+    # Module id (e.g. "foo.bar") or None
+    mod_id = ''
+    # If this not None, override the type of the 'node' attribute.
+    type_override = None  # type: mypy.types.Type
+    # If False, this name won't be imported via 'from <module> import *'.
+    # This has no effect on names within classes.
+    module_public = True
+
+    def __init__(self, kind: int, node: SymbolNode, mod_id: str = None,
+                 typ: 'mypy.types.Type' = None, tvar_id: int = 0,
+                 module_public: bool = True) -> None:
+        self.kind = kind
+        self.node = node
+        self.type_override = typ
+        self.mod_id = mod_id
+        self.tvar_id = tvar_id
+        self.module_public = module_public
+
+    @property
+    def fullname(self) -> str:
+        if self.node is not None:
+            return self.node.fullname()
+        else:
+            return None
+
+    @property
+    def type(self) -> 'mypy.types.Type':
+        # IDEA: Get rid of the Any type.
+        node = self.node  # type: Any
+        if self.type_override is not None:
+            return self.type_override
+        elif ((isinstance(node, Var) or isinstance(node, FuncDef))
+              and node.type is not None):
+            return node.type
+        elif isinstance(node, Decorator):
+            return (cast(Decorator, node)).var.type
+        else:
+            return None
+
+    def __str__(self) -> str:
+        s = '{}/{}'.format(node_kinds[self.kind], short_type(self.node))
+        if self.mod_id is not None:
+            s += ' ({})'.format(self.mod_id)
+        # Include declared type of variables and functions.
+        if self.type is not None:
+            s += ' : {}'.format(self.type)
+        return s
+
+
+class SymbolTable(Dict[str, SymbolTableNode]):
+    def __str__(self) -> str:
+        a = []  # type: List[str]
+        for key, value in self.items():
+            # Filter out the implicit import of builtins.
+            if isinstance(value, SymbolTableNode):
+                if (value.fullname != 'builtins' and
+                        (value.fullname or '').split('.')[-1] not in
+                        implicit_module_attrs):
+                    a.append('  ' + str(key) + ' : ' + str(value))
+            else:
+                a.append('  <invalid item>')
+        a = sorted(a)
+        a.insert(0, 'SymbolTable(')
+        a[-1] += ')'
+        return '\n'.join(a)
+
+
+def clean_up(s: str) -> str:
+    # TODO remove
+    return re.sub('.*::', '', s)
+
+
+def function_type(func: FuncBase, fallback: 'mypy.types.Instance') -> 'mypy.types.FunctionLike':
+    if func.type:
+        assert isinstance(func.type, mypy.types.FunctionLike)
+        return cast(mypy.types.FunctionLike, func.type)
+    else:
+        # Implicit type signature with dynamic types.
+        # Overloaded functions always have a signature, so func must be an ordinary function.
+        fdef = cast(FuncDef, func)
+        name = func.name()
+        if name:
+            name = '"{}"'.format(name)
+        names = []  # type: List[str]
+        for arg in fdef.arguments:
+            names.append(arg.variable.name())
+
+        return mypy.types.CallableType(
+            [mypy.types.AnyType()] * len(fdef.arguments),
+            [arg.kind for arg in fdef.arguments],
+            names,
+            mypy.types.AnyType(),
+            fallback,
+            name,
+        )
+
+
+def method_type_with_fallback(func: FuncBase,
+                              fallback: 'mypy.types.Instance') -> 'mypy.types.FunctionLike':
+    """Return the signature of a method (omit self)."""
+    return method_type(function_type(func, fallback))
+
+
+def method_type(sig: 'mypy.types.FunctionLike') -> 'mypy.types.FunctionLike':
+    if isinstance(sig, mypy.types.CallableType):
+        return method_callable(sig)
+    else:
+        sig = cast(mypy.types.Overloaded, sig)
+        items = []  # type: List[mypy.types.CallableType]
+        for c in sig.items():
+            items.append(method_callable(c))
+        return mypy.types.Overloaded(items)
+
+
+def method_callable(c: 'mypy.types.CallableType') -> 'mypy.types.CallableType':
+    return c.copy_modified(arg_types=c.arg_types[1:],
+                           arg_kinds=c.arg_kinds[1:],
+                           arg_names=c.arg_names[1:])
+
+
+class MroError(Exception):
+    """Raised if a consistent mro cannot be determined for a class."""
+
+
+def linearize_hierarchy(info: TypeInfo) -> List[TypeInfo]:
+    # TODO describe
+    if info.mro:
+        return info.mro
+    bases = info.direct_base_classes()
+    return [info] + merge([linearize_hierarchy(base) for base in bases] +
+                          [bases])
+
+
+def merge(seqs: List[List[TypeInfo]]) -> List[TypeInfo]:
+    seqs = [s[:] for s in seqs]
+    result = []  # type: List[TypeInfo]
+    while True:
+        seqs = [s for s in seqs if s]
+        if not seqs:
+            return result
+        for seq in seqs:
+            head = seq[0]
+            if not [s for s in seqs if head in s[1:]]:
+                break
+        else:
+            raise MroError()
+        result.append(head)
+        for s in seqs:
+            if s[0] is head:
+                del s[0]
diff --git a/mypy/parse.py b/mypy/parse.py
new file mode 100644
index 0000000..f37b04e
--- /dev/null
+++ b/mypy/parse.py
@@ -0,0 +1,1999 @@
+"""Mypy parser.
+
+Constructs a parse tree (abstract syntax tree) based on a string
+representing a source file. Performs only minimal semantic checks.
+"""
+
+import re
+
+from typing import List, Tuple, Any, Set, cast, Union, Optional
+
+from mypy import lex, docstring
+from mypy.lex import (
+    Token, Eof, Bom, Break, Name, Colon, Dedent, IntLit, StrLit, BytesLit,
+    UnicodeLit, FloatLit, Op, Indent, Keyword, Punct, LexError, ComplexLit,
+    EllipsisToken
+)
+import mypy.types
+from mypy.nodes import (
+    MypyFile, Import, Node, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
+    ClassDef, Decorator, Block, Var, OperatorAssignmentStmt,
+    ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt,
+    DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl,
+    WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, CastExpr,
+    TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
+    DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
+    FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
+    UnaryExpr, FuncExpr, TypeApplication, PrintStmt, ImportBase, ComparisonExpr,
+    StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
+    SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, ExecStmt, Argument,
+    BackquoteExpr
+)
+from mypy import defaults
+from mypy import nodes
+from mypy.errors import Errors, CompileError
+from mypy.types import Void, Type, CallableType, AnyType, UnboundType
+from mypy.parsetype import (
+    parse_type, parse_types, parse_signature, TypeParseError, parse_str_as_signature
+)
+
+
+precedence = {
+    '**': 16,
+    '-u': 15, '+u': 15, '~': 15,   # unary operators (-, + and ~)
+    '<cast>': 14,
+    '*': 13, '/': 13, '//': 13, '%': 13,
+    '+': 12, '-': 12,
+    '>>': 11, '<<': 11,
+    '&': 10,
+    '^': 9,
+    '|': 8,
+    '==': 7, '!=': 7, '<': 7, '>': 7, '<=': 7, '>=': 7, 'is': 7, 'in': 7,
+    '*u': 7,  # unary * for star expressions
+    'not': 6,
+    'and': 5,
+    'or': 4,
+    '<if>': 3,  # conditional expression
+    '<for>': 2,  # list comprehension
+    ',': 1}
+
+
+op_assign = set([
+    '+=', '-=', '*=', '/=', '//=', '%=', '**=', '|=', '&=', '^=', '>>=',
+    '<<='])
+
+op_comp = set([
+    '>', '<', '==', '>=', '<=', '<>', '!=', 'is', 'is', 'in', 'not'])
+
+none = Token('')  # Empty token
+
+
+def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
+          pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+          custom_typing_module: str = None, implicit_any: bool = False) -> MypyFile:
+    """Parse a source file, without doing any semantic analysis.
+
+    Return the parse tree. If errors is not provided, raise ParseError
+    on failure. Otherwise, use the errors object to report parse errors.
+
+    The pyversion (major, minor) argument determines the Python syntax variant.
+    """
+    is_stub_file = bool(fnam) and fnam.endswith('.pyi')
+    parser = Parser(fnam,
+                    errors,
+                    pyversion,
+                    custom_typing_module,
+                    is_stub_file=is_stub_file,
+                    implicit_any=implicit_any)
+    tree = parser.parse(source)
+    tree.path = fnam
+    tree.is_stub = is_stub_file
+    return tree
+
+
+class Parser:
+    """Mypy parser that parses a string into an AST.
+
+    Parses type annotations in addition to basic Python syntax. It supports both Python 2 and 3
+    (though Python 2 support is incomplete).
+
+    The AST classes are defined in mypy.nodes and mypy.types.
+    """
+
+    tok = None  # type: List[Token]
+    ind = 0
+    errors = None  # type: Errors
+    # If True, raise an exception on any parse error. Otherwise, errors are reported via 'errors'.
+    raise_on_error = False
+
+    # Are we currently parsing the body of a class definition?
+    is_class_body = False
+    # All import nodes encountered so far in this parse unit.
+    imports = None  # type: List[ImportBase]
+    # Names imported from __future__.
+    future_options = None  # type: List[str]
+    # Lines to ignore (using # type: ignore).
+    ignored_lines = None  # type: Set[int]
+
+    def __init__(self, fnam: str, errors: Errors, pyversion: Tuple[int, int],
+                 custom_typing_module: str = None, is_stub_file: bool = False,
+                 implicit_any = False) -> None:
+        self.raise_on_error = errors is None
+        self.pyversion = pyversion
+        self.custom_typing_module = custom_typing_module
+        self.is_stub_file = is_stub_file
+        self.implicit_any = implicit_any
+        if errors is not None:
+            self.errors = errors
+        else:
+            self.errors = Errors()
+        if fnam is not None:
+            self.errors.set_file(fnam)
+        else:
+            self.errors.set_file('<input>')
+
+    def parse(self, s: Union[str, bytes]) -> MypyFile:
+        self.tok, self.ignored_lines = lex.lex(s, pyversion=self.pyversion,
+                                               is_stub_file=self.is_stub_file)
+        self.ind = 0
+        self.imports = []
+        self.future_options = []
+        file = self.parse_file()
+        if self.raise_on_error and self.errors.is_errors():
+            self.errors.raise_error()
+        return file
+
+    def weak_opts(self) -> Set[str]:
+        """Do weak typing if any of the first ten tokens is a comment saying so.
+
+        The comment can be one of:
+        # mypy: weak=global
+        # mypy: weak=local
+        # mypy: weak      <- defaults to local
+        """
+        regexp = re.compile(r'^[\s]*# *mypy: *weak(=?)([^\s]*)', re.M)
+        for t in self.tok[:10]:
+            for s in [t.string, t.pre]:
+                m = regexp.search(s)
+                if m:
+                    opts = set(x for x in m.group(2).split(',') if x)
+                    if not opts:
+                        opts.add('local')
+                    return opts
+        return set()
+
+    def parse_file(self) -> MypyFile:
+        """Parse a mypy source file."""
+        is_bom = self.parse_bom()
+        defs = self.parse_defs()
+        weak_opts = self.weak_opts()
+        self.expect_type(Eof)
+        # Skip imports that have been ignored (so that we can ignore a C extension module without
+        # stub, for example), except for 'from x import *', because we wouldn't be able to
+        # determine which names should be defined unless we process the module. We can still
+        # ignore errors such as redefinitions when using the latter form.
+        imports = [node for node in self.imports
+                   if node.line not in self.ignored_lines or isinstance(node, ImportAll)]
+        node = MypyFile(defs, imports, is_bom, self.ignored_lines,
+                        weak_opts=weak_opts)
+        return node
+
+    # Parse the initial part
+
+    def parse_bom(self) -> bool:
+        """Parse the optional byte order mark at the beginning of a file."""
+        if isinstance(self.current(), Bom):
+            self.expect_type(Bom)
+            if isinstance(self.current(), Break):
+                self.expect_break()
+            return True
+        else:
+            return False
+
+    def parse_import(self) -> Import:
+        self.expect('import')
+        ids = []
+        while True:
+            id = self.parse_qualified_name()
+            translated = self.translate_module_id(id)
+            as_id = None
+            if self.current_str() == 'as':
+                self.expect('as')
+                name_tok = self.expect_type(Name)
+                as_id = name_tok.string
+            elif translated != id:
+                as_id = id
+            ids.append((translated, as_id))
+            if self.current_str() != ',':
+                break
+            self.expect(',')
+        node = Import(ids)
+        self.imports.append(node)
+        return node
+
+    def translate_module_id(self, id: str) -> str:
+        """Return the actual, internal module id for a source text id.
+
+        For example, translate '__builtin__' in Python 2 to 'builtins'.
+        """
+        if id == self.custom_typing_module:
+            return 'typing'
+        elif id == '__builtin__' and self.pyversion[0] == 2:
+            # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation
+            #   is named __builtin__.py (there is another layer of translation elsewhere).
+            return 'builtins'
+        return id
+
+    def parse_import_from(self) -> Node:
+        self.expect('from')
+
+        # Build the list of beginning relative tokens.
+        relative = 0
+        while self.current_str() in (".", "..."):
+            relative += len(self.current_str())
+            self.skip()
+
+        # Parse qualified name to actually import from.
+        if self.current_str() == "import":
+            # Empty/default values.
+            name = ""
+        else:
+            name = self.parse_qualified_name()
+
+        name = self.translate_module_id(name)
+
+        # Parse import list
+        self.expect('import')
+        node = None  # type: ImportBase
+        if self.current_str() == '*':
+            if name == '__future__':
+                self.parse_error()
+            # An import all from a module node:
+            self.skip()
+            node = ImportAll(name, relative)
+        else:
+            is_paren = self.current_str() == '('
+            if is_paren:
+                self.expect('(')
+            targets = []  # type: List[Tuple[str, str]]
+            while True:
+                id, as_id = self.parse_import_name()
+                if '%s.%s' % (name, id) == self.custom_typing_module:
+                    if targets or self.current_str() == ',':
+                        self.fail('You cannot import any other modules when you '
+                                  'import a custom typing module',
+                                  self.current().line)
+                    node = Import([('typing', as_id)])
+                    self.skip_until_break()
+                    break
+                targets.append((id, as_id))
+                if self.current_str() != ',':
+                    break
+                self.expect(',')
+                if is_paren and self.current_str() == ')':
+                    break
+            if is_paren:
+                self.expect(')')
+            if node is None:
+                node = ImportFrom(name, relative, targets)
+        self.imports.append(node)
+        if name == '__future__':
+            self.future_options.extend(target[0] for target in targets)
+        return node
+
+    def parse_import_name(self) -> Tuple[str, Optional[str]]:
+        tok = self.expect_type(Name)
+        name = tok.string
+        if self.current_str() == 'as':
+            self.skip()
+            as_name = self.expect_type(Name)
+            return name, as_name.string
+        else:
+            return name, None
+
+    def parse_qualified_name(self) -> str:
+        """Parse a name with an optional module qualifier.
+
+        Return a tuple with the name as a string and a token array
+        containing all the components of the name.
+        """
+        tok = self.expect_type(Name)
+        n = tok.string
+        while self.current_str() == '.':
+            self.expect('.')
+            tok = self.expect_type(Name)
+            n += '.' + tok.string
+        return n
+
+    # Parsing global definitions
+
+    def parse_defs(self) -> List[Node]:
+        defs = []  # type: List[Node]
+        while not self.eof():
+            try:
+                defn, is_simple = self.parse_statement()
+                if is_simple:
+                    self.expect_break()
+                if defn is not None:
+                    if not self.try_combine_overloads(defn, defs):
+                        defs.append(defn)
+            except ParseError:
+                pass
+        return defs
+
+    def parse_class_def(self) -> ClassDef:
+        old_is_class_body = self.is_class_body
+        self.is_class_body = True
+
+        self.expect('class')
+        metaclass = None
+
+        try:
+            commas, base_types = [], []  # type: List[Token], List[Node]
+            try:
+                name_tok = self.expect_type(Name)
+                name = name_tok.string
+
+                self.errors.push_type(name)
+
+                if self.current_str() == '(':
+                    self.skip()
+                    while True:
+                        if self.current_str() == ')':
+                            break
+                        if self.current_str() == 'metaclass':
+                            metaclass = self.parse_metaclass()
+                            break
+                        base_types.append(self.parse_super_type())
+                        if self.current_str() != ',':
+                            break
+                        commas.append(self.skip())
+                    self.expect(')')
+            except ParseError:
+                pass
+
+            defs, _ = self.parse_block()
+
+            node = ClassDef(name, defs, None, base_types, metaclass=metaclass)
+            return node
+        finally:
+            self.errors.pop_type()
+            self.is_class_body = old_is_class_body
+
+    def parse_super_type(self) -> Node:
+        return self.parse_expression(precedence[','])
+
+    def parse_metaclass(self) -> str:
+        self.expect('metaclass')
+        self.expect('=')
+        return self.parse_qualified_name()
+
+    def parse_decorated_function_or_class(self) -> Node:
+        decorators = []
+        no_type_checks = False
+        while self.current_str() == '@':
+            self.expect('@')
+            d_exp = self.parse_expression()
+            if self.is_no_type_check_decorator(d_exp):
+                no_type_checks = True
+            decorators.append(d_exp)
+            self.expect_break()
+        if self.current_str() != 'class':
+            func = self.parse_function(no_type_checks)
+            func.is_decorated = True
+            var = Var(func.name())
+            # Types of decorated functions must always be inferred.
+            var.is_ready = False
+            var.set_line(decorators[0].line)
+            node = Decorator(func, decorators, var)
+            return node
+        else:
+            cls = self.parse_class_def()
+            cls.decorators = decorators
+            return cls
+
+    def is_no_type_check_decorator(self, expr: Node) -> bool:
+        if isinstance(expr, NameExpr):
+            return expr.name == 'no_type_check'
+        elif isinstance(expr, MemberExpr):
+            if isinstance(expr.expr, NameExpr):
+                return expr.expr.name == 'typing' and expr.name == 'no_type_check'
+        else:
+            return False
+
+    def parse_function(self, no_type_checks: bool=False) -> FuncDef:
+        def_tok = self.expect('def')
+        is_method = self.is_class_body
+        self.is_class_body = False
+        try:
+            (name, args, typ, is_error, extra_stmts) = self.parse_function_header(no_type_checks)
+
+            arg_kinds = [arg.kind for arg in args]
+            arg_names = [arg.variable.name() for arg in args]
+
+            body, comment_type = self.parse_block(allow_type=True)
+            # Potentially insert extra assignment statements to the beginning of the
+            # body, used to decompose Python 2 tuple arguments.
+            body.body[:0] = extra_stmts
+            if comment_type:
+                # The function has a # type: ... signature.
+                if typ:
+                    self.errors.report(
+                        def_tok.line, 'Function has duplicate type signatures')
+                sig = cast(CallableType, comment_type)
+                if is_method and len(sig.arg_kinds) < len(arg_kinds):
+                    self.check_argument_kinds(arg_kinds,
+                                              [nodes.ARG_POS] + sig.arg_kinds,
+                                              def_tok.line)
+                    # Add implicit 'self' argument to signature.
+                    first_arg = [AnyType()]  # type: List[Type]
+                    typ = CallableType(
+                        first_arg + sig.arg_types,
+                        arg_kinds,
+                        arg_names,
+                        sig.ret_type,
+                        None,
+                        line=def_tok.line)
+                else:
+                    self.check_argument_kinds(arg_kinds, sig.arg_kinds,
+                                              def_tok.line)
+                    typ = CallableType(
+                        sig.arg_types,
+                        arg_kinds,
+                        arg_names,
+                        sig.ret_type,
+                        None,
+                        line=def_tok.line)
+
+            # If there was a serious error, we really cannot build a parse tree
+            # node.
+            if is_error:
+                return None
+
+            # add implicit anys
+            if typ is None and self.implicit_any and not self.is_stub_file:
+                ret_type = None  # type: Type
+                if is_method and name == '__init__':
+                    ret_type = UnboundType('None', [])
+                else:
+                    ret_type = AnyType()
+                typ = CallableType([AnyType() for _ in args],
+                                   arg_kinds,
+                                   [a.variable.name() for a in args],
+                                   ret_type,
+                                   None,
+                                   line=def_tok.line)
+
+            node = FuncDef(name, args, body, typ)
+            node.set_line(def_tok)
+            if typ is not None:
+                typ.definition = node
+            return node
+        finally:
+            self.errors.pop_function()
+            self.is_class_body = is_method
+
+    def check_argument_kinds(self, funckinds: List[int], sigkinds: List[int],
+                             line: int) -> None:
+        """Check that arguments are consistent.
+
+        This verifies that they have the same number and the kinds correspond.
+
+        Arguments:
+          funckinds: kinds of arguments in function definition
+          sigkinds:  kinds of arguments in signature (after # type:)
+        """
+        if len(funckinds) != len(sigkinds):
+            if len(funckinds) > len(sigkinds):
+                self.fail("Type signature has too few arguments", line)
+            else:
+                self.fail("Type signature has too many arguments", line)
+            return
+        for kind, token in [(nodes.ARG_STAR, '*'),
+                            (nodes.ARG_STAR2, '**')]:
+            if ((funckinds.count(kind) != sigkinds.count(kind)) or
+                    (kind in funckinds and sigkinds.index(kind) != funckinds.index(kind))):
+                self.fail(
+                    "Inconsistent use of '{}' in function "
+                    "signature".format(token), line)
+
+    def parse_function_header(
+            self, no_type_checks: bool=False) -> Tuple[str,
+                                                       List[Argument],
+                                                       CallableType,
+                                                       bool,
+                                                       List[AssignmentStmt]]:
+        """Parse function header (a name followed by arguments)
+
+        Return a 5-tuple with the following items:
+          name
+          arguments
+          signature (annotation)
+          error flag (True if error)
+          extra statements needed to decompose arguments (usually empty)
+
+        See parse_arg_list for an explanation of the final tuple item.
+        """
+        name = ''
+
+        try:
+            name_tok = self.expect_type(Name)
+            name = name_tok.string
+
+            self.errors.push_function(name)
+
+            args, typ, extra_stmts = self.parse_args(no_type_checks)
+        except ParseError:
+            if not isinstance(self.current(), Break):
+                self.ind -= 1  # Kludge: go back to the Break token
+            # Resynchronise parsing by going back over :, if present.
+            if isinstance(self.tok[self.ind - 1], Colon):
+                self.ind -= 1
+            return (name, [], None, True, [])
+
+        return (name, args, typ, False, extra_stmts)
+
+    def parse_args(self, no_type_checks: bool=False) -> Tuple[List[Argument],
+                                                              CallableType,
+                                                              List[AssignmentStmt]]:
+        """Parse a function signature (...) [-> t].
+
+        See parse_arg_list for an explanation of the final tuple item.
+        """
+        lparen = self.expect('(')
+
+        # Parse the argument list (everything within '(' and ')').
+        args, extra_stmts = self.parse_arg_list(no_type_checks=no_type_checks)
+
+        self.expect(')')
+
+        if self.current_str() == '->':
+            self.skip()
+            if no_type_checks:
+                self.parse_expression()
+                ret_type = None
+            else:
+                ret_type = self.parse_type()
+        else:
+            ret_type = None
+
+        arg_kinds = [arg.kind for arg in args]
+        self.verify_argument_kinds(arg_kinds, lparen.line)
+
+        annotation = self.build_func_annotation(
+            ret_type, args, lparen.line)
+
+        return args, annotation, extra_stmts
+
+    def build_func_annotation(self, ret_type: Type, args: List[Argument],
+            line: int, is_default_ret: bool = False) -> CallableType:
+        arg_types = [arg.type_annotation for arg in args]
+        # Are there any type annotations?
+        if ((ret_type and not is_default_ret)
+                or arg_types != [None] * len(arg_types)):
+            # Yes. Construct a type for the function signature.
+            return self.construct_function_type(args, ret_type, line)
+        else:
+            return None
+
+    def parse_arg_list(self, allow_signature: bool = True,
+            no_type_checks: bool=False) -> Tuple[List[Argument],
+                                                 List[AssignmentStmt]]:
+        """Parse function definition argument list.
+
+        This includes everything between '(' and ')' (but not the
+        parentheses).
+
+        Return tuple (arguments,
+                      extra statements for decomposing arguments).
+
+        The final argument is only used for Python 2 argument lists with
+        tuples; they contain destructuring assignment statements used to
+        decompose tuple arguments. For example, consider a header like this:
+
+        . def f((x, y))
+
+        The actual (sole) argument will be __tuple_arg_1 (a generated
+        name), whereas the extra statement list will contain a single
+        assignment statement corresponding to this assignment:
+
+          x, y = __tuple_arg_1
+        """
+        args = []  # type: List[Argument]
+        extra_stmts = []
+        # This is for checking duplicate argument names.
+        arg_names = []  # type: List[str]
+        has_tuple_arg = False
+
+        require_named = False
+        bare_asterisk_before = -1
+
+        if self.current_str() != ')' and self.current_str() != ':':
+            while self.current_str() != ')':
+                if self.current_str() == '*' and self.peek().string == ',':
+                    self.expect('*')
+                    require_named = True
+                    bare_asterisk_before = len(args)
+                elif self.current_str() in ['*', '**']:
+                    if bare_asterisk_before == len(args):
+                        # named arguments must follow bare *
+                        self.parse_error()
+
+                    arg = self.parse_asterisk_arg(
+                        allow_signature,
+                        no_type_checks,
+                    )
+                    args.append(arg)
+                    require_named = True
+                elif self.current_str() == '(':
+                    arg, extra_stmt, names = self.parse_tuple_arg(len(args))
+                    args.append(arg)
+                    extra_stmts.append(extra_stmt)
+                    arg_names.extend(names)
+                    has_tuple_arg = True
+                else:
+                    arg, require_named = self.parse_normal_arg(
+                        require_named,
+                        allow_signature,
+                        no_type_checks,
+                    )
+                    args.append(arg)
+                    arg_names.append(arg.variable.name())
+
+                if self.current().string != ',':
+                    break
+
+                self.expect(',')
+
+        # Non-tuple argument dupes will be checked elsewhere. Avoid
+        # generating duplicate errors.
+        if has_tuple_arg:
+            self.check_duplicate_argument_names(arg_names)
+
+        return args, extra_stmts
+
+    def check_duplicate_argument_names(self, names: List[str]) -> None:
+        found = set()  # type: Set[str]
+        for name in names:
+            if name in found:
+                self.fail('Duplicate argument name "{}"'.format(name),
+                          self.current().line)
+            found.add(name)
+
+    def parse_asterisk_arg(self,
+            allow_signature: bool,
+            no_type_checks: bool) -> Argument:
+        asterisk = self.skip()
+        name = self.expect_type(Name)
+        variable = Var(name.string)
+        if asterisk.string == '*':
+            kind = nodes.ARG_STAR
+        else:
+            kind = nodes.ARG_STAR2
+
+        type = None
+        if no_type_checks:
+            self.parse_parameter_annotation()
+        else:
+            type = self.parse_arg_type(allow_signature)
+
+        return Argument(variable, type, None, kind)
+
+    def parse_tuple_arg(self, index: int) -> Tuple[Argument, AssignmentStmt, List[str]]:
+        """Parse a single Python 2 tuple argument.
+
+        Example: def f(x, (y, z)): ...
+
+        The tuple arguments gets transformed into an assignment in the
+        function body (the second return value).
+
+        Return tuple (argument, decomposing assignment, list of names defined).
+        """
+        line = self.current().line
+        # Generate a new argument name that is very unlikely to clash with anything.
+        arg_name = '__tuple_arg_{}'.format(index + 1)
+        if self.pyversion[0] >= 3:
+            self.fail('Tuples in argument lists only supported in Python 2 mode', line)
+        paren_arg = self.parse_parentheses()
+        self.verify_tuple_arg(paren_arg)
+        if isinstance(paren_arg, NameExpr):
+            # This isn't a tuple. Revert to a normal argument. We'll still get a no-op
+            # assignment below but that's benign.
+            arg_name = paren_arg.name
+        rvalue = NameExpr(arg_name)
+        rvalue.set_line(line)
+        decompose = AssignmentStmt([paren_arg], rvalue)
+        decompose.set_line(line)
+        kind = nodes.ARG_POS
+        initializer = None
+        if self.current_str() == '=':
+            self.expect('=')
+            initializer = self.parse_expression(precedence[','])
+            kind = nodes.ARG_OPT
+        var = Var(arg_name)
+        arg_names = self.find_tuple_arg_argument_names(paren_arg)
+        return Argument(var, None, initializer, kind), decompose, arg_names
+
+    def verify_tuple_arg(self, paren_arg: Node) -> None:
+        if isinstance(paren_arg, TupleExpr):
+            if not paren_arg.items:
+                self.fail('Empty tuple not valid as an argument', paren_arg.line)
+            for item in paren_arg.items:
+                self.verify_tuple_arg(item)
+        elif not isinstance(paren_arg, NameExpr):
+            self.fail('Invalid item in tuple argument', paren_arg.line)
+
+    def find_tuple_arg_argument_names(self, node: Node) -> List[str]:
+        result = []  # type: List[str]
+        if isinstance(node, TupleExpr):
+            for item in node.items:
+                result.extend(self.find_tuple_arg_argument_names(item))
+        elif isinstance(node, NameExpr):
+            result.append(node.name)
+        return result
+
+    def parse_normal_arg(self, require_named: bool,
+            allow_signature: bool,
+            no_type_checks: bool) -> Tuple[Argument, bool]:
+        name = self.expect_type(Name)
+        variable = Var(name.string)
+
+        type = None
+        if no_type_checks:
+            self.parse_parameter_annotation()
+        else:
+            type = self.parse_arg_type(allow_signature)
+
+        initializer = None  # type: Node
+        if self.current_str() == '=':
+            self.expect('=')
+            initializer = self.parse_expression(precedence[','])
+            if require_named:
+                kind = nodes.ARG_NAMED
+            else:
+                kind = nodes.ARG_OPT
+        else:
+            if require_named:
+                kind = nodes.ARG_NAMED
+            else:
+                kind = nodes.ARG_POS
+
+        return Argument(variable, type, initializer, kind), require_named
+
+    def parse_parameter_annotation(self) -> Node:
+        if self.current_str() == ':':
+            self.skip()
+            return self.parse_expression(precedence[','])
+
+    def parse_arg_type(self, allow_signature: bool) -> Type:
+        if self.current_str() == ':' and allow_signature:
+            self.skip()
+            return self.parse_type()
+        else:
+            return None
+
+    def verify_argument_kinds(self, kinds: List[int], line: int) -> None:
+        found = set()  # type: Set[int]
+        for i, kind in enumerate(kinds):
+            if kind == nodes.ARG_POS and found & set([nodes.ARG_OPT,
+                                                      nodes.ARG_STAR,
+                                                      nodes.ARG_STAR2]):
+                self.fail('Invalid argument list', line)
+            elif kind == nodes.ARG_STAR and nodes.ARG_STAR in found:
+                self.fail('Invalid argument list', line)
+            elif kind == nodes.ARG_STAR2 and i != len(kinds) - 1:
+                self.fail('Invalid argument list', line)
+            found.add(kind)
+
+    def construct_function_type(self, args: List[Argument], ret_type: Type,
+                                line: int) -> CallableType:
+        # Complete the type annotation by replacing omitted types with 'Any'.
+        arg_types = [arg.type_annotation for arg in args]
+        for i in range(len(arg_types)):
+            if arg_types[i] is None:
+                arg_types[i] = AnyType()
+        if ret_type is None:
+            ret_type = AnyType()
+        arg_kinds = [arg.kind for arg in args]
+        arg_names = [arg.variable.name() for arg in args]
+        return CallableType(arg_types, arg_kinds, arg_names, ret_type, None, name=None,
+                        variables=None, bound_vars=[], line=line)
+
+    # Parsing statements
+
+    def parse_block(self, allow_type: bool = False) -> Tuple[Block, Type]:
+        colon = self.expect(':')
+        if not isinstance(self.current(), Break):
+            # Block immediately after ':'.
+            nodes = []
+            while True:
+                ind = self.ind
+                stmt, is_simple = self.parse_statement()
+                if not is_simple:
+                    self.parse_error_at(self.tok[ind])
+                    break
+                nodes.append(stmt)
+                brk = self.expect_break()
+                if brk.string != ';':
+                    break
+            node = Block(nodes)
+            node.set_line(colon)
+            return node, None
+        else:
+            # Indented block.
+            brk = self.expect_break()
+            type = self.parse_type_comment(brk, signature=True)
+            self.expect_indent()
+            stmt_list = []  # type: List[Node]
+            if allow_type:
+                cur = self.current()
+                if type is None and isinstance(cur, StrLit):
+                    ds = docstring.parse_docstring(cast(StrLit, cur).parsed())
+                    if ds and False:  # TODO: Enable when this is working.
+                        try:
+                            type = parse_str_as_signature(ds.as_type_str(), cur.line)
+                        except TypeParseError:
+                            # We don't require docstrings to be actually correct.
+                            # TODO: Report something here.
+                            type = None
+            while (not isinstance(self.current(), Dedent) and
+                   not isinstance(self.current(), Eof)):
+                try:
+                    stmt, is_simple = self.parse_statement()
+                    if is_simple:
+                        self.expect_break()
+                    if stmt is not None:
+                        if not self.try_combine_overloads(stmt, stmt_list):
+                            stmt_list.append(stmt)
+                except ParseError:
+                    pass
+            if isinstance(self.current(), Dedent):
+                self.skip()
+            node = Block(stmt_list)
+            node.set_line(colon)
+            return node, type
+
+    def try_combine_overloads(self, s: Node, stmt: List[Node]) -> bool:
+        if isinstance(s, Decorator) and stmt:
+            fdef = cast(Decorator, s)
+            n = fdef.func.name()
+            if (isinstance(stmt[-1], Decorator) and
+                    (cast(Decorator, stmt[-1])).func.name() == n):
+                stmt[-1] = OverloadedFuncDef([cast(Decorator, stmt[-1]), fdef])
+                return True
+            elif (isinstance(stmt[-1], OverloadedFuncDef) and
+                    (cast(OverloadedFuncDef, stmt[-1])).name() == n):
+                (cast(OverloadedFuncDef, stmt[-1])).items.append(fdef)
+                return True
+        return False
+
+    def parse_statement(self) -> Tuple[Node, bool]:
+        stmt = None  # type: Node
+        t = self.current()
+        ts = self.current_str()
+        is_simple = True  # Is this a non-block statement?
+        if ts == 'if':
+            stmt = self.parse_if_stmt()
+            is_simple = False
+        elif ts == 'def':
+            stmt = self.parse_function()
+            is_simple = False
+        elif ts == 'while':
+            stmt = self.parse_while_stmt()
+            is_simple = False
+        elif ts == 'return':
+            stmt = self.parse_return_stmt()
+        elif ts == 'for':
+            stmt = self.parse_for_stmt()
+            is_simple = False
+        elif ts == 'try':
+            stmt = self.parse_try_stmt()
+            is_simple = False
+        elif ts == 'break':
+            stmt = self.parse_break_stmt()
+        elif ts == 'continue':
+            stmt = self.parse_continue_stmt()
+        elif ts == 'pass':
+            stmt = self.parse_pass_stmt()
+        elif ts == 'raise':
+            stmt = self.parse_raise_stmt()
+        elif ts == 'import':
+            stmt = self.parse_import()
+        elif ts == 'from':
+            stmt = self.parse_import_from()
+        elif ts == 'class':
+            stmt = self.parse_class_def()
+            is_simple = False
+        elif ts == 'global':
+            stmt = self.parse_global_decl()
+        elif ts == 'nonlocal' and self.pyversion[0] >= 3:
+            stmt = self.parse_nonlocal_decl()
+        elif ts == 'assert':
+            stmt = self.parse_assert_stmt()
+        elif ts == 'del':
+            stmt = self.parse_del_stmt()
+        elif ts == 'with':
+            stmt = self.parse_with_stmt()
+            is_simple = False
+        elif ts == '@':
+            stmt = self.parse_decorated_function_or_class()
+            is_simple = False
+        elif ts == 'print' and (self.pyversion[0] == 2 and
+                                'print_function' not in self.future_options):
+            stmt = self.parse_print_stmt()
+        elif ts == 'exec' and self.pyversion[0] == 2:
+            stmt = self.parse_exec_stmt()
+        else:
+            stmt = self.parse_expression_or_assignment()
+        if stmt is not None:
+            stmt.set_line(t)
+        return stmt, is_simple
+
+    def parse_expression_or_assignment(self) -> Node:
+        expr = self.parse_expression(star_expr_allowed=True)
+        if self.current_str() == '=':
+            return self.parse_assignment(expr)
+        elif self.current_str() in op_assign:
+            # Operator assignment statement.
+            op = self.current_str()[:-1]
+            self.skip()
+            rvalue = self.parse_expression()
+            return OperatorAssignmentStmt(op, expr, rvalue)
+        else:
+            # Expression statement.
+            return ExpressionStmt(expr)
+
+    def parse_assignment(self, lvalue: Any) -> Node:
+        """Parse an assignment statement.
+
+        Assume that lvalue has been parsed already, and the current token is '='.
+        Also parse an optional '# type:' comment.
+        """
+        self.expect('=')
+        lvalues = [lvalue]
+        expr = self.parse_expression(star_expr_allowed=True)
+        while self.current_str() == '=':
+            self.skip()
+            lvalues.append(expr)
+            expr = self.parse_expression(star_expr_allowed=True)
+        cur = self.current()
+        if isinstance(cur, Break):
+            type = self.parse_type_comment(cur, signature=False)
+        else:
+            type = None
+        return AssignmentStmt(lvalues, expr, type)
+
+    def parse_return_stmt(self) -> ReturnStmt:
+        self.expect('return')
+        expr = None
+        current = self.current()
+        if current.string == 'yield':
+            self.parse_error()
+        if not isinstance(current, Break):
+            expr = self.parse_expression()
+        node = ReturnStmt(expr)
+        return node
+
+    def parse_raise_stmt(self) -> RaiseStmt:
+        self.expect('raise')
+        expr = None
+        from_expr = None
+        if not isinstance(self.current(), Break):
+            expr = self.parse_expression()
+            if self.current_str() == 'from':
+                self.expect('from')
+                from_expr = self.parse_expression()
+        node = RaiseStmt(expr, from_expr)
+        return node
+
+    def parse_assert_stmt(self) -> AssertStmt:
+        self.expect('assert')
+        expr = self.parse_expression()
+        node = AssertStmt(expr)
+        return node
+
+    def parse_yield_or_yield_from_expr(self) -> Union[YieldFromExpr, YieldExpr]:
+        self.expect("yield")
+        expr = None
+        node = YieldExpr(expr)  # type: Union[YieldFromExpr, YieldExpr]
+        if not isinstance(self.current(), Break):
+            if self.current_str() == "from":
+                self.expect("from")
+                expr = self.parse_expression()  # when yield from is assigned to a variable
+                node = YieldFromExpr(expr)
+            else:
+                if self.current_str() == ')':
+                    node = YieldExpr(None)
+                else:
+                    expr = self.parse_expression()
+                    node = YieldExpr(expr)
+        return node
+
+    def parse_ellipsis(self) -> EllipsisExpr:
+        self.expect('...')
+        node = EllipsisExpr()
+        return node
+
+    def parse_del_stmt(self) -> DelStmt:
+        self.expect('del')
+        expr = self.parse_expression()
+        node = DelStmt(expr)
+        return node
+
+    def parse_break_stmt(self) -> BreakStmt:
+        self.expect('break')
+        node = BreakStmt()
+        return node
+
+    def parse_continue_stmt(self) -> ContinueStmt:
+        self.expect('continue')
+        node = ContinueStmt()
+        return node
+
+    def parse_pass_stmt(self) -> PassStmt:
+        self.expect('pass')
+        node = PassStmt()
+        return node
+
+    def parse_global_decl(self) -> GlobalDecl:
+        self.expect('global')
+        names = self.parse_identifier_list()
+        node = GlobalDecl(names)
+        return node
+
+    def parse_nonlocal_decl(self) -> NonlocalDecl:
+        self.expect('nonlocal')
+        names = self.parse_identifier_list()
+        node = NonlocalDecl(names)
+        return node
+
+    def parse_identifier_list(self) -> List[str]:
+        names = []
+        while True:
+            n = self.expect_type(Name)
+            names.append(n.string)
+            if self.current_str() != ',':
+                break
+            self.skip()
+        return names
+
+    def parse_while_stmt(self) -> WhileStmt:
+        is_error = False
+        self.expect('while')
+        try:
+            expr = self.parse_expression()
+        except ParseError:
+            is_error = True
+        body, _ = self.parse_block()
+        if self.current_str() == 'else':
+            self.expect('else')
+            else_body, _ = self.parse_block()
+        else:
+            else_body = None
+        if is_error is not None:
+            node = WhileStmt(expr, body, else_body)
+            return node
+        else:
+            return None
+
+    def parse_for_stmt(self) -> ForStmt:
+        self.expect('for')
+        index = self.parse_for_index_variables()
+        self.expect('in')
+        expr = self.parse_expression()
+
+        body, _ = self.parse_block()
+
+        if self.current_str() == 'else':
+            self.expect('else')
+            else_body, _ = self.parse_block()
+        else:
+            else_body = None
+
+        node = ForStmt(index, expr, body, else_body)
+        return node
+
+    def parse_for_index_variables(self) -> Node:
+        # Parse index variables of a 'for' statement.
+        index_items = []
+        force_tuple = False
+
+        while True:
+            v = self.parse_expression(precedence['in'],
+                                      star_expr_allowed=True)  # Prevent parsing of for stmt 'in'
+            index_items.append(v)
+            if self.current_str() != ',':
+                break
+            self.skip()
+            if self.current_str() == 'in':
+                force_tuple = True
+                break
+
+        if len(index_items) == 1 and not force_tuple:
+            index = index_items[0]
+        else:
+            index = TupleExpr(index_items)
+            index.set_line(index_items[0].get_line())
+
+        return index
+
+    def parse_if_stmt(self) -> IfStmt:
+        is_error = False
+
+        self.expect('if')
+        expr = []
+        try:
+            expr.append(self.parse_expression())
+        except ParseError:
+            is_error = True
+
+        body = [self.parse_block()[0]]
+
+        while self.current_str() == 'elif':
+            self.expect('elif')
+            try:
+                expr.append(self.parse_expression())
+            except ParseError:
+                is_error = True
+            body.append(self.parse_block()[0])
+
+        if self.current_str() == 'else':
+            self.expect('else')
+            else_body, _ = self.parse_block()
+        else:
+            else_body = None
+
+        if not is_error:
+            node = IfStmt(expr, body, else_body)
+            return node
+        else:
+            return None
+
+    def parse_try_stmt(self) -> Node:
+        self.expect('try')
+        body, _ = self.parse_block()
+        is_error = False
+        vars = []  # type: List[NameExpr]
+        types = []  # type: List[Node]
+        handlers = []  # type: List[Block]
+        while self.current_str() == 'except':
+            self.expect('except')
+            if not isinstance(self.current(), Colon):
+                try:
+                    t = self.current()
+                    types.append(self.parse_expression(precedence[',']).set_line(t))
+                    if self.current_str() == 'as':
+                        self.expect('as')
+                        vars.append(self.parse_name_expr())
+                    elif self.pyversion[0] == 2 and self.current_str() == ',':
+                        self.expect(',')
+                        vars.append(self.parse_name_expr())
+                    else:
+                        vars.append(None)
+                except ParseError:
+                    is_error = True
+            else:
+                types.append(None)
+                vars.append(None)
+            handlers.append(self.parse_block()[0])
+        if not is_error:
+            if self.current_str() == 'else':
+                self.skip()
+                else_body, _ = self.parse_block()
+            else:
+                else_body = None
+            if self.current_str() == 'finally':
+                self.expect('finally')
+                finally_body, _ = self.parse_block()
+            else:
+                finally_body = None
+            node = TryStmt(body, vars, types, handlers, else_body,
+                           finally_body)
+            return node
+        else:
+            return None
+
+    def parse_with_stmt(self) -> WithStmt:
+        self.expect('with')
+        exprs = []
+        targets = []
+        while True:
+            expr = self.parse_expression(precedence[','])
+            if self.current_str() == 'as':
+                self.expect('as')
+                target = self.parse_expression(precedence[','])
+            else:
+                target = None
+            exprs.append(expr)
+            targets.append(target)
+            if self.current_str() != ',':
+                break
+            self.expect(',')
+        body, _ = self.parse_block()
+        return WithStmt(exprs, targets, body)
+
+    def parse_print_stmt(self) -> PrintStmt:
+        self.expect('print')
+        args = []
+        target = None
+        if self.current_str() == '>>':
+            self.skip()
+            target = self.parse_expression(precedence[','])
+            if self.current_str() == ',':
+                self.skip()
+                if isinstance(self.current(), Break):
+                    self.parse_error()
+            else:
+                if not isinstance(self.current(), Break):
+                    self.parse_error()
+        comma = False
+        while not isinstance(self.current(), Break):
+            args.append(self.parse_expression(precedence[',']))
+            if self.current_str() == ',':
+                comma = True
+                self.skip()
+            else:
+                comma = False
+                break
+        return PrintStmt(args, newline=not comma, target=target)
+
+    def parse_exec_stmt(self) -> ExecStmt:
+        self.expect('exec')
+        expr = self.parse_expression(precedence['in'])
+        variables1 = None
+        variables2 = None
+        if self.current_str() == 'in':
+            self.skip()
+            variables1 = self.parse_expression(precedence[','])
+            if self.current_str() == ',':
+                self.skip()
+                variables2 = self.parse_expression(precedence[','])
+        return ExecStmt(expr, variables1, variables2)
+
+    # Parsing expressions
+
+    def parse_expression(self, prec: int = 0, star_expr_allowed: bool = False) -> Node:
+        """Parse a subexpression within a specific precedence context."""
+        expr = None  # type: Node
+        current = self.current()  # Remember token for setting the line number.
+
+        # Parse a "value" expression or unary operator expression and store
+        # that in expr.
+        s = self.current_str()
+        if s == '(':
+            # Parerenthesised expression or cast.
+            expr = self.parse_parentheses()
+        elif s == '[':
+            expr = self.parse_list_expr()
+        elif s in ['-', '+', 'not', '~']:
+            # Unary operation.
+            expr = self.parse_unary_expr()
+        elif s == 'lambda':
+            expr = self.parse_lambda_expr()
+        elif s == '{':
+            expr = self.parse_dict_or_set_expr()
+        elif s == '*' and star_expr_allowed:
+            expr = self.parse_star_expr()
+        elif s == '`' and self.pyversion[0] == 2:
+            expr = self.parse_backquote_expr()
+        else:
+            if isinstance(current, Name):
+                # Name expression.
+                expr = self.parse_name_expr()
+            elif isinstance(current, IntLit):
+                expr = self.parse_int_expr()
+            elif isinstance(current, StrLit):
+                expr = self.parse_str_expr()
+            elif isinstance(current, BytesLit):
+                expr = self.parse_bytes_literal()
+            elif isinstance(current, UnicodeLit):
+                expr = self.parse_unicode_literal()
+            elif isinstance(current, FloatLit):
+                expr = self.parse_float_expr()
+            elif isinstance(current, ComplexLit):
+                expr = self.parse_complex_expr()
+            elif isinstance(current, Keyword) and s == "yield":
+                # The expression yield from and yield to assign
+                expr = self.parse_yield_or_yield_from_expr()
+            elif isinstance(current, EllipsisToken) and (self.pyversion[0] >= 3
+                                                         or self.is_stub_file):
+                expr = self.parse_ellipsis()
+            else:
+                # Invalid expression.
+                self.parse_error()
+
+        # Set the line of the expression node, if not specified. This
+        # simplifies recording the line number as not every node type needs to
+        # deal with it separately.
+        if expr.line < 0:
+            expr.set_line(current)
+
+        # Parse operations that require a left argument (stored in expr).
+        while True:
+            current = self.current()
+            s = self.current_str()
+            if s == '(':
+                # Call expression.
+                expr = self.parse_call_expr(expr)
+            elif s == '.':
+                # Member access expression.
+                expr = self.parse_member_expr(expr)
+            elif s == '[':
+                # Indexing expression.
+                expr = self.parse_index_expr(expr)
+            elif s == ',':
+                # The comma operator is used to build tuples. Comma also
+                # separates array items and function arguments, but in this
+                # case the precedence is too low to build a tuple.
+                if precedence[','] > prec:
+                    expr = self.parse_tuple_expr(expr)
+                else:
+                    break
+            elif s == 'for':
+                if precedence['<for>'] > prec:
+                    # List comprehension or generator expression. Parse as
+                    # generator expression; it will be converted to list
+                    # comprehension if needed elsewhere.
+                    expr = self.parse_generator_expr(expr)
+                else:
+                    break
+            elif s == 'if':
+                # Conditional expression.
+                if precedence['<if>'] > prec:
+                    expr = self.parse_conditional_expr(expr)
+                else:
+                    break
+            else:
+                # Binary operation or a special case.
+                if isinstance(current, Op):
+                    op = self.current_str()
+                    op_prec = precedence[op]
+                    if op == 'not':
+                        # Either "not in" or an error.
+                        op_prec = precedence['in']
+                    if op_prec > prec:
+                        if op in op_comp:
+                            expr = self.parse_comparison_expr(expr, op_prec)
+                        else:
+                            expr = self.parse_bin_op_expr(expr, op_prec)
+                    else:
+                        # The operation cannot be associated with the
+                        # current left operand due to the precedence
+                        # context; let the caller handle it.
+                        break
+                else:
+                    # Not an operation that accepts a left argument; let the
+                    # caller handle the rest.
+                    break
+
+            # Set the line of the expression node, if not specified. This
+            # simplifies recording the line number as not every node type
+            # needs to deal with it separately.
+            if expr.line < 0:
+                expr.set_line(current)
+
+        return expr
+
+    def parse_parentheses(self) -> Node:
+        self.skip()
+        if self.current_str() == ')':
+            # Empty tuple ().
+            expr = self.parse_empty_tuple_expr()  # type: Node
+        else:
+            # Parenthesised expression.
+            expr = self.parse_expression(0, star_expr_allowed=True)
+            self.expect(')')
+        return expr
+
+    def parse_star_expr(self) -> Node:
+        star = self.expect('*')
+        expr = self.parse_expression(precedence['*u'])
+        expr = StarExpr(expr)
+        if expr.line < 0:
+            expr.set_line(star)
+        return expr
+
+    def parse_empty_tuple_expr(self) -> TupleExpr:
+        self.expect(')')
+        node = TupleExpr([])
+        return node
+
+    def parse_list_expr(self) -> Node:
+        """Parse list literal or list comprehension."""
+        items = []
+        self.expect('[')
+        while self.current_str() != ']' and not self.eol():
+            items.append(self.parse_expression(precedence['<for>'], star_expr_allowed=True))
+            if self.current_str() != ',':
+                break
+            self.expect(',')
+        if self.current_str() == 'for' and len(items) == 1:
+            items[0] = self.parse_generator_expr(items[0])
+        self.expect(']')
+        if len(items) == 1 and isinstance(items[0], GeneratorExpr):
+            return ListComprehension(cast(GeneratorExpr, items[0]))
+        else:
+            expr = ListExpr(items)
+            return expr
+
+    def parse_generator_expr(self, left_expr: Node) -> GeneratorExpr:
+        tok = self.current()
+        indices, sequences, condlists = self.parse_comp_for()
+
+        gen = GeneratorExpr(left_expr, indices, sequences, condlists)
+        gen.set_line(tok)
+        return gen
+
+    def parse_comp_for(self) -> Tuple[List[Node], List[Node], List[List[Node]]]:
+        indices = []
+        sequences = []
+        condlists = []  # type: List[List[Node]]
+        while self.current_str() == 'for':
+            conds = []
+            self.expect('for')
+            index = self.parse_for_index_variables()
+            indices.append(index)
+            self.expect('in')
+            if self.pyversion[0] >= 3:
+                sequence = self.parse_expression(precedence['<if>'])
+            else:
+                sequence = self.parse_expression_list()
+            sequences.append(sequence)
+            while self.current_str() == 'if':
+                self.skip()
+                conds.append(self.parse_expression(precedence['<if>']))
+            condlists.append(conds)
+
+        return indices, sequences, condlists
+
+    def parse_expression_list(self) -> Node:
+        prec = precedence['<if>']
+        expr = self.parse_expression(prec)
+        if self.current_str() != ',':
+            return expr
+        else:
+            t = self.current()
+            return self.parse_tuple_expr(expr, prec).set_line(t)
+
+    def parse_conditional_expr(self, left_expr: Node) -> ConditionalExpr:
+        self.expect('if')
+        cond = self.parse_expression(precedence['<if>'])
+        self.expect('else')
+        else_expr = self.parse_expression(precedence['<if>'])
+        return ConditionalExpr(cond, left_expr, else_expr)
+
+    def parse_dict_or_set_expr(self) -> Node:
+        items = []  # type: List[Tuple[Node, Node]]
+        self.expect('{')
+        while self.current_str() != '}' and not self.eol():
+            key = self.parse_expression(precedence['<for>'])
+            if self.current_str() in [',', '}'] and items == []:
+                return self.parse_set_expr(key)
+            elif self.current_str() == 'for' and items == []:
+                return self.parse_set_comprehension(key)
+            elif self.current_str() != ':':
+                self.parse_error()
+            colon = self.expect(':')
+            value = self.parse_expression(precedence['<for>'])
+            if self.current_str() == 'for' and items == []:
+                return self.parse_dict_comprehension(key, value, colon)
+            items.append((key, value))
+            if self.current_str() != ',':
+                break
+            self.expect(',')
+        self.expect('}')
+        node = DictExpr(items)
+        return node
+
+    def parse_set_expr(self, first: Node) -> SetExpr:
+        items = [first]
+        while self.current_str() != '}' and not self.eol():
+            self.expect(',')
+            if self.current_str() == '}':
+                break
+            items.append(self.parse_expression(precedence[',']))
+        self.expect('}')
+        expr = SetExpr(items)
+        return expr
+
+    def parse_set_comprehension(self, expr: Node):
+        gen = self.parse_generator_expr(expr)
+        self.expect('}')
+        set_comp = SetComprehension(gen)
+        return set_comp
+
+    def parse_dict_comprehension(self, key: Node, value: Node,
+                                 colon: Token) -> DictionaryComprehension:
+        indices, sequences, condlists = self.parse_comp_for()
+        dic = DictionaryComprehension(key, value, indices, sequences, condlists)
+        dic.set_line(colon)
+        self.expect('}')
+        return dic
+
+    def parse_tuple_expr(self, expr: Node,
+                         prec: int = precedence[',']) -> TupleExpr:
+        items = [expr]
+        while True:
+            self.expect(',')
+            if (self.current_str() in [')', ']', '=', ':'] or
+                    isinstance(self.current(), Break)):
+                break
+            items.append(self.parse_expression(prec, star_expr_allowed=True))
+            if self.current_str() != ',': break
+        node = TupleExpr(items)
+        return node
+
+    def parse_backquote_expr(self) -> BackquoteExpr:
+        self.expect('`')
+        expr = self.parse_expression()
+        self.expect('`')
+        return BackquoteExpr(expr)
+
+    def parse_name_expr(self) -> NameExpr:
+        tok = self.expect_type(Name)
+        node = NameExpr(tok.string)
+        node.set_line(tok)
+        return node
+
+    octal_int = re.compile('0+[1-9]')
+
+    def parse_int_expr(self) -> IntExpr:
+        tok = self.expect_type(IntLit)
+        string = tok.string.rstrip('lL')  # Strip L prefix (Python 2 long literals)
+        if self.octal_int.match(string):
+            value = int(string, 8)
+        else:
+            value = int(string, 0)
+        node = IntExpr(value)
+        return node
+
+    def parse_str_expr(self) -> Node:
+        # XXX \uxxxx literals
+        token = self.expect_type(StrLit)
+        value = cast(StrLit, token).parsed()
+        is_unicode = False
+        while isinstance(self.current(), (StrLit, UnicodeLit)):
+            token = self.skip()
+            if isinstance(token, StrLit):
+                value += token.parsed()
+            elif isinstance(token, UnicodeLit):
+                value += token.parsed()
+                is_unicode = True
+        if is_unicode or (self.pyversion[0] == 2 and 'unicode_literals' in self.future_options):
+            node = UnicodeExpr(value)  # type: Node
+        else:
+            node = StrExpr(value)
+        return node
+
+    def parse_bytes_literal(self) -> Node:
+        # XXX \uxxxx literals
+        tok = [self.expect_type(BytesLit)]
+        value = (cast(BytesLit, tok[0])).parsed()
+        while isinstance(self.current(), BytesLit):
+            t = cast(BytesLit, self.skip())
+            value += t.parsed()
+        if self.pyversion[0] >= 3:
+            node = BytesExpr(value)  # type: Node
+        else:
+            node = StrExpr(value)
+        return node
+
+    def parse_unicode_literal(self) -> Node:
+        # XXX \uxxxx literals
+        token = self.expect_type(UnicodeLit)
+        value = cast(UnicodeLit, token).parsed()
+        while isinstance(self.current(), (UnicodeLit, StrLit)):
+            token = cast(Union[UnicodeLit, StrLit], self.skip())
+            value += token.parsed()
+        if self.pyversion[0] >= 3:
+            # Python 3.3 supports u'...' as an alias of '...'.
+            node = StrExpr(value)  # type: Node
+        else:
+            node = UnicodeExpr(value)
+        return node
+
+    def parse_float_expr(self) -> FloatExpr:
+        tok = self.expect_type(FloatLit)
+        node = FloatExpr(float(tok.string))
+        return node
+
+    def parse_complex_expr(self) -> ComplexExpr:
+        tok = self.expect_type(ComplexLit)
+        node = ComplexExpr(complex(tok.string))
+        return node
+
+    def parse_call_expr(self, callee: Any) -> CallExpr:
+        self.expect('(')
+        args, kinds, names = self.parse_arg_expr()
+        self.expect(')')
+        node = CallExpr(callee, args, kinds, names)
+        return node
+
+    def parse_arg_expr(self) -> Tuple[List[Node], List[int], List[str]]:
+        """Parse arguments in a call expression (within '(' and ')').
+
+        Return a tuple with these items:
+          argument expressions
+          argument kinds
+          argument names (for named arguments; None for ordinary args)
+        """
+        args = []   # type: List[Node]
+        kinds = []  # type: List[int]
+        names = []  # type: List[str]
+        var_arg = False
+        dict_arg = False
+        named_args = False
+        while self.current_str() != ')' and not self.eol() and not dict_arg:
+            if isinstance(self.current(), Name) and self.peek().string == '=':
+                # Named argument
+                name = self.expect_type(Name)
+                self.expect('=')
+                kinds.append(nodes.ARG_NAMED)
+                names.append(name.string)
+                named_args = True
+            elif (self.current_str() == '*' and not var_arg and not dict_arg):
+                # *args
+                var_arg = True
+                self.expect('*')
+                kinds.append(nodes.ARG_STAR)
+                names.append(None)
+            elif self.current_str() == '**':
+                # **kwargs
+                self.expect('**')
+                dict_arg = True
+                kinds.append(nodes.ARG_STAR2)
+                names.append(None)
+            elif not var_arg and not named_args:
+                # Ordinary argument
+                kinds.append(nodes.ARG_POS)
+                names.append(None)
+            else:
+                self.parse_error()
+            args.append(self.parse_expression(precedence[',']))
+            if self.current_str() != ',':
+                break
+            self.expect(',')
+        return args, kinds, names
+
+    def parse_member_expr(self, expr: Any) -> Node:
+        self.expect('.')
+        name = self.expect_type(Name)
+        if (isinstance(expr, CallExpr) and isinstance(expr.callee, NameExpr)
+                and cast(NameExpr, expr.callee).name == 'super'):
+            # super() expression
+            node = SuperExpr(name.string)  # type: Node
+        else:
+            node = MemberExpr(expr, name.string)
+        return node
+
+    def parse_index_expr(self, base: Any) -> IndexExpr:
+        self.expect('[')
+        index = self.parse_slice_item()
+        if self.current_str() == ',':
+            # Extended slicing such as x[1:, :2].
+            items = [index]
+            while self.current_str() == ',':
+                self.skip()
+                if self.current_str() == ']' or isinstance(self.current(), Break):
+                    break
+                items.append(self.parse_slice_item())
+            index = TupleExpr(items)
+            index.set_line(items[0].line)
+        self.expect(']')
+        node = IndexExpr(base, index)
+        return node
+
+    def parse_slice_item(self) -> Node:
+        if self.current_str() != ':':
+            if self.current_str() == '...':
+                # Ellipsis is valid here even in Python 2 (but not elsewhere).
+                ellipsis = EllipsisExpr()
+                token = self.skip()
+                ellipsis.set_line(token)
+                return ellipsis
+            else:
+                item = self.parse_expression(precedence[','])
+        else:
+            item = None
+        if self.current_str() == ':':
+            # Slice.
+            index = item
+            colon = self.expect(':')
+            if self.current_str() not in (']', ':', ','):
+                end_index = self.parse_expression(precedence[','])
+            else:
+                end_index = None
+            stride = None
+            if self.current_str() == ':':
+                self.expect(':')
+                if self.current_str() not in (']', ','):
+                    stride = self.parse_expression(precedence[','])
+            item = SliceExpr(index, end_index, stride).set_line(colon.line)
+        return item
+
+    def parse_bin_op_expr(self, left: Node, prec: int) -> OpExpr:
+        op = self.expect_type(Op)
+        op_str = op.string
+        if op_str == '~':
+            self.ind -= 1
+            self.parse_error()
+        right = self.parse_expression(prec)
+        node = OpExpr(op_str, left, right)
+        return node
+
+    def parse_comparison_expr(self, left: Node, prec: int) -> ComparisonExpr:
+        operators_str = []
+        operands = [left]
+
+        while True:
+            op = self.expect_type(Op)
+            op_str = op.string
+            if op_str == 'not':
+                if self.current_str() == 'in':
+                    op_str = 'not in'
+                    self.skip()
+                else:
+                    self.parse_error()
+            elif op_str == 'is' and self.current_str() == 'not':
+                op_str = 'is not'
+                self.skip()
+
+            operators_str.append(op_str)
+            operand = self.parse_expression(prec)
+            operands.append(operand)
+
+            # Continue if next token is a comparison operator
+            self.current()
+            s = self.current_str()
+            if s not in op_comp:
+                break
+
+        node = ComparisonExpr(operators_str, operands)
+        return node
+
+    def parse_unary_expr(self) -> UnaryExpr:
+        op_tok = self.skip()
+        op = op_tok.string
+        if op == '-' or op == '+':
+            prec = precedence['-u']
+        else:
+            prec = precedence[op]
+        expr = self.parse_expression(prec)
+        node = UnaryExpr(op, expr)
+        return node
+
+    def parse_lambda_expr(self) -> FuncExpr:
+        lambda_tok = self.expect('lambda')
+
+        args, extra_stmts = self.parse_arg_list(allow_signature=False)
+
+        # Use 'object' as the placeholder return type; it will be inferred
+        # later. We can't use 'Any' since it could make type inference results
+        # less precise.
+        ret_type = UnboundType('__builtins__.object')
+        typ = self.build_func_annotation(ret_type, args,
+                                         lambda_tok.line, is_default_ret=True)
+
+        colon = self.expect(':')
+
+        expr = self.parse_expression(precedence[','])
+
+        nodes = [ReturnStmt(expr).set_line(lambda_tok)]
+        # Potentially insert extra assignment statements to the beginning of the
+        # body, used to decompose Python 2 tuple arguments.
+        nodes[:0] = extra_stmts
+        body = Block(nodes)
+        body.set_line(colon)
+
+        return FuncExpr(args, body, typ)
+
+    # Helper methods
+
+    def skip(self) -> Token:
+        self.ind += 1
+        return self.tok[self.ind - 1]
+
+    def expect(self, string: str) -> Token:
+        if self.current_str() == string:
+            self.ind += 1
+            return self.tok[self.ind - 1]
+        else:
+            self.parse_error()
+
+    def expect_indent(self) -> Token:
+        if isinstance(self.current(), Indent):
+            return self.expect_type(Indent)
+        else:
+            self.fail('Expected an indented block', self.current().line)
+            return none
+
+    def fail(self, msg: str, line: int) -> None:
+        self.errors.report(line, msg)
+
+    def expect_type(self, typ: type) -> Token:
+        current = self.current()
+        if isinstance(current, typ):
+            self.ind += 1
+            return current
+        else:
+            self.parse_error()
+
+    def expect_colon_and_break(self) -> Tuple[Token, Token]:
+        return self.expect_type(Colon), self.expect_type(Break)
+
+    def expect_break(self) -> Token:
+        return self.expect_type(Break)
+
+    def current(self) -> Token:
+        return self.tok[self.ind]
+
+    def current_str(self) -> str:
+        return self.current().string
+
+    def peek(self) -> Token:
+        return self.tok[self.ind + 1]
+
+    def parse_error(self) -> None:
+        self.parse_error_at(self.current())
+        raise ParseError()
+
+    def parse_error_at(self, tok: Token, skip: bool = True) -> None:
+        msg = ''
+        if isinstance(tok, LexError):
+            msg = token_repr(tok)
+            msg = msg[0].upper() + msg[1:]
+        elif isinstance(tok, Indent) or isinstance(tok, Dedent):
+            msg = 'Inconsistent indentation'
+        else:
+            msg = 'Parse error before {}'.format(token_repr(tok))
+
+        self.errors.report(tok.line, msg)
+
+        if skip:
+            self.skip_until_next_line()
+
+    def skip_until_break(self) -> None:
+        n = 0
+        while (not isinstance(self.current(), Break)
+               and not isinstance(self.current(), Eof)):
+            self.skip()
+            n += 1
+        if isinstance(self.tok[self.ind - 1], Colon) and n > 1:
+            self.ind -= 1
+
+    def skip_until_next_line(self) -> None:
+        self.skip_until_break()
+        if isinstance(self.current(), Break):
+            self.skip()
+
+    def eol(self) -> bool:
+        return isinstance(self.current(), Break) or self.eof()
+
+    def eof(self) -> bool:
+        return isinstance(self.current(), Eof)
+
+    # Type annotation related functionality
+
+    def parse_type(self) -> Type:
+        try:
+            typ, self.ind = parse_type(self.tok, self.ind)
+        except TypeParseError as e:
+            self.parse_error_at(e.token)
+            raise ParseError()
+        return typ
+
+    annotation_prefix_re = re.compile(r'#\s*type:')
+    ignore_prefix_re = re.compile(r'ignore\b')
+
+    def parse_type_comment(self, token: Token, signature: bool) -> Type:
+        """Parse a '# type: ...' annotation.
+
+        Return None if no annotation found. If signature is True, expect
+        a type signature of form (...) -> t.
+        """
+        whitespace_or_comments = token.rep().strip()
+        if self.annotation_prefix_re.match(whitespace_or_comments):
+            type_as_str = whitespace_or_comments.split(':', 1)[1].strip()
+            if self.ignore_prefix_re.match(type_as_str):
+                # Actually a "# type: ignore" annotation -> not a type.
+                return None
+            tokens = lex.lex(type_as_str, token.line)[0]
+            if len(tokens) < 2:
+                # Empty annotation (only Eof token)
+                self.errors.report(token.line, 'Empty type annotation')
+                return None
+            try:
+                if not signature:
+                    type, index = parse_types(tokens, 0)
+                else:
+                    type, index = parse_signature(tokens)
+            except TypeParseError as e:
+                self.parse_error_at(e.token, skip=False)
+                return None
+            if index < len(tokens) - 2:
+                self.parse_error_at(tokens[index], skip=False)
+                return None
+            return type
+        else:
+            return None
+
+
+class ParseError(Exception): pass
+
+
+def token_repr(tok: Token) -> str:
+    """Return a representation of a token for use in parse error messages."""
+    if isinstance(tok, Break):
+        return 'end of line'
+    elif isinstance(tok, Eof):
+        return 'end of file'
+    elif isinstance(tok, Keyword) or isinstance(tok, Name):
+        return '"{}"'.format(tok.string)
+    elif isinstance(tok, IntLit) or isinstance(tok, FloatLit) or isinstance(tok, ComplexLit):
+        return 'numeric literal'
+    elif isinstance(tok, StrLit) or isinstance(tok, UnicodeLit):
+        return 'string literal'
+    elif (isinstance(tok, Punct) or isinstance(tok, Op)
+          or isinstance(tok, Colon)):
+        return tok.string
+    elif isinstance(tok, Bom):
+        return 'byte order mark'
+    elif isinstance(tok, Indent):
+        return 'indent'
+    elif isinstance(tok, Dedent):
+        return 'dedent'
+    elif isinstance(tok, EllipsisToken):
+        return '...'
+    else:
+        if isinstance(tok, LexError):
+            t = tok.type
+            if t == lex.NUMERIC_LITERAL_ERROR:
+                return 'invalid numeric literal'
+            elif t == lex.UNTERMINATED_STRING_LITERAL:
+                return 'unterminated string literal'
+            elif t == lex.INVALID_CHARACTER:
+                msg = 'unrecognized character'
+                if ord(tok.string) in range(33, 127):
+                    msg += ' ' + tok.string
+                return msg
+            elif t == lex.INVALID_DEDENT:
+                return 'inconsistent indentation'
+            elif t == lex.DECODE_ERROR:
+                return tok.message
+        raise ValueError('Unknown token {}'.format(repr(tok)))
+
+
+if __name__ == '__main__':
+    # Parse a file and dump the AST (or display errors).
+    import sys
+
+    def usage():
+        print('Usage: parse.py [--py2] [--quiet] FILE [...]')
+        sys.exit(2)
+
+    args = sys.argv[1:]
+    pyversion = defaults.PYTHON3_VERSION
+    quiet = False
+    while args and args[0].startswith('--'):
+        if args[0] == '--py2':
+            pyversion = defaults.PYTHON2_VERSION
+        elif args[0] == '--quiet':
+            quiet = True
+        else:
+            usage()
+        args = args[1:]
+    if len(args) < 1:
+        usage()
+    status = 0
+    for fnam in args:
+        s = open(fnam, 'rb').read()
+        errors = Errors()
+        try:
+            tree = parse(s, fnam, pyversion=pyversion)
+            if not quiet:
+                print(tree)
+        except CompileError as e:
+            for msg in e.messages:
+                sys.stderr.write('%s\n' % msg)
+            status = 1
+    sys.exit(status)
diff --git a/mypy/parsetype.py b/mypy/parsetype.py
new file mode 100644
index 0000000..555daa6
--- /dev/null
+++ b/mypy/parsetype.py
@@ -0,0 +1,250 @@
+"""Type parser"""
+
+from typing import List, Tuple, Union, cast
+
+from mypy.types import (
+    Type, UnboundType, TupleType, UnionType, TypeList, AnyType, CallableType, StarType,
+    EllipsisType
+)
+from mypy.lex import Token, Name, StrLit, Break, lex
+from mypy import nodes
+
+
+none = Token('')  # Empty token
+
+
+class TypeParseError(Exception):
+    def __init__(self, token: Token, index: int) -> None:
+        super().__init__()
+        self.token = token
+        self.index = index
+
+
+def parse_type(tok: List[Token], index: int) -> Tuple[Type, int]:
+    """Parse a type.
+
+    Return (type, index after type).
+    """
+
+    p = TypeParser(tok, index)
+    return p.parse_type(), p.index()
+
+
+def parse_types(tok: List[Token], index: int) -> Tuple[Type, int]:
+    """Parse one or more types separated by commas (optional parentheses).
+
+    Return (type, index after type).
+    """
+
+    p = TypeParser(tok, index)
+    return p.parse_types(), p.index()
+
+
+class TypeParser:
+    def __init__(self, tok: List[Token], ind: int) -> None:
+        self.tok = tok
+        self.ind = ind
+
+    def index(self) -> int:
+        return self.ind
+
+    def parse_type(self) -> Type:
+        """Parse a type."""
+        t = self.current_token()
+        if t.string == '(':
+            return self.parse_parens()
+        if isinstance(t, Name):
+            return self.parse_named_type()
+        elif t.string == '[':
+            return self.parse_type_list()
+        elif t.string == '*':
+            return self.parse_star_type()
+        elif t.string == '...':
+            return self.parse_ellipsis_type()
+        elif isinstance(t, StrLit):
+            # Type escaped as string literal.
+            typestr = t.parsed()
+            line = t.line
+            self.skip()
+            try:
+                result = parse_str_as_type(typestr, line)
+            except TypeParseError as e:
+                raise TypeParseError(e.token, self.ind)
+            return result
+        else:
+            self.parse_error()
+
+    def parse_parens(self) -> Type:
+        self.expect('(')
+        types = self.parse_types()
+        self.expect(')')
+        return types
+
+    def parse_types(self) -> Type:
+        """ Parse either a single type or a comma separated
+        list of types as a tuple type. In the latter case, a
+        trailing comma is needed when the list contains only
+        a single type (and optional otherwise).
+
+        int   ->   int
+        int,  ->   TupleType[int]
+        int, int, int  ->  TupleType[int, int, int]
+        """
+        type = self.parse_type()
+        if self.current_token_str() == ',':
+            items = [type]
+            while self.current_token_str() == ',':
+                self.skip()
+                if self.current_token_str() == ')':
+                    break
+                items.append(self.parse_type())
+            type = TupleType(items, None, type.line, implicit=True)
+        return type
+
+    def parse_type_list(self) -> TypeList:
+        """Parse type list [t, ...]."""
+        lbracket = self.expect('[')
+        commas = []  # type: List[Token]
+        items = []  # type: List[Type]
+        while self.current_token_str() != ']':
+            t = self.parse_type()
+            items.append(t)
+            if self.current_token_str() != ',':
+                break
+            commas.append(self.skip())
+        self.expect(']')
+        return TypeList(items, line=lbracket.line)
+
+    def parse_named_type(self) -> Type:
+        line = self.current_token().line
+        name = ''
+        components = []  # type: List[Token]
+
+        components.append(self.expect_type(Name))
+        name += components[-1].string
+
+        while self.current_token_str() == '.':
+            components.append(self.skip())
+            t = self.expect_type(Name)
+            components.append(t)
+            name += '.' + t.string
+
+        commas = []  # type: List[Token]
+        args = []  # type: List[Type]
+        if self.current_token_str() == '[':
+            self.skip()
+            while True:
+                typ = self.parse_type()
+                args.append(typ)
+                if self.current_token_str() != ',':
+                    break
+                commas.append(self.skip())
+
+            self.expect(']')
+
+        typ = UnboundType(name, args, line)
+        return typ
+
+    def parse_star_type(self) -> Type:
+        star = self.expect('*')
+        type = self.parse_type()
+        return StarType(type, star.line)
+
+    def parse_ellipsis_type(self) -> Type:
+        ellipsis = self.expect('...')
+        return EllipsisType(ellipsis.line)
+
+    # Helpers
+
+    def skip(self) -> Token:
+        self.ind += 1
+        return self.tok[self.ind - 1]
+
+    def expect(self, string: str) -> Token:
+        if self.tok[self.ind].string == string:
+            self.ind += 1
+            return self.tok[self.ind - 1]
+        else:
+            self.parse_error()
+
+    def expect_type(self, typ: type) -> Token:
+        if isinstance(self.current_token(), typ):
+            self.ind += 1
+            return self.tok[self.ind - 1]
+        else:
+            self.parse_error()
+
+    def current_token(self) -> Token:
+        return self.tok[self.ind]
+
+    def current_token_str(self) -> str:
+        return self.current_token().string
+
+    def parse_error(self) -> None:
+        raise TypeParseError(self.tok[self.ind], self.ind)
+
+
+def parse_str_as_type(typestr: str, line: int) -> Type:
+    """Parse a type represented as a string.
+
+    Raise TypeParseError on parse error.
+    """
+
+    typestr = typestr.strip()
+    tokens = lex(typestr, line)[0]
+    result, i = parse_type(tokens, 0)
+    if i < len(tokens) - 2:
+        raise TypeParseError(tokens[i], i)
+    return result
+
+
+def parse_str_as_signature(typestr: str, line: int) -> CallableType:
+    """Parse a signature represented as a string.
+
+    Raise TypeParseError on parse error.
+    """
+
+    typestr = typestr.strip()
+    tokens = lex(typestr, line)[0]
+    result, i = parse_signature(tokens)
+    if i < len(tokens) - 2:
+        raise TypeParseError(tokens[i], i)
+    return result
+
+
+def parse_signature(tokens: List[Token]) -> Tuple[CallableType, int]:
+    """Parse signature of form (argtype, ...) -> ...
+
+    Return tuple (signature type, token index).
+    """
+    i = 0
+    if tokens[i].string != '(':
+        raise TypeParseError(tokens[i], i)
+    i += 1
+    arg_types = []  # type: List[Type]
+    arg_kinds = []  # type: List[int]
+    while tokens[i].string != ')':
+        if tokens[i].string == '*':
+            arg_kinds.append(nodes.ARG_STAR)
+            i += 1
+        elif tokens[i].string == '**':
+            arg_kinds.append(nodes.ARG_STAR2)
+            i += 1
+        else:
+            arg_kinds.append(nodes.ARG_POS)
+        arg, i = parse_type(tokens, i)
+        arg_types.append(arg)
+        next = tokens[i].string
+        if next not in ',)':
+            raise TypeParseError(tokens[i], i)
+        if next == ',':
+            i += 1
+    i += 1
+    if tokens[i].string != '->':
+        raise TypeParseError(tokens[i], i)
+    i += 1
+    ret_type, i = parse_type(tokens, i)
+    return CallableType(arg_types,
+                        arg_kinds,
+                        [None] * len(arg_types),
+                        ret_type, None), i
diff --git a/mypy/replacetvars.py b/mypy/replacetvars.py
new file mode 100644
index 0000000..a7658e8
--- /dev/null
+++ b/mypy/replacetvars.py
@@ -0,0 +1,48 @@
+"""Type operations"""
+
+import typing
+
+from mypy.lex import Token
+from mypy.types import Type, AnyType, NoneTyp, TypeTranslator, TypeVarType
+
+
+def replace_type_vars(typ: Type, func_tvars: bool = True) -> Type:
+    """Replace type variable references in a type with the Any type. If
+    func_tvars is false, only replace instance type variables.
+    """
+    return typ.accept(ReplaceTypeVarsVisitor(func_tvars))
+
+
+class ReplaceTypeVarsVisitor(TypeTranslator):
+    # Only override type variable handling; otherwise perform an indentity
+    # transformation.
+
+    func_tvars = False
+
+    def __init__(self, func_tvars: bool) -> None:
+        self.func_tvars = func_tvars
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        if t.id > 0 or self.func_tvars:
+            if t.line is not None:
+                return AnyType(t.line)
+            else:
+                return AnyType()
+        else:
+            return t
+
+
+def replace_func_type_vars(typ: Type, target_type: Type) -> Type:
+    """Replace function type variables in a type with the target type."""
+    return typ.accept(ReplaceFuncTypeVarsVisitor(target_type))
+
+
+class ReplaceFuncTypeVarsVisitor(TypeTranslator):
+    def __init__(self, target_type: Type) -> None:
+        self.target_type = target_type
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        if t.id < 0:
+            return self.target_type
+        else:
+            return t
diff --git a/mypy/report.py b/mypy/report.py
new file mode 100644
index 0000000..bf52a73
--- /dev/null
+++ b/mypy/report.py
@@ -0,0 +1,287 @@
+"""Classes for producing HTML reports about imprecision."""
+
+from abc import ABCMeta, abstractmethod
+import cgi
+import os
+import shutil
+
+from typing import Callable, Dict, List, cast
+
+from mypy.types import Type
+from mypy.nodes import MypyFile, Node
+from mypy import stats
+
+
+reporter_classes = {}  # type: Dict[str, Callable[[Reports, str], AbstractReporter]]
+
+
+class Reports:
+    def __init__(self, main_file: str, data_dir: str, report_dirs: Dict[str, str]) -> None:
+        self.main_file = main_file
+        self.data_dir = data_dir
+        self.reporters = []  # type: List[AbstractReporter]
+        self.named_reporters = {}  # type: Dict[str, AbstractReporter]
+
+        for report_type, report_dir in sorted(report_dirs.items()):
+            self.add_report(report_type, report_dir)
+
+    def add_report(self, report_type: str, report_dir: str) -> 'AbstractReporter':
+        try:
+            return self.named_reporters[report_type]
+        except KeyError:
+            pass
+        reporter_cls = reporter_classes[report_type]
+        reporter = reporter_cls(self, report_dir)
+        self.reporters.append(reporter)
+        self.named_reporters[report_type] = reporter
+        return reporter
+
+    def file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        for reporter in self.reporters:
+            reporter.on_file(tree, type_map)
+
+    def finish(self) -> None:
+        for reporter in self.reporters:
+            reporter.on_finish()
+
+
+class AbstractReporter(metaclass=ABCMeta):
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        self.output_dir = output_dir
+
+    @abstractmethod
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        pass
+
+    @abstractmethod
+    def on_finish(self) -> None:
+        pass
+
+
+class OldHtmlReporter(AbstractReporter):
+    """Old HTML reporter.
+
+    This just calls the old functions in `stats`, which use global
+    variables to preserve state for the index.
+    """
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        stats.generate_html_report(tree, tree.path, type_map, self.output_dir)
+
+    def on_finish(self) -> None:
+        stats.generate_html_index(self.output_dir)
+reporter_classes['old-html'] = OldHtmlReporter
+
+
+class FileInfo:
+    def __init__(self, name: str, module: str) -> None:
+        self.name = name
+        self.module = module
+        self.counts = [0] * len(stats.precision_names)
+
+    def total(self) -> int:
+        return sum(self.counts)
+
+    def attrib(self) -> Dict[str, str]:
+        return {name: str(val) for name, val in zip(stats.precision_names, self.counts)}
+
+
+class MemoryXmlReporter(AbstractReporter):
+    """Internal reporter that generates XML in memory.
+
+    This is used by all other XML-based reporters to avoid duplication.
+    """
+
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        import lxml.etree as etree
+
+        super().__init__(reports, output_dir)
+
+        self.main_file = reports.main_file
+        self.xslt_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.xslt')
+        self.xslt_txt_path = os.path.join(reports.data_dir, 'xml', 'mypy-txt.xslt')
+        self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css')
+        xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd')
+        self.schema = etree.XMLSchema(etree.parse(xsd_path))
+        self.last_xml = None  # type: etree._ElementTree
+        self.files = []  # type: List[FileInfo]
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        import lxml.etree as etree
+
+        self.last_xml = None
+        path = os.path.relpath(tree.path)
+        if stats.is_special_module(path):
+            return
+        if path.startswith('..'):
+            return
+        if 'stubs' in path.split('/'):
+            return
+
+        visitor = stats.StatisticsVisitor(inferred=True, typemap=type_map, all_nodes=True)
+        tree.accept(visitor)
+
+        root = etree.Element('mypy-report-file', name=path, module=tree._fullname)
+        doc = etree.ElementTree(root)
+        file_info = FileInfo(path, tree._fullname)
+
+        with open(path) as input_file:
+            for lineno, line_text in enumerate(input_file, 1):
+                status = visitor.line_map.get(lineno, stats.TYPE_EMPTY)
+                file_info.counts[status] += 1
+                etree.SubElement(root, 'line',
+                                 number=str(lineno),
+                                 precision=stats.precision_names[status],
+                                 content=line_text[:-1])
+        # Assumes a layout similar to what XmlReporter uses.
+        xslt_path = os.path.relpath('mypy-html.xslt', path)
+        transform_pi = etree.ProcessingInstruction('xml-stylesheet',
+                'type="text/xsl" href="%s"' % cgi.escape(xslt_path, True))
+        root.addprevious(transform_pi)
+        self.schema.assertValid(doc)
+
+        self.last_xml = doc
+        self.files.append(file_info)
+
+    def on_finish(self) -> None:
+        import lxml.etree as etree
+
+        self.last_xml = None
+        # index_path = os.path.join(self.output_dir, 'index.xml')
+        output_files = sorted(self.files, key=lambda x: x.module)
+
+        root = etree.Element('mypy-report-index', name=self.main_file)
+        doc = etree.ElementTree(root)
+
+        for file_info in output_files:
+            etree.SubElement(root, 'file',
+                             file_info.attrib(),
+                             total=str(file_info.total()),
+                             name=file_info.name,
+                             module=file_info.module)
+        xslt_path = os.path.relpath('mypy-html.xslt', '.')
+        transform_pi = etree.ProcessingInstruction('xml-stylesheet',
+                'type="text/xsl" href="%s"' % cgi.escape(xslt_path, True))
+        root.addprevious(transform_pi)
+        self.schema.assertValid(doc)
+
+        self.last_xml = doc
+
+reporter_classes['memory-xml'] = MemoryXmlReporter
+
+
+class AbstractXmlReporter(AbstractReporter):
+    """Internal abstract class for reporters that work via XML."""
+
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        super().__init__(reports, output_dir)
+
+        memory_reporter = reports.add_report('memory-xml', '<memory>')
+        # The dependency will be called first.
+        self.memory_xml = cast(MemoryXmlReporter, memory_reporter)
+
+
+class XmlReporter(AbstractXmlReporter):
+    """Public reporter that exports XML.
+
+    The produced XML files contain a reference to the absolute path
+    of the html transform, so they will be locally viewable in a browser.
+
+    However, there is a bug in Chrome and all other WebKit-based browsers
+    that makes it fail from file:// URLs but work on http:// URLs.
+    """
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        last_xml = self.memory_xml.last_xml
+        if last_xml is None:
+            return
+        path = os.path.relpath(tree.path)
+        if path.startswith('..'):
+            return
+        out_path = os.path.join(self.output_dir, 'xml', path + '.xml')
+        stats.ensure_dir_exists(os.path.dirname(out_path))
+        last_xml.write(out_path, encoding='utf-8')
+
+    def on_finish(self) -> None:
+        last_xml = self.memory_xml.last_xml
+        out_path = os.path.join(self.output_dir, 'index.xml')
+        out_xslt = os.path.join(self.output_dir, 'mypy-html.xslt')
+        out_css = os.path.join(self.output_dir, 'mypy-html.css')
+        last_xml.write(out_path, encoding='utf-8')
+        shutil.copyfile(self.memory_xml.xslt_html_path, out_xslt)
+        shutil.copyfile(self.memory_xml.css_html_path, out_css)
+        print('Generated XML report:', os.path.abspath(out_path))
+
+reporter_classes['xml'] = XmlReporter
+
+
+class XsltHtmlReporter(AbstractXmlReporter):
+    """Public reporter that exports HTML via XSLT.
+
+    This is slightly different than running `xsltproc` on the .xml files,
+    because it passes a parameter to rewrite the links.
+    """
+
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        import lxml.etree as etree
+
+        super().__init__(reports, output_dir)
+
+        self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path))
+        self.param_html = etree.XSLT.strparam('html')
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        last_xml = self.memory_xml.last_xml
+        if last_xml is None:
+            return
+        path = os.path.relpath(tree.path)
+        if path.startswith('..'):
+            return
+        out_path = os.path.join(self.output_dir, 'html', path + '.html')
+        stats.ensure_dir_exists(os.path.dirname(out_path))
+        transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html))
+        with open(out_path, 'wb') as out_file:
+            out_file.write(transformed_html)
+
+    def on_finish(self) -> None:
+        last_xml = self.memory_xml.last_xml
+        out_path = os.path.join(self.output_dir, 'index.html')
+        out_css = os.path.join(self.output_dir, 'mypy-html.css')
+        transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html))
+        with open(out_path, 'wb') as out_file:
+            out_file.write(transformed_html)
+        shutil.copyfile(self.memory_xml.css_html_path, out_css)
+        print('Generated HTML report (via XSLT):', os.path.abspath(out_path))
+
+reporter_classes['xslt-html'] = XsltHtmlReporter
+
+
+class XsltTxtReporter(AbstractXmlReporter):
+    """Public reporter that exports TXT via XSLT.
+
+    Currently this only does the summary, not the individual reports.
+    """
+
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        import lxml.etree as etree
+
+        super().__init__(reports, output_dir)
+
+        self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path))
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        pass
+
+    def on_finish(self) -> None:
+        last_xml = self.memory_xml.last_xml
+        out_path = os.path.join(self.output_dir, 'index.txt')
+        stats.ensure_dir_exists(os.path.dirname(out_path))
+        transformed_txt = bytes(self.xslt_txt(last_xml))
+        with open(out_path, 'wb') as out_file:
+            out_file.write(transformed_txt)
+        print('Generated TXT report (via XSLT):', os.path.abspath(out_path))
+
+reporter_classes['xslt-txt'] = XsltTxtReporter
+
+reporter_classes['html'] = reporter_classes['xslt-html']
+reporter_classes['txt'] = reporter_classes['xslt-txt']
diff --git a/mypy/sametypes.py b/mypy/sametypes.py
new file mode 100644
index 0000000..e42d842
--- /dev/null
+++ b/mypy/sametypes.py
@@ -0,0 +1,123 @@
+from typing import List, cast, Sequence
+
+from mypy.types import (
+    Type, UnboundType, ErrorType, AnyType, NoneTyp, Void, TupleType, UnionType, CallableType,
+    TypeVarType, Instance, TypeVisitor, ErasedType, TypeList, Overloaded, PartialType, DeletedType
+)
+
+
+def is_same_type(left: Type, right: Type) -> bool:
+    """Is 'left' the same type as 'right'?"""
+
+    if isinstance(right, UnboundType):
+        # Make unbound types same as anything else to reduce the number of
+        # generated spurious error messages.
+        return True
+    else:
+        # Simplify types to canonical forms.
+        #
+        # There are multiple possible union types that represent the same type,
+        # such as Union[int, bool, str] and Union[int, str]. Also, some union
+        # types can be simplified to non-union types such as Union[int, bool]
+        # -> int. It would be nice if we always had simplified union types but
+        # this is currently not the case, though it often is.
+        left = simplify_union(left)
+        right = simplify_union(right)
+
+        return left.accept(SameTypeVisitor(right))
+
+
+def simplify_union(t: Type) -> Type:
+    if isinstance(t, UnionType):
+        return UnionType.make_simplified_union(t.items)
+    return t
+
+
+def is_same_types(a1: Sequence[Type], a2: Sequence[Type]) -> bool:
+    if len(a1) != len(a2):
+        return False
+    for i in range(len(a1)):
+        if not is_same_type(a1[i], a2[i]):
+            return False
+    return True
+
+
+class SameTypeVisitor(TypeVisitor[bool]):
+    """Visitor for checking whether two types are the 'same' type."""
+
+    def __init__(self, right: Type) -> None:
+        self.right = right
+
+    # visit_x(left) means: is left (which is an instance of X) the same type as
+    # right?
+
+    def visit_unbound_type(self, left: UnboundType) -> bool:
+        return True
+
+    def visit_error_type(self, left: ErrorType) -> bool:
+        return False
+
+    def visit_type_list(self, t: TypeList) -> bool:
+        assert False, 'Not supported'
+
+    def visit_any(self, left: AnyType) -> bool:
+        return isinstance(self.right, AnyType)
+
+    def visit_void(self, left: Void) -> bool:
+        return isinstance(self.right, Void)
+
+    def visit_none_type(self, left: NoneTyp) -> bool:
+        return isinstance(self.right, NoneTyp)
+
+    def visit_erased_type(self, left: ErasedType) -> bool:
+        # Should not get here.
+        raise RuntimeError()
+
+    def visit_deleted_type(self, left: DeletedType) -> bool:
+        return isinstance(self.right, DeletedType)
+
+    def visit_instance(self, left: Instance) -> bool:
+        return (isinstance(self.right, Instance) and
+                left.type == (cast(Instance, self.right)).type and
+                is_same_types(left.args, (cast(Instance, self.right)).args))
+
+    def visit_type_var(self, left: TypeVarType) -> bool:
+        return (isinstance(self.right, TypeVarType) and
+                left.id == (cast(TypeVarType, self.right)).id)
+
+    def visit_callable_type(self, left: CallableType) -> bool:
+        # FIX generics
+        if isinstance(self.right, CallableType):
+            cright = cast(CallableType, self.right)
+            return (is_same_type(left.ret_type, cright.ret_type) and
+                    is_same_types(left.arg_types, cright.arg_types) and
+                    left.arg_names == cright.arg_names and
+                    left.arg_kinds == cright.arg_kinds and
+                    left.is_type_obj() == cright.is_type_obj() and
+                    left.is_ellipsis_args == cright.is_ellipsis_args)
+        else:
+            return False
+
+    def visit_tuple_type(self, left: TupleType) -> bool:
+        if isinstance(self.right, TupleType):
+            return is_same_types(left.items, cast(TupleType, self.right).items)
+        else:
+            return False
+
+    def visit_union_type(self, left: UnionType) -> bool:
+        # XXX This is a test for syntactic equality, not equivalence
+        if isinstance(self.right, UnionType):
+            return is_same_types(left.items, cast(UnionType, self.right).items)
+        else:
+            return False
+
+    def visit_overloaded(self, left: Overloaded) -> bool:
+        if isinstance(self.right, Overloaded):
+            return is_same_types(left.items(), self.right.items())
+        else:
+            return False
+
+    def visit_partial_type(self, left: PartialType) -> bool:
+        # A partial type is not fully defined, so the result is indeterminate. We shouldn't
+        # get here.
+        raise RuntimeError
diff --git a/mypy/semanal.py b/mypy/semanal.py
new file mode 100644
index 0000000..2d20c0a
--- /dev/null
+++ b/mypy/semanal.py
@@ -0,0 +1,2603 @@
+"""The semantic analyzer.
+
+Bind names to definitions and do various other simple consistency
+checks. For example, consider this program:
+
+  x = 1
+  y = x
+
+Here semantic analysis would detect that the assignment 'x = 1'
+defines a new variable, the type of which is to be inferred (in a
+later pass; type inference or type checking is not part of semantic
+analysis).  Also, it would bind both references to 'x' to the same
+module-level variable node.  The second assignment would also be
+analyzed, and the type of 'y' marked as being inferred.
+
+Semantic analysis is the first analysis pass after parsing, and it is
+subdivided into three passes:
+
+ * FirstPass looks up externally visible names defined in a module but
+   ignores imports and local definitions.  It helps enable (some)
+   cyclic references between modules, such as module 'a' that imports
+   module 'b' and used names defined in b *and* vice versa.  The first
+   pass can be performed before dependent modules have been processed.
+
+ * SemanticAnalyzer is the second pass.  It does the bulk of the work.
+   It assumes that dependent modules have been semantically analyzed,
+   up to the second pass, unless there is a import cycle.
+
+ * ThirdPass checks that type argument counts are valid; for example,
+   it will reject Dict[int].  We don't do this in the second pass,
+   since we infer the type argument counts of classes during this
+   pass, and it is possible to refer to classes defined later in a
+   file, which would not have the type argument count set yet.
+
+Semantic analysis of types is implemented in module mypy.typeanal.
+
+TODO: Check if the third pass slows down type checking significantly.
+  We could probably get rid of it -- for example, we could collect all
+  analyzed types in a collection and check them without having to
+  traverse the entire AST.
+"""
+
+from typing import (
+    List, Dict, Set, Tuple, cast, Any, overload, TypeVar, Union, Optional
+)
+
+from mypy.nodes import (
+    MypyFile, TypeInfo, Node, AssignmentStmt, FuncDef, OverloadedFuncDef,
+    ClassDef, Var, GDEF, MODULE_REF, FuncItem, Import,
+    ImportFrom, ImportAll, Block, LDEF, NameExpr, MemberExpr,
+    IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt,
+    RaiseStmt, AssertStmt, OperatorAssignmentStmt, WhileStmt,
+    ForStmt, BreakStmt, ContinueStmt, IfStmt, TryStmt, WithStmt, DelStmt,
+    GlobalDecl, SuperExpr, DictExpr, CallExpr, RefExpr, OpExpr, UnaryExpr,
+    SliceExpr, CastExpr, TypeApplication, Context, SymbolTable,
+    SymbolTableNode, BOUND_TVAR, UNBOUND_TVAR, ListComprehension, GeneratorExpr,
+    FuncExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr,
+    StrExpr, PrintStmt, ConditionalExpr, PromoteExpr,
+    ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, MroError, type_aliases,
+    YieldFromExpr, NamedTupleExpr, NonlocalDecl,
+    SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr,
+    YieldExpr, ExecStmt, Argument, BackquoteExpr, ImportBase, COVARIANT, CONTRAVARIANT,
+    INVARIANT, UNBOUND_IMPORTED
+)
+from mypy.visitor import NodeVisitor
+from mypy.traverser import TraverserVisitor
+from mypy.errors import Errors, report_internal_error
+from mypy.types import (
+    NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType,
+    FunctionLike, UnboundType, TypeList, ErrorType, TypeVarDef,
+    replace_leading_arg_type, TupleType, UnionType, StarType, EllipsisType
+)
+from mypy.nodes import function_type, implicit_module_attrs
+from mypy.typeanal import TypeAnalyser, TypeAnalyserPass3, analyze_type_alias
+from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
+from mypy.lex import lex
+from mypy.parsetype import parse_type
+from mypy.sametypes import is_same_type
+from mypy import defaults
+
+
+T = TypeVar('T')
+
+
+# Inferred value of an expression.
+ALWAYS_TRUE = 0
+ALWAYS_FALSE = 1
+TRUTH_VALUE_UNKNOWN = 2
+
+# Map from obsolete name to the current spelling.
+obsolete_name_mapping = {
+    'typing.Function': 'typing.Callable',
+    'typing.typevar': 'typing.TypeVar',
+}
+
+# Hard coded type promotions (shared between all Python versions).
+# These add extra ad-hoc edges to the subtyping relation. For example,
+# int is considered a subtype of float, even though there is no
+# subclass relationship.
+TYPE_PROMOTIONS = {
+    'builtins.int': 'builtins.float',
+    'builtins.float': 'builtins.complex',
+}
+
+# Hard coded type promotions for Python 3.
+#
+# Note that the bytearray -> bytes promotion is a little unsafe
+# as some functions only accept bytes objects. Here convenience
+# trumps safety.
+TYPE_PROMOTIONS_PYTHON3 = TYPE_PROMOTIONS.copy()
+TYPE_PROMOTIONS_PYTHON3.update({
+    'builtins.bytearray': 'builtins.bytes',
+})
+
+# Hard coded type promotions for Python 2.
+#
+# These promotions are unsafe, but we are doing them anyway
+# for convenience and also for Python 3 compatibility
+# (bytearray -> str).
+TYPE_PROMOTIONS_PYTHON2 = TYPE_PROMOTIONS.copy()
+TYPE_PROMOTIONS_PYTHON2.update({
+    'builtins.str': 'builtins.unicode',
+    'builtins.bytearray': 'builtins.str',
+})
+
+
+# When analyzing a function, should we analyze the whole function in one go, or
+# should we only perform one phase of the analysis? The latter is used for
+# nested functions. In the first phase we add the function to the symbol table
+# but don't process body. In the second phase we process function body. This
+# way we can have mutually recursive nested functions.
+FUNCTION_BOTH_PHASES = 0  # Everthing in one go
+FUNCTION_FIRST_PHASE_POSTPONE_SECOND = 1  # Add to symbol table but postpone body
+FUNCTION_SECOND_PHASE = 2  # Only analyze body
+
+
+class SemanticAnalyzer(NodeVisitor):
+    """Semantically analyze parsed mypy files.
+
+    The analyzer binds names and does various consistency checks for a
+    parse tree. Note that type checking is performed as a separate
+    pass.
+
+    This is the second phase of semantic analysis.
+    """
+
+    # Library search paths
+    lib_path = None  # type: List[str]
+    # Module name space
+    modules = None  # type: Dict[str, MypyFile]
+    # Global name space for current module
+    globals = None  # type: SymbolTable
+    # Names declared using "global" (separate set for each scope)
+    global_decls = None  # type: List[Set[str]]
+    # Names declated using "nonlocal" (separate set for each scope)
+    nonlocal_decls = None  # type: List[Set[str]]
+    # Local names of function scopes; None for non-function scopes.
+    locals = None  # type: List[SymbolTable]
+    # Nested block depths of scopes
+    block_depth = None  # type: List[int]
+    # TypeInfo of directly enclosing class (or None)
+    type = None  # type: TypeInfo
+    # Stack of outer classes (the second tuple item contains tvars).
+    type_stack = None  # type: List[TypeInfo]
+    # Type variables that are bound by the directly enclosing class
+    bound_tvars = None  # type: List[SymbolTableNode]
+    # Stack of type varialbes that were bound by outer classess
+    tvar_stack = None  # type: List[List[SymbolTableNode]]
+
+    # Stack of functions being analyzed
+    function_stack = None  # type: List[FuncItem]
+
+    # Status of postponing analysis of nested function bodies. By using this we
+    # can have mutually recursive nested functions. Values are FUNCTION_x
+    # constants. Note that separate phasea are not used for methods.
+    postpone_nested_functions_stack = None  # type: List[int]
+    # Postponed functions collected if
+    # postpone_nested_functions_stack[-1] == FUNCTION_FIRST_PHASE_POSTPONE_SECOND.
+    postponed_functions_stack = None  # type: List[List[Node]]
+
+    loop_depth = 0         # Depth of breakable loops
+    cur_mod_id = ''        # Current module id (or None) (phase 2)
+    is_stub_file = False   # Are we analyzing a stub file?
+    imports = None  # type: Set[str]  # Imported modules (during phase 2 analysis)
+    errors = None  # type: Errors     # Keeps track of generated errors
+
+    def __init__(self, lib_path: List[str], errors: Errors,
+                 pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
+        """Construct semantic analyzer.
+
+        Use lib_path to search for modules, and report analysis errors
+        using the Errors instance.
+        """
+        self.locals = [None]
+        self.imports = set()
+        self.type = None
+        self.type_stack = []
+        self.bound_tvars = None
+        self.tvar_stack = []
+        self.function_stack = []
+        self.block_depth = [0]
+        self.loop_depth = 0
+        self.lib_path = lib_path
+        self.errors = errors
+        self.modules = {}
+        self.pyversion = pyversion
+        self.postpone_nested_functions_stack = [FUNCTION_BOTH_PHASES]
+        self.postponed_functions_stack = []
+
+    def visit_file(self, file_node: MypyFile, fnam: str) -> None:
+        self.errors.set_file(fnam)
+        self.errors.set_ignored_lines(file_node.ignored_lines)
+        self.cur_mod_node = file_node
+        self.cur_mod_id = file_node.fullname()
+        self.is_stub_file = fnam.lower().endswith('.pyi')
+        self.globals = file_node.names
+
+        if 'builtins' in self.modules:
+            self.globals['__builtins__'] = SymbolTableNode(
+                MODULE_REF, self.modules['builtins'], self.cur_mod_id)
+
+        for name in implicit_module_attrs:
+            v = self.globals[name].node
+            if isinstance(v, Var):
+                v.type = self.anal_type(v.type)
+                v.is_ready = True
+
+        defs = file_node.defs
+        for d in defs:
+            self.accept(d)
+
+        if self.cur_mod_id == 'builtins':
+            remove_imported_names_from_symtable(self.globals, 'builtins')
+
+        self.errors.set_ignored_lines(set())
+
+    def visit_func_def(self, defn: FuncDef) -> None:
+        phase_info = self.postpone_nested_functions_stack[-1]
+        if phase_info != FUNCTION_SECOND_PHASE:
+            # First phase of analysis for function.
+            self.errors.push_function(defn.name())
+            self.update_function_type_variables(defn)
+            self.errors.pop_function()
+
+            defn.is_conditional = self.block_depth[-1] > 0
+
+            # TODO(jukka): Figure out how to share the various cases. It doesn't
+            #   make sense to have (almost) duplicate code (here and elsewhere) for
+            #   3 cases: module-level, class-level and local names. Maybe implement
+            #   a common stack of namespaces. As the 3 kinds of namespaces have
+            #   different semantics, this wouldn't always work, but it might still
+            #   be a win.
+            if self.is_class_scope():
+                # Method definition
+                defn.info = self.type
+                if not defn.is_decorated and not defn.is_overload:
+                    if defn.name() in self.type.names:
+                        # Redefinition. Conditional redefinition is okay.
+                        n = self.type.names[defn.name()].node
+                        if self.is_conditional_func(n, defn):
+                            defn.original_def = cast(FuncDef, n)
+                        else:
+                            self.name_already_defined(defn.name(), defn)
+                    self.type.names[defn.name()] = SymbolTableNode(MDEF, defn)
+                self.prepare_method_signature(defn)
+            elif self.is_func_scope():
+                # Nested function
+                if not defn.is_decorated and not defn.is_overload:
+                    if defn.name() in self.locals[-1]:
+                        # Redefinition. Conditional redefinition is okay.
+                        n = self.locals[-1][defn.name()].node
+                        if self.is_conditional_func(n, defn):
+                            defn.original_def = cast(FuncDef, n)
+                        else:
+                            self.name_already_defined(defn.name(), defn)
+                    else:
+                        self.add_local(defn, defn)
+            else:
+                # Top-level function
+                if not defn.is_decorated and not defn.is_overload:
+                    symbol = self.globals.get(defn.name())
+                    if isinstance(symbol.node, FuncDef) and symbol.node != defn:
+                        # This is redefinition. Conditional redefinition is okay.
+                        original_def = symbol.node
+                        if self.is_conditional_func(original_def, defn):
+                            # Conditional function definition -- multiple defs are ok.
+                            defn.original_def = cast(FuncDef, original_def)
+                        else:
+                            # Report error.
+                            self.check_no_global(defn.name(), defn, True)
+            if phase_info == FUNCTION_FIRST_PHASE_POSTPONE_SECOND:
+                # Postpone this function (for the second phase).
+                self.postponed_functions_stack[-1].append(defn)
+                return
+        if phase_info != FUNCTION_FIRST_PHASE_POSTPONE_SECOND:
+            # Second phase of analysis for function.
+            self.errors.push_function(defn.name())
+            self.analyze_function(defn)
+            self.errors.pop_function()
+
+    def prepare_method_signature(self, func: FuncDef) -> None:
+        """Check basic signature validity and tweak annotation of self/cls argument."""
+        # Only non-static methods are special.
+        if not func.is_static:
+            if not func.arguments:
+                self.fail('Method must have at least one argument', func)
+            elif func.type:
+                sig = cast(FunctionLike, func.type)
+                # TODO: A classmethod's first argument should be more
+                #       precisely typed than Any.
+                leading_type = AnyType() if func.is_class else self_type(self.type)
+                func.type = replace_implicit_first_type(sig, leading_type)
+
+    def is_conditional_func(self, previous: Node, new: FuncDef) -> bool:
+        """Does 'new' conditionally redefine 'previous'?
+
+        We reject straight redefinitions of functions, as they are usuallly
+        a programming error. For example:
+
+        . def f(): ...
+        . def f(): ...  # Error: 'f' redefined
+        """
+        return isinstance(previous, (FuncDef, Var)) and new.is_conditional
+
+    def update_function_type_variables(self, defn: FuncDef) -> None:
+        """Make any type variables in the signature of defn explicit.
+
+        Update the signature of defn to contain type variable definitions
+        if defn is generic.
+        """
+        if defn.type:
+            functype = cast(CallableType, defn.type)
+            typevars = self.infer_type_variables(functype)
+            # Do not define a new type variable if already defined in scope.
+            typevars = [(name, tvar) for name, tvar in typevars
+                        if not self.is_defined_type_var(name, defn)]
+            if typevars:
+                defs = [TypeVarDef(tvar[0], -i - 1, tvar[1].values, self.object_type(),
+                                   tvar[1].variance)
+                        for i, tvar in enumerate(typevars)]
+                functype.variables = defs
+
+    def infer_type_variables(self,
+                             type: CallableType) -> List[Tuple[str, TypeVarExpr]]:
+        """Return list of unique type variables referred to in a callable."""
+        names = []  # type: List[str]
+        tvars = []  # type: List[TypeVarExpr]
+        for arg in type.arg_types + [type.ret_type]:
+            for name, tvar_expr in self.find_type_variables_in_type(arg):
+                if name not in names:
+                    names.append(name)
+                    tvars.append(tvar_expr)
+        return list(zip(names, tvars))
+
+    def find_type_variables_in_type(
+            self, type: Type) -> List[Tuple[str, TypeVarExpr]]:
+        """Return a list of all unique type variable references in type.
+
+        This effectively does partial name binding, results of which are mostly thrown away.
+        """
+        result = []  # type: List[Tuple[str, TypeVarExpr]]
+        if isinstance(type, UnboundType):
+            name = type.name
+            node = self.lookup_qualified(name, type)
+            if node and node.kind == UNBOUND_TVAR:
+                result.append((name, cast(TypeVarExpr, node.node)))
+            for arg in type.args:
+                result.extend(self.find_type_variables_in_type(arg))
+        elif isinstance(type, TypeList):
+            for item in type.items:
+                result.extend(self.find_type_variables_in_type(item))
+        elif isinstance(type, UnionType):
+            for item in type.items:
+                result.extend(self.find_type_variables_in_type(item))
+        elif isinstance(type, AnyType):
+            pass
+        elif isinstance(type, EllipsisType) or isinstance(type, TupleType):
+            pass
+        else:
+            assert False, 'Unsupported type %s' % type
+        return result
+
+    def is_defined_type_var(self, tvar: str, context: Node) -> bool:
+        return self.lookup_qualified(tvar, context).kind == BOUND_TVAR
+
+    def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
+        t = []  # type: List[CallableType]
+        for i, item in enumerate(defn.items):
+            # TODO support decorated overloaded functions properly
+            item.is_overload = True
+            item.func.is_overload = True
+            item.accept(self)
+            t.append(cast(CallableType, function_type(item.func,
+                                                  self.builtin_type('builtins.function'))))
+            if item.func.is_property and i == 0:
+                # This defines a property, probably with a setter and/or deleter.
+                self.analyze_property_with_multi_part_definition(defn)
+                break
+            if not [dec for dec in item.decorators
+                    if refers_to_fullname(dec, 'typing.overload')]:
+                self.fail("'overload' decorator expected", item)
+
+        defn.type = Overloaded(t)
+        defn.type.line = defn.line
+
+        if self.is_class_scope():
+            self.type.names[defn.name()] = SymbolTableNode(MDEF, defn,
+                                                           typ=defn.type)
+            defn.info = self.type
+        elif self.is_func_scope():
+            self.add_local(defn, defn)
+
+    def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) -> None:
+        """Analyze a propery defined using multiple methods (e.g., using @x.setter).
+
+        Assume that the first method (@property) has already been analyzed.
+        """
+        defn.is_property = True
+        items = defn.items
+        for item in items[1:]:
+            if len(item.decorators) == 1:
+                node = item.decorators[0]
+                if isinstance(node, MemberExpr):
+                    if node.name == 'setter':
+                        # The first item represents the entire property.
+                        defn.items[0].var.is_settable_property = True
+                        # Get abstractness from the original definition.
+                        item.func.is_abstract = items[0].func.is_abstract
+            else:
+                self.fail("Decorated property not supported", item)
+            item.func.accept(self)
+
+    def analyze_function(self, defn: FuncItem) -> None:
+        is_method = self.is_class_scope()
+        tvarnodes = self.add_func_type_variables_to_symbol_table(defn)
+        if defn.type:
+            # Signature must be analyzed in the surrounding scope so that
+            # class-level imported names and type variables are in scope.
+            defn.type = self.anal_type(defn.type)
+            self.check_function_signature(defn)
+            if isinstance(defn, FuncDef):
+                defn.info = self.type
+                defn.type = set_callable_name(defn.type, defn)
+        for arg in defn.arguments:
+            if arg.initializer:
+                arg.initializer.accept(self)
+        self.function_stack.append(defn)
+        self.enter()
+        for arg in defn.arguments:
+            self.add_local(arg.variable, defn)
+        for arg in defn.arguments:
+            if arg.initialization_statement:
+                lvalue = arg.initialization_statement.lvalues[0]
+                lvalue.accept(self)
+
+        # The first argument of a non-static, non-class method is like 'self'
+        # (though the name could be different), having the enclosing class's
+        # instance type.
+        if is_method and not defn.is_static and not defn.is_class and defn.arguments:
+            defn.arguments[0].variable.is_self = True
+
+        # First analyze body of the function but ignore nested functions.
+        self.postpone_nested_functions_stack.append(FUNCTION_FIRST_PHASE_POSTPONE_SECOND)
+        self.postponed_functions_stack.append([])
+        defn.body.accept(self)
+
+        # Analyze nested functions (if any) as a second phase.
+        self.postpone_nested_functions_stack[-1] = FUNCTION_SECOND_PHASE
+        for postponed in self.postponed_functions_stack[-1]:
+            postponed.accept(self)
+        self.postpone_nested_functions_stack.pop()
+        self.postponed_functions_stack.pop()
+
+        disable_typevars(tvarnodes)
+        self.leave()
+        self.function_stack.pop()
+
+    def add_func_type_variables_to_symbol_table(
+            self, defn: FuncItem) -> List[SymbolTableNode]:
+        nodes = []  # type: List[SymbolTableNode]
+        if defn.type:
+            tt = defn.type
+            names = self.type_var_names()
+            items = cast(CallableType, tt).variables
+            for i, item in enumerate(items):
+                name = item.name
+                if name in names:
+                    self.name_already_defined(name, defn)
+                node = self.bind_type_var(name, -i - 1, defn)
+                nodes.append(node)
+                names.add(name)
+        return nodes
+
+    def type_var_names(self) -> Set[str]:
+        if not self.type:
+            return set()
+        else:
+            return set(self.type.type_vars)
+
+    def bind_type_var(self, fullname: str, id: int,
+                     context: Context) -> SymbolTableNode:
+        node = self.lookup_qualified(fullname, context)
+        node.kind = BOUND_TVAR
+        node.tvar_id = id
+        return node
+
+    def check_function_signature(self, fdef: FuncItem) -> None:
+        sig = cast(CallableType, fdef.type)
+        if len(sig.arg_types) < len(fdef.arguments):
+            self.fail('Type signature has too few arguments', fdef)
+        elif len(sig.arg_types) > len(fdef.arguments):
+            self.fail('Type signature has too many arguments', fdef)
+
+    def visit_class_def(self, defn: ClassDef) -> None:
+        self.clean_up_bases_and_infer_type_variables(defn)
+        self.setup_class_def_analysis(defn)
+
+        self.bind_class_type_vars(defn)
+
+        self.analyze_base_classes(defn)
+        self.analyze_metaclass(defn)
+
+        for decorator in defn.decorators:
+            self.analyze_class_decorator(defn, decorator)
+
+        self.enter_class(defn)
+
+        self.setup_is_builtinclass(defn)
+
+        # Analyze class body.
+        defn.defs.accept(self)
+
+        self.calculate_abstract_status(defn.info)
+        self.setup_type_promotion(defn)
+
+        self.leave_class()
+        self.unbind_class_type_vars()
+
+    def enter_class(self, defn: ClassDef) -> None:
+        # Remember previous active class
+        self.type_stack.append(self.type)
+        self.locals.append(None)  # Add class scope
+        self.block_depth.append(-1)  # The class body increments this to 0
+        self.postpone_nested_functions_stack.append(FUNCTION_BOTH_PHASES)
+        self.type = defn.info
+
+    def leave_class(self) -> None:
+        """ Restore analyzer state. """
+        self.postpone_nested_functions_stack.pop()
+        self.block_depth.pop()
+        self.locals.pop()
+        self.type = self.type_stack.pop()
+
+    def bind_class_type_vars(self, defn: ClassDef) -> None:
+        """ Unbind type variables of previously active class and bind
+        the type variables for the active class.
+        """
+        if self.bound_tvars:
+            disable_typevars(self.bound_tvars)
+        self.tvar_stack.append(self.bound_tvars)
+        self.bound_tvars = self.bind_class_type_variables_in_symbol_table(defn.info)
+
+    def unbind_class_type_vars(self) -> None:
+        """ Unbind the active class' type vars and rebind the
+        type vars of the previously active class.
+        """
+        disable_typevars(self.bound_tvars)
+        self.bound_tvars = self.tvar_stack.pop()
+        if self.bound_tvars:
+            enable_typevars(self.bound_tvars)
+
+    def analyze_class_decorator(self, defn: ClassDef, decorator: Node) -> None:
+        decorator.accept(self)
+
+    def setup_is_builtinclass(self, defn: ClassDef):
+        for decorator in defn.decorators:
+            if refers_to_fullname(decorator, 'typing.builtinclass'):
+                defn.is_builtinclass = True
+        if defn.fullname == 'builtins.object':
+            # Only 'object' is marked as a built-in class, as otherwise things elsewhere
+            # would break. We need a better way of dealing with built-in classes.
+            defn.is_builtinclass = True
+
+    def calculate_abstract_status(self, typ: TypeInfo) -> None:
+        """Calculate abstract status of a class.
+
+        Set is_abstract of the type to True if the type has an unimplemented
+        abstract attribute.  Also compute a list of abstract attributes.
+        """
+        concrete = set()  # type: Set[str]
+        abstract = []  # type: List[str]
+        for base in typ.mro:
+            for name, symnode in base.names.items():
+                node = symnode.node
+                if isinstance(node, OverloadedFuncDef):
+                    # Unwrap an overloaded function definition. We can just
+                    # check arbitrarily the first overload item. If the
+                    # different items have a different abstract status, there
+                    # should be an error reported elsewhere.
+                    func = node.items[0]  # type: Node
+                else:
+                    func = node
+                if isinstance(func, Decorator):
+                    fdef = func.func
+                    if fdef.is_abstract and name not in concrete:
+                        typ.is_abstract = True
+                        abstract.append(name)
+                concrete.add(name)
+        typ.abstract_attributes = sorted(abstract)
+
+    def setup_type_promotion(self, defn: ClassDef) -> None:
+        """Setup extra, ad-hoc subtyping relationships between classes (promotion).
+
+        This includes things like 'int' being compatible with 'float'.
+        """
+        promote_target = None  # type: Type
+        for decorator in defn.decorators:
+            if isinstance(decorator, CallExpr):
+                analyzed = decorator.analyzed
+                if isinstance(analyzed, PromoteExpr):
+                    # _promote class decorator (undocumented faeture).
+                    promote_target = analyzed.type
+        if not promote_target:
+            promotions = (TYPE_PROMOTIONS_PYTHON3 if self.pyversion[0] >= 3
+                          else TYPE_PROMOTIONS_PYTHON2)
+            if defn.fullname in promotions:
+                promote_target = self.named_type_or_none(promotions[defn.fullname])
+        defn.info._promote = promote_target
+
+    def clean_up_bases_and_infer_type_variables(self, defn: ClassDef) -> None:
+        """Remove extra base classes such as Generic and infer type vars.
+
+        For example, consider this class:
+
+        . class Foo(Bar, Generic[T]): ...
+
+        Now we will remove Generic[T] from bases of Foo and infer that the
+        type variable 'T' is a type argument of Foo.
+
+        Note that this is performed *before* semantic analysis.
+        """
+        removed = []  # type: List[int]
+        type_vars = []  # type: List[TypeVarDef]
+        for i, base_expr in enumerate(defn.base_type_exprs):
+            try:
+                base = expr_to_unanalyzed_type(base_expr)
+            except TypeTranslationError:
+                # This error will be caught later.
+                continue
+            tvars = self.analyze_typevar_declaration(base)
+            if tvars is not None:
+                if type_vars:
+                    self.fail('Duplicate Generic in bases', defn)
+                removed.append(i)
+                for j, (name, tvar_expr) in enumerate(tvars):
+                    type_vars.append(TypeVarDef(name, j + 1, tvar_expr.values,
+                                                self.object_type(), tvar_expr.variance))
+        if type_vars:
+            defn.type_vars = type_vars
+            if defn.info:
+                defn.info.type_vars = [tv.name for tv in type_vars]
+        for i in reversed(removed):
+            del defn.base_type_exprs[i]
+
+    def analyze_typevar_declaration(self, t: Type) -> Optional[List[Tuple[str, TypeVarExpr]]]:
+        if not isinstance(t, UnboundType):
+            return None
+        unbound = cast(UnboundType, t)
+        sym = self.lookup_qualified(unbound.name, unbound)
+        if sym is None or sym.node is None:
+            return None
+        if sym.node.fullname() == 'typing.Generic':
+            tvars = []  # type: List[Tuple[str, TypeVarExpr]]
+            for arg in unbound.args:
+                tvar = self.analyze_unbound_tvar(arg)
+                if tvar:
+                    tvars.append(tvar)
+                else:
+                    self.fail('Free type variable expected in %s[...]' %
+                              sym.node.name(), t)
+            return tvars
+        return None
+
+    def analyze_unbound_tvar(self, t: Type) -> Tuple[str, TypeVarExpr]:
+        if not isinstance(t, UnboundType):
+            return None
+        unbound = cast(UnboundType, t)
+        sym = self.lookup_qualified(unbound.name, unbound)
+        if sym is not None and sym.kind == UNBOUND_TVAR:
+            return unbound.name, cast(TypeVarExpr, sym.node)
+        return None
+
+    def setup_class_def_analysis(self, defn: ClassDef) -> None:
+        """Prepare for the analysis of a class definition."""
+        if not defn.info:
+            defn.info = TypeInfo(SymbolTable(), defn)
+            defn.info._fullname = defn.info.name()
+        if self.is_func_scope() or self.type:
+            kind = MDEF
+            if self.is_func_scope():
+                kind = LDEF
+            self.add_symbol(defn.name, SymbolTableNode(kind, defn.info), defn)
+
+    def analyze_base_classes(self, defn: ClassDef) -> None:
+        """Analyze and set up base classes."""
+        for base_expr in defn.base_type_exprs:
+            # The base class is originallly an expression; convert it to a type.
+            try:
+                base = self.expr_to_analyzed_type(base_expr)
+            except TypeTranslationError:
+                self.fail('Invalid base class', base_expr)
+                defn.info.mro = []
+                return
+            if isinstance(base, TupleType):
+                if defn.info.tuple_type:
+                    self.fail("Class has two incompatible bases derived from tuple", defn)
+                defn.info.tuple_type = base
+                base = base.fallback
+                if (not self.is_stub_file and not defn.info.is_named_tuple and
+                        base.type.fullname() == 'builtins.tuple'):
+                    self.fail("Tuple[...] not supported as a base class outside a stub file", defn)
+            if isinstance(base, Instance):
+                defn.base_types.append(base)
+            elif isinstance(base, TupleType):
+                assert False, "Internal error: Unexpected TupleType base class"
+            elif isinstance(base, AnyType):
+                # We don't know anything about the base class. Make any unknown attributes
+                # have type 'Any'.
+                defn.info.fallback_to_any = True
+            elif not isinstance(base, UnboundType):
+                self.fail('Invalid base class', base_expr)
+            if isinstance(base, Instance):
+                defn.info.is_enum = base.type.fullname() == 'enum.Enum'
+        # Add 'object' as implicit base if there is no other base class.
+        if (not defn.base_types and defn.fullname != 'builtins.object'):
+            obj = self.object_type()
+            defn.base_types.insert(0, obj)
+        defn.info.bases = defn.base_types
+        if not self.verify_base_classes(defn):
+            defn.info.mro = []
+            return
+        try:
+            defn.info.calculate_mro()
+        except MroError:
+            self.fail("Cannot determine consistent method resolution order "
+                      '(MRO) for "%s"' % defn.name, defn)
+            defn.info.mro = []
+        else:
+            # If there are cyclic imports, we may be missing 'object' in
+            # the MRO. Fix MRO if needed.
+            if defn.info.mro[-1].fullname() != 'builtins.object':
+                defn.info.mro.append(self.object_type().type)
+        # The property of falling back to Any is inherited.
+        defn.info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in defn.info.mro)
+
+    def expr_to_analyzed_type(self, expr: Node) -> Type:
+        if isinstance(expr, CallExpr):
+            expr.accept(self)
+            info = self.check_namedtuple(expr)
+            if info is None:
+                # Some form of namedtuple is the only valid type that looks like a call
+                # expression. This isn't a valid type.
+                raise TypeTranslationError()
+            fallback = Instance(info, [])
+            return TupleType(info.tuple_type.items, fallback=fallback)
+        typ = expr_to_unanalyzed_type(expr)
+        return self.anal_type(typ)
+
+    def verify_base_classes(self, defn: ClassDef) -> bool:
+        info = defn.info
+        for base in info.bases:
+            baseinfo = base.type
+            if self.is_base_class(info, baseinfo):
+                self.fail('Cycle in inheritance hierarchy', defn)
+                # Clear bases to forcefully get rid of the cycle.
+                info.bases = []
+            if baseinfo.fullname() == 'builtins.bool':
+                self.fail("'%s' is not a valid base class" %
+                          baseinfo.name(), defn)
+                return False
+        dup = find_duplicate(info.direct_base_classes())
+        if dup:
+            self.fail('Duplicate base class "%s"' % dup.name(), defn)
+            return False
+        return True
+
+    def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool:
+        """Determine if t is a base class of s (but do not use mro)."""
+        # Search the base class graph for t, starting from s.
+        worklist = [s]
+        visited = {s}
+        while worklist:
+            nxt = worklist.pop()
+            if nxt == t:
+                return True
+            for base in nxt.bases:
+                if base.type not in visited:
+                    worklist.append(base.type)
+                    visited.add(base.type)
+        return False
+
+    def analyze_metaclass(self, defn: ClassDef) -> None:
+        if defn.metaclass:
+            sym = self.lookup_qualified(defn.metaclass, defn)
+            if sym is not None and not isinstance(sym.node, TypeInfo):
+                self.fail("Invalid metaclass '%s'" % defn.metaclass, defn)
+
+    def object_type(self) -> Instance:
+        return self.named_type('__builtins__.object')
+
+    def named_type(self, qualified_name: str, args: List[Type] = None) -> Instance:
+        sym = self.lookup_qualified(qualified_name, None)
+        return Instance(cast(TypeInfo, sym.node), args or [])
+
+    def named_type_or_none(self, qualified_name: str) -> Instance:
+        sym = self.lookup_fully_qualified_or_none(qualified_name)
+        if not sym:
+            return None
+        return Instance(cast(TypeInfo, sym.node), [])
+
+    def is_instance_type(self, t: Type) -> bool:
+        return isinstance(t, Instance)
+
+    def bind_class_type_variables_in_symbol_table(
+            self, info: TypeInfo) -> List[SymbolTableNode]:
+        vars = info.type_vars
+        nodes = []  # type: List[SymbolTableNode]
+        for index, var in enumerate(vars, 1):
+            node = self.bind_type_var(var, index, info)
+            nodes.append(node)
+        return nodes
+
+    def visit_import(self, i: Import) -> None:
+        for id, as_id in i.ids:
+            if as_id is not None:
+                self.add_module_symbol(id, as_id, module_public=True, context=i)
+            else:
+                # Modules imported in a stub file without using 'as x' won't get exported when
+                # doing 'from m import *'.
+                module_public = not self.is_stub_file
+                base = id.split('.')[0]
+                self.add_module_symbol(base, base, module_public=module_public,
+                                       context=i)
+
+    def add_module_symbol(self, id: str, as_id: str, module_public: bool,
+                          context: Context) -> None:
+        if id in self.modules:
+            m = self.modules[id]
+            self.add_symbol(as_id, SymbolTableNode(MODULE_REF, m, self.cur_mod_id,
+                                                   module_public=module_public), context)
+        else:
+            self.add_unknown_symbol(as_id, context)
+
+    def visit_import_from(self, imp: ImportFrom) -> None:
+        import_id = self.correct_relative_import(imp)
+        if import_id in self.modules:
+            module = self.modules[import_id]
+            for id, as_id in imp.names:
+                node = module.names.get(id)
+                if node and node.kind != UNBOUND_IMPORTED:
+                    node = self.normalize_type_alias(node, imp)
+                    if not node:
+                        return
+                    imported_id = as_id or id
+                    existing_symbol = self.globals.get(imported_id)
+                    if existing_symbol:
+                        # Import can redefine a variable. They get special treatment.
+                        if self.process_import_over_existing_name(
+                                imported_id, existing_symbol, node, imp):
+                            continue
+                    # 'from m import x as x' exports x in a stub file.
+                    module_public = not self.is_stub_file or as_id is not None
+                    symbol = SymbolTableNode(node.kind, node.node,
+                                             self.cur_mod_id,
+                                             node.type_override,
+                                             module_public=module_public)
+                    self.add_symbol(imported_id, symbol, imp)
+                else:
+                    message = "Module has no attribute '{}'".format(id)
+                    extra = self.undefined_name_extra_info('{}.{}'.format(import_id, id))
+                    if extra:
+                        message += " {}".format(extra)
+                    self.fail(message, imp)
+        else:
+            # Missing module.
+            for id, as_id in imp.names:
+                self.add_unknown_symbol(as_id or id, imp)
+
+    def process_import_over_existing_name(self,
+                                          imported_id: str, existing_symbol: SymbolTableNode,
+                                          module_symbol: SymbolTableNode,
+                                          import_node: ImportBase) -> bool:
+        if (existing_symbol.kind in (LDEF, GDEF, MDEF) and
+                isinstance(existing_symbol.node, (Var, FuncDef))):
+            # This is a valid import over an existing definition in the file. Construct a dummy
+            # assignment that we'll use to type check the import.
+            lvalue = NameExpr(imported_id)
+            lvalue.kind = existing_symbol.kind
+            lvalue.node = existing_symbol.node
+            rvalue = NameExpr(imported_id)
+            rvalue.kind = module_symbol.kind
+            rvalue.node = module_symbol.node
+            assignment = AssignmentStmt([lvalue], rvalue)
+            for node in assignment, lvalue, rvalue:
+                node.set_line(import_node)
+            import_node.assignments.append(assignment)
+            return True
+        return False
+
+    def normalize_type_alias(self, node: SymbolTableNode,
+                             ctx: Context) -> SymbolTableNode:
+        if node.fullname in type_aliases:
+            # Node refers to an aliased type such as typing.List; normalize.
+            node = self.lookup_qualified(type_aliases[node.fullname], ctx)
+        return node
+
+    def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str:
+        if node.relative == 0:
+            return node.id
+
+        parts = self.cur_mod_id.split(".")
+        cur_mod_id = self.cur_mod_id
+
+        rel = node.relative
+        if self.cur_mod_node.is_package_init_file():
+            rel -= 1
+        if len(parts) < rel:
+            self.fail("Relative import climbs too many namespaces", node)
+        if rel != 0:
+            cur_mod_id = ".".join(parts[:-rel])
+
+        return cur_mod_id + (("." + node.id) if node.id else "")
+
+    def visit_import_all(self, i: ImportAll) -> None:
+        i_id = self.correct_relative_import(i)
+        if i_id in self.modules:
+            m = self.modules[i_id]
+            for name, node in m.names.items():
+                node = self.normalize_type_alias(node, i)
+                if not name.startswith('_') and node.module_public:
+                    existing_symbol = self.globals.get(name)
+                    if existing_symbol:
+                        # Import can redefine a variable. They get special treatment.
+                        if self.process_import_over_existing_name(
+                                name, existing_symbol, node, i):
+                            continue
+                    self.add_symbol(name, SymbolTableNode(node.kind, node.node,
+                                                          self.cur_mod_id), i)
+        else:
+            # Don't add any dummy symbols for 'from x import *' if 'x' is unknown.
+            pass
+
+    def add_unknown_symbol(self, name: str, context: Context) -> None:
+        var = Var(name)
+        var._fullname = self.qualified_name(name)
+        var.is_ready = True
+        var.type = AnyType()
+        self.add_symbol(name, SymbolTableNode(GDEF, var, self.cur_mod_id), context)
+
+    #
+    # Statements
+    #
+
+    def visit_block(self, b: Block) -> None:
+        if b.is_unreachable:
+            return
+        self.block_depth[-1] += 1
+        for s in b.body:
+            self.accept(s)
+        self.block_depth[-1] -= 1
+
+    def visit_block_maybe(self, b: Block) -> None:
+        if b:
+            self.visit_block(b)
+
+    def anal_type(self, t: Type, allow_tuple_literal: bool = False) -> Type:
+        if t:
+            if allow_tuple_literal:
+                # Types such as (t1, t2, ...) only allowed in assignment statements. They'll
+                # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead.
+                if isinstance(t, TupleType):
+                    # Unlike TypeAnalyser, also allow implicit tuple types (without Tuple[...]).
+                    star_count = sum(1 for item in t.items if isinstance(item, StarType))
+                    if star_count > 1:
+                        self.fail('At most one star type allowed in a tuple', t)
+                        return TupleType([AnyType() for _ in t.items],
+                                         self.builtin_type('builtins.tuple'), t.line)
+                    items = [self.anal_type(item, True)
+                             for item in t.items]
+                    return TupleType(items, self.builtin_type('builtins.tuple'), t.line)
+            a = TypeAnalyser(self.lookup_qualified,
+                             self.lookup_fully_qualified,
+                             self.fail)
+            return t.accept(a)
+        else:
+            return None
+
+    def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+        for lval in s.lvalues:
+            self.analyze_lvalue(lval, explicit_type=s.type is not None)
+        s.rvalue.accept(self)
+        if s.type:
+            allow_tuple_literal = isinstance(s.lvalues[-1], (TupleExpr, ListExpr))
+            s.type = self.anal_type(s.type, allow_tuple_literal)
+        else:
+            # For simple assignments, allow binding type aliases.
+            if (s.type is None and len(s.lvalues) == 1 and
+                    isinstance(s.lvalues[0], NameExpr)):
+                res = analyze_type_alias(s.rvalue,
+                                         self.lookup_qualified,
+                                         self.lookup_fully_qualified,
+                                         self.fail)
+                if res and (not isinstance(res, Instance) or cast(Instance, res).args):
+                    # TODO: What if this gets reassigned?
+                    name = cast(NameExpr, s.lvalues[0])
+                    node = self.lookup(name.name, name)
+                    node.kind = TYPE_ALIAS
+                    node.type_override = res
+                    if isinstance(s.rvalue, IndexExpr):
+                        s.rvalue.analyzed = TypeAliasExpr(res)
+        if s.type:
+            # Store type into nodes.
+            for lvalue in s.lvalues:
+                self.store_declared_types(lvalue, s.type)
+        self.check_and_set_up_type_alias(s)
+        self.process_typevar_declaration(s)
+        self.process_namedtuple_definition(s)
+
+    def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None:
+        """Check if assignment creates a type alias and set it up as needed."""
+        # For now, type aliases only work at the top level of a module.
+        if (len(s.lvalues) == 1 and not self.is_func_scope() and not self.type
+                and not s.type):
+            lvalue = s.lvalues[0]
+            if isinstance(lvalue, NameExpr):
+                if not lvalue.is_def:
+                    # Only a definition can create a type alias, not regular assignment.
+                    return
+                rvalue = s.rvalue
+                if isinstance(rvalue, RefExpr):
+                    node = rvalue.node
+                    if isinstance(node, TypeInfo):
+                        # TODO: We should record the fact that this is a variable
+                        #       that refers to a type, rather than making this
+                        #       just an alias for the type.
+                        self.globals[lvalue.name].node = node
+
+    def analyze_lvalue(self, lval: Node, nested: bool = False,
+                       add_global: bool = False,
+                       explicit_type: bool = False) -> None:
+        """Analyze an lvalue or assignment target.
+
+        Only if add_global is True, add name to globals table. If nested
+        is true, the lvalue is within a tuple or list lvalue expression.
+        """
+
+        if isinstance(lval, NameExpr):
+            # Top-level definitions within some statements (at least while) are
+            # not handled in the first pass, so they have to be added now.
+            nested_global = (not self.is_func_scope() and
+                             self.block_depth[-1] > 0 and
+                             not self.type)
+            if (add_global or nested_global) and lval.name not in self.globals:
+                # Define new global name.
+                v = Var(lval.name)
+                v._fullname = self.qualified_name(lval.name)
+                v.is_ready = False  # Type not inferred yet
+                lval.node = v
+                lval.is_def = True
+                lval.kind = GDEF
+                lval.fullname = v._fullname
+                self.globals[lval.name] = SymbolTableNode(GDEF, v,
+                                                          self.cur_mod_id)
+            elif isinstance(lval.node, Var) and lval.is_def:
+                # Since the is_def flag is set, this must have been analyzed
+                # already in the first pass and added to the symbol table.
+                v = cast(Var, lval.node)
+                assert v.name() in self.globals
+            elif (self.is_func_scope() and lval.name not in self.locals[-1] and
+                  lval.name not in self.global_decls[-1] and
+                  lval.name not in self.nonlocal_decls[-1]):
+                # Define new local name.
+                v = Var(lval.name)
+                lval.node = v
+                lval.is_def = True
+                lval.kind = LDEF
+                lval.fullname = lval.name
+                self.add_local(v, lval)
+            elif not self.is_func_scope() and (self.type and
+                                               lval.name not in self.type.names):
+                # Define a new attribute within class body.
+                v = Var(lval.name)
+                v.info = self.type
+                v.is_initialized_in_class = True
+                v.set_line(lval)
+                lval.node = v
+                lval.is_def = True
+                lval.kind = MDEF
+                lval.fullname = lval.name
+                self.type.names[lval.name] = SymbolTableNode(MDEF, v)
+            else:
+                # Bind to an existing name.
+                if explicit_type:
+                    self.name_already_defined(lval.name, lval)
+                lval.accept(self)
+                self.check_lvalue_validity(lval.node, lval)
+        elif isinstance(lval, MemberExpr):
+            if not add_global:
+                self.analyze_member_lvalue(lval)
+            if explicit_type and not self.is_self_member_ref(lval):
+                self.fail('Type cannot be declared in assignment to non-self '
+                          'attribute', lval)
+        elif isinstance(lval, IndexExpr):
+            if explicit_type:
+                self.fail('Unexpected type declaration', lval)
+            if not add_global:
+                lval.accept(self)
+        elif (isinstance(lval, TupleExpr) or
+              isinstance(lval, ListExpr)):
+            items = cast(Any, lval).items
+            if len(items) == 0 and isinstance(lval, TupleExpr):
+                self.fail("Can't assign to ()", lval)
+            self.analyze_tuple_or_list_lvalue(cast(Union[ListExpr, TupleExpr], lval),
+                                              add_global, explicit_type)
+        elif isinstance(lval, StarExpr):
+            if nested:
+                self.analyze_lvalue(lval.expr, nested, add_global, explicit_type)
+            else:
+                self.fail('Starred assignment target must be in a list or tuple', lval)
+        else:
+            self.fail('Invalid assignment target', lval)
+
+    def analyze_tuple_or_list_lvalue(self, lval: Union[ListExpr, TupleExpr],
+                                     add_global: bool = False,
+                                     explicit_type: bool = False) -> None:
+        """Analyze an lvalue or assignment target that is a list or tuple."""
+        items = lval.items
+        star_exprs = [cast(StarExpr, item)
+                      for item in items
+                      if isinstance(item, StarExpr)]
+
+        if len(star_exprs) > 1:
+            self.fail('Two starred expressions in assignment', lval)
+        else:
+            if len(star_exprs) == 1:
+                star_exprs[0].valid = True
+            for i in items:
+                self.analyze_lvalue(i, nested=True, add_global=add_global,
+                                    explicit_type = explicit_type)
+
+    def analyze_member_lvalue(self, lval: MemberExpr) -> None:
+        lval.accept(self)
+        if (self.is_self_member_ref(lval) and
+                self.type.get(lval.name) is None):
+            # Implicit attribute definition in __init__.
+            lval.is_def = True
+            v = Var(lval.name)
+            v.info = self.type
+            v.is_ready = False
+            lval.def_var = v
+            lval.node = v
+            self.type.names[lval.name] = SymbolTableNode(MDEF, v)
+        self.check_lvalue_validity(lval.node, lval)
+
+    def is_self_member_ref(self, memberexpr: MemberExpr) -> bool:
+        """Does memberexpr to refer to an attribute of self?"""
+        if not isinstance(memberexpr.expr, NameExpr):
+            return False
+        node = (cast(NameExpr, memberexpr.expr)).node
+        return isinstance(node, Var) and (cast(Var, node)).is_self
+
+    def check_lvalue_validity(self, node: Node, ctx: Context) -> None:
+        if isinstance(node, (TypeInfo, TypeVarExpr)):
+            self.fail('Invalid assignment target', ctx)
+
+    def store_declared_types(self, lvalue: Node, typ: Type) -> None:
+        if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr):
+            self.fail('Star type only allowed for starred expressions', lvalue)
+        if isinstance(lvalue, RefExpr):
+            lvalue.is_def = False
+            if isinstance(lvalue.node, Var):
+                var = cast(Var, lvalue.node)
+                var.type = typ
+                var.is_ready = True
+            # If node is not a variable, we'll catch it elsewhere.
+        elif isinstance(lvalue, TupleExpr):
+            if isinstance(typ, TupleType):
+                if len(lvalue.items) != len(typ.items):
+                    self.fail('Incompatible number of tuple items', lvalue)
+                    return
+                for item, itemtype in zip(lvalue.items, typ.items):
+                    self.store_declared_types(item, itemtype)
+            else:
+                self.fail('Tuple type expected for multiple variables',
+                          lvalue)
+        elif isinstance(lvalue, StarExpr):
+            if isinstance(typ, StarType):
+                self.store_declared_types(lvalue.expr, typ.type)
+            else:
+                self.fail('Star type expected for starred expression', lvalue)
+        else:
+            # This has been flagged elsewhere as an error, so just ignore here.
+            pass
+
+    def process_typevar_declaration(self, s: AssignmentStmt) -> None:
+        """Check if s declares a TypeVar; it yes, store it in symbol table."""
+        call = self.get_typevar_declaration(s)
+        if not call:
+            return
+
+        lvalue = cast(NameExpr, s.lvalues[0])
+        name = lvalue.name
+        if not lvalue.is_def:
+            if s.type:
+                self.fail("Cannot declare the type of a type variable", s)
+            else:
+                self.fail("Cannot redefine '%s' as a type variable" % name, s)
+            return
+
+        if not self.check_typevar_name(call, name, s):
+            return
+
+        # Constraining types
+        n_values = call.arg_kinds[1:].count(ARG_POS)
+        values = self.analyze_types(call.args[1:1 + n_values])
+
+        variance = self.process_typevar_parameters(call.args[1 + n_values:],
+                                                   call.arg_names[1 + n_values:],
+                                                   call.arg_kinds[1 + n_values:],
+                                                   s)
+        if variance is None:
+            return
+
+        # Yes, it's a valid type variable definition! Add it to the symbol table.
+        node = self.lookup(name, s)
+        node.kind = UNBOUND_TVAR
+        TypeVar = TypeVarExpr(name, node.fullname, values, variance)
+        TypeVar.line = call.line
+        call.analyzed = TypeVar
+        node.node = TypeVar
+
+    def check_typevar_name(self, call: CallExpr, name: str, context: Context) -> bool:
+        if len(call.args) < 1:
+            self.fail("Too few arguments for TypeVar()", context)
+            return False
+        if not isinstance(call.args[0], StrExpr) or not call.arg_kinds[0] == ARG_POS:
+            self.fail("TypeVar() expects a string literal as first argument", context)
+            return False
+        if cast(StrExpr, call.args[0]).value != name:
+            self.fail("Unexpected TypeVar() argument value", context)
+            return False
+        return True
+
+    def get_typevar_declaration(self, s: AssignmentStmt) -> Optional[CallExpr]:
+        """Returns the TypeVar() call expression if `s` is a type var declaration
+        or None otherwise.
+        """
+        if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr):
+            return None
+        if not isinstance(s.rvalue, CallExpr):
+            return None
+        call = cast(CallExpr, s.rvalue)
+        if not isinstance(call.callee, RefExpr):
+            return None
+        callee = cast(RefExpr, call.callee)
+        if callee.fullname != 'typing.TypeVar':
+            return None
+        return call
+
+    def process_typevar_parameters(self, args: List[Node],
+                                   names: List[Optional[str]],
+                                   kinds: List[int],
+                                   context: Context) -> Optional[int]:
+        covariant = False
+        contravariant = False
+        for param_value, param_name, param_kind in zip(args, names, kinds):
+            if not param_kind == ARG_NAMED:
+                self.fail("Unexpected argument to TypeVar()", context)
+                return None
+            if param_name == 'covariant':
+                if isinstance(param_value, NameExpr):
+                    if param_value.name == 'True':
+                        covariant = True
+                    else:
+                        self.fail("TypeVar 'covariant' may only be 'True'", context)
+                        return None
+                else:
+                    self.fail("TypeVar 'covariant' may only be 'True'", context)
+                    return None
+            elif param_name == 'contravariant':
+                if isinstance(param_value, NameExpr):
+                    if param_value.name == 'True':
+                        contravariant = True
+                    else:
+                        self.fail("TypeVar 'contravariant' may only be 'True'", context)
+                        return None
+                else:
+                    self.fail("TypeVar 'contravariant' may only be 'True'", context)
+                    return None
+            elif param_name == 'bound':
+                self.fail("TypeVar 'bound' argument not supported yet", context)
+                return None
+            elif param_name == 'values':
+                # Probably using obsolete syntax with values=(...). Explain the current syntax.
+                self.fail("TypeVar 'values' argument not supported", context)
+                self.fail("Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))",
+                          context)
+                return None
+            else:
+                self.fail("Unexpected argument to TypeVar(): {}".format(param_name), context)
+                return None
+        if covariant and contravariant:
+            self.fail("TypeVar cannot be both covariant and contravariant", context)
+            return None
+        elif covariant:
+            return COVARIANT
+        elif contravariant:
+            return CONTRAVARIANT
+        else:
+            return INVARIANT
+
+    def process_namedtuple_definition(self, s: AssignmentStmt) -> None:
+        """Check if s defines a namedtuple; if yes, store the definition in symbol table."""
+        if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr):
+            return
+        named_tuple = self.check_namedtuple(s.rvalue)
+        if named_tuple is None:
+            return
+        # Yes, it's a valid namedtuple definition. Add it to the symbol table.
+        lvalue = cast(NameExpr, s.lvalues[0])
+        name = lvalue.name
+        node = self.lookup(name, s)
+        node.kind = GDEF   # TODO locally defined namedtuple
+        # TODO call.analyzed
+        node.node = named_tuple
+
+    def check_namedtuple(self, node: Node) -> TypeInfo:
+        """Check if a call defines a namedtuple.
+
+        If it does, return the corresponding TypeInfo. Return None otherwise.
+
+        If the definition is invalid but looks like a namedtuple,
+        report errors but return (some) TypeInfo.
+        """
+        if not isinstance(node, CallExpr):
+            return None
+        call = cast(CallExpr, node)
+        if not isinstance(call.callee, RefExpr):
+            return None
+        callee = cast(RefExpr, call.callee)
+        fullname = callee.fullname
+        if fullname not in ('collections.namedtuple', 'typing.NamedTuple'):
+            return None
+        items, types = self.parse_namedtuple_args(call, fullname)
+        if not items:
+            # Error. Construct dummy return value.
+            return self.build_namedtuple_typeinfo('namedtuple', [], [])
+        else:
+            name = cast(StrExpr, call.args[0]).value
+            info = self.build_namedtuple_typeinfo(name, items, types)
+        call.analyzed = NamedTupleExpr(info).set_line(call.line)
+        return info
+
+    def parse_namedtuple_args(self, call: CallExpr,
+                              fullname: str) -> Tuple[List[str], List[Type]]:
+        # TODO Share code with check_argument_count in checkexpr.py?
+        args = call.args
+        if len(args) < 2:
+            return self.fail_namedtuple_arg("Too few arguments for namedtuple()", call)
+        if len(args) > 2:
+            return self.fail_namedtuple_arg("Too many arguments for namedtuple()", call)
+        if call.arg_kinds != [ARG_POS, ARG_POS]:
+            return self.fail_namedtuple_arg("Unexpected arguments to namedtuple()", call)
+        if not isinstance(args[0], StrExpr):
+            return self.fail_namedtuple_arg(
+                "namedtuple() expects a string literal as the first argument", call)
+        types = []  # type: List[Type]
+        if not isinstance(args[1], ListExpr):
+            if fullname == 'collections.namedtuple' and isinstance(args[1], StrExpr):
+                str_expr = cast(StrExpr, args[1])
+                items = str_expr.value.split()
+            else:
+                return self.fail_namedtuple_arg(
+                    "List literal expected as the second argument to namedtuple()", call)
+        else:
+            listexpr = cast(ListExpr, args[1])
+            if fullname == 'collections.namedtuple':
+                # The fields argument contains just names, with implicit Any types.
+                if any(not isinstance(item, StrExpr) for item in listexpr.items):
+                    return self.fail_namedtuple_arg("String literal expected as namedtuple() item",
+                                                    call)
+                items = [cast(StrExpr, item).value for item in listexpr.items]
+            else:
+                # The fields argument contains (name, type) tuples.
+                items, types = self.parse_namedtuple_fields_with_types(listexpr.items, call)
+        if not types:
+            types = [AnyType() for _ in items]
+        return items, types
+
+    def parse_namedtuple_fields_with_types(self, nodes: List[Node],
+                                           context: Context) -> Tuple[List[str], List[Type]]:
+        items = []  # type: List[str]
+        types = []  # type: List[Type]
+        for item in nodes:
+            if isinstance(item, TupleExpr):
+                if len(item.items) != 2:
+                    return self.fail_namedtuple_arg("Invalid NamedTuple field definition",
+                                                    item)
+                name, type_node = item.items
+                if isinstance(name, StrExpr):
+                    items.append(name.value)
+                else:
+                    return self.fail_namedtuple_arg("Invalid NamedTuple() field name", item)
+                try:
+                    type = expr_to_unanalyzed_type(type_node)
+                except TypeTranslationError:
+                    return self.fail_namedtuple_arg('Invalid field type', type_node)
+                types.append(self.anal_type(type))
+            else:
+                return self.fail_namedtuple_arg("Tuple expected as NamedTuple() field", item)
+        return items, types
+
+    def fail_namedtuple_arg(self, message: str, context: Context) -> Tuple[List[str], List[Type]]:
+        self.fail(message, context)
+        return [], []
+
+    def build_namedtuple_typeinfo(self, name: str, items: List[str],
+                                  types: List[Type]) -> TypeInfo:
+        symbols = SymbolTable()
+        class_def = ClassDef(name, Block([]))
+        class_def.fullname = self.qualified_name(name)
+        info = TypeInfo(symbols, class_def)
+        # Add named tuple items as attributes.
+        # TODO: Make them read-only.
+        for item, typ in zip(items, types):
+            var = Var(item)
+            var.info = info
+            var.type = typ
+            symbols[item] = SymbolTableNode(MDEF, var)
+        # Add a __init__ method.
+        init = self.make_namedtuple_init(info, items, types)
+        symbols['__init__'] = SymbolTableNode(MDEF, init)
+        info.tuple_type = TupleType(types, self.named_type('__builtins__.tuple', [AnyType()]))
+        info.is_named_tuple = True
+        info.mro = [info] + info.tuple_type.fallback.type.mro
+        info.bases = [info.tuple_type.fallback]
+        return info
+
+    def make_argument(self, name: str, type: Type) -> Argument:
+        return Argument(Var(name), type, None, ARG_POS)
+
+    def make_namedtuple_init(self, info: TypeInfo, items: List[str],
+                             types: List[Type]) -> FuncDef:
+        args = [self.make_argument(item, type) for item, type in zip(items, types)]
+        # TODO: Make sure that the self argument name is not visible?
+        args = [Argument(Var('__self'), NoneTyp(), None, ARG_POS)] + args
+        arg_kinds = [arg.kind for arg in args]
+        signature = CallableType([cast(Type, None)] + types,
+                                 arg_kinds,
+                                 ['__self'] + items,
+                                 NoneTyp(),
+                                 self.named_type('__builtins__.function'),
+                                 name=info.name())
+        func = FuncDef('__init__',
+                       args,
+                       Block([]),
+                       typ=signature)
+        func.info = info
+        return func
+
+    def analyze_types(self, items: List[Node]) -> List[Type]:
+        result = []  # type: List[Type]
+        for node in items:
+            try:
+                result.append(self.anal_type(expr_to_unanalyzed_type(node)))
+            except TypeTranslationError:
+                self.fail('Type expected', node)
+                result.append(AnyType())
+        return result
+
+    def visit_decorator(self, dec: Decorator) -> None:
+        for d in dec.decorators:
+            d.accept(self)
+        removed = []  # type: List[int]
+        no_type_check = False
+        for i, d in enumerate(dec.decorators):
+            # A bunch of decorators are special cased here.
+            if refers_to_fullname(d, 'abc.abstractmethod'):
+                removed.append(i)
+                dec.func.is_abstract = True
+                self.check_decorated_function_is_method('abstractmethod', dec)
+            elif refers_to_fullname(d, 'asyncio.tasks.coroutine'):
+                removed.append(i)
+            elif refers_to_fullname(d, 'builtins.staticmethod'):
+                removed.append(i)
+                dec.func.is_static = True
+                dec.var.is_staticmethod = True
+                self.check_decorated_function_is_method('staticmethod', dec)
+            elif refers_to_fullname(d, 'builtins.classmethod'):
+                removed.append(i)
+                dec.func.is_class = True
+                dec.var.is_classmethod = True
+                self.check_decorated_function_is_method('classmethod', dec)
+            elif (refers_to_fullname(d, 'builtins.property') or
+                  refers_to_fullname(d, 'abc.abstractproperty')):
+                removed.append(i)
+                dec.func.is_property = True
+                dec.var.is_property = True
+                if refers_to_fullname(d, 'abc.abstractproperty'):
+                    dec.func.is_abstract = True
+                self.check_decorated_function_is_method('property', dec)
+                if len(dec.func.arguments) > 1:
+                    self.fail('Too many arguments', dec.func)
+            elif refers_to_fullname(d, 'typing.no_type_check'):
+                dec.var.type = AnyType()
+                no_type_check = True
+        for i in reversed(removed):
+            del dec.decorators[i]
+        if not dec.is_overload or dec.var.is_property:
+            if self.is_func_scope():
+                self.add_symbol(dec.var.name(), SymbolTableNode(LDEF, dec),
+                                dec)
+            elif self.type:
+                dec.var.info = self.type
+                dec.var.is_initialized_in_class = True
+                self.add_symbol(dec.var.name(), SymbolTableNode(MDEF, dec),
+                                dec)
+        if not no_type_check:
+            dec.func.accept(self)
+        if dec.decorators and dec.var.is_property:
+            self.fail('Decorated property not supported', dec)
+
+    def check_decorated_function_is_method(self, decorator: str,
+                                           context: Context) -> None:
+        if not self.type or self.is_func_scope():
+            self.fail("'%s' used with a non-method" % decorator, context)
+
+    def visit_expression_stmt(self, s: ExpressionStmt) -> None:
+        s.expr.accept(self)
+
+    def visit_return_stmt(self, s: ReturnStmt) -> None:
+        if not self.is_func_scope():
+            self.fail("'return' outside function", s)
+        if s.expr:
+            s.expr.accept(self)
+
+    def visit_raise_stmt(self, s: RaiseStmt) -> None:
+        if s.expr:
+            s.expr.accept(self)
+        if s.from_expr:
+            s.from_expr.accept(self)
+
+    def visit_assert_stmt(self, s: AssertStmt) -> None:
+        if s.expr:
+            s.expr.accept(self)
+
+    def visit_operator_assignment_stmt(self,
+                                       s: OperatorAssignmentStmt) -> None:
+        s.lvalue.accept(self)
+        s.rvalue.accept(self)
+
+    def visit_while_stmt(self, s: WhileStmt) -> None:
+        s.expr.accept(self)
+        self.loop_depth += 1
+        s.body.accept(self)
+        self.loop_depth -= 1
+        self.visit_block_maybe(s.else_body)
+
+    def visit_for_stmt(self, s: ForStmt) -> None:
+        s.expr.accept(self)
+
+        # Bind index variables and check if they define new names.
+        self.analyze_lvalue(s.index)
+
+        self.loop_depth += 1
+        self.visit_block(s.body)
+        self.loop_depth -= 1
+
+        self.visit_block_maybe(s.else_body)
+
+    def visit_break_stmt(self, s: BreakStmt) -> None:
+        if self.loop_depth == 0:
+            self.fail("'break' outside loop", s)
+
+    def visit_continue_stmt(self, s: ContinueStmt) -> None:
+        if self.loop_depth == 0:
+            self.fail("'continue' outside loop", s)
+
+    def visit_if_stmt(self, s: IfStmt) -> None:
+        infer_reachability_of_if_statement(s, pyversion=self.pyversion)
+        for i in range(len(s.expr)):
+            s.expr[i].accept(self)
+            self.visit_block(s.body[i])
+        self.visit_block_maybe(s.else_body)
+
+    def visit_try_stmt(self, s: TryStmt) -> None:
+        self.analyze_try_stmt(s, self)
+
+    def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor,
+                         add_global: bool = False) -> None:
+        s.body.accept(visitor)
+        for type, var, handler in zip(s.types, s.vars, s.handlers):
+            if type:
+                type.accept(visitor)
+            if var:
+                self.analyze_lvalue(var, add_global=add_global)
+            handler.accept(visitor)
+        if s.else_body:
+            s.else_body.accept(visitor)
+        if s.finally_body:
+            s.finally_body.accept(visitor)
+
+    def visit_with_stmt(self, s: WithStmt) -> None:
+        for e, n in zip(s.expr, s.target):
+            e.accept(self)
+            if n:
+                self.analyze_lvalue(n)
+        self.visit_block(s.body)
+
+    def visit_del_stmt(self, s: DelStmt) -> None:
+        s.expr.accept(self)
+        if not self.is_valid_del_target(s.expr):
+            self.fail('Invalid delete target', s)
+
+    def is_valid_del_target(self, s: Node) -> bool:
+        if isinstance(s, (IndexExpr, NameExpr, MemberExpr)):
+            return True
+        elif isinstance(s, TupleExpr):
+            return all(self.is_valid_del_target(item) for item in s.items)
+
+    def visit_global_decl(self, g: GlobalDecl) -> None:
+        for name in g.names:
+            if name in self.nonlocal_decls[-1]:
+                self.fail("Name '{}' is nonlocal and global".format(name), g)
+            self.global_decls[-1].add(name)
+
+    def visit_nonlocal_decl(self, d: NonlocalDecl) -> None:
+        if not self.is_func_scope():
+            self.fail("nonlocal declaration not allowed at module level", d)
+        else:
+            for name in d.names:
+                for table in reversed(self.locals[:-1]):
+                    if table is not None and name in table:
+                        break
+                else:
+                    self.fail("No binding for nonlocal '{}' found".format(name), d)
+
+                if self.locals[-1] is not None and name in self.locals[-1]:
+                    self.fail("Name '{}' is already defined in local "
+                              "scope before nonlocal declaration".format(name), d)
+
+                if name in self.global_decls[-1]:
+                    self.fail("Name '{}' is nonlocal and global".format(name), d)
+                self.nonlocal_decls[-1].add(name)
+
+    def visit_print_stmt(self, s: PrintStmt) -> None:
+        for arg in s.args:
+            arg.accept(self)
+        if s.target:
+            s.target.accept(self)
+
+    def visit_exec_stmt(self, s: ExecStmt) -> None:
+        s.expr.accept(self)
+        if s.variables1:
+            s.variables1.accept(self)
+        if s.variables2:
+            s.variables2.accept(self)
+
+    #
+    # Expressions
+    #
+
+    def visit_name_expr(self, expr: NameExpr) -> None:
+        n = self.lookup(expr.name, expr)
+        if n:
+            if n.kind == BOUND_TVAR:
+                self.fail("'{}' is a type variable and only valid in type "
+                          "context".format(expr.name), expr)
+            else:
+                expr.kind = n.kind
+                expr.node = (cast(Node, n.node))
+                expr.fullname = n.fullname
+
+    def visit_super_expr(self, expr: SuperExpr) -> None:
+        if not self.type:
+            self.fail('"super" used outside class', expr)
+            return
+        expr.info = self.type
+
+    def visit_tuple_expr(self, expr: TupleExpr) -> None:
+        for item in expr.items:
+            item.accept(self)
+
+    def visit_list_expr(self, expr: ListExpr) -> None:
+        for item in expr.items:
+            item.accept(self)
+
+    def visit_set_expr(self, expr: SetExpr) -> None:
+        for item in expr.items:
+            item.accept(self)
+
+    def visit_dict_expr(self, expr: DictExpr) -> None:
+        for key, value in expr.items:
+            key.accept(self)
+            value.accept(self)
+
+    def visit_star_expr(self, expr: StarExpr) -> None:
+        if not expr.valid:
+            self.fail('Can use starred expression only as assignment target', expr)
+        else:
+            expr.expr.accept(self)
+
+    def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
+        if not self.is_func_scope():  # not sure
+            self.fail("'yield from' outside function", e)
+        else:
+            self.function_stack[-1].is_generator = True
+        if e.expr:
+            e.expr.accept(self)
+
+    def visit_call_expr(self, expr: CallExpr) -> None:
+        """Analyze a call expression.
+
+        Some call expressions are recognized as special forms, including
+        cast(...) and Any(...).
+        """
+        expr.callee.accept(self)
+        if refers_to_fullname(expr.callee, 'typing.cast'):
+            # Special form cast(...).
+            if not self.check_fixed_args(expr, 2, 'cast'):
+                return
+            # Translate first argument to an unanalyzed type.
+            try:
+                target = expr_to_unanalyzed_type(expr.args[0])
+            except TypeTranslationError:
+                self.fail('Cast target is not a type', expr)
+                return
+            # Piggyback CastExpr object to the CallExpr object; it takes
+            # precedence over the CallExpr semantics.
+            expr.analyzed = CastExpr(expr.args[1], target)
+            expr.analyzed.line = expr.line
+            expr.analyzed.accept(self)
+        elif refers_to_fullname(expr.callee, 'typing.Any'):
+            # Special form Any(...).
+            if not self.check_fixed_args(expr, 1, 'Any'):
+                return
+            expr.analyzed = CastExpr(expr.args[0], AnyType())
+            expr.analyzed.line = expr.line
+            expr.analyzed.accept(self)
+        elif refers_to_fullname(expr.callee, 'typing._promote'):
+            # Special form _promote(...).
+            if not self.check_fixed_args(expr, 1, '_promote'):
+                return
+            # Translate first argument to an unanalyzed type.
+            try:
+                target = expr_to_unanalyzed_type(expr.args[0])
+            except TypeTranslationError:
+                self.fail('Argument 1 to _promote is not a type', expr)
+                return
+            expr.analyzed = PromoteExpr(target)
+            expr.analyzed.line = expr.line
+            expr.analyzed.accept(self)
+        elif refers_to_fullname(expr.callee, 'builtins.dict'):
+            expr.analyzed = self.translate_dict_call(expr)
+        else:
+            # Normal call expression.
+            for a in expr.args:
+                a.accept(self)
+
+    def translate_dict_call(self, call: CallExpr) -> Optional[DictExpr]:
+        """Translate 'dict(x=y, ...)' to {'x': y, ...}.
+
+        For other variants of dict(...), return None.
+        """
+        if not call.args:
+            return None
+        if not all(kind == ARG_NAMED for kind in call.arg_kinds):
+            return None
+        expr = DictExpr([(StrExpr(key), value)
+                         for key, value in zip(call.arg_names, call.args)])
+        expr.set_line(call)
+        expr.accept(self)
+        return expr
+
+    def check_fixed_args(self, expr: CallExpr, numargs: int,
+                         name: str) -> bool:
+        """Verify that expr has specified number of positional args.
+
+        Return True if the arguments are valid.
+        """
+        s = 's'
+        if numargs == 1:
+            s = ''
+        if len(expr.args) != numargs:
+            self.fail("'%s' expects %d argument%s" % (name, numargs, s),
+                      expr)
+            return False
+        if expr.arg_kinds != [ARG_POS] * numargs:
+            self.fail("'%s' must be called with %s positional argument%s" %
+                      (name, numargs, s), expr)
+            return False
+        return True
+
+    def visit_member_expr(self, expr: MemberExpr) -> None:
+        base = expr.expr
+        base.accept(self)
+        # Bind references to module attributes.
+        if isinstance(base, RefExpr) and cast(RefExpr,
+                                              base).kind == MODULE_REF:
+            file = cast(MypyFile, cast(RefExpr, base).node)
+            names = file.names
+            n = names.get(expr.name, None)
+            if n:
+                n = self.normalize_type_alias(n, expr)
+                if not n:
+                    return
+                expr.kind = n.kind
+                expr.fullname = n.fullname
+                expr.node = n.node
+            else:
+                # We only catch some errors here; the rest will be
+                # catched during type checking.
+                #
+                # This way we can report a larger number of errors in
+                # one type checker run. If we reported errors here,
+                # the build would terminate after semantic analysis
+                # and we wouldn't be able to report any type errors.
+                full_name = '%s.%s' % (file.fullname(), expr.name)
+                if full_name in obsolete_name_mapping:
+                    self.fail("Module has no attribute %r (it's now called %r)" % (
+                        expr.name, obsolete_name_mapping[full_name]), expr)
+
+    def visit_op_expr(self, expr: OpExpr) -> None:
+        expr.left.accept(self)
+        expr.right.accept(self)
+
+    def visit_comparison_expr(self, expr: ComparisonExpr) -> None:
+        for operand in expr.operands:
+            operand.accept(self)
+
+    def visit_unary_expr(self, expr: UnaryExpr) -> None:
+        expr.expr.accept(self)
+
+    def visit_index_expr(self, expr: IndexExpr) -> None:
+        expr.base.accept(self)
+        if refers_to_class_or_function(expr.base):
+            # Special form -- type application.
+            # Translate index to an unanalyzed type.
+            types = []  # type: List[Type]
+            if isinstance(expr.index, TupleExpr):
+                items = (cast(TupleExpr, expr.index)).items
+            else:
+                items = [expr.index]
+            for item in items:
+                try:
+                    typearg = expr_to_unanalyzed_type(item)
+                except TypeTranslationError:
+                    self.fail('Type expected within [...]', expr)
+                    return
+                typearg = self.anal_type(typearg)
+                types.append(typearg)
+            expr.analyzed = TypeApplication(expr.base, types)
+            expr.analyzed.line = expr.line
+        else:
+            expr.index.accept(self)
+
+    def visit_slice_expr(self, expr: SliceExpr) -> None:
+        if expr.begin_index:
+            expr.begin_index.accept(self)
+        if expr.end_index:
+            expr.end_index.accept(self)
+        if expr.stride:
+            expr.stride.accept(self)
+
+    def visit_cast_expr(self, expr: CastExpr) -> None:
+        expr.expr.accept(self)
+        expr.type = self.anal_type(expr.type)
+
+    def visit_type_application(self, expr: TypeApplication) -> None:
+        expr.expr.accept(self)
+        for i in range(len(expr.types)):
+            expr.types[i] = self.anal_type(expr.types[i])
+
+    def visit_list_comprehension(self, expr: ListComprehension) -> None:
+        expr.generator.accept(self)
+
+    def visit_set_comprehension(self, expr: SetComprehension) -> None:
+        expr.generator.accept(self)
+
+    def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> None:
+        self.enter()
+        self.analyze_comp_for(expr)
+        expr.key.accept(self)
+        expr.value.accept(self)
+        self.leave()
+        self.analyze_comp_for_2(expr)
+
+    def visit_generator_expr(self, expr: GeneratorExpr) -> None:
+        self.enter()
+        self.analyze_comp_for(expr)
+        expr.left_expr.accept(self)
+        self.leave()
+        self.analyze_comp_for_2(expr)
+
+    def analyze_comp_for(self, expr: Union[GeneratorExpr,
+                                           DictionaryComprehension]) -> None:
+        """Analyses the 'comp_for' part of comprehensions (part 1).
+
+        That is the part after 'for' in (x for x in l if p). This analyzes
+        variables and conditions which are analyzed in a local scope.
+        """
+        for i, (index, sequence, conditions) in enumerate(zip(expr.indices,
+                                                              expr.sequences,
+                                                              expr.condlists)):
+            if i > 0:
+                sequence.accept(self)
+            # Bind index variables.
+            self.analyze_lvalue(index)
+            for cond in conditions:
+                cond.accept(self)
+
+    def analyze_comp_for_2(self, expr: Union[GeneratorExpr,
+                                             DictionaryComprehension]) -> None:
+        """Analyses the 'comp_for' part of comprehensions (part 2).
+
+        That is the part after 'for' in (x for x in l if p). This analyzes
+        the 'l' part which is analyzed in the surrounding scope.
+        """
+        expr.sequences[0].accept(self)
+
+    def visit_func_expr(self, expr: FuncExpr) -> None:
+        self.analyze_function(expr)
+
+    def visit_conditional_expr(self, expr: ConditionalExpr) -> None:
+        expr.if_expr.accept(self)
+        expr.cond.accept(self)
+        expr.else_expr.accept(self)
+
+    def visit_backquote_expr(self, expr: BackquoteExpr) -> None:
+        expr.expr.accept(self)
+
+    def visit__promote_expr(self, expr: PromoteExpr) -> None:
+        expr.type = self.anal_type(expr.type)
+
+    def visit_yield_expr(self, expr: YieldExpr) -> None:
+        if not self.is_func_scope():
+            self.fail("'yield' outside function", expr)
+        else:
+            self.function_stack[-1].is_generator = True
+        if expr.expr:
+            expr.expr.accept(self)
+
+    #
+    # Helpers
+    #
+
+    def lookup(self, name: str, ctx: Context) -> SymbolTableNode:
+        """Look up an unqualified name in all active namespaces."""
+        # 1a. Name declared using 'global x' takes precedence
+        if name in self.global_decls[-1]:
+            if name in self.globals:
+                return self.globals[name]
+            else:
+                self.name_not_defined(name, ctx)
+                return None
+        # 1b. Name declared using 'nonlocal x' takes precedence
+        if name in self.nonlocal_decls[-1]:
+            for table in reversed(self.locals[:-1]):
+                if table is not None and name in table:
+                    return table[name]
+            else:
+                self.name_not_defined(name, ctx)
+                return None
+        # 2. Class attributes (if within class definition)
+        if self.is_class_scope() and name in self.type.names:
+            return self.type[name]
+        # 3. Local (function) scopes
+        for table in reversed(self.locals):
+            if table is not None and name in table:
+                return table[name]
+        # 4. Current file global scope
+        if name in self.globals:
+            return self.globals[name]
+        # 5. Builtins
+        b = self.globals.get('__builtins__', None)
+        if b:
+            table = cast(MypyFile, b.node).names
+            if name in table:
+                if name[0] == "_" and name[1] != "_":
+                    self.name_not_defined(name, ctx)
+                    return None
+                node = table[name]
+                return node
+        # Give up.
+        self.name_not_defined(name, ctx)
+        self.check_for_obsolete_short_name(name, ctx)
+        return None
+
+    def check_for_obsolete_short_name(self, name: str, ctx: Context) -> None:
+        matches = [obsolete_name
+                   for obsolete_name in obsolete_name_mapping
+                   if obsolete_name.rsplit('.', 1)[-1] == name]
+        if len(matches) == 1:
+            self.note("(Did you mean '{}'?)".format(obsolete_name_mapping[matches[0]]), ctx)
+
+    def lookup_qualified(self, name: str, ctx: Context) -> SymbolTableNode:
+        if '.' not in name:
+            return self.lookup(name, ctx)
+        else:
+            parts = name.split('.')
+            n = self.lookup(parts[0], ctx)  # type: SymbolTableNode
+            if n:
+                for i in range(1, len(parts)):
+                    if isinstance(n.node, TypeInfo):
+                        if n.node.mro is None:
+                            # We haven't yet analyzed the class `n.node`.  Fall back to direct
+                            # lookup in the names declared directly under it, without its base
+                            # classes.  This can happen when we have a forward reference to a
+                            # nested class, and the reference is bound before the outer class
+                            # has been fully semantically analyzed.
+                            #
+                            # A better approach would be to introduce a new analysis pass or
+                            # to move things around between passes, but this unblocks a common
+                            # use case even though this is a little limited in case there is
+                            # inheritance involved.
+                            result = n.node.names.get(parts[i])
+                        else:
+                            result = n.node.get(parts[i])
+                        n = result
+                    elif isinstance(n.node, MypyFile):
+                        n = n.node.names.get(parts[i], None)
+                    # TODO: What if node is Var or FuncDef?
+                    if not n:
+                        self.name_not_defined(name, ctx)
+                        break
+                if n:
+                    n = self.normalize_type_alias(n, ctx)
+            return n
+
+    def builtin_type(self, fully_qualified_name: str) -> Instance:
+        node = self.lookup_fully_qualified(fully_qualified_name)
+        info = cast(TypeInfo, node.node)
+        return Instance(info, [])
+
+    def lookup_fully_qualified(self, name: str) -> SymbolTableNode:
+        """Lookup a fully qualified name.
+
+        Assume that the name is defined. This happens in the global namespace -- the local
+        module namespace is ignored.
+        """
+        assert '.' in name
+        parts = name.split('.')
+        n = self.modules[parts[0]]
+        for i in range(1, len(parts) - 1):
+            n = cast(MypyFile, n.names[parts[i]].node)
+        return n.names[parts[-1]]
+
+    def lookup_fully_qualified_or_none(self, name: str) -> SymbolTableNode:
+        """Lookup a fully qualified name.
+
+        Assume that the name is defined. This happens in the global namespace -- the local
+        module namespace is ignored.
+        """
+        assert '.' in name
+        parts = name.split('.')
+        n = self.modules[parts[0]]
+        for i in range(1, len(parts) - 1):
+            next_sym = n.names.get(parts[i])
+            if not next_sym:
+                return None
+            n = cast(MypyFile, next_sym.node)
+        return n.names.get(parts[-1])
+
+    def qualified_name(self, n: str) -> str:
+        return self.cur_mod_id + '.' + n
+
+    def enter(self) -> None:
+        self.locals.append(SymbolTable())
+        self.global_decls.append(set())
+        self.nonlocal_decls.append(set())
+        # -1 since entering block will increment this to 0.
+        self.block_depth.append(-1)
+
+    def leave(self) -> None:
+        self.locals.pop()
+        self.global_decls.pop()
+        self.nonlocal_decls.pop()
+        self.block_depth.pop()
+
+    def is_func_scope(self) -> bool:
+        return self.locals[-1] is not None
+
+    def is_class_scope(self) -> bool:
+        return self.type is not None and not self.is_func_scope()
+
+    def add_symbol(self, name: str, node: SymbolTableNode,
+                   context: Context) -> None:
+        if self.is_func_scope():
+            if name in self.locals[-1]:
+                # Flag redefinition unless this is a reimport of a module.
+                if not (node.kind == MODULE_REF and
+                        self.locals[-1][name].node == node.node):
+                    self.name_already_defined(name, context)
+            self.locals[-1][name] = node
+        elif self.type:
+            self.type.names[name] = node
+        else:
+            existing = self.globals.get(name)
+            if existing and (not isinstance(node.node, MypyFile) or
+                             existing.node != node.node) and existing.kind != UNBOUND_IMPORTED:
+                # Modules can be imported multiple times to support import
+                # of multiple submodules of a package (e.g. a.x and a.y).
+                ok = False
+                # Only report an error if the symbol collision provides a different type.
+                if existing.type and node.type and is_same_type(existing.type, node.type):
+                    ok = True
+                if not ok:
+                    self.name_already_defined(name, context)
+            self.globals[name] = node
+
+    def add_var(self, v: Var, ctx: Context) -> None:
+        if self.is_func_scope():
+            self.add_local(v, ctx)
+        else:
+            self.globals[v.name()] = SymbolTableNode(GDEF, v, self.cur_mod_id)
+            v._fullname = self.qualified_name(v.name())
+
+    def add_local(self, node: Union[Var, FuncBase], ctx: Context) -> None:
+        name = node.name()
+        if name in self.locals[-1]:
+            self.name_already_defined(name, ctx)
+        node._fullname = name
+        self.locals[-1][name] = SymbolTableNode(LDEF, node)
+
+    def check_no_global(self, n: str, ctx: Context,
+                        is_func: bool = False) -> None:
+        if n in self.globals:
+            if is_func and isinstance(self.globals[n].node, FuncDef):
+                self.fail(("Name '{}' already defined (overload variants "
+                           "must be next to each other)").format(n), ctx)
+            else:
+                self.name_already_defined(n, ctx)
+
+    def name_not_defined(self, name: str, ctx: Context) -> None:
+        message = "Name '{}' is not defined".format(name)
+        extra = self.undefined_name_extra_info(name)
+        if extra:
+            message += ' {}'.format(extra)
+        self.fail(message, ctx)
+
+    def name_already_defined(self, name: str, ctx: Context) -> None:
+        self.fail("Name '{}' already defined".format(name), ctx)
+
+    def fail(self, msg: str, ctx: Context) -> None:
+        self.errors.report(ctx.get_line(), msg)
+
+    def note(self, msg: str, ctx: Context) -> None:
+        self.errors.report(ctx.get_line(), msg, severity='note')
+
+    def undefined_name_extra_info(self, fullname: str) -> Optional[str]:
+        if fullname in obsolete_name_mapping:
+            return "(it's now called '{}')".format(obsolete_name_mapping[fullname])
+        else:
+            return None
+
+    def accept(self, node: Node) -> None:
+        try:
+            node.accept(self)
+        except Exception as err:
+            report_internal_error(err, self.errors.file, node.line)
+
+
+class FirstPass(NodeVisitor):
+    """First phase of semantic analysis.
+
+    See docstring of 'analyze()' below for a description of what this does.
+    """
+
+    def __init__(self, sem: SemanticAnalyzer) -> None:
+        self.sem = sem
+        self.pyversion = sem.pyversion
+
+    def analyze(self, file: MypyFile, fnam: str, mod_id: str) -> None:
+        """Perform the first analysis pass.
+
+        Populate module global table.  Resolve the full names of
+        definitions not nested within functions and construct type
+        info structures, but do not resolve inter-definition
+        references such as base classes.
+
+        Also add implicit definitions such as __name__.
+
+        In this phase we don't resolve imports. For 'from ... import',
+        we generate dummy symbol table nodes for the imported names,
+        and these will get resolved in later phases of semantic
+        analysis.
+        """
+        sem = self.sem
+        sem.cur_mod_id = mod_id
+        sem.errors.set_file(fnam)
+        sem.globals = SymbolTable()
+        sem.global_decls = [set()]
+        sem.nonlocal_decls = [set()]
+        sem.block_depth = [0]
+
+        defs = file.defs
+
+        # Add implicit definitions of module '__name__' etc.
+        for name, t in implicit_module_attrs.items():
+            v = Var(name, UnboundType(t))
+            v._fullname = self.sem.qualified_name(name)
+            self.sem.globals[name] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
+
+        for d in defs:
+            d.accept(self)
+
+        # Add implicit definition of 'None' to builtins, as we cannot define a
+        # variable with a None type explicitly.
+        if mod_id == 'builtins':
+            v = Var('None', NoneTyp())
+            v._fullname = self.sem.qualified_name('None')
+            self.sem.globals['None'] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
+
+    def visit_block(self, b: Block) -> None:
+        if b.is_unreachable:
+            return
+        self.sem.block_depth[-1] += 1
+        for node in b.body:
+            node.accept(self)
+        self.sem.block_depth[-1] -= 1
+
+    def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+        for lval in s.lvalues:
+            self.analyze_lvalue(lval, explicit_type=s.type is not None)
+
+    def visit_func_def(self, func: FuncDef) -> None:
+        sem = self.sem
+        func.is_conditional = sem.block_depth[-1] > 0
+        func._fullname = sem.qualified_name(func.name())
+        if func.name() in sem.globals:
+            # Already defined in this module.
+            original_sym = sem.globals[func.name()]
+            if original_sym.kind == UNBOUND_IMPORTED:
+                # Ah this is an imported name. We can't resolve them now, so we'll postpone
+                # this until the main phase of semantic analysis.
+                return
+            original_def = original_sym.node
+            if sem.is_conditional_func(original_def, func):
+                # Conditional function definition -- multiple defs are ok.
+                func.original_def = cast(FuncDef, original_def)
+            else:
+                # Report error.
+                sem.check_no_global(func.name(), func, True)
+        else:
+            sem.globals[func.name()] = SymbolTableNode(GDEF, func, sem.cur_mod_id)
+
+    def visit_overloaded_func_def(self, func: OverloadedFuncDef) -> None:
+        self.sem.check_no_global(func.name(), func)
+        func._fullname = self.sem.qualified_name(func.name())
+        self.sem.globals[func.name()] = SymbolTableNode(GDEF, func,
+                                                        self.sem.cur_mod_id)
+
+    def visit_class_def(self, cdef: ClassDef) -> None:
+        self.sem.check_no_global(cdef.name, cdef)
+        cdef.fullname = self.sem.qualified_name(cdef.name)
+        info = TypeInfo(SymbolTable(), cdef)
+        info.set_line(cdef.line)
+        cdef.info = info
+        self.sem.globals[cdef.name] = SymbolTableNode(GDEF, info,
+                                                      self.sem.cur_mod_id)
+        self.process_nested_classes(cdef)
+
+    def process_nested_classes(self, outer_def: ClassDef) -> None:
+        for node in outer_def.defs.body:
+            if isinstance(node, ClassDef):
+                node.info = TypeInfo(SymbolTable(), node)
+                node.info._fullname = node.info.name()
+                symbol = SymbolTableNode(MDEF, node.info)
+                outer_def.info.names[node.name] = symbol
+                self.process_nested_classes(node)
+
+    def visit_import_from(self, node: ImportFrom) -> None:
+        # We can't bind module names during the first pass, as the target module might be
+        # unprocessed. However, we add dummy unbound imported names to the symbol table so
+        # that we at least know that the name refers to a module.
+        for name, as_name in node.names:
+            imported_name = as_name or name
+            if imported_name not in self.sem.globals:
+                self.sem.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
+
+    def visit_import(self, node: Import) -> None:
+        # This is similar to visit_import_from -- see the comment there.
+        for id, as_id in node.ids:
+            imported_id = as_id or id
+            if imported_id not in self.sem.globals:
+                self.sem.add_symbol(imported_id, SymbolTableNode(UNBOUND_IMPORTED, None), node)
+            else:
+                # If the previous symbol is a variable, this should take precedence.
+                self.sem.globals[imported_id] = SymbolTableNode(UNBOUND_IMPORTED, None)
+
+    def visit_for_stmt(self, s: ForStmt) -> None:
+        self.analyze_lvalue(s.index)
+        s.body.accept(self)
+
+    def visit_with_stmt(self, s: WithStmt) -> None:
+        for n in s.target:
+            if n:
+                self.analyze_lvalue(n)
+
+    def visit_decorator(self, d: Decorator) -> None:
+        d.var._fullname = self.sem.qualified_name(d.var.name())
+        self.sem.add_symbol(d.var.name(), SymbolTableNode(GDEF, d.var), d)
+
+    def visit_if_stmt(self, s: IfStmt) -> None:
+        infer_reachability_of_if_statement(s, pyversion=self.pyversion)
+        for node in s.body:
+            node.accept(self)
+        if s.else_body:
+            s.else_body.accept(self)
+
+    def visit_try_stmt(self, s: TryStmt) -> None:
+        self.sem.analyze_try_stmt(s, self, add_global=True)
+
+    def analyze_lvalue(self, lvalue: Node, explicit_type: bool = False) -> None:
+        self.sem.analyze_lvalue(lvalue, add_global=True, explicit_type=explicit_type)
+
+
+class ThirdPass(TraverserVisitor[None]):
+    """The third and final pass of semantic analysis.
+
+    Check type argument counts and values of generic types, and perform some
+    straightforward type inference.
+    """
+
+    def __init__(self, modules: Dict[str, MypyFile], errors: Errors) -> None:
+        self.modules = modules
+        self.errors = errors
+
+    def visit_file(self, file_node: MypyFile, fnam: str) -> None:
+        self.errors.set_file(fnam)
+        file_node.accept(self)
+
+    def visit_func_def(self, fdef: FuncDef) -> None:
+        self.errors.push_function(fdef.name())
+        self.analyze(fdef.type)
+        super().visit_func_def(fdef)
+        self.errors.pop_function()
+
+    def visit_class_def(self, tdef: ClassDef) -> None:
+        for type in tdef.info.bases:
+            self.analyze(type)
+        super().visit_class_def(tdef)
+
+    def visit_decorator(self, dec: Decorator) -> None:
+        """Try to infer the type of the decorated function.
+
+        This helps us resolve forward references to decorated
+        functions during type checking.
+        """
+        super().visit_decorator(dec)
+        if dec.var.is_property:
+            # Decorators are expected to have a callable type (it's a little odd).
+            if dec.func.type is None:
+                dec.var.type = CallableType(
+                    [AnyType()],
+                    [ARG_POS],
+                    [None],
+                    AnyType(),
+                    self.builtin_type('function'),
+                    name=dec.var.name())
+            elif isinstance(dec.func.type, CallableType):
+                dec.var.type = dec.func.type
+            return
+        decorator_preserves_type = True
+        for expr in dec.decorators:
+            preserve_type = False
+            if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef):
+                if is_identity_signature(expr.node.type):
+                    preserve_type = True
+            if not preserve_type:
+                decorator_preserves_type = False
+                break
+        if decorator_preserves_type:
+            # No non-special decorators left. We can trivially infer the type
+            # of the function here.
+            dec.var.type = function_type(dec.func, self.builtin_type('function'))
+        if dec.decorators and returns_any_if_called(dec.decorators[0]):
+            # The outermost decorator will return Any so we know the type of the
+            # decorated function.
+            dec.var.type = AnyType()
+
+    def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+        self.analyze(s.type)
+        super().visit_assignment_stmt(s)
+
+    def visit_cast_expr(self, e: CastExpr) -> None:
+        self.analyze(e.type)
+        super().visit_cast_expr(e)
+
+    def visit_type_application(self, e: TypeApplication) -> None:
+        for type in e.types:
+            self.analyze(type)
+        super().visit_type_application(e)
+
+    # Helpers
+
+    def analyze(self, type: Type) -> None:
+        if type:
+            analyzer = TypeAnalyserPass3(self.fail)
+            type.accept(analyzer)
+
+    def fail(self, msg: str, ctx: Context) -> None:
+        self.errors.report(ctx.get_line(), msg)
+
+    def builtin_type(self, name: str, args: List[Type] = None) -> Instance:
+        names = self.modules['builtins']
+        sym = names.names[name]
+        assert isinstance(sym.node, TypeInfo)
+        return Instance(cast(TypeInfo, sym.node), args or [])
+
+
+def self_type(typ: TypeInfo) -> Union[Instance, TupleType]:
+    """For a non-generic type, return instance type representing the type.
+    For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn].
+    """
+    tv = []  # type: List[Type]
+    for i in range(len(typ.type_vars)):
+        tv.append(TypeVarType(typ.type_vars[i], i + 1,
+                          typ.defn.type_vars[i].values,
+                          typ.defn.type_vars[i].upper_bound,
+                          typ.defn.type_vars[i].variance))
+    inst = Instance(typ, tv)
+    if typ.tuple_type is None:
+        return inst
+    else:
+        return TupleType(typ.tuple_type.items, inst)
+
+
+def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
+    if isinstance(sig, CallableType):
+        return replace_leading_arg_type(sig, new)
+    else:
+        sig = cast(Overloaded, sig)
+        return Overloaded([cast(CallableType, replace_implicit_first_type(i, new))
+                           for i in sig.items()])
+
+
+def set_callable_name(sig: Type, fdef: FuncDef) -> Type:
+    if isinstance(sig, FunctionLike):
+        if fdef.info:
+            return sig.with_name(
+                '"{}" of "{}"'.format(fdef.name(), fdef.info.name()))
+        else:
+            return sig.with_name('"{}"'.format(fdef.name()))
+    else:
+        return sig
+
+
+def refers_to_fullname(node: Node, fullname: str) -> bool:
+    """Is node a name or member expression with the given full name?"""
+    return isinstance(node,
+                      RefExpr) and cast(RefExpr, node).fullname == fullname
+
+
+def refers_to_class_or_function(node: Node) -> bool:
+    """Does semantically analyzed node refer to a class?"""
+    return (isinstance(node, RefExpr) and
+            isinstance(cast(RefExpr, node).node, (TypeInfo, FuncDef,
+                                                  OverloadedFuncDef)))
+
+
+def find_duplicate(list: List[T]) -> T:
+    """If the list has duplicates, return one of the duplicates.
+
+    Otherwise, return None.
+    """
+    for i in range(1, len(list)):
+        if list[i] in list[:i]:
+            return list[i]
+    return None
+
+
+def disable_typevars(nodes: List[SymbolTableNode]) -> None:
+    for node in nodes:
+        assert node.kind in (BOUND_TVAR, UNBOUND_TVAR)
+        node.kind = UNBOUND_TVAR
+
+
+def enable_typevars(nodes: List[SymbolTableNode]) -> None:
+    for node in nodes:
+        assert node.kind in (BOUND_TVAR, UNBOUND_TVAR)
+        node.kind = BOUND_TVAR
+
+
+def remove_imported_names_from_symtable(names: SymbolTable,
+                                        module: str) -> None:
+    """Remove all imported names from the symbol table of a module."""
+    removed = []  # type: List[str]
+    for name, node in names.items():
+        fullname = node.node.fullname()
+        prefix = fullname[:fullname.rfind('.')]
+        if prefix != module:
+            removed.append(name)
+    for name in removed:
+        del names[name]
+
+
+def infer_reachability_of_if_statement(s: IfStmt,
+                                       pyversion: Tuple[int, int]) -> None:
+    for i in range(len(s.expr)):
+        result = infer_if_condition_value(s.expr[i], pyversion)
+        if result == ALWAYS_FALSE:
+            # The condition is always false, so we skip the if/elif body.
+            mark_block_unreachable(s.body[i])
+        elif result == ALWAYS_TRUE:
+            # This condition is always true, so all of the remaining
+            # elif/else bodies will never be executed.
+            for body in s.body[i + 1:]:
+                mark_block_unreachable(s.body[i])
+            if s.else_body:
+                mark_block_unreachable(s.else_body)
+            break
+
+
+def infer_if_condition_value(expr: Node, pyversion: Tuple[int, int]) -> int:
+    """Infer whether if condition is always true/false.
+
+    Return ALWAYS_TRUE if always true, ALWAYS_FALSE if always false,
+    and TRUTH_VALUE_UNKNOWN otherwise.
+    """
+    name = ''
+    negated = False
+    alias = expr
+    if isinstance(alias, UnaryExpr):
+        if alias.op == 'not':
+            expr = alias.expr
+            negated = True
+    if isinstance(expr, NameExpr):
+        name = expr.name
+    elif isinstance(expr, MemberExpr):
+        name = expr.name
+    result = TRUTH_VALUE_UNKNOWN
+    if name == 'PY2':
+        result = ALWAYS_TRUE if pyversion[0] == 2 else ALWAYS_FALSE
+    elif name == 'PY3':
+        result = ALWAYS_TRUE if pyversion[0] == 3 else ALWAYS_FALSE
+    elif name == 'MYPY':
+        result = ALWAYS_TRUE
+    if negated:
+        if result == ALWAYS_TRUE:
+            result = ALWAYS_FALSE
+        elif result == ALWAYS_FALSE:
+            result = ALWAYS_TRUE
+    return result
+
+
+def mark_block_unreachable(block: Block) -> None:
+    block.is_unreachable = True
+    block.accept(MarkImportsUnreachableVisitor())
+
+
+class MarkImportsUnreachableVisitor(TraverserVisitor):
+    """Visitor that flags all imports nested within a node as unreachable."""
+
+    def visit_import(self, node: Import) -> None:
+        node.is_unreachable = True
+
+    def visit_import_from(self, node: ImportFrom) -> None:
+        node.is_unreachable = True
+
+    def visit_import_all(self, node: ImportAll) -> None:
+        node.is_unreachable = True
+
+
+def is_identity_signature(sig: Type) -> bool:
+    """Is type a callable of form T -> T (where T is a type variable)?"""
+    if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]:
+        if isinstance(sig.arg_types[0], TypeVarType) and isinstance(sig.ret_type, TypeVarType):
+            return sig.arg_types[0].id == sig.ret_type.id
+    return False
+
+
+def returns_any_if_called(expr: Node) -> bool:
+    """Return True if we can predict that expr will return Any if called.
+
+    This only uses information available during semantic analysis so this
+    will sometimes return False because of insufficient information (as
+    type inference hasn't run yet).
+    """
+    if isinstance(expr, RefExpr):
+        if isinstance(expr.node, FuncDef):
+            typ = expr.node.type
+            if typ is None:
+                # No signature -> default to Any.
+                return True
+            # Explicit Any return?
+            return isinstance(typ, CallableType) and isinstance(typ.ret_type, AnyType)
+        elif isinstance(expr.node, Var):
+            typ = expr.node.type
+            return typ is None or isinstance(typ, AnyType)
+    elif isinstance(expr, CallExpr):
+        return returns_any_if_called(expr.callee)
+    return False
diff --git a/mypy/solve.py b/mypy/solve.py
new file mode 100644
index 0000000..cc04917
--- /dev/null
+++ b/mypy/solve.py
@@ -0,0 +1,75 @@
+"""Type inference constraint solving"""
+
+from typing import List, Dict
+
+from mypy.types import Type, Void, NoneTyp, AnyType, ErrorType
+from mypy.constraints import Constraint, SUPERTYPE_OF
+from mypy.join import join_types
+from mypy.meet import meet_types
+from mypy.subtypes import is_subtype
+
+
+def solve_constraints(vars: List[int], constraints: List[Constraint],
+                      strict=True) -> List[Type]:
+    """Solve type constraints.
+
+    Return the best type(s) for type variables; each type can be None if the value of the variable
+    could not be solved.
+
+    If a variable has no constraints, if strict=True then arbitrarily
+    pick NoneTyp as the value of the type variable.  If strict=False,
+    pick AnyType.
+    """
+    # Collect a list of constraints for each type variable.
+    cmap = {}  # type: Dict[int, List[Constraint]]
+    for con in constraints:
+        a = cmap.get(con.type_var, [])
+        a.append(con)
+        cmap[con.type_var] = a
+
+    res = []  # type: List[Type]
+
+    # Solve each type variable separately.
+    for tvar in vars:
+        bottom = None  # type: Type
+        top = None  # type: Type
+
+        # Process each contraint separely, and calculate the lower and upper
+        # bounds based on constraints. Note that we assume that the constraint
+        # targets do not have constraint references.
+        for c in cmap.get(tvar, []):
+            if c.op == SUPERTYPE_OF:
+                if bottom is None:
+                    bottom = c.target
+                else:
+                    bottom = join_types(bottom, c.target)
+            else:
+                if top is None:
+                    top = c.target
+                else:
+                    top = meet_types(top, c.target)
+
+        if isinstance(top, AnyType) or isinstance(bottom, AnyType):
+            res.append(AnyType())
+            continue
+        elif bottom is None:
+            if top:
+                candidate = top
+            else:
+                # No constraints for type variable -- type 'None' is the most specific type.
+                if strict:
+                    candidate = NoneTyp()
+                else:
+                    candidate = AnyType()
+        elif top is None:
+            candidate = bottom
+        elif is_subtype(bottom, top):
+            candidate = bottom
+        else:
+            candidate = None
+        if isinstance(candidate, ErrorType):
+            res.append(None)
+        else:
+            res.append(candidate)
+
+    return res
diff --git a/mypy/stats.py b/mypy/stats.py
new file mode 100644
index 0000000..1a493d8
--- /dev/null
+++ b/mypy/stats.py
@@ -0,0 +1,371 @@
+"""Utilities for calculating and reporting statistics about types."""
+
+import cgi
+import os.path
+import re
+
+from typing import Any, Dict, List, cast, Tuple
+
+from mypy.traverser import TraverserVisitor
+from mypy.types import (
+    Type, AnyType, Instance, FunctionLike, TupleType, Void, TypeVarType,
+    TypeQuery, ANY_TYPE_STRATEGY, CallableType
+)
+from mypy import nodes
+from mypy.nodes import (
+    Node, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr,
+    MemberExpr, OpExpr, ComparisonExpr, IndexExpr, UnaryExpr, YieldFromExpr
+)
+
+
+TYPE_EMPTY = 0
+TYPE_PRECISE = 1
+TYPE_IMPRECISE = 2
+TYPE_ANY = 3
+
+precision_names = [
+    'empty',
+    'precise',
+    'imprecise',
+    'any',
+]
+
+
+class StatisticsVisitor(TraverserVisitor):
+    def __init__(self, inferred: bool, typemap: Dict[Node, Type] = None,
+                 all_nodes: bool = False) -> None:
+        self.inferred = inferred
+        self.typemap = typemap
+        self.all_nodes = all_nodes
+
+        self.num_precise = 0
+        self.num_imprecise = 0
+        self.num_any = 0
+
+        self.num_simple = 0
+        self.num_generic = 0
+        self.num_tuple = 0
+        self.num_function = 0
+        self.num_typevar = 0
+        self.num_complex = 0
+
+        self.line = -1
+
+        self.line_map = {}  # type: Dict[int, int]
+
+        self.output = []  # type: List[str]
+
+        TraverserVisitor.__init__(self)
+
+    def visit_func_def(self, o: FuncDef) -> None:
+        self.line = o.line
+        if len(o.expanded) > 1:
+            if o in o.expanded:
+                print('ERROR: cycle in function expansion; skipping')
+                return
+            for defn in o.expanded:
+                self.visit_func_def(cast(FuncDef, defn))
+        else:
+            if o.type:
+                sig = cast(CallableType, o.type)
+                arg_types = sig.arg_types
+                if (sig.arg_names and sig.arg_names[0] == 'self' and
+                        not self.inferred):
+                    arg_types = arg_types[1:]
+                for arg in arg_types:
+                    self.type(arg)
+                self.type(sig.ret_type)
+            elif self.all_nodes:
+                self.record_line(self.line, TYPE_ANY)
+            super().visit_func_def(o)
+
+    def visit_type_application(self, o: TypeApplication) -> None:
+        self.line = o.line
+        for t in o.types:
+            self.type(t)
+        super().visit_type_application(o)
+
+    def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
+        self.line = o.line
+        if (isinstance(o.rvalue, nodes.CallExpr) and
+            isinstance(cast(nodes.CallExpr, o.rvalue).analyzed,
+                       nodes.TypeVarExpr)):
+            # Type variable definition -- not a real assignment.
+            return
+        if o.type:
+            self.type(o.type)
+        elif self.inferred:
+            for lvalue in o.lvalues:
+                if isinstance(lvalue, nodes.TupleExpr):
+                    items = lvalue.items
+                elif isinstance(lvalue, nodes.ListExpr):
+                    items = lvalue.items
+                else:
+                    items = [lvalue]
+                for item in items:
+                    if hasattr(item, 'is_def') and cast(Any, item).is_def:
+                        t = self.typemap.get(item)
+                        if t:
+                            self.type(t)
+                        else:
+                            self.log('  !! No inferred type on line %d' %
+                                     self.line)
+                            self.record_line(self.line, TYPE_ANY)
+        super().visit_assignment_stmt(o)
+
+    def visit_name_expr(self, o: NameExpr) -> None:
+        self.process_node(o)
+        super().visit_name_expr(o)
+
+    def visit_yield_from_expr(self, o: YieldFromExpr) -> None:
+        if o.expr:
+            o.expr.accept(self)
+
+    def visit_call_expr(self, o: CallExpr) -> None:
+        self.process_node(o)
+        if o.analyzed:
+            o.analyzed.accept(self)
+        else:
+            o.callee.accept(self)
+            for a in o.args:
+                a.accept(self)
+
+    def visit_member_expr(self, o: MemberExpr) -> None:
+        self.process_node(o)
+        super().visit_member_expr(o)
+
+    def visit_op_expr(self, o: OpExpr) -> None:
+        self.process_node(o)
+        super().visit_op_expr(o)
+
+    def visit_comparison_expr(self, o: ComparisonExpr) -> None:
+        self.process_node(o)
+        super().visit_comparison_expr(o)
+
+    def visit_index_expr(self, o: IndexExpr) -> None:
+        self.process_node(o)
+        super().visit_index_expr(o)
+
+    def visit_unary_expr(self, o: UnaryExpr) -> None:
+        self.process_node(o)
+        super().visit_unary_expr(o)
+
+    def process_node(self, node: Node) -> None:
+        if self.all_nodes:
+            typ = self.typemap.get(node)
+            if typ:
+                self.line = node.line
+                self.type(typ)
+
+    def type(self, t: Type) -> None:
+        if isinstance(t, AnyType):
+            self.log('  !! Any type around line %d' % self.line)
+            self.num_any += 1
+            self.record_line(self.line, TYPE_ANY)
+        elif ((not self.all_nodes and is_imprecise(t)) or
+              (self.all_nodes and is_imprecise2(t))):
+            self.log('  !! Imprecise type around line %d' % self.line)
+            self.num_imprecise += 1
+            self.record_line(self.line, TYPE_IMPRECISE)
+        else:
+            self.num_precise += 1
+            self.record_line(self.line, TYPE_PRECISE)
+
+        if isinstance(t, Instance):
+            if t.args:
+                if any(is_complex(arg) for arg in t.args):
+                    self.num_complex += 1
+                else:
+                    self.num_generic += 1
+            else:
+                self.num_simple += 1
+        elif isinstance(t, Void):
+            self.num_simple += 1
+        elif isinstance(t, FunctionLike):
+            self.num_function += 1
+        elif isinstance(t, TupleType):
+            if any(is_complex(item) for item in t.items):
+                self.num_complex += 1
+            else:
+                self.num_tuple += 1
+        elif isinstance(t, TypeVarType):
+            self.num_typevar += 1
+
+    def log(self, string: str) -> None:
+        self.output.append(string)
+
+    def record_line(self, line: int, precision: int) -> None:
+        self.line_map[line] = max(precision,
+                                  self.line_map.get(line, TYPE_PRECISE))
+
+
+def dump_type_stats(tree: Node, path: str, inferred: bool = False,
+                    typemap: Dict[Node, Type] = None) -> None:
+    if is_special_module(path):
+        return
+    print(path)
+    visitor = StatisticsVisitor(inferred, typemap)
+    tree.accept(visitor)
+    for line in visitor.output:
+        print(line)
+    print('  ** precision **')
+    print('  precise  ', visitor.num_precise)
+    print('  imprecise', visitor.num_imprecise)
+    print('  any      ', visitor.num_any)
+    print('  ** kinds **')
+    print('  simple   ', visitor.num_simple)
+    print('  generic  ', visitor.num_generic)
+    print('  function ', visitor.num_function)
+    print('  tuple    ', visitor.num_tuple)
+    print('  TypeVar  ', visitor.num_typevar)
+    print('  complex  ', visitor.num_complex)
+    print('  any      ', visitor.num_any)
+
+
+def is_special_module(path: str) -> bool:
+    return os.path.basename(path) in ('abc.py', 'typing.py', 'builtins.py')
+
+
+def is_imprecise(t: Type) -> bool:
+    return t.accept(HasAnyQuery())
+
+
+class HasAnyQuery(TypeQuery):
+    def __init__(self) -> None:
+        super().__init__(False, ANY_TYPE_STRATEGY)
+
+    def visit_any(self, t: AnyType) -> bool:
+        return True
+
+    def visit_instance(self, t: Instance) -> bool:
+        if t.type.fullname() == 'builtins.tuple':
+            return True
+        else:
+            return super().visit_instance(t)
+
+
+def is_imprecise2(t: Type) -> bool:
+    return t.accept(HasAnyQuery2())
+
+
+class HasAnyQuery2(HasAnyQuery):
+    def visit_callable_type(self, t: CallableType) -> bool:
+        # We don't want to flag references to functions with some Any
+        # argument types (etc.) since they generally don't mean trouble.
+        return False
+
+
+def is_generic(t: Type) -> bool:
+    return isinstance(t, Instance) and bool(cast(Instance, t).args)
+
+
+def is_complex(t: Type) -> bool:
+    return is_generic(t) or isinstance(t, (FunctionLike, TupleType,
+                                           TypeVarType))
+
+
+html_files = []  # type: List[Tuple[str, str, int, int]]
+
+
+def generate_html_report(tree: Node, path: str, type_map: Dict[Node, Type],
+                         output_dir: str) -> None:
+    if is_special_module(path):
+        return
+    # There may be more than one right answer for "what should we do here?"
+    # but this is a reasonable one.
+    path = os.path.relpath(path)
+    if path.startswith('..'):
+        return
+    visitor = StatisticsVisitor(inferred=True, typemap=type_map, all_nodes=True)
+    tree.accept(visitor)
+    assert not os.path.isabs(path) and not path.startswith('..')
+    # This line is *wrong* if the preceding assert fails.
+    target_path = os.path.join(output_dir, 'html', path)
+    # replace .py or .pyi with .html
+    target_path = os.path.splitext(target_path)[0] + '.html'
+    assert target_path.endswith('.html')
+    ensure_dir_exists(os.path.dirname(target_path))
+    output = []  # type: List[str]
+    append = output.append
+    append('''\
+<html>
+<head>
+  <style>
+    .red { background-color: #faa; }
+    .yellow { background-color: #ffa; }
+    .white { }
+    .lineno { color: #999; }
+  </style>
+</head>
+<body>
+<pre>''')
+    num_imprecise_lines = 0
+    num_lines = 0
+    with open(path) as input_file:
+        for i, line in enumerate(input_file):
+            lineno = i + 1
+            status = visitor.line_map.get(lineno, TYPE_PRECISE)
+            style_map = {TYPE_PRECISE: 'white',
+                         TYPE_IMPRECISE: 'yellow',
+                         TYPE_ANY: 'red'}
+            style = style_map[status]
+            append('<span class="lineno">%4d</span>   ' % lineno +
+                   '<span class="%s">%s</span>' % (style,
+                                                   cgi.escape(line)))
+            if status != TYPE_PRECISE:
+                num_imprecise_lines += 1
+            if line.strip():
+                num_lines += 1
+    append('</pre>')
+    append('</body></html>')
+    with open(target_path, 'w') as output_file:
+        output_file.writelines(output)
+    target_path = target_path[len(output_dir) + 1:]
+    html_files.append((path, target_path, num_lines, num_imprecise_lines))
+
+
+def generate_html_index(output_dir: str) -> None:
+    path = os.path.join(output_dir, 'index.html')
+    output = []  # type: List[str]
+    append = output.append
+    append('''\
+<html>
+<head>
+  <style>
+  body { font-family: courier; }
+  table { border-collapse: collapse; }
+  table tr td { border: 1px solid black; }
+  td { padding: 0.4em; }
+  .red { background-color: #faa; }
+  .yellow { background-color: #ffa; }
+  </style>
+</head>
+<body>''')
+    append('<h1>Mypy Type Check Coverage Report</h1>\n')
+    append('<table>\n')
+    for source_path, target_path, num_lines, num_imprecise in sorted(html_files):
+        if num_lines == 0:
+            continue
+        source_path = os.path.normpath(source_path)
+        # TODO: Windows paths.
+        if (source_path.startswith('stubs/') or
+                '/stubs/' in source_path):
+            continue
+        percent = 100.0 * num_imprecise / num_lines
+        style = ''
+        if percent >= 20:
+            style = 'class="red"'
+        elif percent >= 5:
+            style = 'class="yellow"'
+        append('<tr %s><td><a href="%s">%s</a><td>%.1f%% imprecise<td>%d LOC\n' % (
+            style, target_path, source_path, percent, num_lines))
+    append('</table>\n')
+    append('</body></html>')
+    with open(path, 'w') as file:
+        file.writelines(output)
+    print('Generated HTML report (old): %s' % os.path.abspath(path))
+
+
+def ensure_dir_exists(dir: str) -> None:
+    if not os.path.exists(dir):
+        os.makedirs(dir)
diff --git a/mypy/strconv.py b/mypy/strconv.py
new file mode 100644
index 0000000..7cd88f3
--- /dev/null
+++ b/mypy/strconv.py
@@ -0,0 +1,452 @@
+"""Conversion of parse tree nodes to strings."""
+
+import re
+import os
+
+import typing
+
+from mypy.util import dump_tagged, short_type
+import mypy.nodes
+from mypy.visitor import NodeVisitor
+
+
+class StrConv(NodeVisitor[str]):
+    """Visitor for converting a Node to a human-readable string.
+
+    For example, an MypyFile node from program '1' is converted into
+    something like this:
+
+      MypyFile:1(
+        fnam
+        ExpressionStmt:1(
+          IntExpr(1)))
+    """
+    def dump(self, nodes, obj):
+        """Convert a list of items to a multiline pretty-printed string.
+
+        The tag is produced from the type name of obj and its line
+        number. See mypy.util.dump_tagged for a description of the nodes
+        argument.
+        """
+        return dump_tagged(nodes, short_type(obj) + ':' + str(obj.line))
+
+    def func_helper(self, o):
+        """Return a list in a format suitable for dump() that represents the
+        arguments and the body of a function. The caller can then decorate the
+        array with information specific to methods, global functions or
+        anonymous functions.
+        """
+        args = []
+        init = []
+        extra = []
+        for i, arg in enumerate(o.arguments):
+            kind = arg.kind
+            if kind == mypy.nodes.ARG_POS:
+                args.append(o.arguments[i].variable)
+            elif kind in (mypy.nodes.ARG_OPT, mypy.nodes.ARG_NAMED):
+                args.append(o.arguments[i].variable)
+                init.append(o.arguments[i].initialization_statement)
+            elif kind == mypy.nodes.ARG_STAR:
+                extra.append(('VarArg', [o.arguments[i].variable]))
+            elif kind == mypy.nodes.ARG_STAR2:
+                extra.append(('DictVarArg', [o.arguments[i].variable]))
+        a = []
+        if args:
+            a.append(('Args', args))
+        if o.type:
+            a.append(o.type)
+        if init:
+            a.append(('Init', init))
+        if o.is_generator:
+            a.append('Generator')
+        a.extend(extra)
+        a.append(o.body)
+        return a
+
+    # Top-level structures
+
+    def visit_mypy_file(self, o):
+        # Skip implicit definitions.
+        a = [o.defs]
+        if o.is_bom:
+            a.insert(0, 'BOM')
+        # Omit path to special file with name "main". This is used to simplify
+        # test case descriptions; the file "main" is used by default in many
+        # test cases.
+        if o.path is not None and o.path != 'main':
+            # Insert path. Normalize directory separators to / to unify test
+            # case# output in all platforms.
+            a.insert(0, o.path.replace(os.sep, '/'))
+        if o.ignored_lines:
+            a.append('IgnoredLines(%s)' % ', '.join(str(line)
+                                                    for line in sorted(o.ignored_lines)))
+        return self.dump(a, o)
+
+    def visit_import(self, o):
+        a = []
+        for id, as_id in o.ids:
+            if as_id is not None:
+                a.append('{} : {}'.format(id, as_id))
+            else:
+                a.append(id)
+        return 'Import:{}({})'.format(o.line, ', '.join(a))
+
+    def visit_import_from(self, o):
+        a = []
+        for name, as_name in o.names:
+            if as_name is not None:
+                a.append('{} : {}'.format(name, as_name))
+            else:
+                a.append(name)
+        return 'ImportFrom:{}({}, [{}])'.format(o.line, "." * o.relative + o.id, ', '.join(a))
+
+    def visit_import_all(self, o):
+        return 'ImportAll:{}({})'.format(o.line, "." * o.relative + o.id)
+
+    # Definitions
+
+    def visit_func_def(self, o):
+        a = self.func_helper(o)
+        a.insert(0, o.name())
+        if mypy.nodes.ARG_NAMED in [arg.kind for arg in o.arguments]:
+            a.insert(1, 'MaxPos({})'.format(o.max_pos))
+        if o.is_abstract:
+            a.insert(-1, 'Abstract')
+        if o.is_static:
+            a.insert(-1, 'Static')
+        if o.is_class:
+            a.insert(-1, 'Class')
+        if o.is_property:
+            a.insert(-1, 'Property')
+        return self.dump(a, o)
+
+    def visit_overloaded_func_def(self, o):
+        a = o.items[:]
+        if o.type:
+            a.insert(0, o.type)
+        return self.dump(a, o)
+
+    def visit_class_def(self, o):
+        a = [o.name, o.defs.body]
+        # Display base types unless they are implicitly just builtins.object
+        # (in this case base_type_exprs is empty).
+        if o.base_types and o.base_type_exprs:
+            a.insert(1, ('BaseType', o.base_types))
+        elif len(o.base_type_exprs) > 0:
+            a.insert(1, ('BaseTypeExpr', o.base_type_exprs))
+        if o.type_vars:
+            a.insert(1, ('TypeVars', o.type_vars))
+        if o.metaclass:
+            a.insert(1, 'Metaclass({})'.format(o.metaclass))
+        if o.decorators:
+            a.insert(1, ('Decorators', o.decorators))
+        if o.is_builtinclass:
+            a.insert(1, 'Builtinclass')
+        if o.info and o.info._promote:
+            a.insert(1, 'Promote({})'.format(o.info._promote))
+        if o.info and o.info.tuple_type:
+            a.insert(1, ('TupleType', [o.info.tuple_type]))
+        if o.info and o.info.fallback_to_any:
+            a.insert(1, 'FallbackToAny')
+        return self.dump(a, o)
+
+    def visit_var(self, o):
+        l = ''
+        # Add :nil line number tag if no line number is specified to remain
+        # compatible with old test case descriptions that assume this.
+        if o.line < 0:
+            l = ':nil'
+        return 'Var' + l + '(' + o.name() + ')'
+
+    def visit_global_decl(self, o):
+        return self.dump([o.names], o)
+
+    def visit_nonlocal_decl(self, o):
+        return self.dump([o.names], o)
+
+    def visit_decorator(self, o):
+        return self.dump([o.var, o.decorators, o.func], o)
+
+    def visit_annotation(self, o):
+        return 'Type:{}({})'.format(o.line, o.type)
+
+    # Statements
+
+    def visit_block(self, o):
+        return self.dump(o.body, o)
+
+    def visit_expression_stmt(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_assignment_stmt(self, o):
+        if len(o.lvalues) > 1:
+            a = [('Lvalues', o.lvalues)]
+        else:
+            a = [o.lvalues[0]]
+        a.append(o.rvalue)
+        if o.type:
+            a.append(o.type)
+        return self.dump(a, o)
+
+    def visit_operator_assignment_stmt(self, o):
+        return self.dump([o.op, o.lvalue, o.rvalue], o)
+
+    def visit_while_stmt(self, o):
+        a = [o.expr, o.body]
+        if o.else_body:
+            a.append(('Else', o.else_body.body))
+        return self.dump(a, o)
+
+    def visit_for_stmt(self, o):
+        a = [o.index]
+        a.extend([o.expr, o.body])
+        if o.else_body:
+            a.append(('Else', o.else_body.body))
+        return self.dump(a, o)
+
+    def visit_return_stmt(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_if_stmt(self, o):
+        a = []
+        for i in range(len(o.expr)):
+            a.append(('If', [o.expr[i]]))
+            a.append(('Then', o.body[i].body))
+
+        if not o.else_body:
+            return self.dump(a, o)
+        else:
+            return self.dump([a, ('Else', o.else_body.body)], o)
+
+    def visit_break_stmt(self, o):
+        return self.dump([], o)
+
+    def visit_continue_stmt(self, o):
+        return self.dump([], o)
+
+    def visit_pass_stmt(self, o):
+        return self.dump([], o)
+
+    def visit_raise_stmt(self, o):
+        return self.dump([o.expr, o.from_expr], o)
+
+    def visit_assert_stmt(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_yield_stmt(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_yield_from_stmt(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_yield_expr(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_del_stmt(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_try_stmt(self, o):
+        a = [o.body]
+
+        for i in range(len(o.vars)):
+            a.append(o.types[i])
+            if o.vars[i]:
+                a.append(o.vars[i])
+            a.append(o.handlers[i])
+
+        if o.else_body:
+            a.append(('Else', o.else_body.body))
+        if o.finally_body:
+            a.append(('Finally', o.finally_body.body))
+
+        return self.dump(a, o)
+
+    def visit_with_stmt(self, o):
+        a = []
+        for i in range(len(o.expr)):
+            a.append(('Expr', [o.expr[i]]))
+            if o.target[i]:
+                a.append(('Target', [o.target[i]]))
+        return self.dump(a + [o.body], o)
+
+    def visit_print_stmt(self, o):
+        a = o.args[:]
+        if o.target:
+            a.append(('Target', [o.target]))
+        if o.newline:
+            a.append('Newline')
+        return self.dump(a, o)
+
+    def visit_exec_stmt(self, o):
+        return self.dump([o.expr, o.variables1, o.variables2], o)
+
+    # Expressions
+
+    # Simple expressions
+
+    def visit_int_expr(self, o):
+        return 'IntExpr({})'.format(o.value)
+
+    def visit_str_expr(self, o):
+        return 'StrExpr({})'.format(self.str_repr(o.value))
+
+    def visit_bytes_expr(self, o):
+        return 'BytesExpr({})'.format(self.str_repr(o.value))
+
+    def visit_unicode_expr(self, o):
+        return 'UnicodeExpr({})'.format(self.str_repr(o.value))
+
+    def str_repr(self, s):
+        s = re.sub(r'\\u[0-9a-fA-F]{4}', lambda m: '\\' + m.group(0), s)
+        return re.sub('[^\\x20-\\x7e]',
+                      lambda m: r'\u%.4x' % ord(m.group(0)), s)
+
+    def visit_float_expr(self, o):
+        return 'FloatExpr({})'.format(o.value)
+
+    def visit_complex_expr(self, o):
+        return 'ComplexExpr({})'.format(o.value)
+
+    def visit_ellipsis(self, o):
+        return 'Ellipsis'
+
+    def visit_star_expr(self, o):
+        return self.dump([o.expr], o)
+
+    def visit_name_expr(self, o):
+        return (short_type(o) + '(' + self.pretty_name(o.name, o.kind,
+                                                       o.fullname, o.is_def)
+                + ')')
+
+    def pretty_name(self, name, kind, fullname, is_def):
+        n = name
+        if is_def:
+            n += '*'
+        if kind == mypy.nodes.GDEF or (fullname != name and
+                                       fullname is not None):
+            # Append fully qualified name for global references.
+            n += ' [{}]'.format(fullname)
+        elif kind == mypy.nodes.LDEF:
+            # Add tag to signify a local reference.
+            n += ' [l]'
+        elif kind == mypy.nodes.MDEF:
+            # Add tag to signify a member reference.
+            n += ' [m]'
+        return n
+
+    def visit_member_expr(self, o):
+        return self.dump([o.expr, self.pretty_name(o.name, o.kind, o.fullname,
+                                                   o.is_def)], o)
+
+    def visit_yield_from_expr(self, o):
+        if o.expr:
+            return self.dump([o.expr.accept(self)], o)
+        else:
+            return self.dump([], o)
+
+    def visit_call_expr(self, o):
+        if o.analyzed:
+            return o.analyzed.accept(self)
+        args = []
+        extra = []
+        for i, kind in enumerate(o.arg_kinds):
+            if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]:
+                args.append(o.args[i])
+                if kind == mypy.nodes.ARG_STAR:
+                    extra.append('VarArg')
+            elif kind == mypy.nodes.ARG_NAMED:
+                extra.append(('KwArgs', [o.arg_names[i], o.args[i]]))
+            elif kind == mypy.nodes.ARG_STAR2:
+                extra.append(('DictVarArg', [o.args[i]]))
+            else:
+                raise RuntimeError('unknown kind %d' % kind)
+
+        return self.dump([o.callee, ('Args', args)] + extra, o)
+
+    def visit_op_expr(self, o):
+        return self.dump([o.op, o.left, o.right], o)
+
+    def visit_comparison_expr(self, o):
+        return self.dump([o.operators, o.operands], o)
+
+    def visit_cast_expr(self, o):
+        return self.dump([o.expr, o.type], o)
+
+    def visit_unary_expr(self, o):
+        return self.dump([o.op, o.expr], o)
+
+    def visit_list_expr(self, o):
+        return self.dump(o.items, o)
+
+    def visit_dict_expr(self, o):
+        return self.dump([[k, v] for k, v in o.items], o)
+
+    def visit_set_expr(self, o):
+        return self.dump(o.items, o)
+
+    def visit_tuple_expr(self, o):
+        return self.dump(o.items, o)
+
+    def visit_index_expr(self, o):
+        if o.analyzed:
+            return o.analyzed.accept(self)
+        return self.dump([o.base, o.index], o)
+
+    def visit_super_expr(self, o):
+        return self.dump([o.name], o)
+
+    def visit_type_application(self, o):
+        return self.dump([o.expr, ('Types', o.types)], o)
+
+    def visit_type_var_expr(self, o):
+        if o.variance == mypy.nodes.COVARIANT:
+            return self.dump(['Variance(COVARIANT)'], o)
+        if o.variance == mypy.nodes.CONTRAVARIANT:
+            return self.dump(['Variance(CONTRAVARIANT)'], o)
+        if o.values:
+            return self.dump([('Values', o.values)], o)
+        else:
+            return 'TypeVarExpr:{}()'.format(o.line)
+
+    def visit_type_alias_expr(self, o):
+        return 'TypeAliasExpr({})'.format(o.type)
+
+    def visit_namedtuple_expr(self, o):
+        return 'NamedTupleExpr:{}({}, {})'.format(o.line,
+                                                  o.info.name(),
+                                                  o.info.tuple_type)
+
+    def visit__promote_expr(self, o):
+        return 'PromoteExpr:{}({})'.format(o.line, o.type)
+
+    def visit_func_expr(self, o):
+        a = self.func_helper(o)
+        return self.dump(a, o)
+
+    def visit_generator_expr(self, o):
+        condlists = o.condlists if any(o.condlists) else None
+        return self.dump([o.left_expr, o.indices, o.sequences, condlists], o)
+
+    def visit_list_comprehension(self, o):
+        return self.dump([o.generator], o)
+
+    def visit_set_comprehension(self, o):
+        return self.dump([o.generator], o)
+
+    def visit_dictionary_comprehension(self, o):
+        condlists = o.condlists if any(o.condlists) else None
+        return self.dump([o.key, o.value, o.indices, o.sequences, condlists], o)
+
+    def visit_conditional_expr(self, o):
+        return self.dump([('Condition', [o.cond]), o.if_expr, o.else_expr], o)
+
+    def visit_slice_expr(self, o):
+        a = [o.begin_index, o.end_index, o.stride]
+        if not a[0]:
+            a[0] = '<empty>'
+        if not a[1]:
+            a[1] = '<empty>'
+        return self.dump(a, o)
+
+    def visit_backquote_expr(self, o):
+        return self.dump([o.expr], o)
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
new file mode 100644
index 0000000..45647a7
--- /dev/null
+++ b/mypy/stubgen.py
@@ -0,0 +1,662 @@
+"""Generator of dynamically typed draft stubs for arbitrary modules.
+
+Basic usage:
+
+  $ mkdir out
+  $ stubgen urllib.parse
+
+  => Generate out/urllib/parse.pyi.
+
+For Python 2 mode, use --py2:
+
+  $ stubgen --py2 textwrap
+
+For C modules, you can get more precise function signatures by parsing .rst (Sphinx)
+documentation for extra information. For this, use the --docpath option:
+
+  $ scripts/stubgen --docpath <DIR>/Python-3.4.2/Doc/library curses
+
+  => Generate out/curses.py.
+
+Use "stubgen -h" for more help.
+
+Note: You should verify the generated stubs manually.
+
+TODO:
+
+ - support stubs for C modules in Python 2 mode
+ - support non-default Python interpreters in Python 3 mode
+ - if using --no-import, look for __all__ in the AST
+ - infer some return types, such as no return statement with value -> None
+ - detect 'if PY2 / is_py2' etc. and either preserve those or only include Python 2 or 3 case
+ - maybe export more imported names if there is no __all__ (this affects ssl.SSLError, for example)
+   - a quick and dirty heuristic would be to turn this on if a module has something like
+     'from x import y as _y'
+ - we don't seem to always detect properties ('closed' in 'io', for example)
+"""
+
+import glob
+import imp
+import importlib
+import json
+import os.path
+import subprocess
+import sys
+import textwrap
+
+from typing import Any, List, Dict, Tuple, Iterable, Optional, NamedTuple, Set
+
+import mypy.build
+import mypy.parse
+import mypy.errors
+import mypy.traverser
+from mypy import defaults
+from mypy.nodes import (
+    Node, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr,
+    ListExpr, ComparisonExpr, CallExpr, ClassDef, MypyFile, Decorator, AssignmentStmt,
+    IfStmt, ImportAll, ImportFrom, Import, FuncDef, FuncBase, ARG_STAR, ARG_STAR2, ARG_NAMED
+)
+from mypy.stubgenc import parse_all_signatures, find_unique_signatures, generate_stub_for_c_module
+from mypy.stubutil import is_c_module, write_header
+
+
+Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
+                                 ('no_import', bool),
+                                 ('doc_dir', str),
+                                 ('search_path', List[str]),
+                                 ('interpreter', str),
+                                 ('modules', List[str])])
+
+
+def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
+                             add_header: bool = False, sigs: Dict[str, str] = {},
+                             class_sigs: Dict[str, str] = {},
+                             pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+                             no_import: bool = False,
+                             search_path: List[str] = [],
+                             interpreter: str = sys.executable) -> None:
+    target = module.replace('.', '/')
+    result = find_module_path_and_all(module=module,
+                                      pyversion=pyversion,
+                                      no_import=no_import,
+                                      search_path=search_path,
+                                      interpreter=interpreter)
+    if not result:
+        # C module
+        target = os.path.join(output_dir, target + '.pyi')
+        generate_stub_for_c_module(module_name=module,
+                                   target=target,
+                                   add_header=add_header,
+                                   sigs=sigs,
+                                   class_sigs=class_sigs)
+    else:
+        # Python module
+        module_path, module_all = result
+        if os.path.basename(module_path) == '__init__.py':
+            target += '/__init__.pyi'
+        else:
+            target += '.pyi'
+        target = os.path.join(output_dir, target)
+        generate_stub(module_path, output_dir, module_all,
+                      target=target, add_header=add_header, module=module, pyversion=pyversion)
+    if not quiet:
+        print('Created %s' % target)
+
+
+def find_module_path_and_all(module: str, pyversion: Tuple[int, int],
+                             no_import: bool,
+                             search_path: List[str],
+                             interpreter: str) -> Optional[Tuple[str,
+                                                                 Optional[List[str]]]]:
+    """Find module and determine __all__.
+
+    Return None if the module is a C module. Return (module_path, __all__) if
+    Python module. Raise an exception or exit if failed.
+    """
+    if not no_import:
+        if pyversion[0] == 2:
+            module_path, module_all = load_python_module_info(module, interpreter)
+        else:
+            # TODO: Support custom interpreters.
+            mod = importlib.import_module(module)
+            imp.reload(mod)
+            if is_c_module(mod):
+                return None
+            module_path = mod.__file__
+            module_all = getattr(mod, '__all__', None)
+    else:
+        # Find module by going through search path.
+        module_path = mypy.build.find_module(module, ['.'] + search_path)
+        if not module_path:
+            raise SystemExit(
+                "Can't find module '{}' (consider using --search-path)".format(module))
+        module_all = None
+    return module_path, module_all
+
+
+def load_python_module_info(module: str, interpreter: str) -> Tuple[str, Optional[List[str]]]:
+    """Return tuple (module path, module __all__) for a Python 2 module.
+
+    The path refers to the .py/.py[co] file. The second tuple item is
+    None if the module doesn't define __all__.
+
+    Exit if the module can't be imported or if it's a C extension module.
+    """
+    cmd_template = '{interpreter} -c "%s"'.format(interpreter=interpreter)
+    code = ("import importlib, json; mod = importlib.import_module('%s'); "
+            "print(mod.__file__); print(json.dumps(getattr(mod, '__all__', None)))") % module
+    try:
+        output_bytes = subprocess.check_output(cmd_template % code, shell=True)
+    except subprocess.CalledProcessError:
+        print("Can't import module %s" % module)
+        sys.exit(1)
+    output = output_bytes.decode('ascii').strip().splitlines()
+    module_path = output[0]
+    if not module_path.endswith(('.py', '.pyc', '.pyo')):
+        raise SystemExit('%s looks like a C module; they are not supported for Python 2' %
+                         module)
+    if module_path.endswith(('.pyc', '.pyo')):
+        module_path = module_path[:-1]
+    module_all = json.loads(output[1])
+    return module_path, module_all
+
+
+def generate_stub(path: str, output_dir: str, _all_: Optional[List[str]] = None,
+                  target: str = None, add_header: bool = False, module: str = None,
+                  pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
+    source = open(path, 'rb').read()
+    try:
+        ast = mypy.parse.parse(source, fnam=path, pyversion=pyversion)
+    except mypy.errors.CompileError as e:
+        # Syntax error!
+        for m in e.messages:
+            sys.stderr.write('%s\n' % m)
+        sys.exit(1)
+
+    gen = StubGenerator(_all_, pyversion=pyversion)
+    ast.accept(gen)
+    if not target:
+        target = os.path.join(output_dir, os.path.basename(path))
+    subdir = os.path.dirname(target)
+    if subdir and not os.path.isdir(subdir):
+        os.makedirs(subdir)
+    with open(target, 'w') as file:
+        if add_header:
+            write_header(file, module, pyversion=pyversion)
+        file.write(''.join(gen.output()))
+
+
+# What was generated previously in the stub file. We keep track of these to generate
+# nicely formatted output (add empty line between non-empty classes, for example).
+EMPTY = 'EMPTY'
+FUNC = 'FUNC'
+CLASS = 'CLASS'
+EMPTY_CLASS = 'EMPTY_CLASS'
+VAR = 'VAR'
+NOT_IN_ALL = 'NOT_IN_ALL'
+
+
+class StubGenerator(mypy.traverser.TraverserVisitor):
+    def __init__(self, _all_: Optional[List[str]], pyversion: Tuple[int, int]) -> None:
+        self._all_ = _all_
+        self._output = []  # type: List[str]
+        self._import_lines = []  # type: List[str]
+        self._imports = []  # type: List[str]
+        self._indent = ''
+        self._vars = [[]]  # type: List[List[str]]
+        self._state = EMPTY
+        self._toplevel_names = []  # type: List[str]
+        self._classes = set()  # type: Set[str]
+        self._base_classes = []  # type: List[str]
+        self._pyversion = pyversion
+
+    def visit_mypy_file(self, o: MypyFile) -> None:
+        self._classes = find_classes(o)
+        for node in o.defs:
+            if isinstance(node, ClassDef):
+                self._base_classes.extend(self.get_base_types(node))
+        super().visit_mypy_file(o)
+        undefined_names = [name for name in self._all_ or []
+                           if name not in self._toplevel_names]
+        if undefined_names:
+            if self._state != EMPTY:
+                self.add('\n')
+            self.add('# Names in __all__ with no definition:\n')
+            for name in sorted(undefined_names):
+                self.add('#   %s\n' % name)
+
+    def visit_func_def(self, o: FuncDef) -> None:
+        if self.is_private_name(o.name()):
+            return
+        if self.is_not_in_all(o.name()):
+            return
+        if self.is_recorded_name(o.name()):
+            return
+        if not self._indent and self._state not in (EMPTY, FUNC):
+            self.add('\n')
+        if not self.is_top_level():
+            self_inits = find_self_initializers(o)
+            for init in self_inits:
+                init_code = self.get_init(init)
+                if init_code:
+                    self.add(init_code)
+        self.add("%sdef %s(" % (self._indent, o.name()))
+        self.record_name(o.name())
+        args = []  # type: List[str]
+        for i, arg_ in enumerate(o.arguments):
+            var = arg_.variable
+            kind = arg_.kind
+            name = var.name()
+            init_stmt = arg_.initialization_statement
+            if init_stmt:
+                if kind == ARG_NAMED and '*' not in args:
+                    args.append('*')
+                arg = '%s=' % name
+                rvalue = init_stmt.rvalue
+                if isinstance(rvalue, IntExpr):
+                    arg += str(rvalue.value)
+                elif isinstance(rvalue, StrExpr):
+                    arg += "''"
+                elif isinstance(rvalue, BytesExpr):
+                    arg += "b''"
+                elif isinstance(rvalue, FloatExpr):
+                    arg += "0.0"
+                elif isinstance(rvalue, UnaryExpr) and isinstance(rvalue.expr, IntExpr):
+                    arg += '-%s' % rvalue.expr.value
+                elif isinstance(rvalue, NameExpr) and rvalue.name in ('None', 'True', 'False'):
+                    arg += rvalue.name
+                else:
+                    arg += '...'
+            elif kind == ARG_STAR:
+                arg = '*%s' % name
+            elif kind == ARG_STAR2:
+                arg = '**%s' % name
+            else:
+                arg = name
+            args.append(arg)
+        self.add(', '.join(args))
+        self.add("): ...\n")
+        self._state = FUNC
+
+    def visit_decorator(self, o: Decorator) -> None:
+        if self.is_private_name(o.func.name()):
+            return
+        for decorator in o.decorators:
+            if isinstance(decorator, NameExpr) and decorator.name in ('property',
+                                                                      'staticmethod',
+                                                                      'classmethod'):
+                self.add('%s@%s\n' % (self._indent, decorator.name))
+            elif (isinstance(decorator, MemberExpr) and decorator.name == 'setter' and
+                  isinstance(decorator.expr, NameExpr)):
+                self.add('%s@%s.setter\n' % (self._indent, decorator.expr.name))
+        super().visit_decorator(o)
+
+    def visit_class_def(self, o: ClassDef) -> None:
+        if not self._indent and self._state != EMPTY:
+            sep = len(self._output)
+            self.add('\n')
+        else:
+            sep = None
+        self.add('%sclass %s' % (self._indent, o.name))
+        self.record_name(o.name)
+        base_types = self.get_base_types(o)
+        if base_types:
+            self.add('(%s)' % ', '.join(base_types))
+        self.add(':\n')
+        n = len(self._output)
+        self._indent += '    '
+        self._vars.append([])
+        super().visit_class_def(o)
+        self._indent = self._indent[:-4]
+        self._vars.pop()
+        if len(self._output) == n:
+            if self._state == EMPTY_CLASS and sep is not None:
+                self._output[sep] = ''
+            self._output[-1] = self._output[-1][:-1] + ' ...\n'
+            self._state = EMPTY_CLASS
+        else:
+            self._state = CLASS
+
+    def get_base_types(self, cdef: ClassDef) -> List[str]:
+        base_types = []  # type: List[str]
+        for base in cdef.base_type_exprs:
+            if isinstance(base, NameExpr):
+                if base.name != 'object':
+                    base_types.append(base.name)
+            elif isinstance(base, MemberExpr):
+                modname = get_qualified_name(base.expr)
+                base_types.append('%s.%s' % (modname, base.name))
+                self.add_import_line('import %s\n' % modname)
+        return base_types
+
+    def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
+        lvalue = o.lvalues[0]
+        if isinstance(lvalue, NameExpr) and self.is_namedtuple(o.rvalue):
+            self.process_namedtuple(lvalue, o.rvalue)
+            return
+        if isinstance(lvalue, TupleExpr):
+            items = lvalue.items
+        elif isinstance(lvalue, ListExpr):
+            items = lvalue.items
+        else:
+            items = [lvalue]
+        sep = False
+        found = False
+        for item in items:
+            if isinstance(item, NameExpr):
+                init = self.get_init(item.name)
+                if init:
+                    found = True
+                    if not sep and not self._indent and self._state not in (EMPTY, VAR):
+                        init = '\n' + init
+                        sep = True
+                    self.add(init)
+                    self.record_name(item.name)
+        if found:
+            self._state = VAR
+
+    def is_namedtuple(self, expr: Node) -> bool:
+        if not isinstance(expr, CallExpr):
+            return False
+        callee = expr.callee
+        return ((isinstance(callee, NameExpr) and callee.name.endswith('namedtuple')) or
+                (isinstance(callee, MemberExpr) and callee.name == 'namedtuple'))
+
+    def process_namedtuple(self, lvalue, rvalue):
+        self.add_import_line('from collections import namedtuple\n')
+        if self._state != EMPTY:
+            self.add('\n')
+        name = repr(getattr(rvalue.args[0], 'value', '<ERROR>'))
+        if isinstance(rvalue.args[1], StrExpr):
+            items = repr(rvalue.args[1].value)
+        elif isinstance(rvalue.args[1], ListExpr):
+            list_items = rvalue.args[1].items
+            items = '[%s]' % ', '.join(repr(item.value) for item in list_items)
+        else:
+            items = '<ERROR>'
+        self.add('%s = namedtuple(%s, %s)\n' % (lvalue.name, name, items))
+        self._classes.add(lvalue.name)
+        self._state = CLASS
+
+    def visit_if_stmt(self, o: IfStmt) -> None:
+        # Ignore if __name__ == '__main__'.
+        expr = o.expr[0]
+        if (isinstance(expr, ComparisonExpr) and
+                isinstance(expr.operands[0], NameExpr) and
+                isinstance(expr.operands[1], StrExpr) and
+                expr.operands[0].name == '__name__' and
+                '__main__' in expr.operands[1].value):
+            return
+        super().visit_if_stmt(o)
+
+    def visit_import_all(self, o: ImportAll) -> None:
+        self.add_import_line('from %s%s import *\n' % ('.' * o.relative, o.id))
+
+    def visit_import_from(self, o: ImportFrom) -> None:
+        exported_names = set()  # type: Set[str]
+        if self._all_:
+            # Include import froms that import names defined in __all__.
+            names = [name for name, alias in o.names
+                     if name in self._all_ and alias is None]
+            exported_names.update(names)
+            self.import_and_export_names(o.id, o.relative, names)
+        else:
+            # Include import from targets that import from a submodule of a package.
+            if o.relative:
+                sub_names = [name for name, alias in o.names
+                             if alias is None]
+                exported_names.update(sub_names)
+                self.import_and_export_names(o.id, o.relative, sub_names)
+        # Import names used as base classes.
+        base_names = [(name, alias) for name, alias in o.names
+                      if alias or name in self._base_classes and name not in exported_names]
+        if base_names:
+            imp_names = []  # type: List[str]
+            for name, alias in base_names:
+                if alias is not None and alias != name:
+                    imp_names.append('%s as %s' % (name, alias))
+                else:
+                    imp_names.append(name)
+            self.add_import_line('from %s%s import %s\n' % (
+                '.' * o.relative, o.id, ', '.join(imp_names)))
+
+    def import_and_export_names(self, module_id: str, relative: int, names: Iterable[str]) -> None:
+        """Import names from a module and export them (via from ... import x as x)."""
+        if names and module_id:
+            full_module_name = '%s%s' % ('.' * relative, module_id)
+            imported_names = ', '.join(['%s as %s' % (name, name) for name in names])
+            self.add_import_line('from %s import %s\n' % (full_module_name, imported_names))
+            for name in names:
+                self.record_name(name)
+
+    def visit_import(self, o: Import) -> None:
+        for id, as_id in o.ids:
+            if as_id is None:
+                target_name = id.split('.')[0]
+            else:
+                target_name = as_id
+            if self._all_ and target_name in self._all_ and (as_id is not None or
+                                                             '.' not in id):
+                self.add_import_line('import %s as %s\n' % (id, target_name))
+                self.record_name(target_name)
+
+    def get_init(self, lvalue: str) -> str:
+        """Return initializer for a variable.
+
+        Return None if we've generated one already or if the variable is internal.
+        """
+        if lvalue in self._vars[-1]:
+            # We've generated an initializer already for this variable.
+            return None
+        # TODO: Only do this at module top level.
+        if self.is_private_name(lvalue) or self.is_not_in_all(lvalue):
+            return None
+        self._vars[-1].append(lvalue)
+        self.add_typing_import('Any')
+        return '%s%s = ... # type: Any\n' % (self._indent, lvalue)
+
+    def add(self, string: str) -> None:
+        """Add text to generated stub."""
+        self._output.append(string)
+
+    def add_typing_import(self, name: str) -> None:
+        """Add a name to be imported from typing, unless it's imported already.
+
+        The import will be internal to the stub.
+        """
+        if name not in self._imports:
+            self._imports.append(name)
+
+    def add_import_line(self, line: str) -> None:
+        """Add a line of text to the import section, unless it's already there."""
+        if line not in self._import_lines:
+            self._import_lines.append(line)
+
+    def output(self) -> str:
+        """Return the text for the stub."""
+        imports = ''
+        if self._imports:
+            imports += 'from typing import %s\n' % ", ".join(self._imports)
+        if self._import_lines:
+            imports += ''.join(self._import_lines)
+        if imports and self._output:
+            imports += '\n'
+        return imports + ''.join(self._output)
+
+    def is_not_in_all(self, name: str) -> bool:
+        if self.is_private_name(name):
+            return False
+        return self.is_top_level() and bool(self._all_) and name not in self._all_
+
+    def is_private_name(self, name: str) -> bool:
+        return name.startswith('_') and (not name.endswith('__')
+                                         or name in ('__all__',
+                                                     '__author__',
+                                                     '__version__',
+                                                     '__str__',
+                                                     '__repr__',
+                                                     '__getstate__',
+                                                     '__setstate__',
+                                                     '__slots__'))
+
+    def is_top_level(self) -> bool:
+        """Are we processing the top level of a file?"""
+        return self._indent == ''
+
+    def record_name(self, name: str) -> None:
+        """Mark a name as defined.
+
+        This only does anything if at the top level of a module.
+        """
+        if self.is_top_level():
+            self._toplevel_names.append(name)
+
+    def is_recorded_name(self, name: str) -> bool:
+        """Has this name been recorded previously?"""
+        return self.is_top_level() and name in self._toplevel_names
+
+
+def find_self_initializers(fdef: FuncBase) -> List[str]:
+    results = []  # type: List[str]
+
+    class SelfTraverser(mypy.traverser.TraverserVisitor):
+        def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
+            lvalue = o.lvalues[0]
+            if (isinstance(lvalue, MemberExpr) and
+                    isinstance(lvalue.expr, NameExpr) and
+                    lvalue.expr.name == 'self'):
+                results.append(lvalue.name)
+
+    fdef.accept(SelfTraverser())
+    return results
+
+
+def find_classes(node: Node) -> Set[str]:
+    results = set()  # type: Set[str]
+
+    class ClassTraverser(mypy.traverser.TraverserVisitor):
+        def visit_class_def(self, o: ClassDef) -> None:
+            results.add(o.name)
+
+    node.accept(ClassTraverser())
+    return results
+
+
+def get_qualified_name(o: Node) -> str:
+    if isinstance(o, NameExpr):
+        return o.name
+    elif isinstance(o, MemberExpr):
+        return '%s.%s' % (get_qualified_name(o.expr), o.name)
+    else:
+        return '<ERROR>'
+
+
+def main() -> None:
+    options = parse_options()
+    if not os.path.isdir('out'):
+        raise SystemExit('Directory "out" does not exist')
+    sigs = {}  # type: Any
+    class_sigs = {}  # type: Any
+    if options.doc_dir:
+        all_sigs = []  # type: Any
+        all_class_sigs = []  # type: Any
+        for path in glob.glob('%s/*.rst' % options.doc_dir):
+            func_sigs, class_sigs = parse_all_signatures(open(path).readlines())
+            all_sigs += func_sigs
+            all_class_sigs += class_sigs
+        sigs = dict(find_unique_signatures(all_sigs))
+        class_sigs = dict(find_unique_signatures(all_class_sigs))
+    for module in options.modules:
+        generate_stub_for_module(module, 'out',
+                                 add_header=True,
+                                 sigs=sigs,
+                                 class_sigs=class_sigs,
+                                 pyversion=options.pyversion,
+                                 no_import=options.no_import,
+                                 search_path=options.search_path,
+                                 interpreter=options.interpreter)
+
+
+def parse_options() -> Options:
+    args = sys.argv[1:]
+    pyversion = defaults.PYTHON3_VERSION
+    no_import = False
+    doc_dir = ''
+    search_path = []  # type: List[str]
+    interpreter = ''
+    while args and args[0].startswith('-'):
+        if args[0] == '--doc-dir':
+            doc_dir = args[1]
+            args = args[1:]
+        elif args[0] == '--search-path':
+            if not args[1]:
+                usage()
+            search_path = args[1].split(':')
+            args = args[1:]
+        elif args[0] == '-p':
+            interpreter = args[1]
+            args = args[1:]
+        elif args[0] == '--py2':
+            pyversion = defaults.PYTHON2_VERSION
+        elif args[0] == '--no-import':
+            no_import = True
+        elif args[0] in ('-h', '--help'):
+            usage()
+        else:
+            raise SystemExit('Unrecognized option %s' % args[0])
+        args = args[1:]
+    if not args:
+        usage()
+    if not interpreter:
+        interpreter = sys.executable if pyversion[0] == 3 else default_python2_interpreter()
+    return Options(pyversion=pyversion,
+                   no_import=no_import,
+                   doc_dir=doc_dir,
+                   search_path=search_path,
+                   interpreter=interpreter,
+                   modules=args)
+
+
+def default_python2_interpreter() -> str:
+    # TODO: Make this do something reasonable in Windows.
+    for candidate in ('/usr/bin/python2', '/usr/bin/python'):
+        if not os.path.exists(candidate):
+            continue
+        output = subprocess.check_output([candidate, '--version'],
+                                         stderr=subprocess.STDOUT).strip()
+        if b'Python 2' in output:
+            return candidate
+    raise SystemExit("Can't find a Python 2 interpreter -- please use the -p option")
+
+
+def usage() -> None:
+    usage = textwrap.dedent("""\
+        usage: stubgen [--py2] [--no-import] [--doc-dir PATH]
+                       [--search-path PATH] [-p PATH] MODULE ...
+
+        Generate draft stubs for modules.
+
+        Stubs are generated in directory ./out, to avoid overriding files with
+        manual changes.  This directory is assumed to exist.
+
+        Options:
+          --py2           run in Python 2 mode (default: Python 3 mode)
+          --no-import     don't import the modules, just parse and analyze them
+                          (doesn't work with C extension modules and doesn't
+                          respect __all__)
+          --doc-dir PATH  use .rst documentation in PATH (this may result in
+                          better stubs in some cases; consider setting this to
+                          DIR/Python-X.Y.Z/Doc/library)
+          --search-path PATH
+                          specify module search directories, separated by ':'
+                          (currently only used if --no-import is given)
+          -p PATH         use Python interpreter at PATH (only works for
+                          Python 2 right now)
+          -h, --help      print this help message and exit
+    """.rstrip())
+
+    raise SystemExit(usage)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py
new file mode 100644
index 0000000..6e1829d
--- /dev/null
+++ b/mypy/stubgenc.py
@@ -0,0 +1,215 @@
+"""Stub generator for C modules.
+
+The public interface is via the mypy.stubgen module.
+"""
+
+import importlib
+import os.path
+import re
+
+
+from mypy.stubutil import (
+    parse_all_signatures, find_unique_signatures, is_c_module, write_header,
+    infer_sig_from_docstring
+)
+
+
+def generate_stub_for_c_module(module_name, target, add_header=True, sigs={}, class_sigs={}):
+    module = importlib.import_module(module_name)
+    assert is_c_module(module), '%s is not a C module' % module_name
+    subdir = os.path.dirname(target)
+    if subdir and not os.path.isdir(subdir):
+        os.makedirs(subdir)
+    functions = []
+    done = set()
+    items = sorted(module.__dict__.items(), key=lambda x: x[0])
+    for name, obj in items:
+        if is_c_function(obj):
+            generate_c_function_stub(module, name, obj, functions, sigs=sigs)
+            done.add(name)
+    types = []
+    for name, obj in items:
+        if name.startswith('__') and name.endswith('__'):
+            continue
+        if is_c_type(obj):
+            generate_c_type_stub(module, name, obj, types, sigs=sigs, class_sigs=class_sigs)
+            done.add(name)
+    variables = []
+    for name, obj in items:
+        if name.startswith('__') and name.endswith('__'):
+            continue
+        if name not in done:
+            type_str = type(obj).__name__
+            if type_str not in ('int', 'str', 'bytes', 'float', 'bool'):
+                type_str = 'Any'
+            variables.append('%s = ... # type: %s' % (name, type_str))
+    output = []
+    for line in variables:
+        output.append(line)
+    if output and functions:
+        output.append('')
+    for line in functions:
+        output.append(line)
+    for line in types:
+        if line.startswith('class') and output and output[-1]:
+            output.append('')
+        output.append(line)
+    output = add_typing_import(output)
+    with open(target, 'w') as file:
+        if add_header:
+            write_header(file, module_name)
+        for line in output:
+            file.write('%s\n' % line)
+
+
+def add_typing_import(output):
+    names = []
+    for name in ['Any']:
+        if any(re.search(r'\b%s\b' % name, line) for line in output):
+            names.append(name)
+    if names:
+        return ['from typing import %s' % ', '.join(names), ''] + output
+    else:
+        return output[:]
+
+
+def is_c_function(obj):
+    return type(obj) is type(ord)
+
+
+def is_c_method(obj):
+    return type(obj) in (type(str.index),
+                         type(str.__add__),
+                         type(str.__new__))
+
+
+def is_c_classmethod(obj):
+    type_str = type(obj).__name__
+    return type_str == 'classmethod_descriptor'
+
+
+def is_c_type(obj):
+    return type(obj) is type(int)
+
+
+def generate_c_function_stub(module, name, obj, output, self_var=None, sigs={}, class_name=None,
+                             class_sigs={}):
+    if self_var:
+        self_arg = '%s, ' % self_var
+    else:
+        self_arg = ''
+    if name in ('__new__', '__init__') and name not in sigs and class_name in class_sigs:
+        sig = class_sigs[class_name]
+    else:
+        docstr = getattr(obj, '__doc__', None)
+        sig = infer_sig_from_docstring(docstr, name)
+        if not sig:
+            if class_name and name not in sigs:
+                sig = infer_method_sig(name)
+            else:
+                sig = sigs.get(name, '(*args, **kwargs)')
+    sig = sig[1:-1]
+    if sig:
+        if sig.split(',', 1)[0] == self_var:
+            self_arg = ''
+    else:
+        self_arg = self_arg.replace(', ', '')
+    output.append('def %s(%s%s): ...' % (name, self_arg, sig))
+
+
+def generate_c_type_stub(module, class_name, obj, output, sigs={}, class_sigs={}):
+    items = sorted(obj.__dict__.items(), key=lambda x: method_name_sort_key(x[0]))
+    methods = []
+    done = set()
+    for attr, value in items:
+        if is_c_method(value) or is_c_classmethod(value):
+            done.add(attr)
+            if not is_skipped_attribute(attr):
+                if is_c_classmethod(value):
+                    methods.append('@classmethod')
+                    self_var = 'cls'
+                else:
+                    self_var = 'self'
+                if attr == '__new__':
+                    # TODO: We should support __new__.
+                    if '__init__' in obj.__dict__:
+                        # Avoid duplicate functions if both are present.
+                        # But is there any case where .__new__() has a
+                        # better signature than __init__() ?
+                        continue
+                    attr = '__init__'
+                generate_c_function_stub(module, attr, value, methods, self_var, sigs=sigs,
+                                         class_name=class_name, class_sigs=class_sigs)
+    variables = []
+    for attr, value in items:
+        if is_skipped_attribute(attr):
+            continue
+        if attr not in done:
+            variables.append('%s = ... # type: Any' % attr)
+    all_bases = obj.mro()[1:]
+    if all_bases[-1] is object:
+        # TODO: Is this always object?
+        del all_bases[-1]
+    # Remove base classes of other bases as redundant.
+    bases = []
+    for base in all_bases:
+        if not any(issubclass(b, base) for b in bases):
+            bases.append(base)
+    if bases:
+        bases_str = '(%s)' % ', '.join(base.__name__ for base in bases)
+    else:
+        bases_str = ''
+    if not methods and not variables:
+        output.append('class %s%s: ...' % (class_name, bases_str))
+    else:
+        output.append('class %s%s:' % (class_name, bases_str))
+        for variable in variables:
+            output.append('    %s' % variable)
+        for method in methods:
+            output.append('    %s' % method)
+
+
+def method_name_sort_key(name):
+    if name in ('__new__', '__init__'):
+        return (0, name)
+    if name.startswith('__') and name.endswith('__'):
+        return (2, name)
+    return (1, name)
+
+
+def is_skipped_attribute(attr):
+    return attr in ('__getattribute__',
+                    '__str__',
+                    '__repr__',
+                    '__doc__',
+                    '__dict__',
+                    '__module__',
+                    '__weakref__')  # For pickling
+
+
+def infer_method_sig(name):
+    if name.startswith('__') and name.endswith('__'):
+        name = name[2:-2]
+        if name in ('hash', 'iter', 'next', 'sizeof', 'copy', 'deepcopy', 'reduce', 'getinitargs',
+                    'int', 'float', 'trunc', 'complex', 'bool'):
+            return '()'
+        if name == 'getitem':
+            return '(index)'
+        if name == 'setitem':
+            return '(index, object)'
+        if name in ('delattr', 'getattr'):
+            return '(name)'
+        if name == 'setattr':
+            return '(name, value)'
+        if name == 'getstate':
+            return '()'
+        if name == 'setstate':
+            return '(state)'
+        if name in ('eq', 'ne', 'lt', 'le', 'gt', 'ge',
+                    'add', 'radd', 'sub', 'rsub', 'mul', 'rmul',
+                    'mod', 'rmod', 'floordiv', 'rfloordiv', 'truediv', 'rtruediv',
+                    'divmod', 'rdivmod', 'pow', 'rpow'):
+            return '(other)'
+        if name in ('neg', 'pos'):
+            return '()'
+    return '(*args, **kwargs)'
diff --git a/mypy/stubutil.py b/mypy/stubutil.py
new file mode 100644
index 0000000..b254798
--- /dev/null
+++ b/mypy/stubutil.py
@@ -0,0 +1,102 @@
+import re
+import sys
+
+
+def parse_signature(sig):
+    m = re.match(r'([.a-zA-Z0-9_]+)\(([^)]*)\)', sig)
+    if not m:
+        return None
+    name = m.group(1)
+    name = name.split('.')[-1]
+    arg_string = m.group(2)
+    if not arg_string.strip():
+        return (name, [], [])
+    args = [arg.strip() for arg in arg_string.split(',')]
+    fixed = []
+    optional = []
+    i = 0
+    while i < len(args):
+        if args[i].startswith('[') or '=' in args[i]:
+            break
+        fixed.append(args[i].rstrip('['))
+        i += 1
+        if args[i - 1].endswith('['):
+            break
+    while i < len(args):
+        arg = args[i]
+        arg = arg.strip('[]')
+        arg = arg.split('=')[0]
+        optional.append(arg)
+        i += 1
+    return (name, fixed, optional)
+
+
+def build_signature(fixed, optional):
+    args = fixed[:]
+    for arg in optional:
+        if arg.startswith('*'):
+            args.append(arg)
+        else:
+            args.append('%s=...' % arg)
+    sig = '(%s)' % ', '.join(args)
+    # Ad-hoc fixes.
+    sig = sig.replace('(self)', '')
+    return sig
+
+
+def parse_all_signatures(lines):
+    sigs = []
+    class_sigs = []
+    for line in lines:
+        line = line.strip()
+        m = re.match(r'\.\. *(function|method|class) *:: *[a-zA-Z_]', line)
+        if m:
+            sig = line.split('::')[1].strip()
+            parsed = parse_signature(sig)
+            if parsed:
+                name, fixed, optional = parsed
+                if m.group(1) != 'class':
+                    sigs.append((name, build_signature(fixed, optional)))
+                else:
+                    class_sigs.append((name, build_signature(fixed, optional)))
+
+    return sorted(sigs), sorted(class_sigs)
+
+
+def find_unique_signatures(sigs):
+    sig_map = {}
+    for name, sig in sigs:
+        sig_map.setdefault(name, []).append(sig)
+    result = []
+    for name, name_sigs in sig_map.items():
+        if len(set(name_sigs)) == 1:
+            result.append((name, name_sigs[0]))
+    return sorted(result)
+
+
+def is_c_module(module):
+    return '__file__' not in module.__dict__ or module.__dict__['__file__'].endswith('.so')
+
+
+def write_header(file, module_name, pyversion=(3, 5)):
+    if module_name:
+        if pyversion[0] >= 3:
+            version = '%d.%d' % (sys.version_info.major,
+                                 sys.version_info.minor)
+        else:
+            version = '2'
+        file.write('# Stubs for %s (Python %s)\n' % (module_name, version))
+    file.write(
+        '#\n'
+        '# NOTE: This dynamically typed stub was automatically generated by stubgen.\n\n')
+
+
+def infer_sig_from_docstring(docstr, name):
+    if not docstr:
+        return None
+    docstr = docstr.lstrip()
+    m = re.match(r'%s(\([a-zA-Z0-9_=, ]*\))' % name, docstr)
+    if m:
+        return m.group(1)
+    else:
+        return None
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
new file mode 100644
index 0000000..38958a3
--- /dev/null
+++ b/mypy/subtypes.py
@@ -0,0 +1,346 @@
+from typing import cast, List, Dict, Callable
+
+from mypy.types import (
+    Type, AnyType, UnboundType, TypeVisitor, ErrorType, Void, NoneTyp,
+    Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded, ErasedType, TypeList,
+    PartialType, DeletedType, is_named_instance
+)
+import mypy.applytype
+import mypy.constraints
+# Circular import; done in the function instead.
+# import mypy.solve
+from mypy import messages, sametypes
+from mypy.nodes import CONTRAVARIANT, COVARIANT
+from mypy.maptype import map_instance_to_supertype
+
+
+TypeParameterChecker = Callable[[Type, Type, int], bool]
+
+
+def check_type_parameter(lefta: Type, righta: Type, variance: int) -> bool:
+    if variance == COVARIANT:
+        return is_subtype(lefta, righta, check_type_parameter)
+    elif variance == CONTRAVARIANT:
+        return is_subtype(righta, lefta, check_type_parameter)
+    else:
+        return is_equivalent(lefta, righta, check_type_parameter)
+
+
+def is_subtype(left: Type, right: Type,
+               type_parameter_checker: TypeParameterChecker = check_type_parameter) -> bool:
+    """Is 'left' subtype of 'right'?
+
+    Also consider Any to be a subtype of any type, and vice versa. This
+    recursively applies to components of composite types (List[int] is subtype
+    of List[Any], for example).
+
+    type_parameter_checker is used to check the type parameters (for example,
+    A with B in is_subtype(C[A], C[B]). The default checks for subtype relation
+    between the type arguments (e.g., A and B), taking the variance of the
+    type var into account.
+    """
+    if (isinstance(right, AnyType) or isinstance(right, UnboundType)
+            or isinstance(right, ErasedType)):
+        return True
+    elif isinstance(right, UnionType) and not isinstance(left, UnionType):
+        return any(is_subtype(left, item, type_parameter_checker)
+                   for item in cast(UnionType, right).items)
+    else:
+        return left.accept(SubtypeVisitor(right, type_parameter_checker))
+
+
+def is_subtype_ignoring_tvars(left: Type, right: Type) -> bool:
+    def ignore_tvars(s: Type, t: Type, v: int) -> bool:
+        return True
+    return is_subtype(left, right, ignore_tvars)
+
+
+def is_equivalent(a: Type, b: Type,
+                  type_parameter_checker=check_type_parameter) -> bool:
+    return is_subtype(a, b, type_parameter_checker) and is_subtype(b, a, type_parameter_checker)
+
+
+class SubtypeVisitor(TypeVisitor[bool]):
+
+    def __init__(self, right: Type,
+                 type_parameter_checker: TypeParameterChecker) -> None:
+        self.right = right
+        self.check_type_parameter = type_parameter_checker
+
+    # visit_x(left) means: is left (which is an instance of X) a subtype of
+    # right?
+
+    def visit_unbound_type(self, left: UnboundType) -> bool:
+        return True
+
+    def visit_error_type(self, left: ErrorType) -> bool:
+        return False
+
+    def visit_type_list(self, t: TypeList) -> bool:
+        assert False, 'Not supported'
+
+    def visit_any(self, left: AnyType) -> bool:
+        return True
+
+    def visit_void(self, left: Void) -> bool:
+        return isinstance(self.right, Void)
+
+    def visit_none_type(self, left: NoneTyp) -> bool:
+        return not isinstance(self.right, Void)
+
+    def visit_erased_type(self, left: ErasedType) -> bool:
+        return True
+
+    def visit_deleted_type(self, left: DeletedType) -> bool:
+        return True
+
+    def visit_instance(self, left: Instance) -> bool:
+        right = self.right
+        if isinstance(right, Instance):
+            if left.type._promote and is_subtype(left.type._promote,
+                                                 self.right,
+                                                 self.check_type_parameter):
+                return True
+            rname = right.type.fullname()
+            if not left.type.has_base(rname) and rname != 'builtins.object':
+                return False
+
+            # Map left type to corresponding right instances.
+            t = map_instance_to_supertype(left, right.type)
+
+            return all(self.check_type_parameter(lefta, righta, tvar.variance)
+                       for lefta, righta, tvar in
+                       zip(t.args, right.args, right.type.defn.type_vars))
+        else:
+            return False
+
+    def visit_type_var(self, left: TypeVarType) -> bool:
+        right = self.right
+        if isinstance(right, TypeVarType):
+            return left.id == right.id
+        else:
+            return is_named_instance(self.right, 'builtins.object')
+
+    def visit_callable_type(self, left: CallableType) -> bool:
+        right = self.right
+        if isinstance(right, CallableType):
+            return is_callable_subtype(left, right)
+        elif isinstance(right, Overloaded):
+            return all(is_subtype(left, item, self.check_type_parameter)
+                       for item in right.items())
+        elif isinstance(right, Instance):
+            return is_subtype(left.fallback, right)
+        else:
+            return False
+
+    def visit_tuple_type(self, left: TupleType) -> bool:
+        right = self.right
+        if isinstance(right, Instance):
+            if is_named_instance(right, 'builtins.object'):
+                return True
+            if is_named_instance(right, 'builtins.tuple'):
+                target_item_type = right.args[0]
+                return all(is_subtype(item, target_item_type)
+                           for item in left.items)
+            elif is_named_instance(right, 'typing.Sized'):
+                return True
+            elif (is_named_instance(right, 'typing.Iterable') or
+                  is_named_instance(right, 'typing.Container') or
+                  is_named_instance(right, 'typing.Sequence') or
+                  is_named_instance(right, 'typing.Reversible')):
+                iter_type = right.args[0]
+                return all(is_subtype(li, iter_type) for li in left.items)
+            return False
+        elif isinstance(right, TupleType):
+            if len(left.items) != len(right.items):
+                return False
+            for i in range(len(left.items)):
+                if not is_subtype(left.items[i], right.items[i], self.check_type_parameter):
+                    return False
+            if not is_subtype(left.fallback, right.fallback, self.check_type_parameter):
+                return False
+            return True
+        else:
+            return False
+
+    def visit_overloaded(self, left: Overloaded) -> bool:
+        right = self.right
+        if isinstance(right, Instance):
+            return is_subtype(left.fallback, right)
+        elif isinstance(right, CallableType) or is_named_instance(
+                right, 'builtins.type'):
+            for item in left.items():
+                if is_subtype(item, right, self.check_type_parameter):
+                    return True
+            return False
+        elif isinstance(right, Overloaded):
+            # TODO: this may be too restrictive
+            if len(left.items()) != len(right.items()):
+                return False
+            for i in range(len(left.items())):
+                if not is_subtype(left.items()[i], right.items()[i], self.check_type_parameter):
+                    return False
+            return True
+        elif isinstance(right, UnboundType):
+            return True
+        else:
+            return False
+
+    def visit_union_type(self, left: UnionType) -> bool:
+        return all(is_subtype(item, self.right, self.check_type_parameter)
+                   for item in left.items)
+
+    def visit_partial_type(self, left: PartialType) -> bool:
+        # This is indeterminate as we don't really know the complete type yet.
+        raise RuntimeError
+
+
+def is_callable_subtype(left: CallableType, right: CallableType,
+                        ignore_return: bool = False) -> bool:
+    """Is left a subtype of right?"""
+    # TODO: Support named arguments, **args, etc.
+    # Non-type cannot be a subtype of type.
+    if right.is_type_obj() and not left.is_type_obj():
+        return False
+    if right.variables:
+        # Subtyping is not currently supported for generic function as the supertype.
+        return False
+    if left.variables:
+        # Apply generic type variables away in left via type inference.
+        left = unify_generic_callable(left, right, ignore_return=ignore_return)
+        if left is None:
+            return False
+
+    # Check return types.
+    if not ignore_return and not is_subtype(left.ret_type, right.ret_type):
+        return False
+
+    if right.is_ellipsis_args:
+        return True
+
+    # Check argument types.
+    if left.min_args > right.min_args:
+        return False
+    if left.is_var_arg:
+        return is_var_arg_callable_subtype_helper(left, right)
+    if right.is_var_arg:
+        return False
+    if len(left.arg_types) < len(right.arg_types):
+        return False
+    for i in range(len(right.arg_types)):
+        if not is_subtype(right.arg_types[i], left.arg_types[i]):
+            return False
+    return True
+
+
+def is_var_arg_callable_subtype_helper(left: CallableType, right: CallableType) -> bool:
+    """Is left a subtype of right, assuming left has *args?
+
+    See also is_callable_subtype for additional assumptions we can make.
+    """
+    left_fixed = left.max_fixed_args()
+    right_fixed = right.max_fixed_args()
+    num_fixed_matching = min(left_fixed, right_fixed)
+    for i in range(num_fixed_matching):
+        if not is_subtype(right.arg_types[i], left.arg_types[i]):
+            return False
+    if not right.is_var_arg:
+        for i in range(num_fixed_matching, len(right.arg_types)):
+            if not is_subtype(right.arg_types[i], left.arg_types[-1]):
+                return False
+        return True
+    else:
+        for i in range(left_fixed, right_fixed):
+            if not is_subtype(right.arg_types[i], left.arg_types[-1]):
+                return False
+        for i in range(right_fixed, left_fixed):
+            if not is_subtype(right.arg_types[-1], left.arg_types[i]):
+                return False
+        return is_subtype(right.arg_types[-1], left.arg_types[-1])
+
+
+def unify_generic_callable(type: CallableType, target: CallableType,
+                           ignore_return: bool) -> CallableType:
+    """Try to unify a generic callable type with another callable type.
+
+    Return unified CallableType if successful; otherwise, return None.
+    """
+    import mypy.solve
+    constraints = []  # type: List[mypy.constraints.Constraint]
+    for arg_type, target_arg_type in zip(type.arg_types, target.arg_types):
+        c = mypy.constraints.infer_constraints(
+            arg_type, target_arg_type, mypy.constraints.SUPERTYPE_OF)
+        constraints.extend(c)
+    if not ignore_return:
+        c = mypy.constraints.infer_constraints(
+            type.ret_type, target.ret_type, mypy.constraints.SUBTYPE_OF)
+        constraints.extend(c)
+    type_var_ids = [tvar.id for tvar in type.variables]
+    inferred_vars = mypy.solve.solve_constraints(type_var_ids, constraints)
+    if None in inferred_vars:
+        return None
+    msg = messages.temp_message_builder()
+    applied = mypy.applytype.apply_generic_arguments(type, inferred_vars, msg, context=target)
+    if msg.is_errors() or not isinstance(applied, CallableType):
+        return None
+    return cast(CallableType, applied)
+
+
+def restrict_subtype_away(t: Type, s: Type) -> Type:
+    """Return a supertype of (t intersect not s)
+
+    Currently just remove elements of a union type.
+    """
+    if isinstance(t, UnionType):
+        new_items = [item for item in t.items if not is_subtype(item, s)]
+        return UnionType.make_union(new_items)
+    else:
+        return t
+
+
+def is_proper_subtype(t: Type, s: Type) -> bool:
+    """Check if t is a proper subtype of s?
+
+    For proper subtypes, there's no need to rely on compatibility due to
+    Any types. Any instance type t is also a proper subtype of t.
+    """
+    # FIX tuple types
+    if isinstance(t, Instance):
+        if isinstance(s, Instance):
+            if not t.type.has_base(s.type.fullname()):
+                return False
+
+            def check_argument(left: Type, right: Type, variance: int) -> bool:
+                if variance == COVARIANT:
+                    return is_proper_subtype(left, right)
+                elif variance == CONTRAVARIANT:
+                    return is_proper_subtype(right, left)
+                else:
+                    return sametypes.is_same_type(left, right)
+
+            # Map left type to corresponding right instances.
+            t = map_instance_to_supertype(t, s.type)
+
+            return all(check_argument(ta, ra, tvar.variance) for ta, ra, tvar in
+                       zip(t.args, s.args, s.type.defn.type_vars))
+        return False
+    else:
+        return sametypes.is_same_type(t, s)
+
+
+def is_more_precise(t: Type, s: Type) -> bool:
+    """Check if t is a more precise type than s.
+
+    A t is a proper subtype of s, t is also more precise than s. Also, if
+    s is Any, t is more precise than s for any t. Finally, if t is the same
+    type as s, t is more precise than s.
+    """
+    # TODO Should List[int] be more precise than List[Any]?
+    if isinstance(s, AnyType):
+        return True
+    if isinstance(s, Instance):
+        if isinstance(t, CallableType):
+            # Fall back to subclass check and ignore other properties of the callable.
+            return is_proper_subtype(t.fallback, s)
+        return is_proper_subtype(t, s)
+    return sametypes.is_same_type(t, s)
diff --git a/mypy/traverser.py b/mypy/traverser.py
new file mode 100644
index 0000000..d91c6e7
--- /dev/null
+++ b/mypy/traverser.py
@@ -0,0 +1,219 @@
+"""Generic node traverser visitor"""
+
+from typing import TypeVar, Generic
+
+from mypy.visitor import NodeVisitor
+from mypy.nodes import (
+    Block, MypyFile, FuncItem, CallExpr, ClassDef, Decorator, FuncDef,
+    ExpressionStmt, AssignmentStmt, OperatorAssignmentStmt, WhileStmt,
+    ForStmt, ReturnStmt, AssertStmt, DelStmt, IfStmt, RaiseStmt,
+    TryStmt, WithStmt, MemberExpr, OpExpr, SliceExpr, CastExpr,
+    UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr,
+    GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication,
+    FuncExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr,
+    YieldExpr
+)
+
+
+T = TypeVar('T')
+
+
+class TraverserVisitor(NodeVisitor[T], Generic[T]):
+    """A parse tree visitor that traverses the parse tree during visiting.
+
+    It does not peform any actions outside the travelsal. Subclasses
+    should override visit methods to perform actions during
+    travelsal. Calling the superclass method allows reusing the
+    travelsal implementation.
+    """
+
+    # Visit methods
+
+    def visit_mypy_file(self, o: MypyFile) -> T:
+        for d in o.defs:
+            d.accept(self)
+
+    def visit_block(self, block: Block) -> T:
+        for s in block.body:
+            s.accept(self)
+
+    def visit_func(self, o: FuncItem) -> T:
+        for arg in o.arguments:
+            init = arg.initialization_statement
+            if init is not None:
+                init.accept(self)
+
+        for arg in o.arguments:
+            self.visit_var(arg.variable)
+
+        o.body.accept(self)
+
+    def visit_func_def(self, o: FuncDef) -> T:
+        self.visit_func(o)
+
+    def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> T:
+        for item in o.items:
+            item.accept(self)
+
+    def visit_class_def(self, o: ClassDef) -> T:
+        o.defs.accept(self)
+
+    def visit_decorator(self, o: Decorator) -> T:
+        o.func.accept(self)
+        o.var.accept(self)
+        for decorator in o.decorators:
+            decorator.accept(self)
+
+    def visit_expression_stmt(self, o: ExpressionStmt) -> T:
+        o.expr.accept(self)
+
+    def visit_assignment_stmt(self, o: AssignmentStmt) -> T:
+        o.rvalue.accept(self)
+        for l in o.lvalues:
+            l.accept(self)
+
+    def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> T:
+        o.rvalue.accept(self)
+        o.lvalue.accept(self)
+
+    def visit_while_stmt(self, o: WhileStmt) -> T:
+        o.expr.accept(self)
+        o.body.accept(self)
+        if o.else_body:
+            o.else_body.accept(self)
+
+    def visit_for_stmt(self, o: ForStmt) -> T:
+        o.index.accept(self)
+        o.expr.accept(self)
+        o.body.accept(self)
+        if o.else_body:
+            o.else_body.accept(self)
+
+    def visit_return_stmt(self, o: ReturnStmt) -> T:
+        if o.expr is not None:
+            o.expr.accept(self)
+
+    def visit_assert_stmt(self, o: AssertStmt) -> T:
+        if o.expr is not None:
+            o.expr.accept(self)
+
+    def visit_del_stmt(self, o: DelStmt) -> T:
+        if o.expr is not None:
+            o.expr.accept(self)
+
+    def visit_if_stmt(self, o: IfStmt) -> T:
+        for e in o.expr:
+            e.accept(self)
+        for b in o.body:
+            b.accept(self)
+        if o.else_body:
+            o.else_body.accept(self)
+
+    def visit_raise_stmt(self, o: RaiseStmt) -> T:
+        if o.expr is not None:
+            o.expr.accept(self)
+        if o.from_expr is not None:
+            o.from_expr.accept(self)
+
+    def visit_try_stmt(self, o: TryStmt) -> T:
+        o.body.accept(self)
+        for i in range(len(o.types)):
+            if o.types[i]:
+                o.types[i].accept(self)
+            o.handlers[i].accept(self)
+        if o.else_body is not None:
+            o.else_body.accept(self)
+        if o.finally_body is not None:
+            o.finally_body.accept(self)
+
+    def visit_with_stmt(self, o: WithStmt) -> T:
+        for i in range(len(o.expr)):
+            o.expr[i].accept(self)
+            if o.target[i] is not None:
+                o.target[i].accept(self)
+        o.body.accept(self)
+
+    def visit_member_expr(self, o: MemberExpr) -> T:
+        o.expr.accept(self)
+
+    def visit_yield_from_expr(self, o: YieldFromExpr) -> T:
+        o.expr.accept(self)
+
+    def visit_yield_expr(self, o: YieldExpr) -> T:
+        if o.expr:
+            o.expr.accept(self)
+
+    def visit_call_expr(self, o: CallExpr) -> T:
+        for a in o.args:
+            a.accept(self)
+        o.callee.accept(self)
+        if o.analyzed:
+            o.analyzed.accept(self)
+
+    def visit_op_expr(self, o: OpExpr) -> T:
+        o.left.accept(self)
+        o.right.accept(self)
+
+    def visit_comparison_expr(self, o: ComparisonExpr) -> T:
+        for operand in o.operands:
+            operand.accept(self)
+
+    def visit_slice_expr(self, o: SliceExpr) -> T:
+        if o.begin_index is not None:
+            o.begin_index.accept(self)
+        if o.end_index is not None:
+            o.end_index.accept(self)
+        if o.stride is not None:
+            o.stride.accept(self)
+
+    def visit_cast_expr(self, o: CastExpr) -> T:
+        o.expr.accept(self)
+
+    def visit_unary_expr(self, o: UnaryExpr) -> T:
+        o.expr.accept(self)
+
+    def visit_list_expr(self, o: ListExpr) -> T:
+        for item in o.items:
+            item.accept(self)
+
+    def visit_tuple_expr(self, o: TupleExpr) -> T:
+        for item in o.items:
+            item.accept(self)
+
+    def visit_dict_expr(self, o: DictExpr) -> T:
+        for k, v in o.items:
+            k.accept(self)
+            v.accept(self)
+
+    def visit_set_expr(self, o: SetExpr) -> T:
+        for item in o.items:
+            item.accept(self)
+
+    def visit_index_expr(self, o: IndexExpr) -> T:
+        o.base.accept(self)
+        o.index.accept(self)
+        if o.analyzed:
+            o.analyzed.accept(self)
+
+    def visit_generator_expr(self, o: GeneratorExpr) -> T:
+        for index, sequence, conditions in zip(o.indices, o.sequences,
+                                               o.condlists):
+            sequence.accept(self)
+            index.accept(self)
+            for cond in conditions:
+                cond.accept(self)
+        o.left_expr.accept(self)
+
+    def visit_list_comprehension(self, o: ListComprehension) -> T:
+        o.generator.accept(self)
+
+    def visit_conditional_expr(self, o: ConditionalExpr) -> T:
+        o.cond.accept(self)
+        o.if_expr.accept(self)
+        o.else_expr.accept(self)
+
+    def visit_type_application(self, o: TypeApplication) -> T:
+        o.expr.accept(self)
+
+    def visit_func_expr(self, o: FuncExpr) -> T:
+        self.visit_func(o)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
new file mode 100644
index 0000000..b0b9572
--- /dev/null
+++ b/mypy/treetransform.py
@@ -0,0 +1,522 @@
+"""Base visitor that implements an identity AST transform.
+
+Subclass TransformVisitor to perform non-trivial transformations.
+"""
+
+from typing import List, Dict, cast
+
+from mypy.nodes import (
+    MypyFile, Import, Node, ImportAll, ImportFrom, FuncItem, FuncDef,
+    OverloadedFuncDef, ClassDef, Decorator, Block, Var,
+    OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt,
+    RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt,
+    PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt,
+    CastExpr, TupleExpr, GeneratorExpr, ListComprehension, ListExpr,
+    ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr,
+    UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr,
+    SliceExpr, OpExpr, UnaryExpr, FuncExpr, TypeApplication, PrintStmt,
+    SymbolTable, RefExpr, TypeVarExpr, PromoteExpr,
+    ComparisonExpr, TempNode, StarExpr,
+    YieldFromExpr, NamedTupleExpr, NonlocalDecl, SetComprehension,
+    DictionaryComprehension, ComplexExpr, TypeAliasExpr, EllipsisExpr,
+    YieldExpr, ExecStmt, Argument, BackquoteExpr
+)
+from mypy.types import Type, FunctionLike, Instance
+from mypy.visitor import NodeVisitor
+
+
+class TransformVisitor(NodeVisitor[Node]):
+    """Transform a semantically analyzed AST (or subtree) to an identical copy.
+
+    Use the node() method to transform an AST node.
+
+    Subclass to perform a non-identity transform.
+
+    Notes:
+
+     * Do not duplicate TypeInfo nodes. This would generally not be desirable.
+     * Only update some name binding cross-references, but only those that
+       refer to Var nodes, not those targeting ClassDef, TypeInfo or FuncDef
+       nodes.
+     * Types are not transformed, but you can override type() to also perform
+       type transformation.
+
+    TODO nested classes and functions have not been tested well enough
+    """
+
+    def __init__(self) -> None:
+        # There may be multiple references to a Var node. Keep track of
+        # Var translations using a dictionary.
+        self.var_map = {}  # type: Dict[Var, Var]
+
+    def visit_mypy_file(self, node: MypyFile) -> Node:
+        # NOTE: The 'names' and 'imports' instance variables will be empty!
+        new = MypyFile(self.nodes(node.defs), [], node.is_bom,
+                       ignored_lines=set(node.ignored_lines))
+        new._name = node._name
+        new._fullname = node._fullname
+        new.path = node.path
+        new.names = SymbolTable()
+        return new
+
+    def visit_import(self, node: Import) -> Node:
+        return Import(node.ids[:])
+
+    def visit_import_from(self, node: ImportFrom) -> Node:
+        return ImportFrom(node.id, node.relative, node.names[:])
+
+    def visit_import_all(self, node: ImportAll) -> Node:
+        return ImportAll(node.id, node.relative)
+
+    def copy_argument(self, argument: Argument) -> Argument:
+        init_stmt = None  # type: AssignmentStmt
+
+        if argument.initialization_statement:
+            init_lvalue = cast(
+                NameExpr,
+                self.node(argument.initialization_statement.lvalues[0]),
+            )
+            init_lvalue.set_line(argument.line)
+            init_stmt = AssignmentStmt(
+                [init_lvalue],
+                self.node(argument.initialization_statement.rvalue),
+                self.optional_type(argument.initialization_statement.type),
+            )
+
+        arg = Argument(
+            self.visit_var(argument.variable),
+            argument.type_annotation,
+            argument.initializer,
+            argument.kind,
+            init_stmt,
+        )
+
+        # Refresh lines of the inner things
+        arg.set_line(argument.line)
+
+        return arg
+
+    def visit_func_def(self, node: FuncDef) -> FuncDef:
+        # Note that a FuncDef must be transformed to a FuncDef.
+        new = FuncDef(node.name(),
+                      [self.copy_argument(arg) for arg in node.arguments],
+                      self.block(node.body),
+                      cast(FunctionLike, self.optional_type(node.type)))
+
+        self.copy_function_attributes(new, node)
+
+        new._fullname = node._fullname
+        new.is_decorated = node.is_decorated
+        new.is_conditional = node.is_conditional
+        new.is_abstract = node.is_abstract
+        new.is_static = node.is_static
+        new.is_class = node.is_class
+        new.is_property = node.is_property
+        new.original_def = node.original_def
+        return new
+
+    def visit_func_expr(self, node: FuncExpr) -> Node:
+        new = FuncExpr([self.copy_argument(arg) for arg in node.arguments],
+                       self.block(node.body),
+                       cast(FunctionLike, self.optional_type(node.type)))
+        self.copy_function_attributes(new, node)
+        return new
+
+    def copy_function_attributes(self, new: FuncItem,
+                                 original: FuncItem) -> None:
+        new.info = original.info
+        new.min_args = original.min_args
+        new.max_pos = original.max_pos
+        new.is_implicit = original.is_implicit
+        new.is_overload = original.is_overload
+        new.is_generator = original.is_generator
+
+    def duplicate_inits(self,
+                        inits: List[AssignmentStmt]) -> List[AssignmentStmt]:
+        result = []  # type: List[AssignmentStmt]
+        for init in inits:
+            if init:
+                result.append(self.duplicate_assignment(init))
+            else:
+                result.append(None)
+        return result
+
+    def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> Node:
+        items = [self.visit_decorator(decorator)
+                 for decorator in node.items]
+        for newitem, olditem in zip(items, node.items):
+            newitem.line = olditem.line
+        new = OverloadedFuncDef(items)
+        new._fullname = node._fullname
+        new.type = self.type(node.type)
+        new.info = node.info
+        return new
+
+    def visit_class_def(self, node: ClassDef) -> Node:
+        new = ClassDef(node.name,
+                       self.block(node.defs),
+                       node.type_vars,
+                       self.nodes(node.base_type_exprs),
+                       node.metaclass)
+        new.fullname = node.fullname
+        new.info = node.info
+        new.base_types = []
+        for base in node.base_types:
+            new.base_types.append(cast(Instance, self.type(base)))
+        new.decorators = [decorator.accept(self)
+                          for decorator in node.decorators]
+        new.is_builtinclass = node.is_builtinclass
+        return new
+
+    def visit_global_decl(self, node: GlobalDecl) -> Node:
+        return GlobalDecl(node.names[:])
+
+    def visit_nonlocal_decl(self, node: NonlocalDecl) -> Node:
+        return NonlocalDecl(node.names[:])
+
+    def visit_block(self, node: Block) -> Block:
+        return Block(self.nodes(node.body))
+
+    def visit_decorator(self, node: Decorator) -> Decorator:
+        # Note that a Decorator must be transformed to a Decorator.
+        func = self.visit_func_def(node.func)
+        func.line = node.func.line
+        new = Decorator(func, self.nodes(node.decorators),
+                        self.visit_var(node.var))
+        new.is_overload = node.is_overload
+        return new
+
+    def visit_var(self, node: Var) -> Var:
+        # Note that a Var must be transformed to a Var.
+        if node in self.var_map:
+            return self.var_map[node]
+        new = Var(node.name(), self.optional_type(node.type))
+        new.line = node.line
+        new._fullname = node._fullname
+        new.info = node.info
+        new.is_self = node.is_self
+        new.is_ready = node.is_ready
+        new.is_initialized_in_class = node.is_initialized_in_class
+        new.is_staticmethod = node.is_staticmethod
+        new.is_classmethod = node.is_classmethod
+        new.is_property = node.is_property
+        new.set_line(node.line)
+        self.var_map[node] = new
+        return new
+
+    def visit_expression_stmt(self, node: ExpressionStmt) -> Node:
+        return ExpressionStmt(self.node(node.expr))
+
+    def visit_assignment_stmt(self, node: AssignmentStmt) -> Node:
+        return self.duplicate_assignment(node)
+
+    def duplicate_assignment(self, node: AssignmentStmt) -> AssignmentStmt:
+        new = AssignmentStmt(self.nodes(node.lvalues),
+                             self.node(node.rvalue),
+                             self.optional_type(node.type))
+        new.line = node.line
+        return new
+
+    def visit_operator_assignment_stmt(self,
+                                       node: OperatorAssignmentStmt) -> Node:
+        return OperatorAssignmentStmt(node.op,
+                                      self.node(node.lvalue),
+                                      self.node(node.rvalue))
+
+    def visit_while_stmt(self, node: WhileStmt) -> Node:
+        return WhileStmt(self.node(node.expr),
+                         self.block(node.body),
+                         self.optional_block(node.else_body))
+
+    def visit_for_stmt(self, node: ForStmt) -> Node:
+        return ForStmt(self.node(node.index),
+                       self.node(node.expr),
+                       self.block(node.body),
+                       self.optional_block(node.else_body))
+
+    def visit_return_stmt(self, node: ReturnStmt) -> Node:
+        return ReturnStmt(self.optional_node(node.expr))
+
+    def visit_assert_stmt(self, node: AssertStmt) -> Node:
+        return AssertStmt(self.node(node.expr))
+
+    def visit_del_stmt(self, node: DelStmt) -> Node:
+        return DelStmt(self.node(node.expr))
+
+    def visit_if_stmt(self, node: IfStmt) -> Node:
+        return IfStmt(self.nodes(node.expr),
+                      self.blocks(node.body),
+                      self.optional_block(node.else_body))
+
+    def visit_break_stmt(self, node: BreakStmt) -> Node:
+        return BreakStmt()
+
+    def visit_continue_stmt(self, node: ContinueStmt) -> Node:
+        return ContinueStmt()
+
+    def visit_pass_stmt(self, node: PassStmt) -> Node:
+        return PassStmt()
+
+    def visit_raise_stmt(self, node: RaiseStmt) -> Node:
+        return RaiseStmt(self.optional_node(node.expr),
+                         self.optional_node(node.from_expr))
+
+    def visit_try_stmt(self, node: TryStmt) -> Node:
+        return TryStmt(self.block(node.body),
+                       self.optional_names(node.vars),
+                       self.optional_nodes(node.types),
+                       self.blocks(node.handlers),
+                       self.optional_block(node.else_body),
+                       self.optional_block(node.finally_body))
+
+    def visit_with_stmt(self, node: WithStmt) -> Node:
+        return WithStmt(self.nodes(node.expr),
+                        self.optional_nodes(node.target),
+                        self.block(node.body))
+
+    def visit_print_stmt(self, node: PrintStmt) -> Node:
+        return PrintStmt(self.nodes(node.args),
+                         node.newline,
+                         self.optional_node(node.target))
+
+    def visit_exec_stmt(self, node: ExecStmt) -> Node:
+        return ExecStmt(self.node(node.expr),
+                        self.optional_node(node.variables1),
+                        self.optional_node(node.variables2))
+
+    def visit_star_expr(self, node: StarExpr) -> Node:
+        return StarExpr(node.expr)
+
+    def visit_int_expr(self, node: IntExpr) -> Node:
+        return IntExpr(node.value)
+
+    def visit_str_expr(self, node: StrExpr) -> Node:
+        return StrExpr(node.value)
+
+    def visit_bytes_expr(self, node: BytesExpr) -> Node:
+        return BytesExpr(node.value)
+
+    def visit_unicode_expr(self, node: UnicodeExpr) -> Node:
+        return UnicodeExpr(node.value)
+
+    def visit_float_expr(self, node: FloatExpr) -> Node:
+        return FloatExpr(node.value)
+
+    def visit_complex_expr(self, node: ComplexExpr) -> Node:
+        return ComplexExpr(node.value)
+
+    def visit_ellipsis(self, node: EllipsisExpr) -> Node:
+        return EllipsisExpr()
+
+    def visit_name_expr(self, node: NameExpr) -> Node:
+        return self.duplicate_name(node)
+
+    def duplicate_name(self, node: NameExpr) -> NameExpr:
+        # This method is used when the transform result must be a NameExpr.
+        # visit_name_expr() is used when there is no such restriction.
+        new = NameExpr(node.name)
+        new.info = node.info
+        self.copy_ref(new, node)
+        return new
+
+    def visit_member_expr(self, node: MemberExpr) -> Node:
+        member = MemberExpr(self.node(node.expr),
+                            node.name)
+        if node.def_var:
+            member.def_var = self.visit_var(node.def_var)
+        self.copy_ref(member, node)
+        return member
+
+    def copy_ref(self, new: RefExpr, original: RefExpr) -> None:
+        new.kind = original.kind
+        new.fullname = original.fullname
+        target = original.node
+        if isinstance(target, Var):
+            target = self.visit_var(target)
+        new.node = target
+        new.is_def = original.is_def
+
+    def visit_yield_from_expr(self, node: YieldFromExpr) -> Node:
+        return YieldFromExpr(self.node(node.expr))
+
+    def visit_yield_expr(self, node: YieldExpr) -> Node:
+        return YieldExpr(self.node(node.expr))
+
+    def visit_call_expr(self, node: CallExpr) -> Node:
+        return CallExpr(self.node(node.callee),
+                        self.nodes(node.args),
+                        node.arg_kinds[:],
+                        node.arg_names[:],
+                        self.optional_node(node.analyzed))
+
+    def visit_op_expr(self, node: OpExpr) -> Node:
+        new = OpExpr(node.op, self.node(node.left), self.node(node.right))
+        new.method_type = self.optional_type(node.method_type)
+        return new
+
+    def visit_comparison_expr(self, node: ComparisonExpr) -> Node:
+        new = ComparisonExpr(node.operators, self.nodes(node.operands))
+        new.method_types = [self.optional_type(t) for t in node.method_types]
+        return new
+
+    def visit_cast_expr(self, node: CastExpr) -> Node:
+        return CastExpr(self.node(node.expr),
+                        self.type(node.type))
+
+    def visit_super_expr(self, node: SuperExpr) -> Node:
+        new = SuperExpr(node.name)
+        new.info = node.info
+        return new
+
+    def visit_unary_expr(self, node: UnaryExpr) -> Node:
+        new = UnaryExpr(node.op, self.node(node.expr))
+        new.method_type = self.optional_type(node.method_type)
+        return new
+
+    def visit_list_expr(self, node: ListExpr) -> Node:
+        return ListExpr(self.nodes(node.items))
+
+    def visit_dict_expr(self, node: DictExpr) -> Node:
+        return DictExpr([(self.node(key), self.node(value))
+                         for key, value in node.items])
+
+    def visit_tuple_expr(self, node: TupleExpr) -> Node:
+        return TupleExpr(self.nodes(node.items))
+
+    def visit_set_expr(self, node: SetExpr) -> Node:
+        return SetExpr(self.nodes(node.items))
+
+    def visit_index_expr(self, node: IndexExpr) -> Node:
+        new = IndexExpr(self.node(node.base), self.node(node.index))
+        if node.method_type:
+            new.method_type = self.type(node.method_type)
+        if node.analyzed:
+            if isinstance(node.analyzed, TypeApplication):
+                new.analyzed = self.visit_type_application(node.analyzed)
+            else:
+                new.analyzed = self.visit_type_alias_expr(node.analyzed)
+            new.analyzed.set_line(node.analyzed.line)
+        return new
+
+    def visit_type_application(self, node: TypeApplication) -> TypeApplication:
+        return TypeApplication(self.node(node.expr),
+                               self.types(node.types))
+
+    def visit_list_comprehension(self, node: ListComprehension) -> Node:
+        generator = self.duplicate_generator(node.generator)
+        generator.set_line(node.generator.line)
+        return ListComprehension(generator)
+
+    def visit_set_comprehension(self, node: SetComprehension) -> Node:
+        generator = self.duplicate_generator(node.generator)
+        generator.set_line(node.generator.line)
+        return SetComprehension(generator)
+
+    def visit_dictionary_comprehension(self, node: DictionaryComprehension) -> Node:
+        return DictionaryComprehension(self.node(node.key), self.node(node.value),
+                             [self.node(index) for index in node.indices],
+                             [self.node(s) for s in node.sequences],
+                             [[self.node(cond) for cond in conditions]
+                              for conditions in node.condlists])
+
+    def visit_generator_expr(self, node: GeneratorExpr) -> Node:
+        return self.duplicate_generator(node)
+
+    def duplicate_generator(self, node: GeneratorExpr) -> GeneratorExpr:
+        return GeneratorExpr(self.node(node.left_expr),
+                             [self.node(index) for index in node.indices],
+                             [self.node(s) for s in node.sequences],
+                             [[self.node(cond) for cond in conditions]
+                              for conditions in node.condlists])
+
+    def visit_slice_expr(self, node: SliceExpr) -> Node:
+        return SliceExpr(self.optional_node(node.begin_index),
+                         self.optional_node(node.end_index),
+                         self.optional_node(node.stride))
+
+    def visit_conditional_expr(self, node: ConditionalExpr) -> Node:
+        return ConditionalExpr(self.node(node.cond),
+                               self.node(node.if_expr),
+                               self.node(node.else_expr))
+
+    def visit_backquote_expr(self, node: BackquoteExpr) -> Node:
+        return BackquoteExpr(self.node(node.expr))
+
+    def visit_type_var_expr(self, node: TypeVarExpr) -> Node:
+        return TypeVarExpr(node.name(), node.fullname(),
+                           self.types(node.values), variance=node.variance)
+
+    def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr:
+        return TypeAliasExpr(node.type)
+
+    def visit_namedtuple_expr(self, node: NamedTupleExpr) -> Node:
+        return NamedTupleExpr(node.info)
+
+    def visit__promote_expr(self, node: PromoteExpr) -> Node:
+        return PromoteExpr(node.type)
+
+    def visit_temp_node(self, node: TempNode) -> Node:
+        return TempNode(self.type(node.type))
+
+    def node(self, node: Node) -> Node:
+        new = node.accept(self)
+        new.set_line(node.line)
+        return new
+
+    # Helpers
+    #
+    # All the node helpers also propagate line numbers.
+
+    def optional_node(self, node: Node) -> Node:
+        if node:
+            return self.node(node)
+        else:
+            return None
+
+    def block(self, block: Block) -> Block:
+        new = self.visit_block(block)
+        new.line = block.line
+        return new
+
+    def optional_block(self, block: Block) -> Block:
+        if block:
+            return self.block(block)
+        else:
+            return None
+
+    def nodes(self, nodes: List[Node]) -> List[Node]:
+        return [self.node(node) for node in nodes]
+
+    def optional_nodes(self, nodes: List[Node]) -> List[Node]:
+        return [self.optional_node(node) for node in nodes]
+
+    def blocks(self, blocks: List[Block]) -> List[Block]:
+        return [self.block(block) for block in blocks]
+
+    def names(self, names: List[NameExpr]) -> List[NameExpr]:
+        return [self.duplicate_name(name) for name in names]
+
+    def optional_names(self, names: List[NameExpr]) -> List[NameExpr]:
+        result = []  # type: List[NameExpr]
+        for name in names:
+            if name:
+                result.append(self.duplicate_name(name))
+            else:
+                result.append(None)
+        return result
+
+    def type(self, type: Type) -> Type:
+        # Override this method to transform types.
+        return type
+
+    def optional_type(self, type: Type) -> Type:
+        if type:
+            return self.type(type)
+        else:
+            return None
+
+    def types(self, types: List[Type]) -> List[Type]:
+        return [self.type(type) for type in types]
+
+    def optional_types(self, types: List[Type]) -> List[Type]:
+        return [self.optional_type(type) for type in types]
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
new file mode 100644
index 0000000..444b3e6
--- /dev/null
+++ b/mypy/typeanal.py
@@ -0,0 +1,377 @@
+"""Semantic analysis of types"""
+
+from typing import Callable, cast, List, Tuple, Dict, Any, Union
+
+from mypy.types import (
+    Type, UnboundType, TypeVarType, TupleType, UnionType, Instance, AnyType, CallableType,
+    Void, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor, StarType, PartialType,
+    EllipsisType
+)
+from mypy.nodes import (
+    GDEF, BOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
+    TypeInfo, Context, SymbolTableNode, TypeVarExpr, Var, Node,
+    IndexExpr, NameExpr, TupleExpr, RefExpr
+)
+from mypy.sametypes import is_same_type
+from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
+from mypy import nodes
+
+
+type_constructors = ['typing.Tuple', 'typing.Union', 'typing.Callable']
+
+
+def analyze_type_alias(node: Node,
+                       lookup_func: Callable[[str, Context], SymbolTableNode],
+                       lookup_fqn_func: Callable[[str], SymbolTableNode],
+                       fail_func: Callable[[str, Context], None]) -> Type:
+    """Return type if node is valid as a type alias rvalue.
+
+    Return None otherwise. 'node' must have been semantically analyzed.
+    """
+    # Quickly return None if the expression doesn't look like a type. Note
+    # that we don't support straight string literals as type aliases
+    # (only string literals within index expressions).
+    if isinstance(node, RefExpr):
+        if not (isinstance(node.node, TypeInfo) or
+                node.fullname == 'typing.Any' or
+                node.kind == TYPE_ALIAS):
+            return None
+    elif isinstance(node, IndexExpr):
+        base = node.base
+        if isinstance(base, RefExpr):
+            if not (isinstance(base.node, TypeInfo) or
+                    base.fullname in type_constructors):
+                return None
+        else:
+            return None
+    else:
+        return None
+
+    # It's a type alias (though it may be an invalid one).
+    try:
+        type = expr_to_unanalyzed_type(node)
+    except TypeTranslationError:
+        fail_func('Invalid type alias', node)
+        return None
+    analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, fail_func)
+    return type.accept(analyzer)
+
+
+class TypeAnalyser(TypeVisitor[Type]):
+    """Semantic analyzer for types (semantic analysis pass 2).
+
+    Converts unbound types into bound types.
+    """
+
+    def __init__(self,
+                 lookup_func: Callable[[str, Context], SymbolTableNode],
+                 lookup_fqn_func: Callable[[str], SymbolTableNode],
+                 fail_func: Callable[[str, Context], None]) -> None:
+        self.lookup = lookup_func
+        self.lookup_fqn_func = lookup_fqn_func
+        self.fail = fail_func
+
+    def visit_unbound_type(self, t: UnboundType) -> Type:
+        sym = self.lookup(t.name, t)
+        if sym is not None:
+            if sym.node is None:
+                # UNBOUND_IMPORTED can happen if an unknown name was imported.
+                if sym.kind != UNBOUND_IMPORTED:
+                    self.fail('Internal error (node is None, kind={})'.format(sym.kind), t)
+                return AnyType()
+            fullname = sym.node.fullname()
+            if sym.kind == BOUND_TVAR:
+                if len(t.args) > 0:
+                    self.fail('Type variable "{}" used with arguments'.format(
+                        t.name), t)
+                tvar_expr = cast(TypeVarExpr, sym.node)
+                return TypeVarType(t.name, sym.tvar_id, tvar_expr.values,
+                                   self.builtin_type('builtins.object'),
+                                   tvar_expr.variance,
+                                   t.line)
+            elif fullname == 'builtins.None':
+                return Void()
+            elif fullname == 'typing.Any':
+                return AnyType()
+            elif fullname == 'typing.Tuple':
+                if len(t.args) == 2 and isinstance(t.args[1], EllipsisType):
+                    # Tuple[T, ...] (uniform, variable-length tuple)
+                    node = self.lookup_fqn_func('builtins.tuple')
+                    info = cast(TypeInfo, node.node)
+                    return Instance(info, [t.args[0].accept(self)], t.line)
+                return TupleType(self.anal_array(t.args),
+                                 self.builtin_type('builtins.tuple'))
+            elif fullname == 'typing.Union':
+                items = self.anal_array(t.args)
+                items = [item for item in items if not isinstance(item, Void)]
+                return UnionType.make_union(items)
+            elif fullname == 'typing.Optional':
+                if len(t.args) != 1:
+                    self.fail('Optional[...] must have exactly one type argument', t)
+                items = self.anal_array(t.args)
+                # Currently Optional[t] is just an alias for t.
+                return items[0]
+            elif fullname == 'typing.Callable':
+                return self.analyze_callable_type(t)
+            elif sym.kind == TYPE_ALIAS:
+                # TODO: Generic type aliases.
+                return sym.type_override
+            elif not isinstance(sym.node, TypeInfo):
+                name = sym.fullname
+                if name is None:
+                    name = sym.node.name()
+                if isinstance(sym.node, Var) and isinstance(sym.node.type, AnyType):
+                    # Something with an Any type -- make it an alias for Any in a type
+                    # context. This is slightly problematic as it allows using the type 'Any'
+                    # as a base class -- however, this will fail soon at runtime so the problem
+                    # is pretty minor.
+                    return AnyType()
+                self.fail('Invalid type "{}"'.format(name), t)
+                return t
+            info = cast(TypeInfo, sym.node)
+            if len(t.args) > 0 and info.fullname() == 'builtins.tuple':
+                return TupleType(self.anal_array(t.args),
+                                 Instance(info, [AnyType()], t.line),
+                                 t.line)
+            else:
+                # Analyze arguments and construct Instance type. The
+                # number of type arguments and their values are
+                # checked only later, since we do not always know the
+                # valid count at this point. Thus we may construct an
+                # Instance with an invalid number of type arguments.
+                instance = Instance(info, self.anal_array(t.args), t.line)
+                if info.tuple_type is None:
+                    return instance
+                else:
+                    # The class has a Tuple[...] base class so it will be
+                    # represented as a tuple type.
+                    if t.args:
+                        self.fail('Generic tuple types not supported', t)
+                        return AnyType()
+                    return TupleType(self.anal_array(info.tuple_type.items),
+                                     fallback=instance,
+                                     line=t.line)
+        else:
+            return AnyType()
+
+    def visit_any(self, t: AnyType) -> Type:
+        return t
+
+    def visit_void(self, t: Void) -> Type:
+        return t
+
+    def visit_none_type(self, t: NoneTyp) -> Type:
+        return t
+
+    def visit_deleted_type(self, t: DeletedType) -> Type:
+        return t
+
+    def visit_type_list(self, t: TypeList) -> Type:
+        self.fail('Invalid type', t)
+        return AnyType()
+
+    def visit_instance(self, t: Instance) -> Type:
+        return t
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        raise RuntimeError('TypeVarType is already analyzed')
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        return t.copy_modified(arg_types=self.anal_array(t.arg_types),
+                               ret_type=t.ret_type.accept(self),
+                               fallback=self.builtin_type('builtins.function'),
+                               variables=self.anal_var_defs(t.variables),
+                               bound_vars=self.anal_bound_vars(t.bound_vars))
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        if t.implicit:
+            self.fail('Invalid tuple literal type', t)
+            return AnyType()
+        star_count = sum(1 for item in t.items if isinstance(item, StarType))
+        if star_count > 1:
+            self.fail('At most one star type allowed in a tuple', t)
+            return AnyType()
+        fallback = t.fallback if t.fallback else self.builtin_type('builtins.tuple')
+        return TupleType(self.anal_array(t.items), fallback, t.line)
+
+    def visit_star_type(self, t: StarType) -> Type:
+        return StarType(t.type.accept(self), t.line)
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        return UnionType(self.anal_array(t.items), t.line)
+
+    def visit_partial_type(self, t: PartialType) -> Type:
+        assert False, "Internal error: Unexpected partial type"
+
+    def visit_ellipsis_type(self, t: EllipsisType) -> Type:
+        self.fail("Unexpected '...'", t)
+        return AnyType()
+
+    def analyze_callable_type(self, t: UnboundType) -> Type:
+        if len(t.args) != 2:
+            self.fail('Invalid function type', t)
+            return AnyType()
+        ret_type = t.args[1].accept(self)
+        fallback = self.builtin_type('builtins.function')
+        if isinstance(t.args[0], TypeList):
+            # Callable[[ARG, ...], RET] (ordinary callable type)
+            args = t.args[0].items
+            return CallableType(self.anal_array(args),
+                                [nodes.ARG_POS] * len(args),
+                                [None] * len(args),
+                                ret_type=ret_type,
+                                fallback=fallback)
+        elif isinstance(t.args[0], EllipsisType):
+            # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments)
+            return CallableType([AnyType(), AnyType()],
+                                [nodes.ARG_STAR, nodes.ARG_STAR2],
+                                [None, None],
+                                ret_type=ret_type,
+                                fallback=fallback,
+                                is_ellipsis_args=True)
+        else:
+            self.fail('Invalid function type', t)
+            return AnyType()
+
+    def anal_array(self, a: List[Type]) -> List[Type]:
+        res = []  # type: List[Type]
+        for t in a:
+            res.append(t.accept(self))
+        return res
+
+    def anal_bound_vars(self,
+                        a: List[Tuple[int, Type]]) -> List[Tuple[int, Type]]:
+        res = []  # type: List[Tuple[int, Type]]
+        for id, t in a:
+            res.append((id, t.accept(self)))
+        return res
+
+    def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]:
+        a = []  # type: List[TypeVarDef]
+        for vd in var_defs:
+            a.append(TypeVarDef(vd.name, vd.id, self.anal_array(vd.values),
+                                vd.upper_bound.accept(self),
+                                vd.variance,
+                                vd.line))
+        return a
+
+    def builtin_type(self, fully_qualified_name: str) -> Instance:
+        node = self.lookup_fqn_func(fully_qualified_name)
+        info = cast(TypeInfo, node.node)
+        return Instance(info, [])
+
+
+class TypeAnalyserPass3(TypeVisitor[None]):
+    """Analyze type argument counts and values of generic types.
+
+    This is semantic analysis pass 3 for types.
+
+    Perform these operations:
+
+     * Report error for invalid type argument counts, such as List[x, y].
+     * Make implicit Any type argumenents explicit my modifying types
+       in-place. For example, modify Foo into Foo[Any] if Foo expects a single
+       type argument.
+     * If a type variable has a value restriction, ensure that the value is
+       valid. For example, reject IO[int] if the type argument must be str
+       or bytes.
+
+    We can't do this earlier than the third pass, since type argument counts
+    are only determined in pass 2, and we have to support forward references
+    to types.
+    """
+
+    def __init__(self, fail_func: Callable[[str, Context], None]) -> None:
+        self.fail = fail_func
+
+    def visit_instance(self, t: Instance) -> None:
+        info = t.type
+        # Check type argument count.
+        if len(t.args) != len(info.type_vars):
+            if len(t.args) == 0:
+                # Insert implicit 'Any' type arguments.
+                t.args = [AnyType()] * len(info.type_vars)
+                return
+            # Invalid number of type parameters.
+            n = len(info.type_vars)
+            s = '{} type arguments'.format(n)
+            if n == 0:
+                s = 'no type arguments'
+            elif n == 1:
+                s = '1 type argument'
+            act = str(len(t.args))
+            if act == '0':
+                act = 'none'
+            self.fail('"{}" expects {}, but {} given'.format(
+                info.name(), s, act), t)
+            # Construct the correct number of type arguments, as
+            # otherwise the type checker may crash as it expects
+            # things to be right.
+            t.args = [AnyType() for _ in info.type_vars]
+        elif info.defn.type_vars:
+            # Check type argument values.
+            for arg, TypeVar in zip(t.args, info.defn.type_vars):
+                if TypeVar.values:
+                    if isinstance(arg, TypeVarType):
+                        arg_values = arg.values
+                        if not arg_values:
+                            self.fail('Type variable "{}" not valid as type '
+                                      'argument value for "{}"'.format(
+                                          arg.name, info.name()), t)
+                            continue
+                    else:
+                        arg_values = [arg]
+                    self.check_type_var_values(info, arg_values,
+                                               TypeVar.values, t)
+        for arg in t.args:
+            arg.accept(self)
+
+    def check_type_var_values(self, type: TypeInfo, actuals: List[Type],
+                              valids: List[Type], context: Context) -> None:
+        for actual in actuals:
+            if (not isinstance(actual, AnyType) and
+                    not any(is_same_type(actual, value) for value in valids)):
+                self.fail('Invalid type argument value for "{}"'.format(
+                    type.name()), context)
+
+    def visit_callable_type(self, t: CallableType) -> None:
+        t.ret_type.accept(self)
+        for arg_type in t.arg_types:
+            arg_type.accept(self)
+
+    def visit_tuple_type(self, t: TupleType) -> None:
+        for item in t.items:
+            item.accept(self)
+
+    def visit_union_type(self, t: UnionType) -> None:
+        for item in t.items:
+            item.accept(self)
+
+    def visit_star_type(self, t: StarType) -> None:
+        t.type.accept(self)
+
+    # Other kinds of type are trivial, since they are atomic (or invalid).
+
+    def visit_unbound_type(self, t: UnboundType) -> None:
+        pass
+
+    def visit_any(self, t: AnyType) -> None:
+        pass
+
+    def visit_void(self, t: Void) -> None:
+        pass
+
+    def visit_none_type(self, t: NoneTyp) -> None:
+        pass
+
+    def visit_deleted_type(self, t: DeletedType) -> None:
+        pass
+
+    def visit_type_list(self, t: TypeList) -> None:
+        self.fail('Invalid type', t)
+
+    def visit_type_var(self, t: TypeVarType) -> None:
+        pass
+
+    def visit_partial_type(self, t: PartialType) -> None:
+        pass
diff --git a/mypy/typefixture.py b/mypy/typefixture.py
new file mode 100644
index 0000000..ffd1e84
--- /dev/null
+++ b/mypy/typefixture.py
@@ -0,0 +1,238 @@
+"""Fixture used in type-related test cases.
+
+It contains class TypeInfos and Type objects.
+"""
+
+from typing import List
+
+from mypy.types import (
+    TypeVarType, AnyType, Void, ErrorType, NoneTyp, Instance, CallableType, TypeVarDef
+)
+from mypy.nodes import (
+    TypeInfo, ClassDef, Block, ARG_POS, ARG_OPT, ARG_STAR, SymbolTable,
+    COVARIANT)
+
+
+class TypeFixture:
+    """Helper class that is used as a fixture in type-related unit tests.
+
+    The members are initialized to contain various type-related values.
+    """
+
+    def __init__(self, variance: int=COVARIANT) -> None:
+        # The 'object' class
+        self.oi = self.make_type_info('builtins.object')               # class object
+        self.o = Instance(self.oi, [])                        # object
+
+        # Type variables
+        self.t = TypeVarType('T', 1, [], self.o, variance)     # T`1 (type variable)
+        self.tf = TypeVarType('T', -1, [], self.o, variance)   # T`-1 (type variable)
+        self.tf2 = TypeVarType('T', -2, [], self.o, variance)  # T`-2 (type variable)
+        self.s = TypeVarType('S', 2, [], self.o, variance)     # S`2 (type variable)
+        self.s1 = TypeVarType('S', 1, [], self.o, variance)    # S`1 (type variable)
+        self.sf = TypeVarType('S', -2, [], self.o, variance)   # S`-2 (type variable)
+        self.sf1 = TypeVarType('S', -1, [], self.o, variance)  # S`-1 (type variable)
+
+        # Simple types
+        self.anyt = AnyType()
+        self.void = Void()
+        self.err = ErrorType()
+        self.nonet = NoneTyp()
+
+        # Abstract class TypeInfos
+
+        # class F
+        self.fi = self.make_type_info('F', is_abstract=True)
+
+        # class F2
+        self.f2i = self.make_type_info('F2', is_abstract=True)
+
+        # class F3(F)
+        self.f3i = self.make_type_info('F3', is_abstract=True, mro=[self.fi])
+
+        # Class TypeInfos
+        self.std_tuplei = self.make_type_info('builtins.tuple')        # class tuple
+        self.type_typei = self.make_type_info('builtins.type')         # class type
+        self.functioni = self.make_type_info('builtins.function')  # function TODO
+        self.ai = self.make_type_info('A', mro=[self.oi])              # class A
+        self.bi = self.make_type_info('B', mro=[self.ai, self.oi])     # class B(A)
+        self.ci = self.make_type_info('C', mro=[self.ai, self.oi])     # class C(A)
+        self.di = self.make_type_info('D', mro=[self.oi])              # class D
+        # class E(F)
+        self.ei = self.make_type_info('E', mro=[self.fi, self.oi])
+        # class E2(F2, F)
+        self.e2i = self.make_type_info('E2', mro=[self.f2i, self.fi, self.oi])
+        # class E3(F, F2)
+        self.e3i = self.make_type_info('E3', mro=[self.fi, self.f2i, self.oi])
+
+        # Generic class TypeInfos
+        # G[T]
+        self.gi = self.make_type_info('G', mro=[self.oi],
+                                      typevars=['T'],
+                                      variances=[variance])
+        # G2[T]
+        self.g2i = self.make_type_info('G2', mro=[self.oi],
+                                       typevars=['T'],
+                                       variances=[variance])
+        # H[S, T]
+        self.hi = self.make_type_info('H', mro=[self.oi],
+                                      typevars=['S', 'T'],
+                                      variances=[variance, variance])
+        # GS[T, S] <: G[S]
+        self.gsi = self.make_type_info('GS', mro=[self.gi, self.oi],
+                                       typevars=['T', 'S'],
+                                       variances=[variance, variance],
+                                       bases=[Instance(self.gi, [self.s])])
+        # GS2[S] <: G[S]
+        self.gs2i = self.make_type_info('GS2', mro=[self.gi, self.oi],
+                                        typevars=['S'],
+                                        variances=[variance],
+                                        bases=[Instance(self.gi, [self.s1])])
+        # list[T]
+        self.std_listi = self.make_type_info('builtins.list', mro=[self.oi],
+                                             typevars=['T'],
+                                             variances=[variance])
+
+        # Instance types
+        self.std_tuple = Instance(self.std_tuplei, [])        # tuple
+        self.type_type = Instance(self.type_typei, [])        # type
+        self.function = Instance(self.functioni, [])  # function TODO
+        self.a = Instance(self.ai, [])          # A
+        self.b = Instance(self.bi, [])          # B
+        self.c = Instance(self.ci, [])          # C
+        self.d = Instance(self.di, [])          # D
+
+        self.e = Instance(self.ei, [])          # E
+        self.e2 = Instance(self.e2i, [])        # E2
+        self.e3 = Instance(self.e3i, [])        # E3
+
+        self.f = Instance(self.fi, [])          # F
+        self.f2 = Instance(self.f2i, [])        # F2
+        self.f3 = Instance(self.f3i, [])        # F3
+
+        # Generic instance types
+        self.ga = Instance(self.gi, [self.a])        # G[A]
+        self.gb = Instance(self.gi, [self.b])        # G[B]
+        self.gd = Instance(self.gi, [self.d])        # G[D]
+        self.go = Instance(self.gi, [self.o])        # G[object]
+        self.gt = Instance(self.gi, [self.t])        # G[T`1]
+        self.gtf = Instance(self.gi, [self.tf])      # G[T`-1]
+        self.gtf2 = Instance(self.gi, [self.tf2])    # G[T`-2]
+        self.gs = Instance(self.gi, [self.s])        # G[S]
+        self.gdyn = Instance(self.gi, [self.anyt])    # G[Any]
+
+        self.g2a = Instance(self.g2i, [self.a])      # G2[A]
+
+        self.gsaa = Instance(self.gsi, [self.a, self.a])  # GS[A, A]
+        self.gsab = Instance(self.gsi, [self.a, self.b])  # GS[A, B]
+        self.gsba = Instance(self.gsi, [self.b, self.a])  # GS[B, A]
+
+        self.gs2a = Instance(self.gs2i, [self.a])    # GS2[A]
+        self.gs2b = Instance(self.gs2i, [self.b])    # GS2[B]
+        self.gs2d = Instance(self.gs2i, [self.d])    # GS2[D]
+
+        self.hab = Instance(self.hi, [self.a, self.b])    # H[A, B]
+        self.haa = Instance(self.hi, [self.a, self.a])    # H[A, A]
+        self.hbb = Instance(self.hi, [self.b, self.b])    # H[B, B]
+        self.hts = Instance(self.hi, [self.t, self.s])    # H[T, S]
+        self.had = Instance(self.hi, [self.a, self.d])    # H[A, D]
+
+        self.lsta = Instance(self.std_listi, [self.a])  # List[A]
+        self.lstb = Instance(self.std_listi, [self.b])  # List[B]
+
+    # Helper methods
+
+    def callable(self, *a):
+        """callable(a1, ..., an, r) constructs a callable with argument types
+        a1, ... an and return type r.
+        """
+        return CallableType(a[:-1], [ARG_POS] * (len(a) - 1),
+                        [None] * (len(a) - 1), a[-1], self.function)
+
+    def callable_type(self, *a):
+        """callable_type(a1, ..., an, r) constructs a callable with
+        argument types a1, ... an and return type r, and which
+        represents a type.
+        """
+        return CallableType(a[:-1], [ARG_POS] * (len(a) - 1),
+                        [None] * (len(a) - 1), a[-1], self.type_type)
+
+    def callable_default(self, min_args, *a):
+        """callable_default(min_args, a1, ..., an, r) constructs a
+        callable with argument types a1, ... an and return type r,
+        with min_args mandatory fixed arguments.
+        """
+        n = len(a) - 1
+        return CallableType(a[:-1],
+                            [ARG_POS] * min_args + [ARG_OPT] * (n - min_args),
+                            [None] * n,
+                            a[-1], self.function)
+
+    def callable_var_arg(self, min_args, *a):
+        """callable_var_arg(min_args, a1, ..., an, r) constructs a callable
+        with argument types a1, ... *an and return type r.
+        """
+        n = len(a) - 1
+        return CallableType(a[:-1],
+                            [ARG_POS] * min_args +
+                            [ARG_OPT] * (n - 1 - min_args) +
+                            [ARG_STAR], [None] * n,
+                            a[-1], self.function)
+
+    def make_type_info(self, name: str,
+                       is_abstract: bool = False,
+                       mro: List[TypeInfo] = None,
+                       bases: List[Instance] = None,
+                       typevars: List[str] = None,
+                       variances: List[int] = None) -> TypeInfo:
+        """Make a TypeInfo suitable for use in unit tests."""
+
+        class_def = ClassDef(name, Block([]), None, [])
+        class_def.fullname = name
+
+        if typevars:
+            v = []  # type: List[TypeVarDef]
+            for id, n in enumerate(typevars, 1):
+                if variances:
+                    variance = variances[id - 1]
+                else:
+                    variance = COVARIANT
+                v.append(TypeVarDef(n, id, None, self.o, variance=variance))
+            class_def.type_vars = v
+
+        info = TypeInfo(SymbolTable(), class_def)
+        if mro is None:
+            mro = []
+            if name != 'builtins.object':
+                mro.append(self.oi)
+        info.mro = [info] + mro
+        if bases is None:
+            if mro:
+                # By default, assume that there is a single non-generic base.
+                bases = [Instance(mro[0], [])]
+            else:
+                bases = []
+        info.bases = bases
+
+        return info
+
+
+class InterfaceTypeFixture(TypeFixture):
+    """Extension of TypeFixture that contains additional generic
+    interface types."""
+
+    def __init__(self):
+        super().__init__()
+        # GF[T]
+        self.gfi = self.make_type_info('GF', typevars=['T'], is_abstract=True)
+
+        # M1 <: GF[A]
+        self.m1i = self.make_type_info('M1',
+                                       is_abstract=True,
+                                       mro=[self.gfi, self.oi],
+                                       bases=[Instance(self.gfi, [self.a])])
+
+        self.gfa = Instance(self.gfi, [self.a])  # GF[A]
+        self.gfb = Instance(self.gfi, [self.b])  # GF[B]
+
+        self.m1 = Instance(self.m1i, [])  # M1
diff --git a/mypy/types.py b/mypy/types.py
new file mode 100644
index 0000000..ff08d71
--- /dev/null
+++ b/mypy/types.py
@@ -0,0 +1,986 @@
+"""Classes for representing mypy types."""
+
+from abc import abstractmethod
+from typing import Any, TypeVar, List, Tuple, cast, Generic, Set, Sequence, Optional
+
+import mypy.nodes
+from mypy.nodes import INVARIANT, SymbolNode
+
+
+T = TypeVar('T')
+
+
+class Type(mypy.nodes.Context):
+    """Abstract base class for all types."""
+
+    line = 0
+
+    def __init__(self, line: int = -1) -> None:
+        self.line = line
+
+    def get_line(self) -> int:
+        return self.line
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        raise RuntimeError('Not implemented')
+
+    def __repr__(self) -> str:
+        return self.accept(TypeStrVisitor())
+
+
+class TypeVarDef(mypy.nodes.Context):
+    """Definition of a single type variable."""
+
+    name = ''
+    id = 0
+    values = None  # type: List[Type]
+    upper_bound = None  # type: Type
+    variance = INVARIANT  # type: int
+    line = 0
+
+    def __init__(self, name: str, id: int, values: List[Type],
+                 upper_bound: Type, variance: int = INVARIANT, line: int = -1) -> None:
+        self.name = name
+        self.id = id
+        self.values = values
+        self.upper_bound = upper_bound
+        self.variance = variance
+        self.line = line
+
+    def get_line(self) -> int:
+        return self.line
+
+    def __repr__(self) -> str:
+        if self.values:
+            return '{} in {}'.format(self.name, tuple(self.values))
+        else:
+            return self.name
+
+
+class UnboundType(Type):
+    """Instance type that has not been bound during semantic analysis."""
+
+    name = ''
+    args = None  # type: List[Type]
+
+    def __init__(self, name: str, args: List[Type] = None, line: int = -1) -> None:
+        if not args:
+            args = []
+        self.name = name
+        self.args = args
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_unbound_type(self)
+
+
+class ErrorType(Type):
+    """The error type is used as the result of failed type operations."""
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_error_type(self)
+
+
+class TypeList(Type):
+    """A list of types [...].
+
+    This is only used for the arguments of a Callable type, i.e. for
+    [arg, ...] in Callable[[arg, ...], ret]. This is not a real type
+    but a syntactic AST construct.
+    """
+
+    items = None  # type: List[Type]
+
+    def __init__(self, items: List[Type], line: int = -1) -> None:
+        super().__init__(line)
+        self.items = items
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_type_list(self)
+
+
+class AnyType(Type):
+    """The type 'Any'."""
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_any(self)
+
+
+class Void(Type):
+    """The return type 'None'.
+
+    This can only be used as the return type in a callable type and as
+    the result type of calling such callable.
+    """
+
+    source = ''   # May be None; function that generated this value
+
+    def __init__(self, source: str = None, line: int = -1) -> None:
+        self.source = source
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_void(self)
+
+    def with_source(self, source: str) -> 'Void':
+        return Void(source, self.line)
+
+
+class NoneTyp(Type):
+    """The type of 'None'.
+
+    This is only used internally during type inference.  Programs
+    cannot declare a variable of this type, and the type checker
+    refuses to infer this type for a variable. However, subexpressions
+    often have this type. Note that this is not used as the result
+    type when calling a function with a void type, even though
+    semantically such a function returns a None value; the void type
+    is used instead so that we can report an error if the caller tries
+    to do anything with the return value.
+    """
+
+    def __init__(self, line: int = -1) -> None:
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_none_type(self)
+
+
+class ErasedType(Type):
+    """Placeholder for an erased type.
+
+    This is used during type inference. This has the special property that
+    it is ignored during type inference.
+    """
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_erased_type(self)
+
+
+class DeletedType(Type):
+    """Type of deleted variables.
+
+    These can be used as lvalues but not rvalues.
+    """
+
+    source = ''   # May be None; name that generated this value
+
+    def __init__(self, source: str = None, line: int = -1) -> None:
+        self.source = source
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_deleted_type(self)
+
+
+class Instance(Type):
+    """An instance type of form C[T1, ..., Tn].
+
+    The list of type variables may be empty.
+    """
+
+    type = None  # type: mypy.nodes.TypeInfo
+    args = None  # type: List[Type]
+    erased = False      # True if result of type variable substitution
+
+    def __init__(self, typ: mypy.nodes.TypeInfo, args: List[Type],
+                 line: int = -1, erased: bool = False) -> None:
+        self.type = typ
+        self.args = args
+        self.erased = erased
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_instance(self)
+
+
+class TypeVarType(Type):
+    """A type variable type.
+
+    This refers to either a class type variable (id > 0) or a function
+    type variable (id < 0).
+    """
+
+    name = ''  # Name of the type variable (for messages and debugging)
+    id = 0     # 1, 2, ... for type-related, -1, ... for function-related
+    values = None  # type: List[Type]  # Value restriction, empty list if no restriction
+    upper_bound = None  # type: Type   # Upper bound for values (currently always 'object')
+    # See comments in TypeVarDef for more about variance.
+    variance = INVARIANT  # type: int
+
+    def __init__(self, name: str, id: int, values: List[Type], upper_bound: Type,
+                 variance: int = INVARIANT, line: int = -1) -> None:
+        self.name = name
+        self.id = id
+        self.values = values
+        self.upper_bound = upper_bound
+        self.variance = variance
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_type_var(self)
+
+
+class FunctionLike(Type):
+    """Abstract base class for function types."""
+
+    @abstractmethod
+    def is_type_obj(self) -> bool: pass
+
+    @abstractmethod
+    def type_object(self) -> mypy.nodes.TypeInfo: pass
+
+    @abstractmethod
+    def items(self) -> List['CallableType']: pass
+
+    @abstractmethod
+    def with_name(self, name: str) -> 'FunctionLike': pass
+
+    # Corresponding instance type (e.g. builtins.type)
+    fallback = None  # type: Instance
+
+
+_dummy = object()  # type: Any
+
+
+class CallableType(FunctionLike):
+    """Type of a non-overloaded callable object (function)."""
+
+    arg_types = None  # type: List[Type]  # Types of function arguments
+    arg_kinds = None  # type: List[int]   # mypy.nodes.ARG_ constants
+    arg_names = None  # type: List[str]   # None if not a keyword argument
+    min_args = 0                    # Minimum number of arguments
+    is_var_arg = False              # Is it a varargs function?
+    ret_type = None  # type:Type    # Return value type
+    name = ''                       # Name (may be None; for error messages)
+    definition = None  # type: SymbolNode # For error messages.  May be None.
+    # Type variables for a generic function
+    variables = None  # type: List[TypeVarDef]
+
+    # Implicit bound values of type variables. These can be either for
+    # class type variables or for generic function type variables.
+    # For example, the method 'append' of List[int] has implicit value
+    # 'int' for the list type variable; the explicit method type is
+    # just 'def append(int) -> None', without any type variable. Implicit
+    # values are needed for runtime type checking, but they do not
+    # affect static type checking.
+    #
+    # All class type arguments must be stored first, ordered by id,
+    # and function type arguments must be stored next, again ordered by id
+    # (absolute value this time).
+    #
+    # Stored as tuples (id, type).
+    bound_vars = None  # type: List[Tuple[int, Type]]
+
+    # Is this Callable[..., t] (with literal '...')?
+    is_ellipsis_args = False
+
+    def __init__(self, arg_types: List[Type],
+                 arg_kinds: List[int],
+                 arg_names: List[str],
+                 ret_type: Type,
+                 fallback: Instance,
+                 name: str = None,
+                 definition: SymbolNode = None,
+                 variables: List[TypeVarDef] = None,
+                 bound_vars: List[Tuple[int, Type]] = None,
+                 line: int = -1,
+                 is_ellipsis_args: bool = False) -> None:
+        if variables is None:
+            variables = []
+        if not bound_vars:
+            bound_vars = []
+        self.arg_types = arg_types
+        self.arg_kinds = arg_kinds
+        self.arg_names = arg_names
+        self.min_args = arg_kinds.count(mypy.nodes.ARG_POS)
+        self.is_var_arg = mypy.nodes.ARG_STAR in arg_kinds
+        self.ret_type = ret_type
+        self.fallback = fallback
+        assert not name or '<bound method' not in name
+        self.name = name
+        self.definition = definition
+        self.variables = variables
+        self.bound_vars = bound_vars
+        self.is_ellipsis_args = is_ellipsis_args
+        super().__init__(line)
+
+    def copy_modified(self,
+                      arg_types: List[Type] = _dummy,
+                      arg_kinds: List[int] = _dummy,
+                      arg_names: List[str] = _dummy,
+                      ret_type: Type = _dummy,
+                      fallback: Instance = _dummy,
+                      name: str = _dummy,
+                      definition: SymbolNode = _dummy,
+                      variables: List[TypeVarDef] = _dummy,
+                      bound_vars: List[Tuple[int, Type]] = _dummy,
+                      line: int = _dummy,
+                      is_ellipsis_args: bool = _dummy) -> 'CallableType':
+        return CallableType(
+            arg_types=arg_types if arg_types is not _dummy else self.arg_types,
+            arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds,
+            arg_names=arg_names if arg_names is not _dummy else self.arg_names,
+            ret_type=ret_type if ret_type is not _dummy else self.ret_type,
+            fallback=fallback if fallback is not _dummy else self.fallback,
+            name=name if name is not _dummy else self.name,
+            definition=definition if definition is not _dummy else self.definition,
+            variables=variables if variables is not _dummy else self.variables,
+            bound_vars=bound_vars if bound_vars is not _dummy else self.bound_vars,
+            line=line if line is not _dummy else self.line,
+            is_ellipsis_args=(
+                is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args),
+        )
+
+    def is_type_obj(self) -> bool:
+        return self.fallback.type.fullname() == 'builtins.type'
+
+    def type_object(self) -> mypy.nodes.TypeInfo:
+        assert self.is_type_obj()
+        ret = self.ret_type
+        if isinstance(ret, TupleType):
+            ret = ret.fallback
+        return cast(Instance, ret).type
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_callable_type(self)
+
+    def with_name(self, name: str) -> 'CallableType':
+        """Return a copy of this type with the specified name."""
+        ret = self.ret_type
+        if isinstance(ret, Void):
+            ret = ret.with_source(name)
+        return self.copy_modified(ret_type=ret, name=name)
+
+    def max_fixed_args(self) -> int:
+        n = len(self.arg_types)
+        if self.is_var_arg:
+            n -= 1
+        return n
+
+    def items(self) -> List['CallableType']:
+        return [self]
+
+    def is_generic(self) -> bool:
+        return bool(self.variables)
+
+    def type_var_ids(self) -> List[int]:
+        a = []  # type: List[int]
+        for tv in self.variables:
+            a.append(tv.id)
+        return a
+
+
+class Overloaded(FunctionLike):
+    """Overloaded function type T1, ... Tn, where each Ti is CallableType.
+
+    The variant to call is chosen based on static argument
+    types. Overloaded function types can only be defined in stub
+    files, and thus there is no explicit runtime dispatch
+    implementation.
+    """
+
+    _items = None  # type: List[CallableType]  # Must not be empty
+
+    def __init__(self, items: List[CallableType]) -> None:
+        self._items = items
+        self.fallback = items[0].fallback
+        super().__init__(items[0].line)
+
+    def items(self) -> List[CallableType]:
+        return self._items
+
+    def name(self) -> str:
+        return self._items[0].name
+
+    def is_type_obj(self) -> bool:
+        # All the items must have the same type object status, so it's
+        # sufficient to query only (any) one of them.
+        return self._items[0].is_type_obj()
+
+    def type_object(self) -> mypy.nodes.TypeInfo:
+        # All the items must have the same type object, so it's sufficient to
+        # query only (any) one of them.
+        return self._items[0].type_object()
+
+    def with_name(self, name: str) -> 'Overloaded':
+        ni = []  # type: List[CallableType]
+        for it in self._items:
+            ni.append(it.with_name(name))
+        return Overloaded(ni)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_overloaded(self)
+
+
+class TupleType(Type):
+    """The tuple type Tuple[T1, ..., Tn] (at least one type argument).
+
+    Instance variables:
+        items: tuple item types
+        fallback: the underlying instance type that is used for non-tuple methods
+            (this is currently always builtins.tuple, but it could be different for named
+            tuples, for example)
+        implicit: if True, derived from a tuple expression (t,....) instead of Tuple[t, ...]
+    """
+
+    items = None  # type: List[Type]
+    fallback = None  # type: Instance
+    implicit = False
+
+    def __init__(self, items: List[Type], fallback: Instance, line: int = -1,
+                 implicit: bool = False) -> None:
+        self.items = items
+        self.fallback = fallback
+        self.implicit = implicit
+        super().__init__(line)
+
+    def length(self) -> int:
+        return len(self.items)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_tuple_type(self)
+
+
+class StarType(Type):
+    """The star type *type_parameter.
+
+    This is not a real type but a syntactic AST construct.
+    """
+
+    type = None  # type: Type
+
+    def __init__(self, type: Type, line: int = -1) -> None:
+        self.type = type
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_star_type(self)
+
+
+class UnionType(Type):
+    """The union type Union[T1, ..., Tn] (at least one type argument)."""
+
+    items = None  # type: List[Type]
+
+    def __init__(self, items: List[Type], line: int = -1) -> None:
+        self.items = items
+        super().__init__(line)
+
+    @staticmethod
+    def make_union(items: List[Type], line: int = -1) -> Type:
+        if len(items) > 1:
+            return UnionType(items, line)
+        elif len(items) == 1:
+            return items[0]
+        else:
+            return Void()
+
+    @staticmethod
+    def make_simplified_union(items: List[Type], line: int = -1) -> Type:
+        while any(isinstance(typ, UnionType) for typ in items):
+            all_items = []  # type: List[Type]
+            for typ in items:
+                if isinstance(typ, UnionType):
+                    all_items.extend(typ.items)
+                else:
+                    all_items.append(typ)
+            items = all_items
+
+        if any(isinstance(typ, AnyType) for typ in items):
+            return AnyType()
+
+        from mypy.subtypes import is_subtype
+        removed = set()  # type: Set[int]
+        for i in range(len(items)):
+            if any(is_subtype(items[i], items[j]) for j in range(len(items))
+                   if j not in removed and j != i):
+                removed.add(i)
+
+        simplified_set = [items[i] for i in range(len(items)) if i not in removed]
+        return UnionType.make_union(simplified_set)
+
+    def length(self) -> int:
+        return len(self.items)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_union_type(self)
+
+    def has_readable_member(self, name: str) -> bool:
+        """For a tree of unions of instances, check whether all instances have a given member.
+
+        TODO: Deal with attributes of TupleType etc.
+        TODO: This should probably be refactored to go elsewhere.
+        """
+        return all((isinstance(x, UnionType) and cast(UnionType, x).has_readable_member(name)) or
+                   (isinstance(x, Instance) and cast(Instance, x).type.has_readable_member(name))
+                   for x in self.items)
+
+
+class PartialType(Type):
+    """Type such as List[?] where type arguments are unknown, or partial None type.
+
+    These are used for inferring types in multiphase initialization such as this:
+
+      x = []       # x gets a partial type List[?], as item type is unknown
+      x.append(1)  # partial type gets replaced with normal type List[int]
+
+    Or with None:
+
+      x = None  # x gets a partial type None
+      if c:
+          x = 1  # Infer actual type int for x
+    """
+
+    # None for the 'None' partial type; otherwise a generic class
+    type = None  # type: Optional[mypy.nodes.TypeInfo]
+    var = None  # type: mypy.nodes.Var
+
+    def __init__(self, type: Optional['mypy.nodes.TypeInfo'], var: 'mypy.nodes.Var') -> None:
+        self.type = type
+        self.var = var
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_partial_type(self)
+
+
+class EllipsisType(Type):
+    """The type ... (ellipsis).
+
+    This is not a real type but a syntactic AST construct, used in Callable[..., T], for example.
+
+    A semantically analyzed type will never have ellipsis types.
+    """
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_ellipsis_type(self)
+
+
+#
+# Visitor-related classes
+#
+
+
+class TypeVisitor(Generic[T]):
+    """Visitor class for types (Type subclasses).
+
+    The parameter T is the return type of the visit methods.
+    """
+
+    def _notimplemented_helper(self) -> NotImplementedError:
+        return NotImplementedError("Method visit_type_list not implemented in "
+                                   + "'{}'\n".format(type(self).__name__)
+                                   + "This is a known bug, track development in "
+                                   + "'https://github.com/JukkaL/mypy/issues/730'")
+
+    @abstractmethod
+    def visit_unbound_type(self, t: UnboundType) -> T:
+        pass
+
+    def visit_type_list(self, t: TypeList) -> T:
+        raise self._notimplemented_helper()
+
+    def visit_error_type(self, t: ErrorType) -> T:
+        raise self._notimplemented_helper()
+
+    @abstractmethod
+    def visit_any(self, t: AnyType) -> T:
+        pass
+
+    @abstractmethod
+    def visit_void(self, t: Void) -> T:
+        pass
+
+    @abstractmethod
+    def visit_none_type(self, t: NoneTyp) -> T:
+        pass
+
+    def visit_erased_type(self, t: ErasedType) -> T:
+        raise self._notimplemented_helper()
+
+    @abstractmethod
+    def visit_deleted_type(self, t: DeletedType) -> T:
+        pass
+
+    @abstractmethod
+    def visit_type_var(self, t: TypeVarType) -> T:
+        pass
+
+    @abstractmethod
+    def visit_instance(self, t: Instance) -> T:
+        pass
+
+    @abstractmethod
+    def visit_callable_type(self, t: CallableType) -> T:
+        pass
+
+    def visit_overloaded(self, t: Overloaded) -> T:
+        raise self._notimplemented_helper()
+
+    @abstractmethod
+    def visit_tuple_type(self, t: TupleType) -> T:
+        pass
+
+    def visit_star_type(self, t: StarType) -> T:
+        raise self._notimplemented_helper()
+
+    @abstractmethod
+    def visit_union_type(self, t: UnionType) -> T:
+        pass
+
+    @abstractmethod
+    def visit_partial_type(self, t: PartialType) -> T:
+        pass
+
+    def visit_ellipsis_type(self, t: EllipsisType) -> T:
+        raise self._notimplemented_helper()
+
+
+class TypeTranslator(TypeVisitor[Type]):
+    """Identity type transformation.
+
+    Subclass this and override some methods to implement a non-trivial
+    transformation.
+    """
+
+    def visit_unbound_type(self, t: UnboundType) -> Type:
+        return t
+
+    def visit_type_list(self, t: TypeList) -> Type:
+        return t
+
+    def visit_error_type(self, t: ErrorType) -> Type:
+        return t
+
+    def visit_any(self, t: AnyType) -> Type:
+        return t
+
+    def visit_void(self, t: Void) -> Type:
+        return t
+
+    def visit_none_type(self, t: NoneTyp) -> Type:
+        return t
+
+    def visit_erased_type(self, t: ErasedType) -> Type:
+        return t
+
+    def visit_deleted_type(self, t: DeletedType) -> Type:
+        return t
+
+    def visit_instance(self, t: Instance) -> Type:
+        return Instance(t.type, self.translate_types(t.args), t.line)
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        return t
+
+    def visit_partial_type(self, t: PartialType) -> Type:
+        return t
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        return t.copy_modified(arg_types=self.translate_types(t.arg_types),
+                               ret_type=t.ret_type.accept(self),
+                               variables=self.translate_variables(t.variables),
+                               bound_vars=self.translate_bound_vars(t.bound_vars))
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        return TupleType(self.translate_types(t.items),
+                         cast(Any, t.fallback.accept(self)),
+                         t.line)
+
+    def visit_star_type(self, t: StarType) -> Type:
+        return StarType(t.type.accept(self), t.line)
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        return UnionType(self.translate_types(t.items), t.line)
+
+    def visit_ellipsis_type(self, t: EllipsisType) -> Type:
+        return t
+
+    def translate_types(self, types: List[Type]) -> List[Type]:
+        return [t.accept(self) for t in types]
+
+    def translate_bound_vars(
+            self, types: List[Tuple[int, Type]]) -> List[Tuple[int, Type]]:
+        return [(id, t.accept(self)) for id, t in types]
+
+    def translate_variables(self,
+                            variables: List[TypeVarDef]) -> List[TypeVarDef]:
+        return variables
+
+    def visit_overloaded(self, t: Overloaded) -> Type:
+        items = []  # type: List[CallableType]
+        for item in t.items():
+            new = item.accept(self)
+            if isinstance(new, CallableType):
+                items.append(new)
+            else:
+                raise RuntimeError('CallableType expectected, but got {}'.format(type(new)))
+        return Overloaded(items=items)
+
+
+class TypeStrVisitor(TypeVisitor[str]):
+    """Visitor for pretty-printing types into strings.
+
+    This is mostly for debugging/testing.
+
+    Do not preserve original formatting.
+
+    Notes:
+     - Include implicit bound type variables of callables.
+     - Represent unbound types as Foo? or Foo?[...].
+     - Represent the NoneTyp type as None.
+    """
+
+    def visit_unbound_type(self, t):
+        s = t.name + '?'
+        if t.args != []:
+            s += '[{}]'.format(self.list_str(t.args))
+        return s
+
+    def visit_type_list(self, t):
+        return '<TypeList {}>'.format(self.list_str(t.items))
+
+    def visit_error_type(self, t):
+        return '<ERROR>'
+
+    def visit_any(self, t):
+        return 'Any'
+
+    def visit_void(self, t):
+        return 'void'
+
+    def visit_none_type(self, t):
+        # Include quotes to make this distinct from the None value.
+        return "'None'"
+
+    def visit_erased_type(self, t):
+        return "<Erased>"
+
+    def visit_deleted_type(self, t):
+        if t.source is None:
+            return "<Deleted>"
+        else:
+            return "<Deleted '{}'>".format(t.source)
+
+    def visit_instance(self, t):
+        s = t.type.fullname()
+        if t.erased:
+            s += '*'
+        if t.args != []:
+            s += '[{}]'.format(self.list_str(t.args))
+        return s
+
+    def visit_type_var(self, t):
+        if t.name is None:
+            # Anonymous type variable type (only numeric id).
+            return '`{}'.format(t.id)
+        else:
+            # Named type variable type.
+            return '{}`{}'.format(t.name, t.id)
+
+    def visit_callable_type(self, t):
+        s = ''
+        bare_asterisk = False
+        for i in range(len(t.arg_types)):
+            if s != '':
+                s += ', '
+            if t.arg_kinds[i] == mypy.nodes.ARG_NAMED and not bare_asterisk:
+                s += '*, '
+                bare_asterisk = True
+            if t.arg_kinds[i] == mypy.nodes.ARG_STAR:
+                s += '*'
+            if t.arg_kinds[i] == mypy.nodes.ARG_STAR2:
+                s += '**'
+            if t.arg_names[i]:
+                s += t.arg_names[i] + ': '
+            s += str(t.arg_types[i])
+            if t.arg_kinds[i] == mypy.nodes.ARG_OPT:
+                s += ' ='
+
+        s = '({})'.format(s)
+
+        if not isinstance(t.ret_type, Void):
+            s += ' -> {}'.format(t.ret_type)
+
+        if t.variables:
+            s = '{} {}'.format(t.variables, s)
+
+        if t.bound_vars != []:
+            # Include implicit bound type variables.
+            a = []
+            for i, bt in t.bound_vars:
+                a.append('{}:{}'.format(i, bt))
+            s = '[{}] {}'.format(', '.join(a), s)
+
+        return 'def {}'.format(s)
+
+    def visit_overloaded(self, t):
+        a = []
+        for i in t.items():
+            a.append(i.accept(self))
+        return 'Overload({})'.format(', '.join(a))
+
+    def visit_tuple_type(self, t):
+        s = self.list_str(t.items)
+        if t.fallback:
+            fallback_name = t.fallback.type.fullname()
+            if fallback_name != 'builtins.tuple':
+                return 'Tuple[{}, fallback={}]'.format(s, t.fallback.accept(self))
+        return 'Tuple[{}]'.format(s)
+
+    def visit_star_type(self, t):
+        s = t.type.accept(self)
+        return '*{}'.format(s)
+
+    def visit_union_type(self, t):
+        s = self.list_str(t.items)
+        return 'Union[{}]'.format(s)
+
+    def visit_partial_type(self, t: PartialType) -> str:
+        if t.type is None:
+            return '<partial None>'
+        else:
+            return '<partial {}[{}]>'.format(t.type.name(),
+                                             ', '.join(['?'] * len(t.type.type_vars)))
+
+    def visit_ellipsis_type(self, t):
+        return '...'
+
+    def list_str(self, a):
+        """Convert items of an array to strings (pretty-print types)
+        and join the results with commas.
+        """
+        res = []
+        for t in a:
+            if isinstance(t, Type):
+                res.append(t.accept(self))
+            else:
+                res.append(str(t))
+        return ', '.join(res)
+
+
+# These constants define the method used by TypeQuery to combine multiple
+# query results, e.g. for tuple types. The strategy is not used for empty
+# result lists; in that case the default value takes precedence.
+ANY_TYPE_STRATEGY = 0   # Return True if any of the results are True.
+ALL_TYPES_STRATEGY = 1  # Return True if all of the results are True.
+
+
+class TypeQuery(TypeVisitor[bool]):
+    """Visitor for performing simple boolean queries of types.
+
+    This class allows defining the default value for leafs to simplify the
+    implementation of many queries.
+    """
+
+    default = False  # Default result
+    strategy = 0     # Strategy for combining multiple values (ANY_TYPE_STRATEGY or ALL_TYPES_...).
+
+    def __init__(self, default: bool, strategy: int) -> None:
+        """Construct a query visitor.
+
+        Use the given default result and strategy for combining
+        multiple results. The strategy must be either
+        ANY_TYPE_STRATEGY or ALL_TYPES_STRATEGY.
+        """
+        self.default = default
+        self.strategy = strategy
+
+    def visit_unbound_type(self, t: UnboundType) -> bool:
+        return self.default
+
+    def visit_type_list(self, t: TypeList) -> bool:
+        return self.default
+
+    def visit_error_type(self, t: ErrorType) -> bool:
+        return self.default
+
+    def visit_any(self, t: AnyType) -> bool:
+        return self.default
+
+    def visit_void(self, t: Void) -> bool:
+        return self.default
+
+    def visit_none_type(self, t: NoneTyp) -> bool:
+        return self.default
+
+    def visit_erased_type(self, t: ErasedType) -> bool:
+        return self.default
+
+    def visit_deleted_type(self, t: DeletedType) -> bool:
+        return self.default
+
+    def visit_type_var(self, t: TypeVarType) -> bool:
+        return self.default
+
+    def visit_partial_type(self, t: PartialType) -> bool:
+        return self.default
+
+    def visit_instance(self, t: Instance) -> bool:
+        return self.query_types(t.args)
+
+    def visit_callable_type(self, t: CallableType) -> bool:
+        # FIX generics
+        return self.query_types(t.arg_types + [t.ret_type])
+
+    def visit_tuple_type(self, t: TupleType) -> bool:
+        return self.query_types(t.items)
+
+    def visit_star_type(self, t: StarType) -> bool:
+        return t.type.accept(self)
+
+    def visit_union_type(self, t: UnionType) -> bool:
+        return self.query_types(t.items)
+
+    def visit_overloaded(self, t: Overloaded) -> bool:
+        return self.query_types(t.items())
+
+    def query_types(self, types: Sequence[Type]) -> bool:
+        """Perform a query for a list of types.
+
+        Use the strategy constant to combine the results.
+        """
+        if not types:
+            # Use default result for empty list.
+            return self.default
+        if self.strategy == ANY_TYPE_STRATEGY:
+            # Return True if at least one component is true.
+            res = False
+            for t in types:
+                res = res or t.accept(self)
+                if res:
+                    break
+            return res
+        else:
+            # Return True if all components are true.
+            res = True
+            for t in types:
+                res = res and t.accept(self)
+                if not res:
+                    break
+            return res
+
+
+def strip_type(typ: Type) -> Type:
+    """Make a copy of type without 'debugging info' (function name)."""
+
+    if isinstance(typ, CallableType):
+        return typ.copy_modified(name=None)
+    elif isinstance(typ, Overloaded):
+        return Overloaded([cast(CallableType, strip_type(item))
+                           for item in typ.items()])
+    else:
+        return typ
+
+
+def replace_leading_arg_type(t: CallableType, self_type: Type) -> CallableType:
+    """Return a copy of a callable type with a different self argument type.
+
+    Assume that the callable is the signature of a method.
+    """
+    return t.copy_modified(arg_types=[self_type] + t.arg_types[1:])
+
+
+def is_named_instance(t: Type, fullname: str) -> bool:
+    return (isinstance(t, Instance) and
+            cast(Instance, t).type.fullname() == fullname)
diff --git a/mypy/util.py b/mypy/util.py
new file mode 100644
index 0000000..33f9f3c
--- /dev/null
+++ b/mypy/util.py
@@ -0,0 +1,102 @@
+"""Utility functions with no non-trivial dependencies."""
+
+import re
+import subprocess
+from typing import TypeVar, List, Any, Tuple, Optional
+
+
+T = TypeVar('T')
+
+ENCODING_RE = re.compile(br'(\s*#.*(\r\n?|\n))?\s*#.*coding[:=]\s*([-\w.]+)')
+
+default_python2_interpreter = ['python2', 'python', '/usr/bin/python']
+
+
+def short_type(obj: object) -> str:
+    """Return the last component of the type name of an object.
+
+    If obj is None, return 'nil'. For example, if obj is 1, return 'int'.
+    """
+    if obj is None:
+        return 'nil'
+    t = str(type(obj))
+    return t.split('.')[-1].rstrip("'>")
+
+
+def indent(s: str, n: int) -> str:
+    """Indent all the lines in s (separated by Newlines) by n spaces."""
+    s = ' ' * n + s
+    s = s.replace('\n', '\n' + ' ' * n)
+    return s
+
+
+def array_repr(a: List[T]) -> List[str]:
+    """Return the items of an array converted to strings using Repr."""
+    aa = []  # type: List[str]
+    for x in a:
+        aa.append(repr(x))
+    return aa
+
+
+def dump_tagged(nodes: List[Any], tag: str) -> str:
+    """Convert an array into a pretty-printed multiline string representation.
+
+    The format is
+      tag(
+        item1..
+        itemN)
+    Individual items are formatted like this:
+     - arrays are flattened
+     - pairs (str : array) are converted recursively, so that str is the tag
+     - other items are converted to strings and indented
+    """
+    a = []  # type: List[str]
+    if tag:
+        a.append(tag + '(')
+    for n in nodes:
+        if isinstance(n, list):
+            if n:
+                a.append(dump_tagged(n, None))
+        elif isinstance(n, tuple):
+            s = dump_tagged(n[1], n[0])
+            a.append(indent(s, 2))
+        elif n:
+            a.append(indent(str(n), 2))
+    if tag:
+        a[-1] += ')'
+    return '\n'.join(a)
+
+
+def find_python_encoding(text: bytes, pyversion: Tuple[int, int]) -> Tuple[str, int]:
+    """PEP-263 for detecting Python file encoding"""
+    result = ENCODING_RE.match(text)
+    if result:
+        line = 2 if result.group(1) else 1
+        encoding = result.group(3).decode('ascii')
+        # Handle some aliases that Python is happy to accept and that are used in the wild.
+        if encoding.startswith(('iso-latin-1-', 'latin-1-')) or encoding == 'iso-latin-1':
+            encoding = 'latin-1'
+        return encoding, line
+    else:
+        default_encoding = 'utf8' if pyversion[0] >= 3 else 'ascii'
+        return default_encoding, -1
+
+
+_python2_interpreter = None  # type: Optional[str]
+
+
+def try_find_python2_interpreter() -> Optional[str]:
+    global _python2_interpreter
+    if _python2_interpreter:
+        return _python2_interpreter
+    for interpreter in default_python2_interpreter:
+        try:
+            process = subprocess.Popen([interpreter, '-V'], stdout=subprocess.PIPE,
+                                       stderr=subprocess.STDOUT)
+            stdout, stderr = process.communicate()
+            if b'Python 2.7' in stdout:
+                _python2_interpreter = interpreter
+                return interpreter
+        except OSError:
+            pass
+    return None
diff --git a/mypy/version.py b/mypy/version.py
new file mode 100644
index 0000000..e1424ed
--- /dev/null
+++ b/mypy/version.py
@@ -0,0 +1 @@
+__version__ = '0.3.1'
diff --git a/mypy/visitor.py b/mypy/visitor.py
new file mode 100644
index 0000000..717e004
--- /dev/null
+++ b/mypy/visitor.py
@@ -0,0 +1,229 @@
+"""Generic abstract syntax tree node visitor"""
+
+from typing import TypeVar, Generic
+
+if False:
+    # break import cycle only needed for mypy
+    import mypy.nodes
+
+
+T = TypeVar('T')
+
+
+class NodeVisitor(Generic[T]):
+    """Empty base class for parse tree node visitors.
+
+    The T type argument specifies the return type of the visit
+    methods. As all methods defined here return None by default,
+    subclasses do not always need to override all the methods.
+
+    TODO make the default return value explicit
+    """
+
+    # Module structure
+
+    def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> T:
+        pass
+
+    def visit_import(self, o: 'mypy.nodes.Import') -> T:
+        pass
+
+    def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> T:
+        pass
+
+    def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> T:
+        pass
+
+    # Definitions
+
+    def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> T:
+        pass
+
+    def visit_overloaded_func_def(self,
+                                  o: 'mypy.nodes.OverloadedFuncDef') -> T:
+        pass
+
+    def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> T:
+        pass
+
+    def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> T:
+        pass
+
+    def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> T:
+        pass
+
+    def visit_decorator(self, o: 'mypy.nodes.Decorator') -> T:
+        pass
+
+    def visit_var(self, o: 'mypy.nodes.Var') -> T:
+        pass
+
+    # Statements
+
+    def visit_block(self, o: 'mypy.nodes.Block') -> T:
+        pass
+
+    def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> T:
+        pass
+
+    def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T:
+        pass
+
+    def visit_operator_assignment_stmt(self,
+                                       o: 'mypy.nodes.OperatorAssignmentStmt') -> T:
+        pass
+
+    def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> T:
+        pass
+
+    def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> T:
+        pass
+
+    def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> T:
+        pass
+
+    def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> T:
+        pass
+
+    def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> T:
+        pass
+
+    def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> T:
+        pass
+
+    def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> T:
+        pass
+
+    def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> T:
+        pass
+
+    def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> T:
+        pass
+
+    def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> T:
+        pass
+
+    def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> T:
+        pass
+
+    def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> T:
+        pass
+
+    def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> T:
+        pass
+
+    def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T:
+        pass
+
+    # Expressions
+
+    def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T:
+        pass
+
+    def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> T:
+        pass
+
+    def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> T:
+        pass
+
+    def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> T:
+        pass
+
+    def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> T:
+        pass
+
+    def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> T:
+        pass
+
+    def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> T:
+        pass
+
+    def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> T:
+        pass
+
+    def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> T:
+        pass
+
+    def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> T:
+        pass
+
+    def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> T:
+        pass
+
+    def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> T:
+        pass
+
+    def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> T:
+        pass
+
+    def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> T:
+        pass
+
+    def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> T:
+        pass
+
+    def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T:
+        pass
+
+    def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T:
+        pass
+
+    def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T:
+        pass
+
+    def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T:
+        pass
+
+    def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T:
+        pass
+
+    def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T:
+        pass
+
+    def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T:
+        pass
+
+    def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T:
+        pass
+
+    def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T:
+        pass
+
+    def visit_func_expr(self, o: 'mypy.nodes.FuncExpr') -> T:
+        pass
+
+    def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T:
+        pass
+
+    def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T:
+        pass
+
+    def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T:
+        pass
+
+    def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T:
+        pass
+
+    def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T:
+        pass
+
+    def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T:
+        pass
+
+    def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T:
+        pass
+
+    def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T:
+        pass
+
+    def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T:
+        pass
+
+    def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T:
+        pass
+
+    def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T:
+        pass
+
+    def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T:
+        pass
diff --git a/mypy/waiter.py b/mypy/waiter.py
new file mode 100644
index 0000000..2d9767a
--- /dev/null
+++ b/mypy/waiter.py
@@ -0,0 +1,285 @@
+"""Parallel subprocess task runner.
+
+This is used for running mypy tests.
+"""
+
+from typing import Dict, List, Optional, Tuple
+
+import os
+import pipes
+import re
+from subprocess import Popen, PIPE, STDOUT
+import sys
+import tempfile
+
+
+class WaiterError(Exception):
+    pass
+
+
+class LazySubprocess:
+    """Wrapper around a subprocess that runs a test task."""
+
+    def __init__(self, name: str, args: List[str], *, cwd: str = None,
+                 env: Dict[str, str] = None) -> None:
+        self.name = name
+        self.args = args
+        self.cwd = cwd
+        self.env = env
+
+    def start(self) -> None:
+        self.outfile = tempfile.NamedTemporaryFile()
+        self.process = Popen(self.args, cwd=self.cwd, env=self.env,
+                             stdout=self.outfile, stderr=STDOUT)
+        self.pid = self.process.pid
+
+    def handle_exit_status(self, status: int) -> None:
+        """Update process exit status received via an external os.waitpid() call."""
+        # Inlined subprocess._handle_exitstatus, it's not a public API.
+        # TODO(jukka): I'm not quite sure why this is implemented like this.
+        process = self.process
+        assert process.returncode is None
+        if os.WIFSIGNALED(status):
+            process.returncode = -os.WTERMSIG(status)
+        elif os.WIFEXITED(status):
+            process.returncode = os.WEXITSTATUS(status)
+        else:
+            # Should never happen
+            raise RuntimeError("Unknown child exit status!")
+        assert process.returncode is not None
+
+    def wait(self) -> int:
+        return self.process.wait()
+
+    def status(self) -> Optional[int]:
+        return self.process.returncode
+
+    def read_output(self) -> str:
+        with open(self.outfile.name, 'rb') as file:
+            # Assume it's ascii to avoid unicode headaches (and portability issues).
+            return file.read().decode('ascii')
+
+    def cleanup(self) -> None:
+        self.outfile.close()
+        assert not os.path.exists(self.outfile.name)
+
+
+class Noter:
+    """Update stats about running jobs.
+
+    Only used when verbosity == 0.
+    """
+
+    def __init__(self, total: int) -> None:
+        # Total number of tasks.
+        self.total = total
+        self.running = set()  # type: Set[int]
+        # Passed tasks.
+        self.passes = 0
+        # Failed tasks.
+        self.fails = 0
+
+    def start(self, job: int) -> None:
+        self.running.add(job)
+        self.update()
+
+    def stop(self, job: int, failed: bool) -> None:
+        self.running.remove(job)
+        if failed:
+            self.fails += 1
+        else:
+            self.passes += 1
+        self.update()
+
+    def message(self, msg: str) -> None:
+        # Using a CR instead of NL will overwrite the line.
+        sys.stdout.write('%-80s\r' % msg)
+        sys.stdout.flush()
+
+    def update(self) -> None:
+        pending = self.total - self.passes - self.fails - len(self.running)
+        args = (self.passes, self.fails, pending, len(self.running))
+        msg = 'passed %d, failed %d, pending %d; running %d' % args
+        self.message(msg)
+
+    def clear(self) -> None:
+        self.message('')
+
+
+class Waiter:
+    """Run subprocesses in parallel and wait for them.
+
+    Usage:
+
+    waiter = Waiter()
+    waiter.add('sleep 9')
+    waiter.add('sleep 10')
+    if not waiter.run():
+        print('error')
+    """
+    def __init__(self, limit: int = 0, *, verbosity: int = 0, xfail: List[str] = []) -> None:
+        self.verbosity = verbosity
+        self.queue = []  # type: List[LazySubprocess]
+        # Index of next task to run in the queue.
+        self.next = 0
+        self.current = {}  # type: Dict[int, Tuple[int, LazySubprocess]]
+        if limit == 0:
+            try:
+                sched_getaffinity = os.sched_getaffinity
+            except AttributeError:
+                limit = 2
+            else:
+                # Note: only count CPUs we are allowed to use. It is a
+                # major mistake to count *all* CPUs on the machine.
+                limit = len(sched_getaffinity(0))
+        self.limit = limit
+        assert limit > 0
+        self.xfail = set(xfail)
+        self._note = None  # type: Noter
+
+    def add(self, cmd: LazySubprocess) -> int:
+        rv = len(self.queue)
+        self.queue.append(cmd)
+        return rv
+
+    def _start_next(self) -> None:
+        num = self.next
+        cmd = self.queue[num]
+        name = cmd.name
+        cmd.start()
+        self.current[cmd.pid] = (num, cmd)
+        if self.verbosity >= 1:
+            print('%-8s #%d %s' % ('START', num, name))
+            if self.verbosity >= 2:
+                print('%-8s #%d %s' % ('CWD', num, cmd.cwd or '.'))
+                cmd_str = ' '.join(pipes.quote(a) for a in cmd.args)
+                print('%-8s #%d %s' % ('COMMAND', num, cmd_str))
+            sys.stdout.flush()
+        elif self.verbosity >= 0:
+            self._note.start(num)
+        self.next += 1
+
+    def _wait_next(self) -> Tuple[List[str], int, int]:
+        """Wait for a single task to finish.
+
+        Return tuple (list of failed tasks, number test cases, number of failed tests).
+        """
+        pid, status = os.waitpid(-1, 0)
+        num, cmd = self.current.pop(pid)
+
+        cmd.handle_exit_status(status)
+
+        name = cmd.name
+        rc = cmd.wait()
+        if rc >= 0:
+            msg = 'EXIT %d' % rc
+        else:
+            msg = 'SIG %d' % -rc
+        if self.verbosity >= 1:
+            print('%-8s #%d %s' % (msg, num, name))
+            sys.stdout.flush()
+        elif self.verbosity >= 0:
+            self._note.stop(num, bool(rc))
+        elif self.verbosity >= -1:
+            sys.stdout.write('.' if rc == 0 else msg[0])
+            num_complete = self.next - len(self.current)
+            if num_complete % 50 == 0 or num_complete == len(self.queue):
+                sys.stdout.write(' %d/%d\n' % (num_complete, len(self.queue)))
+            elif num_complete % 10 == 0:
+                sys.stdout.write(' ')
+            sys.stdout.flush()
+
+        if rc != 0:
+            if name not in self.xfail:
+                fail_type = 'FAILURE'
+            else:
+                fail_type = 'XFAIL'
+        else:
+            if name not in self.xfail:
+                fail_type = None
+            else:
+                fail_type = 'UPASS'
+
+        # Get task output.
+        output = cmd.read_output()
+        cmd.cleanup()
+        num_tests, num_tests_failed = parse_test_stats_from_output(output, fail_type)
+
+        if fail_type is not None or self.verbosity >= 1:
+            self._report_task_failure(fail_type, num, name, output)
+
+        if fail_type is not None:
+            failed_tasks = ['%8s %s' % (fail_type, name)]
+        else:
+            failed_tasks = []
+
+        return failed_tasks, num_tests, num_tests_failed
+
+    def _report_task_failure(self, fail_type: Optional[str], num: int, name: str,
+                             output: str) -> None:
+        if self.verbosity <= 0:
+            sys.stdout.write('\n')
+        sys.stdout.write('\n%-8s #%d %s\n\n' % (fail_type or 'PASS', num, name))
+        sys.stdout.write(output + '\n')
+        sys.stdout.flush()
+
+    def run(self) -> None:
+        if self.verbosity >= -1:
+            print('%-8s %d' % ('PARALLEL', self.limit))
+            sys.stdout.flush()
+        if self.verbosity == 0:
+            self._note = Noter(len(self.queue))
+        print('SUMMARY  %d tasks selected' % len(self.queue))
+        sys.stdout.flush()
+        # Failed tasks.
+        all_failures = []  # type: List[str]
+        # Number of test cases. Some tasks can involve multiple test cases.
+        total_tests = 0
+        # Number of failed test cases.
+        total_failed_tests = 0
+        while self.current or self.next < len(self.queue):
+            while len(self.current) < self.limit and self.next < len(self.queue):
+                self._start_next()
+            fails, tests, test_fails = self._wait_next()
+            all_failures += fails
+            total_tests += tests
+            total_failed_tests += test_fails
+        if self.verbosity == 0:
+            self._note.clear()
+        if all_failures:
+            summary = 'SUMMARY  %d/%d tasks and %d/%d tests failed' % (
+                len(all_failures), len(self.queue), total_failed_tests, total_tests)
+            print(summary)
+            for f in all_failures:
+                print(f)
+            print(summary)
+            print('*** FAILURE ***')
+            sys.stdout.flush()
+            if any('XFAIL' not in f for f in all_failures):
+                sys.exit(1)
+        else:
+            print('SUMMARY  all %d tasks and %d tests passed' % (
+                len(self.queue), total_tests))
+            print('*** OK ***')
+            sys.stdout.flush()
+
+
+def parse_test_stats_from_output(output: str, fail_type: Optional[str]) -> Tuple[int, int]:
+    """Parse tasks output and determine test counts.
+
+    Return tuple (number of tests, number of test failures). Default
+    to the entire task representing a single test as a fallback.
+    """
+    m = re.search('^([0-9]+)/([0-9]+) test cases failed(, ([0-9]+) skipped)?.$', output,
+                  re.MULTILINE)
+    if m:
+        return int(m.group(2)), int(m.group(1))
+    m = re.search('^([0-9]+) test cases run(, ([0-9]+) skipped)?, all passed.$', output,
+                  re.MULTILINE)
+    if m:
+        return int(m.group(1)), 0
+    # Couldn't find test counts, so fall back to single test per tasks.
+    if fail_type is not None:
+        return 1, 1
+    else:
+        return 1, 0
diff --git a/scripts/mypy b/scripts/mypy
new file mode 100755
index 0000000..0e96016
--- /dev/null
+++ b/scripts/mypy
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+"""Mypy type checker command line tool."""
+
+from mypy.main import main
+
+main(__file__)
diff --git a/scripts/stubgen b/scripts/stubgen
new file mode 100755
index 0000000..9b2d05a
--- /dev/null
+++ b/scripts/stubgen
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+"""Generator of dynamically typed draft stubs for arbitrary modules.
+
+This is just a wrapper script. Look at mypy/stubgen.py for the actual
+implementation.
+"""
+
+import os
+import os.path
+import sys
+
+file_dir = os.path.dirname(__file__)
+parent_dir = os.path.join(file_dir, os.pardir)
+if os.path.exists(os.path.join(parent_dir, '.git')):
+    # We are running from a git clone.
+    sys.path.insert(0, parent_dir)
+
+import mypy.stubgen
+
+mypy.stubgen.main()
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..2aeeb50
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,13 @@
+[flake8]
+max-line-length = 99
+exclude = mypy/codec/*,mypy/test/data/lib-stub/*,mypy/test/data/fixtures/*
+# Thing to ignore:
+#   E251: spaces around default arg value (against our style)
+#   E128: continuation line under-indented (too noisy)
+#   F401: unused identifiers (useless, as it doesn't see inside # type: comments)
+#   W601: has_key() deprecated (false positives)
+#   E701: multiple statements on one line (colon) (we use this for classes with empty body)
+#   W503: line break before binary operator
+#   E704: multiple statements on one line (def)
+#   E402: module level import not at top of file
+ignore = E251,E128,F401,W601,E701,W503,E704,E402
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..89554a6
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+
+import glob
+import os
+import os.path
+import sys
+
+from distutils.core import setup
+from mypy.version import __version__
+from mypy import git
+
+if sys.version_info < (3, 2, 0):
+    sys.stderr.write("ERROR: You need Python 3.2 or later to use mypy.\n")
+    exit(1)
+
+git.verify_git_integrity_or_abort(".")
+
+version = __version__
+description = 'Optional static typing for Python'
+long_description = '''
+Mypy -- Optional Static Typing for Python
+=========================================
+
+Add type annotations to your Python programs, and use mypy to type
+check them.  Mypy is essentially a Python linter on steroids, and it
+can catch many programming errors by analyzing your program, without
+actually having to run it.  Mypy has a powerful type system with
+features such as type inference, gradual typing, generics and union
+types.
+'''.lstrip()
+
+
+def find_data_files(base, globs):
+    """Find all interesting data files, for setup(data_files=)
+
+    Arguments:
+      root:  The directory to search in.
+      globs: A list of glob patterns to accept files.
+    """
+
+    rv_dirs = [root for root, dirs, files in os.walk(base)]
+    rv = []
+    for rv_dir in rv_dirs:
+        files = []
+        for pat in globs:
+            files += glob.glob(os.path.join(rv_dir, pat))
+        if not files:
+            continue
+        target = os.path.join('lib', 'mypy', rv_dir)
+        rv.append((target, files))
+
+    return rv
+
+data_files = []
+
+data_files += find_data_files('typeshed', ['*.py', '*.pyi'])
+
+data_files += find_data_files('xml', ['*.xsd', '*.xslt', '*.css'])
+
+classifiers = [
+    'Development Status :: 2 - Pre-Alpha',
+    'Environment :: Console',
+    'Intended Audience :: Developers',
+    'License :: OSI Approved :: MIT License',
+    'Operating System :: POSIX',
+    'Programming Language :: Python :: 3.2',
+    'Programming Language :: Python :: 3.3',
+    'Programming Language :: Python :: 3.4',
+    'Programming Language :: Python :: 3.5',
+    'Topic :: Software Development',
+]
+
+package_dir = {'mypy': 'mypy'}
+if sys.version_info < (3, 5, 0):
+    package_dir[''] = 'lib-typing/3.2'
+
+setup(name='mypy-lang',
+      version=version,
+      description=description,
+      long_description=long_description,
+      author='Jukka Lehtosalo',
+      author_email='jukka.lehtosalo at iki.fi',
+      url='http://www.mypy-lang.org/',
+      license='MIT License',
+      platforms=['POSIX'],
+      package_dir=package_dir,
+      py_modules=['typing'] if sys.version_info < (3, 5, 0) else [],
+      packages=['mypy'],
+      scripts=['scripts/mypy', 'scripts/stubgen'],
+      data_files=data_files,
+      classifiers=classifiers,
+      )
diff --git a/typeshed/runtests.py b/typeshed/runtests.py
new file mode 100755
index 0000000..603f421
--- /dev/null
+++ b/typeshed/runtests.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+"""Test runner for typeshed.
+
+Depends on mypy being installed.
+
+Approach:
+
+1. Parse sys.argv
+2. Compute appropriate arguments for mypy
+3. Stuff those arguments into sys.argv
+4. Run mypy.main('')
+5. Repeat steps 2-4 for other mypy runs (e.g. --py2)
+"""
+
+import os
+import re
+import sys
+import argparse
+
+parser = argparse.ArgumentParser(description="Test runner for typeshed. Patterns are unanchored regexps on the full path.")
+parser.add_argument('-v', '--verbose', action='count', default=0, help="More output")
+parser.add_argument('-n', '--dry-run', action='store_true', help="Don't actually run mypy")
+parser.add_argument('-x', '--exclude', type=str, nargs='*', help="Exclude pattern")
+parser.add_argument('filter', type=str, nargs='*', help="Include pattern (default all)")
+
+
+def log(args, *varargs):
+    if args.verbose >= 2:
+        print(*varargs)
+
+def match(args, fn):
+    if not args.filter and not args.exclude:
+        log(args, fn, 'accept by default')
+        return True
+    if args.exclude:
+        for f in args.exclude:
+            if re.search(f, fn):
+                log(args, fn, 'excluded by pattern', f)
+                return False
+    if args.filter:
+        for f in args.filter:
+            if re.search(f, fn):
+                log(args, fn, 'accepted by pattern', f)
+                return True
+    if args.filter:
+        log(args, fn, 'rejected (no pattern matches)')
+        return False
+    log(args, fn, 'accepted (no exclude pattern matches)')
+    return True
+
+
+def main():
+    args = parser.parse_args()
+
+    try:
+        from mypy.main import main as mypy_main
+    except ImportError:
+        print("Cannot import mypy. Did you install it?")
+        sys.exit(1)
+
+    files2 = []
+    files3 = []
+    for dir, subdirs, files in os.walk('.'):
+        for file in files:
+            if file == '__builtin__.pyi':
+                continue  # Special case (alias for builtins.py).
+            if file.endswith('.pyi') or file.endswith('.py'):
+                full = os.path.join(dir, file)
+                if match(args, full):
+                    if '/2' in dir:
+                        files2.append(full)
+                    if '/3' in dir or '/2and3' in dir:
+                        files3.append(full)
+    if not (files2 or files3):
+        print('--- nothing to do ---')
+    code = 0
+    for flags, files in [([], files3), (['--py2'], files2)]:
+        if files:
+            sys.argv = ['mypy'] + flags + files
+            if args.verbose:
+                print('running', ' '.join(sys.argv))
+            else:
+                print('running mypy', ' '.join(flags), '# with', len(files), 'files')
+            try:
+                if not args.dry_run:
+                    mypy_main('')
+            except SystemExit as err:
+                code = max(code, err.code)
+    if code:
+        print('--- exit status', code, '---')
+        sys.exit(code)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/typeshed/stdlib/2.7/HTMLParser.pyi b/typeshed/stdlib/2.7/HTMLParser.pyi
new file mode 100644
index 0000000..ae6c309
--- /dev/null
+++ b/typeshed/stdlib/2.7/HTMLParser.pyi
@@ -0,0 +1,28 @@
+from typing import List, Tuple, AnyStr
+from markupbase import ParserBase
+
+class HTMLParser(ParserBase):
+    def __init__(self, *args, convert_charrefs: bool) -> None: ...
+    def feed(self, feed: AnyStr) -> None: ...
+    def close(self) -> None: ...
+    def reset(self) -> None: ...
+
+    def get_starttag_text(self) -> AnyStr: ...
+    def set_cdata_mode(self, AnyStr) -> None: ...
+    def clear_cdata_mode(self) -> None: ...
+
+    def handle_startendtag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ...
+    def handle_starttag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ...
+    def handle_endtag(self, tag: AnyStr): ...
+    def handle_charref(self, name: AnyStr): ...
+    def handle_entityref(self, name: AnyStr): ...
+    def handle_data(self, data: AnyStr): ...
+    def handle_comment(self, data: AnyStr): ...
+    def handle_decl(self, decl: AnyStr): ...
+    def handle_pi(self, data: AnyStr): ...
+
+    def unknown_decl(self, data: AnyStr): ...
+
+    def unescape(self, s: AnyStr) -> AnyStr: ...
+
+class HTMLParseError(Exception): ...
diff --git a/typeshed/stdlib/2.7/Queue.pyi b/typeshed/stdlib/2.7/Queue.pyi
new file mode 100644
index 0000000..c12ee7a
--- /dev/null
+++ b/typeshed/stdlib/2.7/Queue.pyi
@@ -0,0 +1,29 @@
+# Stubs for Queue (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Empty(Exception): ...
+class Full(Exception): ...
+
+class Queue:
+    maxsize = ... # type: Any
+    mutex = ... # type: Any
+    not_empty = ... # type: Any
+    not_full = ... # type: Any
+    all_tasks_done = ... # type: Any
+    unfinished_tasks = ... # type: Any
+    def __init__(self, maxsize: int = ...) -> None: ...
+    def task_done(self) -> None: ...
+    def join(self) -> None: ...
+    def qsize(self) -> int: ...
+    def empty(self) -> bool: ...
+    def full(self) -> bool: ...
+    def put(self, item: Any, block: bool = ..., timeout: float = ...) -> None: ...
+    def put_nowait(self, item) -> None: ...
+    def get(self, block: bool = ..., timeout: float = ...) -> Any: ...
+    def get_nowait(self) -> Any: ...
+
+class PriorityQueue(Queue): ...
+class LifoQueue(Queue): ...
diff --git a/typeshed/stdlib/2.7/StringIO.pyi b/typeshed/stdlib/2.7/StringIO.pyi
new file mode 100644
index 0000000..22f7a02
--- /dev/null
+++ b/typeshed/stdlib/2.7/StringIO.pyi
@@ -0,0 +1,28 @@
+# Stubs for StringIO (Python 2)
+
+from typing import Any, IO, AnyStr, Iterator, Iterable, Generic, List
+
+class StringIO(IO[AnyStr], Generic[AnyStr]):
+    closed = ... # type: bool
+    softspace = ... # type: int
+    def __init__(self, buf: AnyStr = ...) -> None: ...
+    def __iter__(self) -> Iterator[AnyStr]: ...
+    def next(self) -> AnyStr: ...
+    def close(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def seek(self, pos: int, mode: int = ...) -> None: ...
+    def tell(self) -> int: ...
+    def read(self, n: int = ...) -> AnyStr: ...
+    def readline(self, length: int = ...) -> AnyStr: ...
+    def readlines(self, sizehint: int = ...) -> List[AnyStr]: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def write(self, s: AnyStr) -> None: ...
+    def writelines(self, iterable: Iterable[AnyStr]) -> None: ...
+    def flush(self) -> None: ...
+    def getvalue(self) -> AnyStr: ...
+    def __enter__(self) -> Any: ...
+    def __exit__(self, type: Any, value: Any, traceback: Any) -> Any: ...
+    def fileno(self) -> int: ...
+    def readable(self) -> bool: ...
+    def seekable(self) -> bool: ...
+    def writable(self) -> bool: ...
diff --git a/typeshed/stdlib/2.7/UserDict.pyi b/typeshed/stdlib/2.7/UserDict.pyi
new file mode 100644
index 0000000..e5cfedc
--- /dev/null
+++ b/typeshed/stdlib/2.7/UserDict.pyi
@@ -0,0 +1,11 @@
+from typing import Dict, Generic, Mapping, TypeVar
+
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+
+class UserDict(Dict[_KT, _VT], Generic[_KT, _VT]):
+    data = ... # type: Mapping[_KT, _VT]
+
+    def __init__(self, initialdata: Mapping[_KT, _VT] = ...) -> None: ...
+
+    # TODO: DictMixin
diff --git a/typeshed/stdlib/2.7/UserList.pyi b/typeshed/stdlib/2.7/UserList.pyi
new file mode 100644
index 0000000..4f31b93
--- /dev/null
+++ b/typeshed/stdlib/2.7/UserList.pyi
@@ -0,0 +1,3 @@
+import collections
+
+class UserList(collections.MutableSequence): ...
diff --git a/typeshed/stdlib/2.7/UserString.pyi b/typeshed/stdlib/2.7/UserString.pyi
new file mode 100644
index 0000000..d2a33c3
--- /dev/null
+++ b/typeshed/stdlib/2.7/UserString.pyi
@@ -0,0 +1,4 @@
+import collections
+
+class UserString(collections.Sequence): ...
+class MutableString(UserString, collections.MutableSequence): ...
\ No newline at end of file
diff --git a/typeshed/stdlib/2.7/__builtin__.pyi b/typeshed/stdlib/2.7/__builtin__.pyi
new file mode 100644
index 0000000..546202b
--- /dev/null
+++ b/typeshed/stdlib/2.7/__builtin__.pyi
@@ -0,0 +1,870 @@
+# Stubs for builtins (Python 2.7)
+
+from typing import (
+    TypeVar, Iterator, Iterable, overload,
+    Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set,
+    AbstractSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs,
+    SupportsRound, IO, BinaryIO, Union, AnyStr, MutableSequence, MutableMapping,
+    MutableSet
+)
+from abc import abstractmethod, ABCMeta
+
+_T = TypeVar('_T')
+_T_co = TypeVar('_T_co', covariant=True)
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+_S = TypeVar('_S')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_T3 = TypeVar('_T3')
+_T4 = TypeVar('_T4')
+
+staticmethod = object()  # Special, only valid as a decorator.
+classmethod = object()  # Special, only valid as a decorator.
+property = object()
+
+class object:
+    __doc__ = ...  # type: str
+    __class__ = ...  # type: type
+
+    def __init__(self) -> None: ...
+    def __new__(cls) -> Any: ...
+    def __setattr__(self, name: str, value: Any) -> None: ...
+    def __eq__(self, o: object) -> bool: ...
+    def __ne__(self, o: object) -> bool: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __hash__(self) -> int: ...
+
+class type:
+    __name__ = ...  # type: str
+    __module__ = ...  # type: str
+    __dict__ = ...  # type: Dict[unicode, Any]
+
+    @overload
+    def __init__(self, o: object) -> None: ...
+    @overload
+    def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ...
+    # TODO: __new__ may have to be special and not a static method.
+    @staticmethod
+    def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
+
+class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, x: SupportsInt) -> None: ...
+    @overload
+    def __init__(self, x: Union[str, unicode, bytearray], base: int = 10) -> None: ...
+    def bit_length(self) -> int: ...
+
+    def __add__(self, x: int) -> int: ...
+    def __sub__(self, x: int) -> int: ...
+    def __mul__(self, x: int) -> int: ...
+    def __floordiv__(self, x: int) -> int: ...
+    def __div__(self, x: int) -> int: ...
+    def __truediv__(self, x: int) -> float: ...
+    def __mod__(self, x: int) -> int: ...
+    def __radd__(self, x: int) -> int: ...
+    def __rsub__(self, x: int) -> int: ...
+    def __rmul__(self, x: int) -> int: ...
+    def __rfloordiv__(self, x: int) -> int: ...
+    def __rdiv__(self, x: int) -> int: ...
+    def __rtruediv__(self, x: int) -> float: ...
+    def __rmod__(self, x: int) -> int: ...
+    def __pow__(self, x: int) -> Any: ...  # Return type can be int or float, depending on x.
+    def __rpow__(self, x: int) -> Any: ...
+    def __and__(self, n: int) -> int: ...
+    def __or__(self, n: int) -> int: ...
+    def __xor__(self, n: int) -> int: ...
+    def __lshift__(self, n: int) -> int: ...
+    def __rshift__(self, n: int) -> int: ...
+    def __rand__(self, n: int) -> int: ...
+    def __ror__(self, n: int) -> int: ...
+    def __rxor__(self, n: int) -> int: ...
+    def __rlshift__(self, n: int) -> int: ...
+    def __rrshift__(self, n: int) -> int: ...
+    def __neg__(self) -> int: ...
+    def __pos__(self) -> int: ...
+    def __invert__(self) -> int: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: int) -> bool: ...
+    def __le__(self, x: int) -> bool: ...
+    def __gt__(self, x: int) -> bool: ...
+    def __ge__(self, x: int) -> bool: ...
+
+    def __str__(self) -> str: ...
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: return self
+    def __abs__(self) -> int: ...
+    def __hash__(self) -> int: ...
+
+class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, x: SupportsFloat) -> None: ...
+    @overload
+    def __init__(self, x: unicode) -> None: ...
+    @overload
+    def __init__(self, x: bytearray) -> None: ...
+    def as_integer_ratio(self) -> Tuple[int, int]: ...
+    def hex(self) -> str: ...
+    def is_integer(self) -> bool: ...
+    @classmethod
+    def fromhex(cls, s: str) -> float: ...
+
+    def __add__(self, x: float) -> float: ...
+    def __sub__(self, x: float) -> float: ...
+    def __mul__(self, x: float) -> float: ...
+    def __floordiv__(self, x: float) -> float: ...
+    def __div__(self, x: float) -> float: ...
+    def __truediv__(self, x: float) -> float: ...
+    def __mod__(self, x: float) -> float: ...
+    def __pow__(self, x: float) -> float: ...
+    def __radd__(self, x: float) -> float: ...
+    def __rsub__(self, x: float) -> float: ...
+    def __rmul__(self, x: float) -> float: ...
+    def __rfloordiv__(self, x: float) -> float: ...
+    def __rdiv__(self, x: float) -> float: ...
+    def __rtruediv__(self, x: float) -> float: ...
+    def __rmod__(self, x: float) -> float: ...
+    def __rpow__(self, x: float) -> float: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: float) -> bool: ...
+    def __le__(self, x: float) -> bool: ...
+    def __gt__(self, x: float) -> bool: ...
+    def __ge__(self, x: float) -> bool: ...
+    def __neg__(self) -> float: ...
+    def __pos__(self) -> float: ...
+
+    def __str__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: return self
+    def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class complex(SupportsAbs[float]):
+    @overload
+    def __init__(self, re: float = 0.0, im: float = 0.0) -> None: ...
+    @overload
+    def __init__(self, s: str) -> None: ...
+
+    @property
+    def real(self) -> float: ...
+    @property
+    def imag(self) -> float: ...
+
+    def conjugate(self) -> complex: ...
+
+    def __add__(self, x: complex) -> complex: ...
+    def __sub__(self, x: complex) -> complex: ...
+    def __mul__(self, x: complex) -> complex: ...
+    def __pow__(self, x: complex) -> complex: ...
+    def __div__(self, x: complex) -> complex: ...
+    def __truediv__(self, x: complex) -> complex: ...
+    def __radd__(self, x: complex) -> complex: ...
+    def __rsub__(self, x: complex) -> complex: ...
+    def __rmul__(self, x: complex) -> complex: ...
+    def __rpow__(self, x: complex) -> complex: ...
+    def __rdiv__(self, x: complex) -> complex: ...
+    def __rtruediv__(self, x: complex) -> complex: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __neg__(self) -> complex: ...
+    def __pos__(self) -> complex: ...
+
+    def __str__(self) -> str: ...
+    def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class basestring(metaclass=ABCMeta): ...
+
+class unicode(basestring, Sequence[unicode]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, o: object) -> None: ...
+    @overload
+    def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ...
+    def capitalize(self) -> unicode: ...
+    def center(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def count(self, x: unicode) -> int: ...
+    def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
+    def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
+    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+                 end: int = ...) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> unicode: ...
+    def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def format(self, *args: Any, **kwargs: Any) -> unicode: ...
+    def format_map(self, map: Mapping[unicode, Any]) -> unicode: ...
+    def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdecimal(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def isidentifier(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isnumeric(self) -> bool: ...
+    def isprintable(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[unicode]) -> unicode: ...
+    def ljust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def lower(self) -> unicode: ...
+    def lstrip(self, chars: unicode = ...) -> unicode: ...
+    def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ...
+    def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    def rsplit(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def rstrip(self, chars: unicode = ...) -> unicode: ...
+    def split(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def splitlines(self, keepends: bool = ...) -> List[unicode]: ...
+    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+                   end: int = ...) -> bool: ...
+    def strip(self, chars: unicode = ...) -> unicode: ...
+    def swapcase(self) -> unicode: ...
+    def title(self) -> unicode: ...
+    def translate(self, table: Union[Dict[int, Any], unicode]) -> unicode: ...
+    def upper(self) -> unicode: ...
+    def zfill(self, width: int) -> unicode: ...
+
+    @overload
+    def __getitem__(self, i: int) -> unicode: ...
+    @overload
+    def __getitem__(self, s: slice) -> unicode: ...
+    def __getslice__(self, start: int, stop: int) -> unicode: ...
+    def __add__(self, s: unicode) -> unicode: ...
+    def __mul__(self, n: int) -> unicode: ...
+    def __mod__(self, x: Any) -> unicode: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: unicode) -> bool: ...
+    def __le__(self, x: unicode) -> bool: ...
+    def __gt__(self, x: unicode) -> bool: ...
+    def __ge__(self, x: unicode) -> bool: ...
+
+    def __len__(self) -> int: ...
+    def __contains__(self, s: object) -> bool: ...
+    def __iter__(self) -> Iterator[unicode]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class str(basestring, Sequence[str]):
+    def __init__(self, object: object) -> None: ...
+    def capitalize(self) -> str: ...
+    def center(self, width: int, fillchar: str = ...) -> str: ...
+    def count(self, x: unicode) -> int: ...
+    def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
+    def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
+    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> str: ...
+    def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def format(self, *args: Any, **kwargs: Any) -> str: ...
+    def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[AnyStr]) -> AnyStr: ...
+    def ljust(self, width: int, fillchar: str = ...) -> str: ...
+    def lower(self) -> str: ...
+    @overload
+    def lstrip(self, chars: str = ...) -> str: ...
+    @overload
+    def lstrip(self, chars: unicode) -> unicode: ...
+    @overload
+    def partition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ...
+    @overload
+    def partition(self, sep: str) -> Tuple[str, str, str]: ...
+    @overload
+    def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ...
+    def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: str = ...) -> str: ...
+    @overload
+    def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ...
+    @overload
+    def rpartition(self, sep: str) -> Tuple[str, str, str]: ...
+    @overload
+    def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    @overload
+    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    @overload
+    def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
+    @overload
+    def rstrip(self, chars: str = ...) -> str: ...
+    @overload
+    def rstrip(self, chars: unicode) -> unicode: ...
+    @overload
+    def split(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    @overload
+    def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
+    def splitlines(self, keepends: bool = ...) -> List[str]: ...
+    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
+    @overload
+    def strip(self, chars: str = ...) -> str: ...
+    @overload
+    def strip(self, chars: unicode) -> unicode: ...
+    def swapcase(self) -> str: ...
+    def title(self) -> str: ...
+    def translate(self, table: AnyStr, deletechars: AnyStr = None) -> AnyStr: ...
+    def upper(self) -> str: ...
+    def zfill(self, width: int) -> str: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> str: ...
+    @overload
+    def __getitem__(self, s: slice) -> str: ...
+    def __getslice__(self, start: int, stop: int) -> str: ...
+    def __add__(self, s: AnyStr) -> AnyStr: ...
+    def __mul__(self, n: int) -> str: ...
+    def __rmul__(self, n: int) -> str: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: unicode) -> bool: ...
+    def __le__(self, x: unicode) -> bool: ...
+    def __gt__(self, x: unicode) -> bool: ...
+    def __ge__(self, x: unicode) -> bool: ...
+    def __mod__(self, x: Any) -> str: ...
+
+class bytearray(Sequence[int]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, x: Union[Iterable[int], str]) -> None: ...
+    @overload
+    def __init__(self, x: unicode, encoding: unicode,
+                 errors: unicode = ...) -> None: ...
+    @overload
+    def __init__(self, length: int) -> None: ...
+    def capitalize(self) -> bytearray: ...
+    def center(self, width: int, fillchar: str = ...) -> bytearray: ...
+    def count(self, x: str) -> int: ...
+    def decode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
+    def endswith(self, suffix: Union[str, Tuple[str, ...]]) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> bytearray: ...
+    def find(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def index(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[str]) -> bytearray: ...
+    def ljust(self, width: int, fillchar: str = ...) -> bytearray: ...
+    def lower(self) -> bytearray: ...
+    def lstrip(self, chars: str = ...) -> bytearray: ...
+    def partition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
+    def replace(self, old: str, new: str, count: int = ...) -> bytearray: ...
+    def rfind(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def rindex(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def rjust(self, width: int, fillchar: str = ...) -> bytearray: ...
+    def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
+    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def rstrip(self, chars: str = ...) -> bytearray: ...
+    def split(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
+    def startswith(self, prefix: Union[str, Tuple[str, ...]]) -> bool: ...
+    def strip(self, chars: str = ...) -> bytearray: ...
+    def swapcase(self) -> bytearray: ...
+    def title(self) -> bytearray: ...
+    def translate(self, table: str) -> bytearray: ...
+    def upper(self) -> bytearray: ...
+    def zfill(self, width: int) -> bytearray: ...
+    @staticmethod
+    def fromhex(self, x: str) -> bytearray: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[int]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> int: ...
+    @overload
+    def __getitem__(self, s: slice) -> bytearray: ...
+    def __getslice__(self, start: int, stop: int) -> bytearray: ...
+    @overload
+    def __setitem__(self, i: int, x: int) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, x: Union[Sequence[int], str]) -> None: ...
+    def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ...
+    @overload
+    def __delitem__(self, i: int) -> None: ...
+    @overload
+    def __delitem__(self, s: slice) -> None: ...
+    def __delslice__(self, start: int, stop: int) -> None: ...
+    def __add__(self, s: str) -> bytearray: ...
+    def __mul__(self, n: int) -> bytearray: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: str) -> bool: ...
+    def __le__(self, x: str) -> bool: ...
+    def __gt__(self, x: str) -> bool: ...
+    def __ge__(self, x: str) -> bool: ...
+
+class bool(int, SupportsInt, SupportsFloat):
+    def __init__(self, o: object = ...) -> None: ...
+
+class slice:
+    start = 0
+    step = 0
+    stop = 0
+    def __init__(self, start: int, stop: int = 0, step: int = 0) -> None: ...
+
+class tuple(Sequence[_T_co], Generic[_T_co]):
+    def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, x: object) -> bool: ...
+    @overload
+    def __getitem__(self, x: int) -> _T_co: ...
+    @overload
+    def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ...
+    def __iter__(self) -> Iterator[_T_co]: ...
+    def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __le__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ...
+    def __mul__(self, n: int) -> Tuple[_T_co, ...]: ...
+    def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ...
+    def count(self, x: Any) -> int: ...
+    def index(self, x: Any) -> int: ...
+
+class function:
+    # TODO name of the class (corresponds to Python 'function' class)
+    __name__ = ...  # type: str
+    __module__ = ...  # type: str
+
+class list(MutableSequence[_T], Generic[_T]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    def append(self, object: _T) -> None: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def pop(self, index: int = -1) -> _T: ...
+    def index(self, object: _T, start: int = 0, stop: int = ...) -> int: ...
+    def count(self, object: _T) -> int: ...
+    def insert(self, index: int, object: _T) -> None: ...
+    def remove(self, object: _T) -> None: ...
+    def reverse(self) -> None: ...
+    def sort(self, *, key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> _T: ...
+    @overload
+    def __getitem__(self, s: slice) -> List[_T]: ...
+    def __getslice__(self, start: int, stop: int) -> List[_T]: ...
+    @overload
+    def __setitem__(self, i: int, o: _T) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, o: Sequence[_T]) -> None: ...
+    def __setslice__(self, start: int, stop: int, o: Sequence[_T]) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
+    def __delslice(self, start: int, stop: int) -> None: ...
+    def __add__(self, x: List[_T]) -> List[_T]: ...
+    def __iadd__(self, x: Iterable[_T]) -> List[_T]: ...
+    def __mul__(self, n: int) -> List[_T]: ...
+    def __rmul__(self, n: int) -> List[_T]: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __reversed__(self) -> Iterator[_T]: ...
+    def __gt__(self, x: List[_T]) -> bool: ...
+    def __ge__(self, x: List[_T]) -> bool: ...
+    def __lt__(self, x: List[_T]) -> bool: ...
+    def __le__(self, x: List[_T]) -> bool: ...
+
+class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...  # TODO keyword args
+
+    def has_key(self, k: _KT) -> bool: ...
+    def clear(self) -> None: ...
+    def copy(self) -> Dict[_KT, _VT]: ...
+    def get(self, k: _KT, default: _VT = None) -> _VT: ...
+    def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+    def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def update(self, m: Union[Mapping[_KT, _VT],
+                              Iterable[Tuple[_KT, _VT]]]) -> None: ...
+    def keys(self) -> List[_KT]: ...
+    def values(self) -> List[_VT]: ...
+    def items(self) -> List[Tuple[_KT, _VT]]: ...
+    def iterkeys(self) -> Iterator[_KT]: ...
+    def itervalues(self) -> Iterator[_VT]: ...
+    def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
+    @staticmethod
+    @overload
+    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method
+    @staticmethod
+    @overload
+    def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ...
+    def __len__(self) -> int: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    def __delitem__(self, v: _KT) -> None: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+    def __str__(self) -> str: ...
+
+class set(MutableSet[_T], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def add(self, element: _T) -> None: ...
+    def clear(self) -> None: ...
+    def copy(self) -> set[_T]: ...
+    def difference(self, s: Iterable[Any]) -> set[_T]: ...
+    def difference_update(self, s: Iterable[Any]) -> None: ...
+    def discard(self, element: _T) -> None: ...
+    def intersection(self, s: Iterable[Any]) -> set[_T]: ...
+    def intersection_update(self, s: Iterable[Any]) -> None: ...
+    def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
+    def issubset(self, s: AbstractSet[Any]) -> bool: ...
+    def issuperset(self, s: AbstractSet[Any]) -> bool: ...
+    def pop(self) -> _T: ...
+    def remove(self, element: _T) -> None: ...
+    def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
+    def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
+    def union(self, s: Iterable[_T]) -> set[_T]: ...
+    def update(self, s: Iterable[_T]) -> None: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __and__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __iand__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __or__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __ior__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __sub__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __isub__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __xor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __ixor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    # TODO more set operations
+
+class frozenset(AbstractSet[_T], Generic[_T]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    def copy(self) -> frozenset[_T]: ...
+    def difference(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def intersection(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def isdisjoint(self, s: AbstractSet[_T]) -> bool: ...
+    def issubset(self, s: AbstractSet[Any]) -> bool: ...
+    def issuperset(self, s: AbstractSet[Any]) -> bool: ...
+    def symmetric_difference(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def union(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __and__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __or__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+    def __sub__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __xor__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+
+class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
+    def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
+    def next(self) -> Tuple[int, _T]: ...
+    # TODO __getattribute__
+
+class xrange(Sized, Iterable[int], Reversible[int]):
+    @overload
+    def __init__(self, stop: int) -> None: ...
+    @overload
+    def __init__(self, start: int, stop: int, step: int = 1) -> None: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[int]: ...
+    def __getitem__(self, i: int) -> int: ...
+    def __reversed__(self) -> Iterator[int]: ...
+
+class module:
+    __name__ = ...  # type: str
+    __file__ = ...  # type: str
+    __dict__ = ...  # type: Dict[unicode, Any]
+
+True = ...  # type: bool
+False = ...  # type: bool
+__debug__ = False
+
+long = int
+bytes = str
+
+NotImplemented = ...  # type: Any
+
+def abs(n: SupportsAbs[_T]) -> _T: ...
+def all(i: Iterable) -> bool: ...
+def any(i: Iterable) -> bool: ...
+def bin(number: int) -> str: ...
+def callable(o: object) -> bool: ...
+def chr(code: int) -> str: ...
+def compile(source: Any, filename: unicode, mode: str, flags: int = 0,
+            dont_inherit: int = 0) -> Any: ...
+def delattr(o: Any, name: unicode) -> None: ...
+def dir(o: object = ...) -> List[str]: ...
+ at overload
+def divmod(a: int, b: int) -> Tuple[int, int]: ...
+ at overload
+def divmod(a: float, b: float) -> Tuple[float, float]: ...
+def exit(code: int = ...) -> None: ...
+def filter(function: Callable[[_T], Any],
+           iterable: Iterable[_T]) -> List[_T]: ...
+def format(o: object, format_spec: str = '') -> str: ...  # TODO unicode
+def getattr(o: Any, name: unicode, default: Any = None) -> Any: ...
+def hasattr(o: Any, name: unicode) -> bool: ...
+def hash(o: object) -> int: ...
+def hex(i: int) -> str: ...  # TODO __index__
+def id(o: object) -> int: ...
+def input(prompt: unicode = ...) -> Any: ...
+def intern(string: str) -> str: ...
+ at overload
+def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
+ at overload
+def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ...
+def isinstance(o: object, t: Union[type, Tuple[type, ...]]) -> bool: ...
+def issubclass(cls: type, classinfo: type) -> bool: ...
+# TODO support this
+#def issubclass(type cld, classinfo: Sequence[type]) -> bool: ...
+def len(o: Sized) -> int: ...
+ at overload
+def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> List[_S]: ...
+ at overload
+def map(func: Callable[[_T1, _T2], _S],
+        iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> List[_S]: ...  # TODO more than two iterables
+ at overload
+def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def max(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+# TODO memoryview
+ at overload
+def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def min(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def next(i: Iterator[_T]) -> _T: ...
+ at overload
+def next(i: Iterator[_T], default: _T) -> _T: ...
+def oct(i: int) -> str: ...  # TODO __index__
+ at overload
+def open(file: str, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
+ at overload
+def open(file: unicode, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
+ at overload
+def open(file: int, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
+def ord(c: unicode) -> int: ...
+# This is only available after from __future__ import print_function.
+def print(*values: Any, sep: unicode = u' ', end: unicode = u'\n',
+           file: IO[Any] = ...) -> None: ...
+ at overload
+def pow(x: int, y: int) -> Any: ...  # The return type can be int or float, depending on y.
+ at overload
+def pow(x: int, y: int, z: int) -> Any: ...
+ at overload
+def pow(x: float, y: float) -> float: ...
+ at overload
+def pow(x: float, y: float, z: float) -> float: ...
+def quit(code: int = ...) -> None: ...
+def range(x: int, y: int = 0, step: int = 1) -> List[int]: ...
+def raw_input(prompt: unicode = ...) -> str: ...
+
+def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], initializer: _T = None) -> _T: ...
+
+def reload(module: Any) -> Any: ...
+ at overload
+def reversed(object: Reversible[_T]) -> Iterator[_T]: ...
+ at overload
+def reversed(object: Sequence[_T]) -> Iterator[_T]: ...
+def repr(o: object) -> str: ...
+ at overload
+def round(number: float) -> int: ...
+ at overload
+def round(number: float, ndigits: int) -> float: ...  # Always return a float if given ndigits.
+ at overload
+def round(number: SupportsRound[_T]) -> _T: ...
+ at overload
+def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
+def setattr(object: Any, name: unicode, value: Any) -> None: ...
+def sorted(iterable: Iterable[_T], *,
+           cmp: Callable[[_T, _T], int] = ...,
+           key: Callable[[_T], Any] = ...,
+           reverse: bool = ...) -> List[_T]: ...
+def sum(iterable: Iterable[_T], start: _T = ...) -> _T: ...
+def unichr(i: int) -> unicode: ...
+def vars(object: Any = ...) -> Dict[str, Any]: ...
+ at overload
+def zip(iter1: Iterable[_T1]) -> List[Tuple[_T1]]: ...
+ at overload
+def zip(iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2],
+        iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+        iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2,
+                                           _T3, _T4]]: ...  # TODO more than four iterables
+def __import__(name: unicode,
+               globals: Dict[str, Any] = ...,
+               locals: Dict[str, Any] = ...,
+               fromlist: List[str] = ..., level: int = ...) -> Any: ...
+
+def globals() -> Dict[str, Any]: ...
+def locals() -> Dict[str, Any]: ...
+
+# Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
+# not exposed anywhere under that name, we make it private here.
+class ellipsis: ...
+Ellipsis = ...  # type: ellipsis
+
+# TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check.
+AnyBuffer = TypeVar('AnyBuffer', str, unicode, bytearray, buffer)
+
+class buffer(Sized):
+    def __init__(self, object: AnyBuffer, offset: int = ..., size: int = ...) -> None: ...
+    def __add__(self, other: AnyBuffer) -> str: ...
+    def __cmp__(self, other: AnyBuffer) -> bool: ...
+    def __getitem__(self, key: Union[int, slice]) -> str: ...
+    def __getslice__(self, i: int, j: int) -> str: ...
+    def __len__(self) -> int: ...
+    def __mul__(self, x: int) -> str: ...
+
+class BaseException:
+    args = ...  # type: Any
+    message = ...  # type: str
+    def __init__(self, *args: Any) -> None: ...
+    def with_traceback(self, tb: Any) -> BaseException: ...
+class GeneratorExit(BaseException): ...
+class KeyboardInterrupt(BaseException): ...
+class SystemExit(BaseException):
+    code = 0
+class Exception(BaseException): ...
+class StopIteration(Exception): ...
+class StandardError(Exception): ...
+class ArithmeticError(StandardError): ...
+class BufferError(StandardError): ...
+class EnvironmentError(StandardError):
+    errno = 0
+    strerror = ...  # type: str
+    # TODO can this be unicode?
+    filename = ...  # type: str
+class LookupError(StandardError): ...
+class RuntimeError(StandardError): ...
+class ValueError(StandardError): ...
+class AssertionError(StandardError): ...
+class AttributeError(StandardError): ...
+class EOFError(StandardError): ...
+class FloatingPointError(ArithmeticError): ...
+class IOError(EnvironmentError): ...
+class ImportError(StandardError): ...
+class IndexError(LookupError): ...
+class KeyError(LookupError): ...
+class MemoryError(StandardError): ...
+class NameError(StandardError): ...
+class NotImplementedError(RuntimeError): ...
+class OSError(EnvironmentError): ...
+class WindowsError(OSError): ...
+class OverflowError(ArithmeticError): ...
+class ReferenceError(StandardError): ...
+class SyntaxError(StandardError): ...
+class IndentationError(SyntaxError): ...
+class TabError(IndentationError): ...
+class SystemError(StandardError): ...
+class TypeError(StandardError): ...
+class UnboundLocalError(NameError): ...
+class UnicodeError(ValueError): ...
+class UnicodeDecodeError(UnicodeError): ...
+class UnicodeEncodeError(UnicodeError): ...
+class UnicodeTranslateError(UnicodeError): ...
+class ZeroDivisionError(ArithmeticError): ...
+
+class Warning(Exception): ...
+class UserWarning(Warning): ...
+class DeprecationWarning(Warning): ...
+class SyntaxWarning(Warning): ...
+class RuntimeWarning(Warning): ...
+class FutureWarning(Warning): ...
+class PendingDeprecationWarning(Warning): ...
+class ImportWarning(Warning): ...
+class UnicodeWarning(Warning): ...
+class BytesWarning(Warning): ...
+class ResourceWarning(Warning): ...
+
+def eval(s: str) -> Any: ...
+
+def cmp(x: Any, y: Any) -> int: ...
+
+def execfile(filename: str, globals: Dict[str, Any] = None, locals: Dict[str, Any] = None) -> None: ...
+
+class file(BinaryIO):
+    @overload
+    def __init__(self, file: str, mode: str = 'r', buffering: int = ...) -> None: ...
+    @overload
+    def __init__(self, file: unicode, mode: str = 'r', buffering: int = ...) -> None: ...
+    @overload
+    def __init__(file: int, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def read(self, n: int = ...) -> str: ...
+    def __enter__(self) -> BinaryIO: ...
+    def __exit__(self, typ, exc, tb) -> bool: ...
+    def flush(self) -> None: ...
+    def fileno(self) -> int: ...
+    def isatty(self) -> bool: ...
+    def close(self) -> None: ...
+
+    def readable(self) -> bool: ...
+    def writable(self) -> bool: ...
+    def seekable(self) -> bool: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def tell(self) -> int: ...
+    def readline(self, limit: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def write(self, data: str) -> None: ...
+    def writelines(self, data: Iterable[str]) -> None: ...
+    def truncate(self, pos: int = ...) -> int: ...
diff --git a/typeshed/stdlib/2.7/__future__.pyi b/typeshed/stdlib/2.7/__future__.pyi
new file mode 100644
index 0000000..e863874
--- /dev/null
+++ b/typeshed/stdlib/2.7/__future__.pyi
@@ -0,0 +1,9 @@
+class _Feature: ...
+
+absolute_import = None  # type: _Feature
+division = None  # type: _Feature
+generators = None  # type: _Feature
+nested_scopes = None  # type: _Feature
+print_function = None  # type: _Feature
+unicode_literals = None  # type: _Feature
+with_statement = None  # type: _Feature
diff --git a/typeshed/stdlib/2.7/_ast.pyi b/typeshed/stdlib/2.7/_ast.pyi
new file mode 100644
index 0000000..bd8d259
--- /dev/null
+++ b/typeshed/stdlib/2.7/_ast.pyi
@@ -0,0 +1,516 @@
+from typing import Any
+from typing import Tuple as TypingTuple
+
+__version__ = ...  # type: int
+
+PyCF_ONLY_AST = ...  # type: int
+
+class AST(object):
+    _attributes = ...  # type: TypingTuple[str]
+    _fields = ...  # type: TypingTuple[str]
+    def __init__(self, *args, **kwargs) -> None: pass
+
+class alias(AST):
+    pass
+
+class arguments(AST):
+    pass
+
+class boolop(AST):
+    pass
+
+class cmpop(AST):
+    pass
+
+class comprehension(AST):
+    pass
+
+class excepthandler(AST):
+    pass
+
+class expr(AST):
+    pass
+
+class expr_context(AST):
+    pass
+
+class keyword(AST):
+    pass
+
+class mod(AST):
+    pass
+
+class operator(AST):
+    pass
+
+class slice(AST):
+    pass
+
+class stmt(AST):
+    pass
+
+class unaryop(AST):
+    pass
+
+
+class Add(operator):
+    def __init__(self) -> None:
+        pass
+
+class And(boolop):
+    def __init__(self) -> None:
+        pass
+
+class Assert(stmt):
+    test = ...  # type: Any
+    msg = ...  # type: Any
+    def __init__(self, test = ..., msg = ...) -> None:
+        pass
+
+class Assign(stmt):
+    targets = ...  # type: Any
+    value = ...  # type: Any
+    def __init__(self, targets = ..., value = ...) -> None:
+        pass
+
+class Attribute(expr):
+    value = ...  # type: Any
+    attr = ...  # type: Any
+    ctx = ...  # type: Any
+    def __init__(self, value = ..., attr = ..., ctx = ...) -> None:
+        pass
+
+class AugAssign(stmt):
+    target = ...  # type: Any
+    op = ...  # type: Any
+    value = ...  # type: Any
+    def __init__(self, target = ..., op = ..., value = ...) -> None:
+        pass
+
+class AugLoad(expr_context):
+    def __init__(self) -> None:
+        pass
+
+class AugStore(expr_context):
+    def __init__(self) -> None:
+        pass
+
+class BinOp(expr):
+    left = ...  # type: Any
+    op = ...  # type: Any
+    right = ...  # type: Any
+    def __init__(self, left = ..., op = ..., right = ...) -> None:
+        pass
+
+class BitAnd(operator):
+    def __init__(self) -> None:
+        pass
+
+class BitOr(operator):
+    def __init__(self) -> None:
+        pass
+
+class BitXor(operator):
+    def __init__(self) -> None:
+        pass
+
+class BoolOp(expr):
+    op = ...  # type: Any
+    values = ...  # type: Any
+    def __init__(self, op = ..., values = ...) -> None:
+        pass
+
+class Break(stmt):
+    def __init__(self) -> None:
+        pass
+
+class Call(expr):
+    func = ...  # type: Any
+    args = ...  # type: Any
+    keywords = ...  # type: Any
+    starargs = ...  # type: Any
+    kwargs = ...  # type: Any
+    def __init__(self, func = ..., args = ..., keywords = ..., starargs = ..., kwargs = ...) -> None:
+        pass
+
+class ClassDef(stmt):
+    name = ...  # type: Any
+    bases = ...  # type: Any
+    body = ...  # type: Any
+    decorator_list = ...  # type: Any
+    def __init__(self, name = ..., bases = ..., body = ..., decorator_list = ...) -> None:
+        pass
+
+class Compare(expr):
+    left = ...  # type: Any
+    ops = ...  # type: Any
+    comparators = ...  # type: Any
+    def __init__(self, left = ..., ops = ..., comparators = ...) -> None:
+        pass
+
+class Continue(stmt):
+    def __init__(self) -> None:
+        pass
+
+class Del(expr_context):
+    def __init__(self) -> None:
+        pass
+
+class Delete(stmt):
+    targets = ...  # type: Any
+    def __init__(self, targets = ...) -> None:
+        pass
+
+class Dict(expr):
+    keys = ...  # type: Any
+    values = ...  # type: Any
+    def __init__(self, keys = ..., values = ...) -> None:
+        pass
+
+class DictComp(expr):
+    key = ...  # type: Any
+    value = ...  # type: Any
+    generators = ...  # type: Any
+    def __init__(self, key = ..., value = ..., generators = ...) -> None:
+        pass
+
+class Div(operator):
+    def __init__(self) -> None:
+        pass
+
+class Ellipsis(slice):
+    def __init__(self) -> None:
+        pass
+
+class Eq(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class ExceptHandler(excepthandler):
+    type = ...  # type: Any
+    name = ...  # type: Any
+    body = ...  # type: Any
+    def __init__(self, type = ..., name = ..., body = ...) -> None:
+        pass
+
+class Exec(stmt):
+    body = ...  # type: Any
+    globals = ...  # type: Any
+    locals = ...  # type: Any
+    def __init__(self, body = ..., globals = ..., locals = ...) -> None:
+        pass
+
+class Expr(stmt):
+    value = ...  # type: Any
+    def __init__(self, value = ...) -> None:
+        pass
+
+class Expression(mod):
+    body = ...  # type: Any
+    def __init__(self, body = ...) -> None:
+        pass
+
+class ExtSlice(slice):
+    dims = ...  # type: Any
+    def __init__(self, dims = ...) -> None:
+        pass
+
+class FloorDiv(operator):
+    def __init__(self) -> None:
+        pass
+
+class For(stmt):
+    target = ...  # type: Any
+    iter = ...  # type: Any
+    body = ...  # type: Any
+    orelse = ...  # type: Any
+    def __init__(self, target = ..., iter = ..., body = ..., orelse = ...) -> None:
+        pass
+
+class FunctionDef(stmt):
+    name = ...  # type: Any
+    args = ...  # type: Any
+    body = ...  # type: Any
+    decorator_list = ...  # type: Any
+    def __init__(self, name = ..., args = ..., body = ..., decorator_list = ...) -> None:
+        pass
+
+class GeneratorExp(expr):
+    elt = ...  # type: Any
+    generators = ...  # type: Any
+    def __init__(self, elt = ..., generators = ...) -> None:
+        pass
+
+class Global(stmt):
+    names = ...  # type: Any
+    def __init__(self, names = ...) -> None:
+        pass
+
+class Gt(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class GtE(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class If(stmt):
+    test = ...  # type: Any
+    body = ...  # type: Any
+    orelse = ...  # type: Any
+    def __init__(self, test = ..., body = ..., orelse = ...) -> None:
+        pass
+
+class IfExp(expr):
+    test = ...  # type: Any
+    body = ...  # type: Any
+    orelse = ...  # type: Any
+    def __init__(self, test = ..., body = ..., orelse = ...) -> None:
+        pass
+
+class Import(stmt):
+    names = ...  # type: Any
+    def __init__(self, names = ...) -> None:
+        pass
+
+class ImportFrom(stmt):
+    module = ...  # type: Any
+    names = ...  # type: Any
+    level = ...  # type: Any
+    def __init__(self, module = ..., names = ..., level = ...) -> None:
+        pass
+
+class In(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class Index(slice):
+    value = ...  # type: Any
+    def __init__(self, value = ...) -> None:
+        pass
+
+class Interactive(mod):
+    body = ...  # type: Any
+    def __init__(self, body = ...) -> None:
+        pass
+
+class Invert(unaryop):
+    def __init__(self) -> None:
+        pass
+
+class Is(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class IsNot(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class LShift(operator):
+    def __init__(self) -> None:
+        pass
+
+class Lambda(expr):
+    args = ...  # type: Any
+    body = ...  # type: Any
+    def __init__(self, args = ..., body = ...) -> None:
+        pass
+
+class List(expr):
+    elts = ...  # type: Any
+    ctx = ...  # type: Any
+    def __init__(self, elts = ..., ctx = ...) -> None:
+        pass
+
+class ListComp(expr):
+    elt = ...  # type: Any
+    generators = ...  # type: Any
+    def __init__(self, elt = ..., generators = ...) -> None:
+        pass
+
+class Load(expr_context):
+    def __init__(self) -> None:
+        pass
+
+class Lt(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class LtE(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class Mod(operator):
+    def __init__(self) -> None:
+        pass
+
+class Module(mod):
+    body = ...  # type: Any
+    def __init__(self, body = ...) -> None:
+        pass
+
+class Mult(operator):
+    def __init__(self) -> None:
+        pass
+
+class Name(expr):
+    id = ...  # type: Any
+    ctx = ...  # type: Any
+    def __init__(self, id = ..., ctx = ...) -> None:
+        pass
+
+class Not(unaryop):
+    def __init__(self) -> None:
+        pass
+
+class NotEq(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class NotIn(cmpop):
+    def __init__(self) -> None:
+        pass
+
+class Num(expr):
+    n = ...  # type: Any
+    def __init__(self, n = ...) -> None:
+        pass
+
+class Or(boolop):
+    def __init__(self) -> None:
+        pass
+
+class Param(expr_context):
+    def __init__(self) -> None:
+        pass
+
+class Pass(stmt):
+    def __init__(self) -> None:
+        pass
+
+class Pow(operator):
+    def __init__(self) -> None:
+        pass
+
+class Print(stmt):
+    dest = ...  # type: Any
+    values = ...  # type: Any
+    nl = ...  # type: Any
+    def __init__(self, dest = ..., values = ..., nl = ...) -> None:
+        pass
+
+class RShift(operator):
+    def __init__(self) -> None:
+        pass
+
+class Raise(stmt):
+    type = ...  # type: Any
+    inst = ...  # type: Any
+    tback = ...  # type: Any
+    def __init__(self, type = ..., inst = ..., tback = ...) -> None:
+        pass
+
+class Repr(expr):
+    value = ...  # type: Any
+    def __init__(self, value = ...) -> None:
+        pass
+
+class Return(stmt):
+    value = ...  # type: Any
+    def __init__(self, value = ...) -> None:
+        pass
+
+class Set(expr):
+    elts = ...  # type: Any
+    def __init__(self, elts = ...) -> None:
+        pass
+
+class SetComp(expr):
+    elt = ...  # type: Any
+    generators = ...  # type: Any
+    def __init__(self, elt = ..., generators = ...) -> None:
+        pass
+
+class Slice(slice):
+    lower = ...  # type: Any
+    upper = ...  # type: Any
+    step = ...  # type: Any
+    def __init__(self, lower = ..., upper = ..., step = ...) -> None:
+        pass
+
+class Store(expr_context):
+    def __init__(self) -> None:
+        pass
+
+class Str(expr):
+    s = ...  # type: Any
+    def __init__(self, s = ...) -> None:
+        pass
+
+class Sub(operator):
+    def __init__(self) -> None:
+        pass
+
+class Subscript(expr):
+    value = ...  # type: Any
+    slice = ...  # type: Any
+    ctx = ...  # type: Any
+    def __init__(self, value = ..., slice = ..., ctx = ...) -> None:
+        pass
+
+class Suite(mod):
+    body = ...  # type: Any
+    def __init__(self, body = ...) -> None:
+        pass
+
+class TryExcept(stmt):
+    body = ...  # type: Any
+    handlers = ...  # type: Any
+    orelse = ...  # type: Any
+    def __init__(self, body = ..., handlers = ..., orelse = ...) -> None:
+        pass
+
+class TryFinally(stmt):
+    body = ...  # type: Any
+    finalbody = ...  # type: Any
+    def __init__(self, body = ..., finalbody = ...) -> None:
+        pass
+
+class Tuple(expr):
+    elts = ...  # type: Any
+    ctx = ...  # type: Any
+    def __init__(self, elts = ..., ctx = ...) -> None:
+        pass
+
+class UAdd(unaryop):
+    def __init__(self) -> None:
+        pass
+
+class USub(unaryop):
+    def __init__(self) -> None:
+        pass
+
+class UnaryOp(expr):
+    op = ...  # type: Any
+    operand = ...  # type: Any
+    def __init__(self, op = ..., operand = ...) -> None:
+        pass
+
+class While(stmt):
+    test = ...  # type: Any
+    body = ...  # type: Any
+    orelse = ...  # type: Any
+    def __init__(self, test = ..., body = ..., orelse = ...) -> None:
+        pass
+
+class With(stmt):
+    context_expr = ...  # type: Any
+    optional_vars = ...  # type: Any
+    body = ...  # type: Any
+    def __init__(self, context_expr = ..., optional_vars = ..., body = ...) -> None:
+        pass
+
+class Yield(expr):
+    value = ...  # type: Any
+    def __init__(self, value = ...) -> None:
+        pass
diff --git a/typeshed/stdlib/2.7/_codecs.pyi b/typeshed/stdlib/2.7/_codecs.pyi
new file mode 100644
index 0000000..45a18de
--- /dev/null
+++ b/typeshed/stdlib/2.7/_codecs.pyi
@@ -0,0 +1,55 @@
+"""Stub file for the '_codecs' module."""
+
+from typing import Any, AnyStr, Callable, Tuple, Optional
+
+import codecs
+
+# For convenience:
+_Handler = Callable[[Exception], Tuple[unicode, int]]
+
+# Not exposed. In Python 2, this is defined in unicode.c:
+class _EncodingMap(object):
+    def size(self) -> int: ...
+
+def register(search_function: Callable[[str], Any]) -> None: ...
+def register_error(errors: str, handler: _Handler) -> None: ...
+def lookup(a: str) -> codecs.CodecInfo: ...
+def lookup_error(a: str) -> _Handler: ...
+def decode(obj: Any, encoding:str = ..., errors:str = ...) -> Any: ...
+def encode(obj: Any, encoding:str = ..., errors:str = ...) -> Any: ...
+def charmap_build(a: unicode) -> _EncodingMap: ...
+
+def ascii_decode(data: AnyStr, errors:str = ...) -> Tuple[unicode, int]: ...
+def ascii_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def charbuffer_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
+def charmap_decode(data: AnyStr, errors: str = ..., mapping: Optional[_EncodingMap] = ...) -> Tuple[unicode, int]: ...
+def charmap_encode(data: AnyStr, errors: str, mapping: Optional[_EncodingMap] = ...) -> Tuple[str, int]: ...
+def escape_decode(data: AnyStr, errors:str = ...) -> Tuple[unicode, int]: ...
+def escape_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def latin_1_decode(data: AnyStr, errors:str = ...) -> Tuple[unicode, int]: ...
+def latin_1_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def raw_unicode_escape_decode(data: AnyStr, errors:str = ...) -> Tuple[unicode, int]: ...
+def raw_unicode_escape_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def readbuffer_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def unicode_escape_decode(data: AnyStr, errors:str = ...) -> Tuple[unicode, int]: ...
+def unicode_escape_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def unicode_internal_decode(data: AnyStr, errors:str = ...) -> Tuple[unicode, int]: ...
+def unicode_internal_encode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def utf_16_be_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_16_be_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_16_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_16_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_16_ex_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_16_le_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_16_le_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_be_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_32_be_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_32_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_ex_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_32_le_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_32_le_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_7_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_7_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_8_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[unicode, int]: ...
+def utf_8_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
diff --git a/typeshed/stdlib/2.7/_collections.pyi b/typeshed/stdlib/2.7/_collections.pyi
new file mode 100644
index 0000000..156cda0
--- /dev/null
+++ b/typeshed/stdlib/2.7/_collections.pyi
@@ -0,0 +1,41 @@
+"""Stub file for the '_collections' module."""
+
+from typing import Any, Generic, Iterator, TypeVar, Optional, Union
+
+class defaultdict(dict):
+    default_factory = ...  # type: None
+    def __init__(self, default: Any = ..., init: Any = ...) -> None: ...
+    def __missing__(self, key) -> Any:
+        raise KeyError()
+    def __copy__(self) -> "defaultdict": ...
+    def copy(self) -> "defaultdict": ...
+
+_T = TypeVar('_T')
+_T2 = TypeVar('_T2')
+
+class deque(Generic[_T]):
+    maxlen = ...  # type: Optional[int]
+    def __init__(self, iterable: Iterator[_T] = ..., maxlen: int = ...) -> None: ...
+    def append(self, x: _T) -> None: ...
+    def appendleft(self, x: _T) -> None: ...
+    def clear(self) -> None: ...
+    def count(self, x: Any) -> int: ...
+    def extend(self, iterable: Iterator[_T]) -> None: ...
+    def extendleft(self, iterable: Iterator[_T]) -> None: ...
+    def pop(self) -> _T:
+        raise IndexError()
+    def popleft(self) -> _T:
+        raise IndexError()
+    def remove(self, value: _T) -> None:
+        raise IndexError()
+    def reverse(self) -> None: ...
+    def rotate(self, n: int = ...) -> None: ...
+    def __contains__(self, o: Any) -> bool: ...
+    def __copy__(self) -> "deque[_T]": ...
+    def __getitem__(self, i: int) -> _T:
+        raise IndexError()
+    def __iadd__(self, other: "deque[_T2]") -> "deque[Union[_T, _T2]]": ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __len__(self) -> int: ...
+    def __reversed__(self) -> Iterator[_T]: ...
+    def __setitem__(self, i: int, x: _T) -> None: ...
diff --git a/typeshed/stdlib/2.7/_functools.pyi b/typeshed/stdlib/2.7/_functools.pyi
new file mode 100644
index 0000000..d6245db
--- /dev/null
+++ b/typeshed/stdlib/2.7/_functools.pyi
@@ -0,0 +1,19 @@
+"""Stub file for the '_functools' module."""
+
+from typing import Any, Callable, Dict, Iterator, Optional, TypeVar, Tuple, overload
+
+_T = TypeVar("_T")
+
+ at overload
+def reduce(function: Callable[[_T, _T], _T],
+           sequence: Iterator[_T]) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _T], _T],
+           sequence: Iterator[_T], initial: _T) -> _T: ...
+
+class partial(object):
+    func = ...  # type: Callable[..., Any]
+    args = ...  # type: Tuple[Any, ...]
+    keywords = ...  # type: Dict[str, Any]
+    def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ...
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
diff --git a/typeshed/stdlib/2.7/_hotshot.pyi b/typeshed/stdlib/2.7/_hotshot.pyi
new file mode 100644
index 0000000..8a9c8d7
--- /dev/null
+++ b/typeshed/stdlib/2.7/_hotshot.pyi
@@ -0,0 +1,34 @@
+"""Stub file for the '_hotshot' module."""
+# This is an autogenerated file. It serves as a starting point
+# for a more precise manual annotation of this module.
+# Feel free to edit the source below, but remove this header when you do.
+
+from typing import Any, List, Tuple, Dict, Generic
+
+def coverage(a: str) -> Any: ...
+
+def logreader(a: str) -> LogReaderType:
+    raise IOError()
+    raise RuntimeError()
+
+def profiler(a: str, *args, **kwargs) -> Any:
+    raise IOError()
+
+def resolution() -> tuple: ...
+
+
+class LogReaderType(object):
+    def close(self) -> None: ...
+    def fileno(self) -> int:
+        raise ValueError()
+
+class ProfilerType(object):
+    def addinfo(self, a: str, b: str) -> None: ...
+    def close(self) -> None: ...
+    def fileno(self) -> int:
+        raise ValueError()
+    def runcall(self, *args, **kwargs) -> Any: ...
+    def runcode(self, a, b, *args, **kwargs) -> Any:
+        raise TypeError()
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
diff --git a/typeshed/stdlib/2.7/_io.pyi b/typeshed/stdlib/2.7/_io.pyi
new file mode 100644
index 0000000..e85da33
--- /dev/null
+++ b/typeshed/stdlib/2.7/_io.pyi
@@ -0,0 +1,107 @@
+from typing import Any, Optional, Iterable, Tuple, List, Union
+
+DEFAULT_BUFFER_SIZE = ...  # type: int
+
+
+class BlockingIOError(IOError):
+    characters_written = ...  # type: int
+
+class UnsupportedOperation(ValueError, IOError): ...
+
+
+class _IOBase(object):
+    closed = ...  # type: bool
+    def __enter__(self) -> "_IOBase": ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+    def __iter__(self) -> "_IOBase": ...
+    def _checkClosed(self) -> None: ...
+    def _checkReadable(self) -> None: ...
+    def _checkSeekable(self) -> None: ...
+    def _checkWritable(self) -> None: ...
+    def close(self) -> None: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def next(self) -> str: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    def writelines(self, lines: Iterable[str]) -> None: ...
+
+class _BufferedIOBase(_IOBase):
+    def read1(self, n: int) -> str: ...
+    def read(self, n: int = ...) -> str: ...
+    def readinto(self, buffer: bytearray) -> int: ...
+    def write(self, s: str) -> int: ...
+    def detach(self) -> "_BufferedIOBase": ...
+
+class BufferedRWPair(_BufferedIOBase):
+    def peek(self, n: int = ...) -> str: ...
+
+class BufferedRandom(_BufferedIOBase):
+    name = ...  # type: str
+    raw = ...  # type: _IOBase
+    mode = ...  # type: str
+    def peek(self, n: int = ...) -> str: ...
+
+class BufferedReader(_BufferedIOBase):
+    name = ...  # type: str
+    raw = ...  # type: _IOBase
+    mode = ...  # type: str
+    def peek(self, n: int = ...) -> str: ...
+
+class BufferedWriter(_BufferedIOBase):
+    name = ...  # type: str
+    raw = ...  # type: _IOBase
+    mode = ...  # type: str
+
+class BytesIO(_BufferedIOBase):
+    def __setstate__(self, tuple) -> None: ...
+    def __getstate__(self) -> tuple: ...
+    def getvalue(self) -> str: ...
+
+class _RawIOBase(_IOBase):
+    def readall(self) -> str: ...
+    def read(self, n: int = ...) -> str: ...
+
+class FileIO(_RawIOBase):
+    mode = ...  # type: str
+    closefd = ...  # type: bool
+    def readinto(self, buffer: bytearray)-> int: ...
+    def write(self, pbuf: str) -> int: ...
+
+class IncrementalNewlineDecoder(object):
+    newlines = ...  # type: Union[str, unicode]
+    def decode(self, input, final) -> Any: ...
+    def getstate(self) -> Tuple[Any, int]: ...
+    def setstate(self, state: Tuple[Any, int]) -> None: ...
+    def reset(self) -> None: ...
+
+class _TextIOBase(_IOBase):
+    errors = ...  # type: Optional[str]
+    newlines = ...  # type: Union[str, unicode]
+    encoding = ...  # type: Optional[str]
+    def read(self, n: int = ...) -> str: ...
+    def write(self) -> None:
+        raise UnsupportedOperation
+    def detach(self) -> None:
+        raise UnsupportedOperation
+
+class StringIO(_TextIOBase):
+    line_buffering = ...  # type: bool
+    def getvalue(self) -> str: ...
+    def __setstate__(self, state: tuple) -> None: ...
+    def __getstate__(self) -> tuple: ...
+
+class TextIOWrapper(_TextIOBase):
+    name = ...  # type: str
+    line_buffering = ...  # type: bool
+    buffer = ...  # type: str
+    _CHUNK_SIZE = ...  # type: int
+
+def open(file: Union[int, str], mode: str = ...) -> _IOBase: ...
diff --git a/typeshed/stdlib/2.7/_json.pyi b/typeshed/stdlib/2.7/_json.pyi
new file mode 100644
index 0000000..dbf621d
--- /dev/null
+++ b/typeshed/stdlib/2.7/_json.pyi
@@ -0,0 +1,19 @@
+"""Stub file for the '_json' module."""
+# This is an autogenerated file. It serves as a starting point
+# for a more precise manual annotation of this module.
+# Feel free to edit the source below, but remove this header when you do.
+
+from typing import Any, List, Tuple, Dict, Generic
+
+def encode_basestring_ascii(*args, **kwargs) -> str:
+    raise TypeError()
+
+def scanstring(a, b, *args, **kwargs) -> tuple:
+    raise TypeError()
+
+
+class Encoder(object):
+    pass
+
+class Scanner(object):
+    pass
diff --git a/typeshed/stdlib/2.7/_locale.pyi b/typeshed/stdlib/2.7/_locale.pyi
new file mode 100644
index 0000000..20584fc
--- /dev/null
+++ b/typeshed/stdlib/2.7/_locale.pyi
@@ -0,0 +1,81 @@
+from typing import Optional, Dict, Any
+
+ABDAY_1 = ...  # type: int
+ABDAY_2 = ...  # type: int
+ABDAY_3 = ...  # type: int
+ABDAY_4 = ...  # type: int
+ABDAY_5 = ...  # type: int
+ABDAY_6 = ...  # type: int
+ABDAY_7 = ...  # type: int
+ABMON_1 = ...  # type: int
+ABMON_10 = ...  # type: int
+ABMON_11 = ...  # type: int
+ABMON_12 = ...  # type: int
+ABMON_2 = ...  # type: int
+ABMON_3 = ...  # type: int
+ABMON_4 = ...  # type: int
+ABMON_5 = ...  # type: int
+ABMON_6 = ...  # type: int
+ABMON_7 = ...  # type: int
+ABMON_8 = ...  # type: int
+ABMON_9 = ...  # type: int
+ALT_DIGITS = ...  # type: int
+AM_STR = ...  # type: int
+CHAR_MAX = ...  # type: int
+CODESET = ...  # type: int
+CRNCYSTR = ...  # type: int
+DAY_1 = ...  # type: int
+DAY_2 = ...  # type: int
+DAY_3 = ...  # type: int
+DAY_4 = ...  # type: int
+DAY_5 = ...  # type: int
+DAY_6 = ...  # type: int
+DAY_7 = ...  # type: int
+D_FMT = ...  # type: int
+D_T_FMT = ...  # type: int
+ERA = ...  # type: int
+ERA_D_FMT = ...  # type: int
+ERA_D_T_FMT = ...  # type: int
+ERA_T_FMT = ...  # type: int
+LC_ALL = ...  # type: int
+LC_COLLATE = ...  # type: int
+LC_CTYPE = ...  # type: int
+LC_MESSAGES = ...  # type: int
+LC_MONETARY = ...  # type: int
+LC_NUMERIC = ...  # type: int
+LC_TIME = ...  # type: int
+MON_1 = ...  # type: int
+MON_10 = ...  # type: int
+MON_11 = ...  # type: int
+MON_12 = ...  # type: int
+MON_2 = ...  # type: int
+MON_3 = ...  # type: int
+MON_4 = ...  # type: int
+MON_5 = ...  # type: int
+MON_6 = ...  # type: int
+MON_7 = ...  # type: int
+MON_8 = ...  # type: int
+MON_9 = ...  # type: int
+NOEXPR = ...  # type: int
+PM_STR = ...  # type: int
+RADIXCHAR = ...  # type: int
+THOUSEP = ...  # type: int
+T_FMT = ...  # type: int
+T_FMT_AMPM = ...  # type: int
+YESEXPR = ...  # type: int
+_DATE_FMT = ...  # type: int
+
+class Error(Exception):
+    pass
+
+def bind_textdomain_codeset(domain: Optional[str], codeset: Optional[str]) -> Optional[str]: pass
+def bindtextdomain(domain: Optional[str], dir: Optional[str]) -> str: pass
+def dcgettext(domain: Optional[str], msg: str, category: int) -> str: pass
+def dgettext(domain: Optional[str], msg: str) -> str: pass
+def gettext(msg: str) -> str: pass
+def localeconv() -> Dict[str, Any]: pass
+def nl_langinfo(key: int) -> str: pass
+def setlocale(i: int, s: str) -> str: pass
+def strcoll(left: str, right: str) -> int: pass
+def strxfrm(s: str) -> str: pass
+def textdomain(domain: Optional[str]) -> str: pass
diff --git a/typeshed/stdlib/2.7/_md5.pyi b/typeshed/stdlib/2.7/_md5.pyi
new file mode 100644
index 0000000..862b68f
--- /dev/null
+++ b/typeshed/stdlib/2.7/_md5.pyi
@@ -0,0 +1,13 @@
+blocksize = ...  # type: int
+digest_size = ...  # type: int
+
+class MD5Type(object):
+    name = ...  # type: str
+    block_size = ...  # type: int
+    digest_size = ...  # type: int
+    def copy(self) -> "MD5Type": ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def update(self, arg: str) -> None: ...
+
+def new(arg: str = ...) -> MD5Type: ...
diff --git a/typeshed/stdlib/2.7/_random.pyi b/typeshed/stdlib/2.7/_random.pyi
new file mode 100644
index 0000000..060dcd2
--- /dev/null
+++ b/typeshed/stdlib/2.7/_random.pyi
@@ -0,0 +1,13 @@
+from typing import Tuple
+
+# Actually Tuple[(int,) * 625]
+_State = Tuple[int, ...]
+
+class Random(object):
+    def __init__(self, seed: object = ...) -> None: ...
+    def seed(self, x: object = ...) -> None: ...
+    def getstate(self) -> _State: ...
+    def setstate(self, state: _State) -> None: ...
+    def random(self) -> float: ...
+    def getrandbits(self, k: int) -> int: ...
+    def jumpahead(self, i: int) -> None: ...
diff --git a/typeshed/stdlib/2.7/_sha.pyi b/typeshed/stdlib/2.7/_sha.pyi
new file mode 100644
index 0000000..eb750e0
--- /dev/null
+++ b/typeshed/stdlib/2.7/_sha.pyi
@@ -0,0 +1,15 @@
+blocksize = ...  # type: int
+block_size = ...  # type: int
+digest_size = ...  # type: int
+
+class sha(object):  # not actually exposed
+    name = ...  # type: str
+    block_size = ...  # type: int
+    digest_size = ...  # type: int
+    digestsize = ...  # type: int
+    def copy(self) -> "sha": ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def update(self, arg: str) -> None: ...
+
+def new(arg: str = ...) -> sha: ...
diff --git a/typeshed/stdlib/2.7/_sha256.pyi b/typeshed/stdlib/2.7/_sha256.pyi
new file mode 100644
index 0000000..4cf40c1
--- /dev/null
+++ b/typeshed/stdlib/2.7/_sha256.pyi
@@ -0,0 +1,23 @@
+from typing import Optional
+
+class sha224(object):
+    name = ...  # type: str
+    block_size = ...  # type: int
+    digest_size = ...  # type: int
+    digestsize = ...  # type: int
+    def __init__(self, init: Optional[str]) -> None: ...
+    def copy(self) -> "sha224": ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def update(self, arg: str) -> None: ...
+
+class sha256(object):
+    name = ...  # type: str
+    block_size = ...  # type: int
+    digest_size = ...  # type: int
+    digestsize = ...  # type: int
+    def __init__(self, init: Optional[str]) -> None: ...
+    def copy(self) -> "sha256": ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def update(self, arg: str) -> None: ...
diff --git a/typeshed/stdlib/2.7/_sha512.pyi b/typeshed/stdlib/2.7/_sha512.pyi
new file mode 100644
index 0000000..f9e4928
--- /dev/null
+++ b/typeshed/stdlib/2.7/_sha512.pyi
@@ -0,0 +1,23 @@
+from typing import Optional
+
+class sha384(object):
+    name = ...  # type: str
+    block_size = ...  # type: int
+    digest_size = ...  # type: int
+    digestsize = ...  # type: int
+    def __init__(self, init: Optional[str]) -> None: ...
+    def copy(self) -> "sha384": ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def update(self, arg: str) -> None: ...
+
+class sha512(object):
+    name = ...  # type: str
+    block_size = ...  # type: int
+    digest_size = ...  # type: int
+    digestsize = ...  # type: int
+    def __init__(self, init: Optional[str]) -> None: ...
+    def copy(self) -> "sha512": ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def update(self, arg: str) -> None: ...
diff --git a/typeshed/stdlib/2.7/_socket.pyi b/typeshed/stdlib/2.7/_socket.pyi
new file mode 100644
index 0000000..6f72ba9
--- /dev/null
+++ b/typeshed/stdlib/2.7/_socket.pyi
@@ -0,0 +1,287 @@
+from typing import Tuple, Union, IO, Any, Optional, overload
+
+AF_APPLETALK = ...  # type: int
+AF_ASH = ...  # type: int
+AF_ATMPVC = ...  # type: int
+AF_ATMSVC = ...  # type: int
+AF_AX25 = ...  # type: int
+AF_BLUETOOTH = ...  # type: int
+AF_BRIDGE = ...  # type: int
+AF_DECnet = ...  # type: int
+AF_ECONET = ...  # type: int
+AF_INET = ...  # type: int
+AF_INET6 = ...  # type: int
+AF_IPX = ...  # type: int
+AF_IRDA = ...  # type: int
+AF_KEY = ...  # type: int
+AF_LLC = ...  # type: int
+AF_NETBEUI = ...  # type: int
+AF_NETLINK = ...  # type: int
+AF_NETROM = ...  # type: int
+AF_PACKET = ...  # type: int
+AF_PPPOX = ...  # type: int
+AF_ROSE = ...  # type: int
+AF_ROUTE = ...  # type: int
+AF_SECURITY = ...  # type: int
+AF_SNA = ...  # type: int
+AF_TIPC = ...  # type: int
+AF_UNIX = ...  # type: int
+AF_UNSPEC = ...  # type: int
+AF_WANPIPE = ...  # type: int
+AF_X25 = ...  # type: int
+AI_ADDRCONFIG = ...  # type: int
+AI_ALL = ...  # type: int
+AI_CANONNAME = ...  # type: int
+AI_NUMERICHOST = ...  # type: int
+AI_NUMERICSERV = ...  # type: int
+AI_PASSIVE = ...  # type: int
+AI_V4MAPPED = ...  # type: int
+BDADDR_ANY = ...  # type: str
+BDADDR_LOCAL = ...  # type: str
+BTPROTO_HCI = ...  # type: int
+BTPROTO_L2CAP = ...  # type: int
+BTPROTO_RFCOMM = ...  # type: int
+BTPROTO_SCO = ...  # type: int
+EAI_ADDRFAMILY = ...  # type: int
+EAI_AGAIN = ...  # type: int
+EAI_BADFLAGS = ...  # type: int
+EAI_FAIL = ...  # type: int
+EAI_FAMILY = ...  # type: int
+EAI_MEMORY = ...  # type: int
+EAI_NODATA = ...  # type: int
+EAI_NONAME = ...  # type: int
+EAI_OVERFLOW = ...  # type: int
+EAI_SERVICE = ...  # type: int
+EAI_SOCKTYPE = ...  # type: int
+EAI_SYSTEM = ...  # type: int
+EBADF = ...  # type: int
+EINTR = ...  # type: int
+HCI_DATA_DIR = ...  # type: int
+HCI_FILTER = ...  # type: int
+HCI_TIME_STAMP = ...  # type: int
+INADDR_ALLHOSTS_GROUP = ...  # type: int
+INADDR_ANY = ...  # type: int
+INADDR_BROADCAST = ...  # type: int
+INADDR_LOOPBACK = ...  # type: int
+INADDR_MAX_LOCAL_GROUP = ...  # type: int
+INADDR_NONE = ...  # type: int
+INADDR_UNSPEC_GROUP = ...  # type: int
+IPPORT_RESERVED = ...  # type: int
+IPPORT_USERRESERVED = ...  # type: int
+IPPROTO_AH = ...  # type: int
+IPPROTO_DSTOPTS = ...  # type: int
+IPPROTO_EGP = ...  # type: int
+IPPROTO_ESP = ...  # type: int
+IPPROTO_FRAGMENT = ...  # type: int
+IPPROTO_GRE = ...  # type: int
+IPPROTO_HOPOPTS = ...  # type: int
+IPPROTO_ICMP = ...  # type: int
+IPPROTO_ICMPV6 = ...  # type: int
+IPPROTO_IDP = ...  # type: int
+IPPROTO_IGMP = ...  # type: int
+IPPROTO_IP = ...  # type: int
+IPPROTO_IPIP = ...  # type: int
+IPPROTO_IPV6 = ...  # type: int
+IPPROTO_NONE = ...  # type: int
+IPPROTO_PIM = ...  # type: int
+IPPROTO_PUP = ...  # type: int
+IPPROTO_RAW = ...  # type: int
+IPPROTO_ROUTING = ...  # type: int
+IPPROTO_RSVP = ...  # type: int
+IPPROTO_TCP = ...  # type: int
+IPPROTO_TP = ...  # type: int
+IPPROTO_UDP = ...  # type: int
+IPV6_CHECKSUM = ...  # type: int
+IPV6_DSTOPTS = ...  # type: int
+IPV6_HOPLIMIT = ...  # type: int
+IPV6_HOPOPTS = ...  # type: int
+IPV6_JOIN_GROUP = ...  # type: int
+IPV6_LEAVE_GROUP = ...  # type: int
+IPV6_MULTICAST_HOPS = ...  # type: int
+IPV6_MULTICAST_IF = ...  # type: int
+IPV6_MULTICAST_LOOP = ...  # type: int
+IPV6_NEXTHOP = ...  # type: int
+IPV6_PKTINFO = ...  # type: int
+IPV6_RECVDSTOPTS = ...  # type: int
+IPV6_RECVHOPLIMIT = ...  # type: int
+IPV6_RECVHOPOPTS = ...  # type: int
+IPV6_RECVPKTINFO = ...  # type: int
+IPV6_RECVRTHDR = ...  # type: int
+IPV6_RECVTCLASS = ...  # type: int
+IPV6_RTHDR = ...  # type: int
+IPV6_RTHDRDSTOPTS = ...  # type: int
+IPV6_RTHDR_TYPE_0 = ...  # type: int
+IPV6_TCLASS = ...  # type: int
+IPV6_UNICAST_HOPS = ...  # type: int
+IPV6_V6ONLY = ...  # type: int
+IP_ADD_MEMBERSHIP = ...  # type: int
+IP_DEFAULT_MULTICAST_LOOP = ...  # type: int
+IP_DEFAULT_MULTICAST_TTL = ...  # type: int
+IP_DROP_MEMBERSHIP = ...  # type: int
+IP_HDRINCL = ...  # type: int
+IP_MAX_MEMBERSHIPS = ...  # type: int
+IP_MULTICAST_IF = ...  # type: int
+IP_MULTICAST_LOOP = ...  # type: int
+IP_MULTICAST_TTL = ...  # type: int
+IP_OPTIONS = ...  # type: int
+IP_RECVOPTS = ...  # type: int
+IP_RECVRETOPTS = ...  # type: int
+IP_RETOPTS = ...  # type: int
+IP_TOS = ...  # type: int
+IP_TTL = ...  # type: int
+MSG_CTRUNC = ...  # type: int
+MSG_DONTROUTE = ...  # type: int
+MSG_DONTWAIT = ...  # type: int
+MSG_EOR = ...  # type: int
+MSG_OOB = ...  # type: int
+MSG_PEEK = ...  # type: int
+MSG_TRUNC = ...  # type: int
+MSG_WAITALL = ...  # type: int
+MethodType = ...  # type: type
+NETLINK_DNRTMSG = ...  # type: int
+NETLINK_FIREWALL = ...  # type: int
+NETLINK_IP6_FW = ...  # type: int
+NETLINK_NFLOG = ...  # type: int
+NETLINK_ROUTE = ...  # type: int
+NETLINK_USERSOCK = ...  # type: int
+NETLINK_XFRM = ...  # type: int
+NI_DGRAM = ...  # type: int
+NI_MAXHOST = ...  # type: int
+NI_MAXSERV = ...  # type: int
+NI_NAMEREQD = ...  # type: int
+NI_NOFQDN = ...  # type: int
+NI_NUMERICHOST = ...  # type: int
+NI_NUMERICSERV = ...  # type: int
+PACKET_BROADCAST = ...  # type: int
+PACKET_FASTROUTE = ...  # type: int
+PACKET_HOST = ...  # type: int
+PACKET_LOOPBACK = ...  # type: int
+PACKET_MULTICAST = ...  # type: int
+PACKET_OTHERHOST = ...  # type: int
+PACKET_OUTGOING = ...  # type: int
+PF_PACKET = ...  # type: int
+SHUT_RD = ...  # type: int
+SHUT_RDWR = ...  # type: int
+SHUT_WR = ...  # type: int
+SOCK_DGRAM = ...  # type: int
+SOCK_RAW = ...  # type: int
+SOCK_RDM = ...  # type: int
+SOCK_SEQPACKET = ...  # type: int
+SOCK_STREAM = ...  # type: int
+SOL_HCI = ...  # type: int
+SOL_IP = ...  # type: int
+SOL_SOCKET = ...  # type: int
+SOL_TCP = ...  # type: int
+SOL_TIPC = ...  # type: int
+SOL_UDP = ...  # type: int
+SOMAXCONN = ...  # type: int
+SO_ACCEPTCONN = ...  # type: int
+SO_BROADCAST = ...  # type: int
+SO_DEBUG = ...  # type: int
+SO_DONTROUTE = ...  # type: int
+SO_ERROR = ...  # type: int
+SO_KEEPALIVE = ...  # type: int
+SO_LINGER = ...  # type: int
+SO_OOBINLINE = ...  # type: int
+SO_RCVBUF = ...  # type: int
+SO_RCVLOWAT = ...  # type: int
+SO_RCVTIMEO = ...  # type: int
+SO_REUSEADDR = ...  # type: int
+SO_REUSEPORT = ...  # type: int
+SO_SNDBUF = ...  # type: int
+SO_SNDLOWAT = ...  # type: int
+SO_SNDTIMEO = ...  # type: int
+SO_TYPE = ...  # type: int
+SSL_ERROR_EOF = ...  # type: int
+SSL_ERROR_INVALID_ERROR_CODE = ...  # type: int
+SSL_ERROR_SSL = ...  # type: int
+SSL_ERROR_SYSCALL = ...  # type: int
+SSL_ERROR_WANT_CONNECT = ...  # type: int
+SSL_ERROR_WANT_READ = ...  # type: int
+SSL_ERROR_WANT_WRITE = ...  # type: int
+SSL_ERROR_WANT_X509_LOOKUP = ...  # type: int
+SSL_ERROR_ZERO_RETURN = ...  # type: int
+TCP_CORK = ...  # type: int
+TCP_DEFER_ACCEPT = ...  # type: int
+TCP_INFO = ...  # type: int
+TCP_KEEPCNT = ...  # type: int
+TCP_KEEPIDLE = ...  # type: int
+TCP_KEEPINTVL = ...  # type: int
+TCP_LINGER2 = ...  # type: int
+TCP_MAXSEG = ...  # type: int
+TCP_NODELAY = ...  # type: int
+TCP_QUICKACK = ...  # type: int
+TCP_SYNCNT = ...  # type: int
+TCP_WINDOW_CLAMP = ...  # type: int
+TIPC_ADDR_ID = ...  # type: int
+TIPC_ADDR_NAME = ...  # type: int
+TIPC_ADDR_NAMESEQ = ...  # type: int
+TIPC_CFG_SRV = ...  # type: int
+TIPC_CLUSTER_SCOPE = ...  # type: int
+TIPC_CONN_TIMEOUT = ...  # type: int
+TIPC_CRITICAL_IMPORTANCE = ...  # type: int
+TIPC_DEST_DROPPABLE = ...  # type: int
+TIPC_HIGH_IMPORTANCE = ...  # type: int
+TIPC_IMPORTANCE = ...  # type: int
+TIPC_LOW_IMPORTANCE = ...  # type: int
+TIPC_MEDIUM_IMPORTANCE = ...  # type: int
+TIPC_NODE_SCOPE = ...  # type: int
+TIPC_PUBLISHED = ...  # type: int
+TIPC_SRC_DROPPABLE = ...  # type: int
+TIPC_SUBSCR_TIMEOUT = ...  # type: int
+TIPC_SUB_CANCEL = ...  # type: int
+TIPC_SUB_PORTS = ...  # type: int
+TIPC_SUB_SERVICE = ...  # type: int
+TIPC_TOP_SRV = ...  # type: int
+TIPC_WAIT_FOREVER = ...  # type: int
+TIPC_WITHDRAWN = ...  # type: int
+TIPC_ZONE_SCOPE = ...  # type: int
+
+# PyCapsule
+CAPI = ...  # type: Any
+
+has_ipv6 = ...  # type: bool
+
+class error(IOError): ...
+class gaierror(error): ...
+class timeout(error): ...
+
+class SocketType(object):
+    family = ...  # type: int
+    type = ...  # type: int
+    proto = ...  # type: int
+    timeout = ...  # type: float
+
+    def __init__(self, family: int = ..., type: int = ..., proto: int = ...) -> None: ...
+    def accept(self) -> Tuple['SocketType', tuple]: ...
+    def bind(self, address: tuple) -> None: ...
+    def close(self) -> None: ...
+    def connect(self, address: tuple) -> None:
+        raise gaierror
+        raise timeout
+    def connect_ex(self, address: tuple) -> int: ...
+    def dup(self) -> "SocketType": ...
+    def fileno(self) -> int: ...
+    def getpeername(self) -> tuple: ...
+    def getsockname(self) -> tuple: ...
+    def getsockopt(self, level: int, option: str, buffersize: int = ...) -> str: ...
+    def gettimeout(self) -> float: ...
+    def listen(self, backlog: int) -> None:
+        raise error
+    def makefile(self, mode: str = ..., buffersize: int = ...) -> IO[Any]: ...
+    def recv(self, buffersize: int, flags: int = ...) -> str: ...
+    def recv_into(self, buffer: bytearray, nbytes: int = ..., flags: int = ...) -> int: ...
+    def recvfrom(self, buffersize: int, flags: int = ...) -> tuple:
+        raise error
+    def recvfrom_into(self, buffer: bytearray, nbytes: int = ...,
+                      flags: int = ...) -> int: ...
+    def send(self, data: str, flags: int =...) -> int: ...
+    def sendall(self, data: str, flags: int = ...) -> None: ...
+    @overload
+    def sendto(self, data: str, address: tuple) -> int: ...
+    @overload
+    def sendto(self, data: str, flags: int, address: tuple) -> int: ...
+    def setblocking(self, flag: bool) -> None: ...
+    def setsockopt(self, level: int, option: int, value: Union[int, str]) -> None: ...
+    def settimeout(self, value: Optional[float]) -> None: ...
+    def shutdown(self, flag: int) -> None: ...
diff --git a/typeshed/stdlib/2.7/_sre.pyi b/typeshed/stdlib/2.7/_sre.pyi
new file mode 100644
index 0000000..2f1500f
--- /dev/null
+++ b/typeshed/stdlib/2.7/_sre.pyi
@@ -0,0 +1,53 @@
+"""Stub file for the '_sre' module."""
+
+from typing import Any, Union, Iterable, Optional, Mapping, Sequence, Dict, List, Tuple, overload
+
+CODESIZE = ...  # type: int
+MAGIC = ...  # type: int
+MAXREPEAT = ...  # type: long
+copyright = ...  # type: str
+
+class SRE_Match(object):
+    def start(self, group: int = ...) -> int:
+        raise IndexError()
+    def end(self, group: int = ...) -> int:
+        raise IndexError()
+    def expand(self, s: str) -> Any: ...
+    @overload
+    def group(self) -> str: ...
+    @overload
+    def group(self, group:int = ...) -> Optional[str]: ...
+    def groupdict(self) -> Dict[int, Optional[str]]: ...
+    def groups(self) -> Tuple[Optional[str]]: ...
+    def span(self) -> Tuple[int, int]:
+        raise IndexError()
+
+class SRE_Scanner(object):
+    pattern = ...  # type: str
+    def match(self) -> SRE_Match: ...
+    def search(self) -> SRE_Match: ...
+
+class SRE_Pattern(object):
+    pattern = ...  # type: str
+    flags = ...  # type: int 
+    groups = ...  # type: int 
+    groupindex = ...  # type: Mapping[int, int]
+    indexgroup = ...  # type: Sequence[int]
+    def findall(self, source: str, pos:int = ..., endpos:int = ...) -> List[Union[tuple, str]]: ...
+    def finditer(self, source: str, pos: int = ..., endpos:int = ...) -> Iterable[Union[tuple, str]]: ...
+    def match(self, pattern, pos: int = ..., endpos:int = ...) -> SRE_Match: ...
+    def scanner(self, s: str, start: int = ..., end: int = ...) -> SRE_Scanner: ...
+    def search(self, pattern, pos: int = ..., endpos: int = ...) -> SRE_Match: ...
+    def split(self, source: str, maxsplit:int = ...) -> List[Optional[str]]: ...
+    def sub(self, repl: str, string: str, count:int = ...) -> tuple: ...
+    def subn(self, repl: str, string: str, count:int = ...) -> tuple: ...
+
+def compile(pattern: str, flags: int, code: List[int],
+            groups:int = ...,
+            groupindex: Mapping[int, int] = ...,
+            indexgroup: Sequence[int] = ...) -> SRE_Pattern:
+    raise OverflowError()
+
+def getcodesize() -> int: ...
+
+def getlower(a: int, b: int) -> int: ...
diff --git a/typeshed/stdlib/2.7/_struct.pyi b/typeshed/stdlib/2.7/_struct.pyi
new file mode 100644
index 0000000..a9048b7
--- /dev/null
+++ b/typeshed/stdlib/2.7/_struct.pyi
@@ -0,0 +1,22 @@
+"""Stub file for the '_struct' module."""
+
+from typing import Any, AnyStr, Tuple
+
+class error(Exception): ...
+
+class Struct(object):
+    size = ...  # type: int
+    format = ...  # type: str
+
+    def __init__(self, fmt: str) -> None: ...
+    def pack_into(buffer: bytearray, offset: int, obj: Any) -> None: ...
+    def pack(self, *args) -> str: ...
+    def unpack(self, s:str) -> Tuple[Any]: ...
+    def unpack_from(self, buffer: bytearray, offset:int = ...) -> Tuple[Any]: ...
+
+def _clearcache() -> None: ...
+def calcsize(fmt: str) -> int: ...
+def pack(fmt: AnyStr, obj: Any) -> str: ...
+def pack_into(fmt: AnyStr, buffer: bytearray, offset: int, obj: Any) -> None: ...
+def unpack(fmt: AnyStr, data: str) -> Tuple[Any]: ...
+def unpack_from(fmt: AnyStr, buffer: bytearray, offset: int = ...) -> Tuple[Any]: ...
diff --git a/typeshed/stdlib/2.7/_symtable.pyi b/typeshed/stdlib/2.7/_symtable.pyi
new file mode 100644
index 0000000..b2c9290
--- /dev/null
+++ b/typeshed/stdlib/2.7/_symtable.pyi
@@ -0,0 +1,41 @@
+from typing import List, Dict
+
+CELL = ...  # type: int
+DEF_BOUND = ...  # type: int
+DEF_FREE = ...  # type: int
+DEF_FREE_CLASS = ...  # type: int
+DEF_GLOBAL = ...  # type: int
+DEF_IMPORT = ...  # type: int
+DEF_LOCAL = ...  # type: int
+DEF_PARAM = ...  # type: int
+FREE = ...  # type: int
+GLOBAL_EXPLICIT = ...  # type: int
+GLOBAL_IMPLICIT = ...  # type: int
+LOCAL = ...  # type: int
+OPT_BARE_EXEC = ...  # type: int
+OPT_EXEC = ...  # type: int
+OPT_IMPORT_STAR = ...  # type: int
+SCOPE_MASK = ...  # type: int
+SCOPE_OFF = ...  # type: int
+TYPE_CLASS = ...  # type: int
+TYPE_FUNCTION = ...  # type: int
+TYPE_MODULE = ...  # type: int
+USE = ...  # type: int
+
+class _symtable_entry(object):
+    ...
+
+class symtable(object):
+    children = ...  # type: List[_symtable_entry]
+    id = ...  # type: int
+    lineno = ...  # type: int
+    name = ...  # type: str
+    nested = ...  # type: int
+    optimized = ...  # type: int
+    symbols = ...  # type: Dict[str, int]
+    type = ...  # type: int
+    varnames = ...  # type: List[str]
+
+    def __init__(src: str, filename: str, startstr: str) -> None: ...
+
+
diff --git a/typeshed/stdlib/2.7/_warnings.pyi b/typeshed/stdlib/2.7/_warnings.pyi
new file mode 100644
index 0000000..0b24966
--- /dev/null
+++ b/typeshed/stdlib/2.7/_warnings.pyi
@@ -0,0 +1,11 @@
+from typing import Any, List
+
+default_action = ...  # type: str
+filters = ...  # type: List[tuple]
+once_registry = ...  # type: dict
+
+def warn(message: Warning, category:type = ..., stacklevel:int = ...) -> None: ...
+def warn_explicit(message: Warning, category:type,
+                  filename: str, lineno: int,
+                  module:Any = ..., registry:dict = ...,
+                  module_globals:dict = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/_weakref.pyi b/typeshed/stdlib/2.7/_weakref.pyi
new file mode 100644
index 0000000..d2c457b
--- /dev/null
+++ b/typeshed/stdlib/2.7/_weakref.pyi
@@ -0,0 +1,16 @@
+from typing import Any, Callable
+
+class CallableProxyType(object):  # "weakcallableproxy"
+  pass
+
+class ProxyType(object):  # "weakproxy"
+  pass
+
+class ReferenceType(object):  # "weakref"
+  pass
+
+ref = ReferenceType
+
+def getweakrefcount(object: Any) -> int: ...
+def getweakrefs(object: Any) -> int: ...
+def proxy(object: Any, callback: Callable[[Any], Any] = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/_weakrefset.pyi b/typeshed/stdlib/2.7/_weakrefset.pyi
new file mode 100644
index 0000000..a3de693
--- /dev/null
+++ b/typeshed/stdlib/2.7/_weakrefset.pyi
@@ -0,0 +1,5 @@
+from typing import Iterator, Any
+
+class WeakSet:
+    def __iter__(self) -> Iterator[Any]: ...
+    def add(self, *args, **kwargs) -> Any: ...
diff --git a/typeshed/stdlib/2.7/abc.pyi b/typeshed/stdlib/2.7/abc.pyi
new file mode 100644
index 0000000..66f7183
--- /dev/null
+++ b/typeshed/stdlib/2.7/abc.pyi
@@ -0,0 +1,39 @@
+from typing import Any, Dict, Set, Union, Tuple
+import _weakrefset
+
+# mypy has special processing for ABCMeta and abstractmethod.
+
+WeakSet = ...  # type: _weakrefset.WeakSet
+_InstanceType = ...  # type: type
+types = ...  # type: module
+
+def abstractmethod(funcobj) -> Any: ...
+
+class ABCMeta(type):
+    # TODO: FrozenSet
+    __abstractmethods__ = ...  # type: Set[Any]
+    __doc__ = ...  # type: str
+    _abc_cache = ...  # type: _weakrefset.WeakSet
+    _abc_invalidation_counter = ...  # type: int
+    _abc_negative_cache = ...  # type: _weakrefset.WeakSet
+    _abc_negative_cache_version = ...  # type: int
+    _abc_registry = ...  # type: _weakrefset.WeakSet
+    def __init__(self, name, bases, namespace: Dict[Any, Any]) -> None: ...
+    def __instancecheck__(cls: "ABCMeta", instance) -> Any: ...
+    def __subclasscheck__(cls: "ABCMeta", subclass) -> Any: ...
+    def _dump_registry(cls: "ABCMeta", *args, **kwargs) -> None: ...
+    # TODO: subclass: Union["ABCMeta", type, Tuple[type, ...]]
+    def register(cls: "ABCMeta", subclass: Any) -> None: ...
+
+class _C:
+    pass
+
+# TODO: The real abc.abstractproperty inherits from "property".
+class abstractproperty(object):
+    def __new__(cls, func): ...
+    __doc__ = ...  # type: str
+    __isabstractmethod__ = ...  # type: bool
+    doc = ...  # type: Any
+    fdel = ...  # type: Any
+    fget = ...  # type: Any
+    fset = ...  # type: Any
diff --git a/typeshed/stdlib/2.7/argparse.pyi b/typeshed/stdlib/2.7/argparse.pyi
new file mode 100644
index 0000000..96b213c
--- /dev/null
+++ b/typeshed/stdlib/2.7/argparse.pyi
@@ -0,0 +1,171 @@
+# Stubs for argparse (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Sequence
+
+SUPPRESS = ... # type: Any
+OPTIONAL = ... # type: Any
+ZERO_OR_MORE = ... # type: Any
+ONE_OR_MORE = ... # type: Any
+PARSER = ... # type: Any
+REMAINDER = ... # type: Any
+
+class _AttributeHolder: ...
+
+class HelpFormatter:
+    def __init__(self, prog, indent_increment=..., max_help_position=..., width=...) -> None: ...
+    class _Section:
+        formatter = ... # type: Any
+        parent = ... # type: Any
+        heading = ... # type: Any
+        items = ... # type: Any
+        def __init__(self, formatter, parent, heading=...) -> None: ...
+        def format_help(self): ...
+    def start_section(self, heading): ...
+    def end_section(self): ...
+    def add_text(self, text): ...
+    def add_usage(self, usage, actions, groups, prefix=...): ...
+    def add_argument(self, action): ...
+    def add_arguments(self, actions): ...
+    def format_help(self): ...
+
+class RawDescriptionHelpFormatter(HelpFormatter): ...
+class RawTextHelpFormatter(RawDescriptionHelpFormatter): ...
+class ArgumentDefaultsHelpFormatter(HelpFormatter): ...
+
+class ArgumentError(Exception):
+    argument_name = ... # type: Any
+    message = ... # type: Any
+    def __init__(self, argument, message) -> None: ...
+
+class ArgumentTypeError(Exception): ...
+
+class Action(_AttributeHolder):
+    option_strings = ... # type: Any
+    dest = ... # type: Any
+    nargs = ... # type: Any
+    const = ... # type: Any
+    default = ... # type: Any
+    type = ... # type: Any
+    choices = ... # type: Any
+    required = ... # type: Any
+    help = ... # type: Any
+    metavar = ... # type: Any
+    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
+                 choices=..., required=..., help=..., metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _StoreAction(Action):
+    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
+                 choices=..., required=..., help=..., metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _StoreConstAction(Action):
+    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
+                 metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _StoreTrueAction(_StoreConstAction):
+    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
+
+class _StoreFalseAction(_StoreConstAction):
+    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
+
+class _AppendAction(Action):
+    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
+                 choices=..., required=..., help=..., metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _AppendConstAction(Action):
+    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
+                 metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _CountAction(Action):
+    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _HelpAction(Action):
+    def __init__(self, option_strings, dest=..., default=..., help=...) -> None: ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _VersionAction(Action):
+    version = ... # type: Any
+    def __init__(self, option_strings, version=..., dest=..., default=..., help=...) -> None: ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _SubParsersAction(Action):
+    class _ChoicesPseudoAction(Action):
+        def __init__(self, name, help) -> None: ...
+    def __init__(self, option_strings, prog, parser_class, dest=..., help=..., metavar=...) -> None: ...
+    def add_parser(self, name, **kwargs): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class FileType:
+    def __init__(self, mode=..., bufsize=...) -> None: ...
+    def __call__(self, string): ...
+
+class Namespace(_AttributeHolder):
+    def __init__(self, **kwargs) -> None: ...
+    __hash__ = ... # type: Any
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def __contains__(self, key): ...
+    def __getattr__(self, name: str) -> Any: ...
+
+class _ActionsContainer:
+    description = ... # type: Any
+    argument_default = ... # type: Any
+    prefix_chars = ... # type: Any
+    conflict_handler = ... # type: Any
+    def __init__(self, description, prefix_chars, argument_default, conflict_handler) -> None: ...
+    def register(self, registry_name, value, object): ...
+    def set_defaults(self, **kwargs): ...
+    def get_default(self, dest): ...
+    def add_argument(self,
+        *args: str,
+        action: str = ...,
+        nargs: str = ...,
+        const: Any = ...,
+        default: Any = ...,
+        type: Any = ...,
+        choices: Any = ..., # TODO: Container?
+        required: bool = ...,
+        help: str = ...,
+        metavar: str = ...,
+        dest: str = ...) -> None: ...
+    def add_argument_group(self, *args, **kwargs): ...
+    def add_mutually_exclusive_group(self, **kwargs): ...
+
+class _ArgumentGroup(_ActionsContainer):
+    title = ... # type: Any
+    def __init__(self, container, title=..., description=..., **kwargs) -> None: ...
+
+class _MutuallyExclusiveGroup(_ArgumentGroup):
+    required = ... # type: Any
+    def __init__(self, container, required=...) -> None: ...
+
+class ArgumentParser(_AttributeHolder, _ActionsContainer):
+    prog = ... # type: Any
+    usage = ... # type: Any
+    epilog = ... # type: Any
+    version = ... # type: Any
+    formatter_class = ... # type: Any
+    fromfile_prefix_chars = ... # type: Any
+    add_help = ... # type: Any
+    def __init__(self, prog=..., usage=..., description=..., epilog=..., version=...,
+                 parents=..., formatter_class=..., prefix_chars=..., fromfile_prefix_chars=...,
+                 argument_default=..., conflict_handler=..., add_help=...): ...
+    def add_subparsers(self, **kwargs): ...
+    def parse_args(self, args: Sequence[str] = ..., namespace=...): ...
+    def parse_known_args(self, args=..., namespace=...): ...
+    def convert_arg_line_to_args(self, arg_line): ...
+    def format_usage(self): ...
+    def format_help(self): ...
+    def format_version(self): ...
+    def print_usage(self, file=...): ...
+    def print_help(self, file=...): ...
+    def print_version(self, file=...): ...
+    def exit(self, status=..., message=...): ...
+    def error(self, message): ...
diff --git a/typeshed/stdlib/2.7/array.pyi b/typeshed/stdlib/2.7/array.pyi
new file mode 100644
index 0000000..3ff4081
--- /dev/null
+++ b/typeshed/stdlib/2.7/array.pyi
@@ -0,0 +1,56 @@
+"""Stub file for the 'array' module."""
+
+from typing import (Any, Generic, IO, Iterable, Sequence, TypeVar,
+                    Union, overload, Iterator, Tuple, BinaryIO, List)
+
+T = TypeVar('T')
+
+class array(Generic[T]):
+    def __init__(self, typecode: str, init: Iterable[T] = ...) -> None: ...
+    def __add__(self, y: "array[T]") -> "array[T]": ...
+    def __contains__(self, y: Any) -> bool: ...
+    def __copy__(self) -> "array[T]": ...
+    def __deepcopy__(self) -> "array": ...
+    def __delitem__(self, y: Union[slice, int]) -> None: ...
+    def __delslice__(self, i: int, j: int) -> None: ...
+    @overload
+    def __getitem__(self, i: int) -> Any: ...
+    @overload
+    def __getitem__(self, s: slice) -> "array": ...
+    def __iadd__(self, y: "array[T]") -> "array[T]": ...
+    def __imul__(self, y: int) -> "array[T]": ...
+    def __iter__(self) -> Iterator[T]: ...
+    def __len__(self) -> int: ...
+    def __mul__(self, n: int) -> "array[T]": ...
+    def __rmul__(self, n: int) -> "array[T]": ...
+    @overload
+    def __setitem__(self, i: int, y: T) -> None: ...
+    @overload
+    def __setitem__(self, i: slice, y: "array[T]") -> None: ...
+
+    def append(self, x: T) -> None: ...
+    def buffer_info(self) -> Tuple[int, int]: ...
+    def byteswap(self) -> None:
+        raise RuntimeError()
+    def count(self) -> int: ...
+    def extend(self, x: Sequence[T]) -> None: ...
+    def fromlist(self, list: List[T]) -> None:
+        raise EOFError()
+        raise IOError()
+    def fromfile(self, f: BinaryIO, n: int) -> None: ...
+    def fromstring(self, s: str) -> None: ...
+    def fromunicode(self, u: unicode) -> None: ...
+    def index(self, x: T) -> int: ...
+    def insert(self, i: int, x: T) -> None: ...
+    def pop(self, i: int = ...) -> T: ...
+    def read(self, f: IO[str], n: int) -> None:
+        raise DeprecationWarning()
+    def remove(self, x: T) -> None: ...
+    def reverse(self) -> None: ...
+    def tofile(self, f: BinaryIO) -> None:
+        raise IOError()
+    def tolist(self) -> List[T]: ...
+    def tostring(self) -> str: ...
+    def tounicode(self) -> unicode: ...
+    def write(self, f: IO[str]) -> None:
+        raise DeprecationWarning()
diff --git a/typeshed/stdlib/2.7/ast.pyi b/typeshed/stdlib/2.7/ast.pyi
new file mode 100644
index 0000000..dbaf6af
--- /dev/null
+++ b/typeshed/stdlib/2.7/ast.pyi
@@ -0,0 +1,40 @@
+# Automatically generated by pytype. May contain errors.
+
+from typing import Any, Tuple, Generator
+
+from _ast import (
+        AST, alias, arguments, boolop, cmpop, comprehension, excepthandler,
+        expr, expr_context, keyword, mod, operator, slice, stmt, unaryop, Add,
+        And, Assert, Assign, Attribute, AugAssign, AugLoad, AugStore, BinOp,
+        BitAnd, BitOr, BitXor, BoolOp, Break, Call, ClassDef, Compare, Continue,
+        Del, Delete, Dict, DictComp, Div, Ellipsis, Eq, ExceptHandler, Exec,
+        Expr, Expression, ExtSlice, FloorDiv, For, FunctionDef, GeneratorExp,
+        Global, Gt, GtE, If, IfExp, Import, ImportFrom, In, Index, Interactive,
+        Invert, Is, IsNot, LShift, Lambda, List, ListComp, Load, Lt, LtE, Mod,
+        Module, Mult, Name, Not, NotEq, NotIn, Num, Or, Param, Pass, Pow, Print,
+        RShift, Raise, Repr, Return, Set, SetComp, Slice, Store, Str, Sub,
+        Subscript, Suite, TryExcept, TryFinally, Tuple, UAdd, USub, UnaryOp,
+        While, With, Yield)
+
+__version__ = ...  # type: int
+PyCF_ONLY_AST = ...  # type: int
+
+def copy_location(new_node, old_node) -> Any: ...
+def dump(node, *args, **kwargs) -> str: ...
+def fix_missing_locations(node) -> Any: ...
+def get_docstring(node, *args, **kwargs) -> Any: ...
+def increment_lineno(node, *args, **kwargs) -> Any: ...
+def iter_child_nodes(node) -> Generator[Any, Any, Any]: ...
+def iter_fields(node) -> Any: ...  # TODO: Generator[Tuple[Any, ...]]: ...
+def literal_eval(node_or_string) -> Any: ...
+def parse(source, filename = ..., mode = ..., *args, **kwargs) -> AST: ...
+def walk(node) -> Any: ...  # TODO: Generator[Any]: ...
+
+class NodeVisitor(object):
+    __doc__ = ...  # type: str
+    def generic_visit(self, node) -> None: ...
+    def visit(self, node) -> Any: ...
+
+class NodeTransformer(NodeVisitor):
+    __doc__ = ...  # type: str
+    def generic_visit(self, node) -> Any: ...
diff --git a/typeshed/stdlib/2.7/atexit.pyi b/typeshed/stdlib/2.7/atexit.pyi
new file mode 100644
index 0000000..13d2602
--- /dev/null
+++ b/typeshed/stdlib/2.7/atexit.pyi
@@ -0,0 +1,5 @@
+from typing import TypeVar, Any
+
+_FT = TypeVar('_FT')
+
+def register(func: _FT, *args: Any, **kargs: Any) -> _FT: ...
diff --git a/typeshed/stdlib/2.7/base64.pyi b/typeshed/stdlib/2.7/base64.pyi
new file mode 100644
index 0000000..d593fa8
--- /dev/null
+++ b/typeshed/stdlib/2.7/base64.pyi
@@ -0,0 +1,25 @@
+# Stubs for base64
+
+# Based on http://docs.python.org/3.2/library/base64.html
+
+from typing import IO
+
+def b64encode(s: str, altchars: str = ...) -> str: ...
+def b64decode(s: str, altchars: str = ...,
+              validate: bool = ...) -> str: ...
+def standard_b64encode(s: str) -> str: ...
+def standard_b64decode(s: str) -> str: ...
+def urlsafe_b64encode(s: str) -> str: ...
+def urlsafe_b64decode(s: str) -> str: ...
+def b32encode(s: str) -> str: ...
+def b32decode(s: str, casefold: bool = ...,
+              map01: str = ...) -> str: ...
+def b16encode(s: str) -> str: ...
+def b16decode(s: str, casefold: bool = ...) -> str: ...
+
+def decode(input: IO[str], output: IO[str]) -> None: ...
+def decodebytes(s: str) -> str: ...
+def decodestring(s: str) -> str: ...
+def encode(input: IO[str], output: IO[str]) -> None: ...
+def encodebytes(s: str) -> str: ...
+def encodestring(s: str) -> str: ...
diff --git a/typeshed/stdlib/2.7/binascii.pyi b/typeshed/stdlib/2.7/binascii.pyi
new file mode 100644
index 0000000..f8b85b6
--- /dev/null
+++ b/typeshed/stdlib/2.7/binascii.pyi
@@ -0,0 +1,21 @@
+"""Stubs for the binascii module."""
+
+def a2b_base64(string: str) -> str: ...
+def a2b_hex(hexstr: str) -> str: ...
+def a2b_hqx(string: str) -> str: ...
+def a2b_qp(string: str, header: bool = ...) -> str: ...
+def a2b_uu(string: str) -> str: ...
+def b2a_base64(data: str) -> str: ...
+def b2a_hex(data: str) -> str: ...
+def b2a_hqx(data: str) -> str: ...
+def b2a_qp(data: str, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> str: ...
+def b2a_uu(data: str) -> str: ...
+def crc32(data: str, crc: int = None) -> int: ...
+def crc_hqx(data: str, oldcrc: int) -> int: ...
+def hexlify(data: str) -> str: ...
+def rlecode_hqx(data: str) -> str: ...
+def rledecode_hqx(data: str) -> str: ...
+def unhexlify(hexstr: str) -> str: ...
+
+class Error(Exception): ...
+class Incomplete(Exception): ...
diff --git a/typeshed/stdlib/2.7/builtins.pyi b/typeshed/stdlib/2.7/builtins.pyi
new file mode 100644
index 0000000..546202b
--- /dev/null
+++ b/typeshed/stdlib/2.7/builtins.pyi
@@ -0,0 +1,870 @@
+# Stubs for builtins (Python 2.7)
+
+from typing import (
+    TypeVar, Iterator, Iterable, overload,
+    Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set,
+    AbstractSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs,
+    SupportsRound, IO, BinaryIO, Union, AnyStr, MutableSequence, MutableMapping,
+    MutableSet
+)
+from abc import abstractmethod, ABCMeta
+
+_T = TypeVar('_T')
+_T_co = TypeVar('_T_co', covariant=True)
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+_S = TypeVar('_S')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_T3 = TypeVar('_T3')
+_T4 = TypeVar('_T4')
+
+staticmethod = object()  # Special, only valid as a decorator.
+classmethod = object()  # Special, only valid as a decorator.
+property = object()
+
+class object:
+    __doc__ = ...  # type: str
+    __class__ = ...  # type: type
+
+    def __init__(self) -> None: ...
+    def __new__(cls) -> Any: ...
+    def __setattr__(self, name: str, value: Any) -> None: ...
+    def __eq__(self, o: object) -> bool: ...
+    def __ne__(self, o: object) -> bool: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __hash__(self) -> int: ...
+
+class type:
+    __name__ = ...  # type: str
+    __module__ = ...  # type: str
+    __dict__ = ...  # type: Dict[unicode, Any]
+
+    @overload
+    def __init__(self, o: object) -> None: ...
+    @overload
+    def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ...
+    # TODO: __new__ may have to be special and not a static method.
+    @staticmethod
+    def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
+
+class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, x: SupportsInt) -> None: ...
+    @overload
+    def __init__(self, x: Union[str, unicode, bytearray], base: int = 10) -> None: ...
+    def bit_length(self) -> int: ...
+
+    def __add__(self, x: int) -> int: ...
+    def __sub__(self, x: int) -> int: ...
+    def __mul__(self, x: int) -> int: ...
+    def __floordiv__(self, x: int) -> int: ...
+    def __div__(self, x: int) -> int: ...
+    def __truediv__(self, x: int) -> float: ...
+    def __mod__(self, x: int) -> int: ...
+    def __radd__(self, x: int) -> int: ...
+    def __rsub__(self, x: int) -> int: ...
+    def __rmul__(self, x: int) -> int: ...
+    def __rfloordiv__(self, x: int) -> int: ...
+    def __rdiv__(self, x: int) -> int: ...
+    def __rtruediv__(self, x: int) -> float: ...
+    def __rmod__(self, x: int) -> int: ...
+    def __pow__(self, x: int) -> Any: ...  # Return type can be int or float, depending on x.
+    def __rpow__(self, x: int) -> Any: ...
+    def __and__(self, n: int) -> int: ...
+    def __or__(self, n: int) -> int: ...
+    def __xor__(self, n: int) -> int: ...
+    def __lshift__(self, n: int) -> int: ...
+    def __rshift__(self, n: int) -> int: ...
+    def __rand__(self, n: int) -> int: ...
+    def __ror__(self, n: int) -> int: ...
+    def __rxor__(self, n: int) -> int: ...
+    def __rlshift__(self, n: int) -> int: ...
+    def __rrshift__(self, n: int) -> int: ...
+    def __neg__(self) -> int: ...
+    def __pos__(self) -> int: ...
+    def __invert__(self) -> int: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: int) -> bool: ...
+    def __le__(self, x: int) -> bool: ...
+    def __gt__(self, x: int) -> bool: ...
+    def __ge__(self, x: int) -> bool: ...
+
+    def __str__(self) -> str: ...
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: return self
+    def __abs__(self) -> int: ...
+    def __hash__(self) -> int: ...
+
+class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, x: SupportsFloat) -> None: ...
+    @overload
+    def __init__(self, x: unicode) -> None: ...
+    @overload
+    def __init__(self, x: bytearray) -> None: ...
+    def as_integer_ratio(self) -> Tuple[int, int]: ...
+    def hex(self) -> str: ...
+    def is_integer(self) -> bool: ...
+    @classmethod
+    def fromhex(cls, s: str) -> float: ...
+
+    def __add__(self, x: float) -> float: ...
+    def __sub__(self, x: float) -> float: ...
+    def __mul__(self, x: float) -> float: ...
+    def __floordiv__(self, x: float) -> float: ...
+    def __div__(self, x: float) -> float: ...
+    def __truediv__(self, x: float) -> float: ...
+    def __mod__(self, x: float) -> float: ...
+    def __pow__(self, x: float) -> float: ...
+    def __radd__(self, x: float) -> float: ...
+    def __rsub__(self, x: float) -> float: ...
+    def __rmul__(self, x: float) -> float: ...
+    def __rfloordiv__(self, x: float) -> float: ...
+    def __rdiv__(self, x: float) -> float: ...
+    def __rtruediv__(self, x: float) -> float: ...
+    def __rmod__(self, x: float) -> float: ...
+    def __rpow__(self, x: float) -> float: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: float) -> bool: ...
+    def __le__(self, x: float) -> bool: ...
+    def __gt__(self, x: float) -> bool: ...
+    def __ge__(self, x: float) -> bool: ...
+    def __neg__(self) -> float: ...
+    def __pos__(self) -> float: ...
+
+    def __str__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: return self
+    def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class complex(SupportsAbs[float]):
+    @overload
+    def __init__(self, re: float = 0.0, im: float = 0.0) -> None: ...
+    @overload
+    def __init__(self, s: str) -> None: ...
+
+    @property
+    def real(self) -> float: ...
+    @property
+    def imag(self) -> float: ...
+
+    def conjugate(self) -> complex: ...
+
+    def __add__(self, x: complex) -> complex: ...
+    def __sub__(self, x: complex) -> complex: ...
+    def __mul__(self, x: complex) -> complex: ...
+    def __pow__(self, x: complex) -> complex: ...
+    def __div__(self, x: complex) -> complex: ...
+    def __truediv__(self, x: complex) -> complex: ...
+    def __radd__(self, x: complex) -> complex: ...
+    def __rsub__(self, x: complex) -> complex: ...
+    def __rmul__(self, x: complex) -> complex: ...
+    def __rpow__(self, x: complex) -> complex: ...
+    def __rdiv__(self, x: complex) -> complex: ...
+    def __rtruediv__(self, x: complex) -> complex: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __neg__(self) -> complex: ...
+    def __pos__(self) -> complex: ...
+
+    def __str__(self) -> str: ...
+    def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class basestring(metaclass=ABCMeta): ...
+
+class unicode(basestring, Sequence[unicode]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, o: object) -> None: ...
+    @overload
+    def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ...
+    def capitalize(self) -> unicode: ...
+    def center(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def count(self, x: unicode) -> int: ...
+    def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
+    def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
+    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+                 end: int = ...) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> unicode: ...
+    def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def format(self, *args: Any, **kwargs: Any) -> unicode: ...
+    def format_map(self, map: Mapping[unicode, Any]) -> unicode: ...
+    def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdecimal(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def isidentifier(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isnumeric(self) -> bool: ...
+    def isprintable(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[unicode]) -> unicode: ...
+    def ljust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def lower(self) -> unicode: ...
+    def lstrip(self, chars: unicode = ...) -> unicode: ...
+    def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ...
+    def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    def rsplit(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def rstrip(self, chars: unicode = ...) -> unicode: ...
+    def split(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def splitlines(self, keepends: bool = ...) -> List[unicode]: ...
+    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+                   end: int = ...) -> bool: ...
+    def strip(self, chars: unicode = ...) -> unicode: ...
+    def swapcase(self) -> unicode: ...
+    def title(self) -> unicode: ...
+    def translate(self, table: Union[Dict[int, Any], unicode]) -> unicode: ...
+    def upper(self) -> unicode: ...
+    def zfill(self, width: int) -> unicode: ...
+
+    @overload
+    def __getitem__(self, i: int) -> unicode: ...
+    @overload
+    def __getitem__(self, s: slice) -> unicode: ...
+    def __getslice__(self, start: int, stop: int) -> unicode: ...
+    def __add__(self, s: unicode) -> unicode: ...
+    def __mul__(self, n: int) -> unicode: ...
+    def __mod__(self, x: Any) -> unicode: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: unicode) -> bool: ...
+    def __le__(self, x: unicode) -> bool: ...
+    def __gt__(self, x: unicode) -> bool: ...
+    def __ge__(self, x: unicode) -> bool: ...
+
+    def __len__(self) -> int: ...
+    def __contains__(self, s: object) -> bool: ...
+    def __iter__(self) -> Iterator[unicode]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class str(basestring, Sequence[str]):
+    def __init__(self, object: object) -> None: ...
+    def capitalize(self) -> str: ...
+    def center(self, width: int, fillchar: str = ...) -> str: ...
+    def count(self, x: unicode) -> int: ...
+    def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
+    def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
+    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> str: ...
+    def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def format(self, *args: Any, **kwargs: Any) -> str: ...
+    def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[AnyStr]) -> AnyStr: ...
+    def ljust(self, width: int, fillchar: str = ...) -> str: ...
+    def lower(self) -> str: ...
+    @overload
+    def lstrip(self, chars: str = ...) -> str: ...
+    @overload
+    def lstrip(self, chars: unicode) -> unicode: ...
+    @overload
+    def partition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ...
+    @overload
+    def partition(self, sep: str) -> Tuple[str, str, str]: ...
+    @overload
+    def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ...
+    def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: str = ...) -> str: ...
+    @overload
+    def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ...
+    @overload
+    def rpartition(self, sep: str) -> Tuple[str, str, str]: ...
+    @overload
+    def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
+    @overload
+    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    @overload
+    def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
+    @overload
+    def rstrip(self, chars: str = ...) -> str: ...
+    @overload
+    def rstrip(self, chars: unicode) -> unicode: ...
+    @overload
+    def split(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    @overload
+    def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
+    def splitlines(self, keepends: bool = ...) -> List[str]: ...
+    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
+    @overload
+    def strip(self, chars: str = ...) -> str: ...
+    @overload
+    def strip(self, chars: unicode) -> unicode: ...
+    def swapcase(self) -> str: ...
+    def title(self) -> str: ...
+    def translate(self, table: AnyStr, deletechars: AnyStr = None) -> AnyStr: ...
+    def upper(self) -> str: ...
+    def zfill(self, width: int) -> str: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> str: ...
+    @overload
+    def __getitem__(self, s: slice) -> str: ...
+    def __getslice__(self, start: int, stop: int) -> str: ...
+    def __add__(self, s: AnyStr) -> AnyStr: ...
+    def __mul__(self, n: int) -> str: ...
+    def __rmul__(self, n: int) -> str: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: unicode) -> bool: ...
+    def __le__(self, x: unicode) -> bool: ...
+    def __gt__(self, x: unicode) -> bool: ...
+    def __ge__(self, x: unicode) -> bool: ...
+    def __mod__(self, x: Any) -> str: ...
+
+class bytearray(Sequence[int]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, x: Union[Iterable[int], str]) -> None: ...
+    @overload
+    def __init__(self, x: unicode, encoding: unicode,
+                 errors: unicode = ...) -> None: ...
+    @overload
+    def __init__(self, length: int) -> None: ...
+    def capitalize(self) -> bytearray: ...
+    def center(self, width: int, fillchar: str = ...) -> bytearray: ...
+    def count(self, x: str) -> int: ...
+    def decode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
+    def endswith(self, suffix: Union[str, Tuple[str, ...]]) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> bytearray: ...
+    def find(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def index(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[str]) -> bytearray: ...
+    def ljust(self, width: int, fillchar: str = ...) -> bytearray: ...
+    def lower(self) -> bytearray: ...
+    def lstrip(self, chars: str = ...) -> bytearray: ...
+    def partition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
+    def replace(self, old: str, new: str, count: int = ...) -> bytearray: ...
+    def rfind(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def rindex(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def rjust(self, width: int, fillchar: str = ...) -> bytearray: ...
+    def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
+    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def rstrip(self, chars: str = ...) -> bytearray: ...
+    def split(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
+    def startswith(self, prefix: Union[str, Tuple[str, ...]]) -> bool: ...
+    def strip(self, chars: str = ...) -> bytearray: ...
+    def swapcase(self) -> bytearray: ...
+    def title(self) -> bytearray: ...
+    def translate(self, table: str) -> bytearray: ...
+    def upper(self) -> bytearray: ...
+    def zfill(self, width: int) -> bytearray: ...
+    @staticmethod
+    def fromhex(self, x: str) -> bytearray: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[int]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> int: ...
+    @overload
+    def __getitem__(self, s: slice) -> bytearray: ...
+    def __getslice__(self, start: int, stop: int) -> bytearray: ...
+    @overload
+    def __setitem__(self, i: int, x: int) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, x: Union[Sequence[int], str]) -> None: ...
+    def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ...
+    @overload
+    def __delitem__(self, i: int) -> None: ...
+    @overload
+    def __delitem__(self, s: slice) -> None: ...
+    def __delslice__(self, start: int, stop: int) -> None: ...
+    def __add__(self, s: str) -> bytearray: ...
+    def __mul__(self, n: int) -> bytearray: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: str) -> bool: ...
+    def __le__(self, x: str) -> bool: ...
+    def __gt__(self, x: str) -> bool: ...
+    def __ge__(self, x: str) -> bool: ...
+
+class bool(int, SupportsInt, SupportsFloat):
+    def __init__(self, o: object = ...) -> None: ...
+
+class slice:
+    start = 0
+    step = 0
+    stop = 0
+    def __init__(self, start: int, stop: int = 0, step: int = 0) -> None: ...
+
+class tuple(Sequence[_T_co], Generic[_T_co]):
+    def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, x: object) -> bool: ...
+    @overload
+    def __getitem__(self, x: int) -> _T_co: ...
+    @overload
+    def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ...
+    def __iter__(self) -> Iterator[_T_co]: ...
+    def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __le__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ...
+    def __mul__(self, n: int) -> Tuple[_T_co, ...]: ...
+    def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ...
+    def count(self, x: Any) -> int: ...
+    def index(self, x: Any) -> int: ...
+
+class function:
+    # TODO name of the class (corresponds to Python 'function' class)
+    __name__ = ...  # type: str
+    __module__ = ...  # type: str
+
+class list(MutableSequence[_T], Generic[_T]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    def append(self, object: _T) -> None: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def pop(self, index: int = -1) -> _T: ...
+    def index(self, object: _T, start: int = 0, stop: int = ...) -> int: ...
+    def count(self, object: _T) -> int: ...
+    def insert(self, index: int, object: _T) -> None: ...
+    def remove(self, object: _T) -> None: ...
+    def reverse(self) -> None: ...
+    def sort(self, *, key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> _T: ...
+    @overload
+    def __getitem__(self, s: slice) -> List[_T]: ...
+    def __getslice__(self, start: int, stop: int) -> List[_T]: ...
+    @overload
+    def __setitem__(self, i: int, o: _T) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, o: Sequence[_T]) -> None: ...
+    def __setslice__(self, start: int, stop: int, o: Sequence[_T]) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
+    def __delslice(self, start: int, stop: int) -> None: ...
+    def __add__(self, x: List[_T]) -> List[_T]: ...
+    def __iadd__(self, x: Iterable[_T]) -> List[_T]: ...
+    def __mul__(self, n: int) -> List[_T]: ...
+    def __rmul__(self, n: int) -> List[_T]: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __reversed__(self) -> Iterator[_T]: ...
+    def __gt__(self, x: List[_T]) -> bool: ...
+    def __ge__(self, x: List[_T]) -> bool: ...
+    def __lt__(self, x: List[_T]) -> bool: ...
+    def __le__(self, x: List[_T]) -> bool: ...
+
+class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...  # TODO keyword args
+
+    def has_key(self, k: _KT) -> bool: ...
+    def clear(self) -> None: ...
+    def copy(self) -> Dict[_KT, _VT]: ...
+    def get(self, k: _KT, default: _VT = None) -> _VT: ...
+    def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+    def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def update(self, m: Union[Mapping[_KT, _VT],
+                              Iterable[Tuple[_KT, _VT]]]) -> None: ...
+    def keys(self) -> List[_KT]: ...
+    def values(self) -> List[_VT]: ...
+    def items(self) -> List[Tuple[_KT, _VT]]: ...
+    def iterkeys(self) -> Iterator[_KT]: ...
+    def itervalues(self) -> Iterator[_VT]: ...
+    def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
+    @staticmethod
+    @overload
+    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method
+    @staticmethod
+    @overload
+    def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ...
+    def __len__(self) -> int: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    def __delitem__(self, v: _KT) -> None: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+    def __str__(self) -> str: ...
+
+class set(MutableSet[_T], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def add(self, element: _T) -> None: ...
+    def clear(self) -> None: ...
+    def copy(self) -> set[_T]: ...
+    def difference(self, s: Iterable[Any]) -> set[_T]: ...
+    def difference_update(self, s: Iterable[Any]) -> None: ...
+    def discard(self, element: _T) -> None: ...
+    def intersection(self, s: Iterable[Any]) -> set[_T]: ...
+    def intersection_update(self, s: Iterable[Any]) -> None: ...
+    def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
+    def issubset(self, s: AbstractSet[Any]) -> bool: ...
+    def issuperset(self, s: AbstractSet[Any]) -> bool: ...
+    def pop(self) -> _T: ...
+    def remove(self, element: _T) -> None: ...
+    def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
+    def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
+    def union(self, s: Iterable[_T]) -> set[_T]: ...
+    def update(self, s: Iterable[_T]) -> None: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __and__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __iand__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __or__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __ior__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __sub__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __isub__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __xor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __ixor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    # TODO more set operations
+
+class frozenset(AbstractSet[_T], Generic[_T]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    def copy(self) -> frozenset[_T]: ...
+    def difference(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def intersection(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def isdisjoint(self, s: AbstractSet[_T]) -> bool: ...
+    def issubset(self, s: AbstractSet[Any]) -> bool: ...
+    def issuperset(self, s: AbstractSet[Any]) -> bool: ...
+    def symmetric_difference(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def union(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __and__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __or__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+    def __sub__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __xor__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+
+class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
+    def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
+    def next(self) -> Tuple[int, _T]: ...
+    # TODO __getattribute__
+
+class xrange(Sized, Iterable[int], Reversible[int]):
+    @overload
+    def __init__(self, stop: int) -> None: ...
+    @overload
+    def __init__(self, start: int, stop: int, step: int = 1) -> None: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[int]: ...
+    def __getitem__(self, i: int) -> int: ...
+    def __reversed__(self) -> Iterator[int]: ...
+
+class module:
+    __name__ = ...  # type: str
+    __file__ = ...  # type: str
+    __dict__ = ...  # type: Dict[unicode, Any]
+
+True = ...  # type: bool
+False = ...  # type: bool
+__debug__ = False
+
+long = int
+bytes = str
+
+NotImplemented = ...  # type: Any
+
+def abs(n: SupportsAbs[_T]) -> _T: ...
+def all(i: Iterable) -> bool: ...
+def any(i: Iterable) -> bool: ...
+def bin(number: int) -> str: ...
+def callable(o: object) -> bool: ...
+def chr(code: int) -> str: ...
+def compile(source: Any, filename: unicode, mode: str, flags: int = 0,
+            dont_inherit: int = 0) -> Any: ...
+def delattr(o: Any, name: unicode) -> None: ...
+def dir(o: object = ...) -> List[str]: ...
+ at overload
+def divmod(a: int, b: int) -> Tuple[int, int]: ...
+ at overload
+def divmod(a: float, b: float) -> Tuple[float, float]: ...
+def exit(code: int = ...) -> None: ...
+def filter(function: Callable[[_T], Any],
+           iterable: Iterable[_T]) -> List[_T]: ...
+def format(o: object, format_spec: str = '') -> str: ...  # TODO unicode
+def getattr(o: Any, name: unicode, default: Any = None) -> Any: ...
+def hasattr(o: Any, name: unicode) -> bool: ...
+def hash(o: object) -> int: ...
+def hex(i: int) -> str: ...  # TODO __index__
+def id(o: object) -> int: ...
+def input(prompt: unicode = ...) -> Any: ...
+def intern(string: str) -> str: ...
+ at overload
+def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
+ at overload
+def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ...
+def isinstance(o: object, t: Union[type, Tuple[type, ...]]) -> bool: ...
+def issubclass(cls: type, classinfo: type) -> bool: ...
+# TODO support this
+#def issubclass(type cld, classinfo: Sequence[type]) -> bool: ...
+def len(o: Sized) -> int: ...
+ at overload
+def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> List[_S]: ...
+ at overload
+def map(func: Callable[[_T1, _T2], _S],
+        iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> List[_S]: ...  # TODO more than two iterables
+ at overload
+def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def max(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+# TODO memoryview
+ at overload
+def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def min(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def next(i: Iterator[_T]) -> _T: ...
+ at overload
+def next(i: Iterator[_T], default: _T) -> _T: ...
+def oct(i: int) -> str: ...  # TODO __index__
+ at overload
+def open(file: str, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
+ at overload
+def open(file: unicode, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
+ at overload
+def open(file: int, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
+def ord(c: unicode) -> int: ...
+# This is only available after from __future__ import print_function.
+def print(*values: Any, sep: unicode = u' ', end: unicode = u'\n',
+           file: IO[Any] = ...) -> None: ...
+ at overload
+def pow(x: int, y: int) -> Any: ...  # The return type can be int or float, depending on y.
+ at overload
+def pow(x: int, y: int, z: int) -> Any: ...
+ at overload
+def pow(x: float, y: float) -> float: ...
+ at overload
+def pow(x: float, y: float, z: float) -> float: ...
+def quit(code: int = ...) -> None: ...
+def range(x: int, y: int = 0, step: int = 1) -> List[int]: ...
+def raw_input(prompt: unicode = ...) -> str: ...
+
+def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], initializer: _T = None) -> _T: ...
+
+def reload(module: Any) -> Any: ...
+ at overload
+def reversed(object: Reversible[_T]) -> Iterator[_T]: ...
+ at overload
+def reversed(object: Sequence[_T]) -> Iterator[_T]: ...
+def repr(o: object) -> str: ...
+ at overload
+def round(number: float) -> int: ...
+ at overload
+def round(number: float, ndigits: int) -> float: ...  # Always return a float if given ndigits.
+ at overload
+def round(number: SupportsRound[_T]) -> _T: ...
+ at overload
+def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
+def setattr(object: Any, name: unicode, value: Any) -> None: ...
+def sorted(iterable: Iterable[_T], *,
+           cmp: Callable[[_T, _T], int] = ...,
+           key: Callable[[_T], Any] = ...,
+           reverse: bool = ...) -> List[_T]: ...
+def sum(iterable: Iterable[_T], start: _T = ...) -> _T: ...
+def unichr(i: int) -> unicode: ...
+def vars(object: Any = ...) -> Dict[str, Any]: ...
+ at overload
+def zip(iter1: Iterable[_T1]) -> List[Tuple[_T1]]: ...
+ at overload
+def zip(iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2],
+        iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+        iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2,
+                                           _T3, _T4]]: ...  # TODO more than four iterables
+def __import__(name: unicode,
+               globals: Dict[str, Any] = ...,
+               locals: Dict[str, Any] = ...,
+               fromlist: List[str] = ..., level: int = ...) -> Any: ...
+
+def globals() -> Dict[str, Any]: ...
+def locals() -> Dict[str, Any]: ...
+
+# Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
+# not exposed anywhere under that name, we make it private here.
+class ellipsis: ...
+Ellipsis = ...  # type: ellipsis
+
+# TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check.
+AnyBuffer = TypeVar('AnyBuffer', str, unicode, bytearray, buffer)
+
+class buffer(Sized):
+    def __init__(self, object: AnyBuffer, offset: int = ..., size: int = ...) -> None: ...
+    def __add__(self, other: AnyBuffer) -> str: ...
+    def __cmp__(self, other: AnyBuffer) -> bool: ...
+    def __getitem__(self, key: Union[int, slice]) -> str: ...
+    def __getslice__(self, i: int, j: int) -> str: ...
+    def __len__(self) -> int: ...
+    def __mul__(self, x: int) -> str: ...
+
+class BaseException:
+    args = ...  # type: Any
+    message = ...  # type: str
+    def __init__(self, *args: Any) -> None: ...
+    def with_traceback(self, tb: Any) -> BaseException: ...
+class GeneratorExit(BaseException): ...
+class KeyboardInterrupt(BaseException): ...
+class SystemExit(BaseException):
+    code = 0
+class Exception(BaseException): ...
+class StopIteration(Exception): ...
+class StandardError(Exception): ...
+class ArithmeticError(StandardError): ...
+class BufferError(StandardError): ...
+class EnvironmentError(StandardError):
+    errno = 0
+    strerror = ...  # type: str
+    # TODO can this be unicode?
+    filename = ...  # type: str
+class LookupError(StandardError): ...
+class RuntimeError(StandardError): ...
+class ValueError(StandardError): ...
+class AssertionError(StandardError): ...
+class AttributeError(StandardError): ...
+class EOFError(StandardError): ...
+class FloatingPointError(ArithmeticError): ...
+class IOError(EnvironmentError): ...
+class ImportError(StandardError): ...
+class IndexError(LookupError): ...
+class KeyError(LookupError): ...
+class MemoryError(StandardError): ...
+class NameError(StandardError): ...
+class NotImplementedError(RuntimeError): ...
+class OSError(EnvironmentError): ...
+class WindowsError(OSError): ...
+class OverflowError(ArithmeticError): ...
+class ReferenceError(StandardError): ...
+class SyntaxError(StandardError): ...
+class IndentationError(SyntaxError): ...
+class TabError(IndentationError): ...
+class SystemError(StandardError): ...
+class TypeError(StandardError): ...
+class UnboundLocalError(NameError): ...
+class UnicodeError(ValueError): ...
+class UnicodeDecodeError(UnicodeError): ...
+class UnicodeEncodeError(UnicodeError): ...
+class UnicodeTranslateError(UnicodeError): ...
+class ZeroDivisionError(ArithmeticError): ...
+
+class Warning(Exception): ...
+class UserWarning(Warning): ...
+class DeprecationWarning(Warning): ...
+class SyntaxWarning(Warning): ...
+class RuntimeWarning(Warning): ...
+class FutureWarning(Warning): ...
+class PendingDeprecationWarning(Warning): ...
+class ImportWarning(Warning): ...
+class UnicodeWarning(Warning): ...
+class BytesWarning(Warning): ...
+class ResourceWarning(Warning): ...
+
+def eval(s: str) -> Any: ...
+
+def cmp(x: Any, y: Any) -> int: ...
+
+def execfile(filename: str, globals: Dict[str, Any] = None, locals: Dict[str, Any] = None) -> None: ...
+
+class file(BinaryIO):
+    @overload
+    def __init__(self, file: str, mode: str = 'r', buffering: int = ...) -> None: ...
+    @overload
+    def __init__(self, file: unicode, mode: str = 'r', buffering: int = ...) -> None: ...
+    @overload
+    def __init__(file: int, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def read(self, n: int = ...) -> str: ...
+    def __enter__(self) -> BinaryIO: ...
+    def __exit__(self, typ, exc, tb) -> bool: ...
+    def flush(self) -> None: ...
+    def fileno(self) -> int: ...
+    def isatty(self) -> bool: ...
+    def close(self) -> None: ...
+
+    def readable(self) -> bool: ...
+    def writable(self) -> bool: ...
+    def seekable(self) -> bool: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def tell(self) -> int: ...
+    def readline(self, limit: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def write(self, data: str) -> None: ...
+    def writelines(self, data: Iterable[str]) -> None: ...
+    def truncate(self, pos: int = ...) -> int: ...
diff --git a/typeshed/stdlib/2.7/cPickle.pyi b/typeshed/stdlib/2.7/cPickle.pyi
new file mode 100644
index 0000000..583cb8b
--- /dev/null
+++ b/typeshed/stdlib/2.7/cPickle.pyi
@@ -0,0 +1,23 @@
+from typing import Any, IO, List
+
+HIGHEST_PROTOCOL = ...  # type: int
+compatible_formats = ...  # type: List[str]
+format_version = ...  # type: str
+
+class Pickler:
+    def __init__(self, file: IO[str], protocol: int = ...) -> None: ...
+
+
+class Unpickler:
+    def __init__(self, file: IO[str]) -> None: ...
+
+def dump(obj: Any, file: IO[str], protocol: int = ...) -> None: ...
+def dumps(obj: Any, protocol: int = ...) -> str: ...
+def load(file: IO[str]) -> Any: ...
+def loads(str: str) -> Any: ...
+
+class PickleError(Exception): ...
+class UnpicklingError(PickleError): ...
+class BadPickleGet(UnpicklingError): ...
+class PicklingError(PickleError): ...
+class UnpickleableError(PicklingError): ...
diff --git a/typeshed/stdlib/2.7/cStringIO.pyi b/typeshed/stdlib/2.7/cStringIO.pyi
new file mode 100644
index 0000000..5b4ad25
--- /dev/null
+++ b/typeshed/stdlib/2.7/cStringIO.pyi
@@ -0,0 +1,50 @@
+# Stubs for cStringIO (Python 2.7)
+# See https://docs.python.org/2/library/stringio.html
+
+from typing import overload, IO, List, Iterable, Iterator, Optional, Union
+from types import TracebackType
+
+# TODO the typing.IO[] generics should be split into input and output.
+
+class InputType(IO[str], Iterator[str]):
+    def getvalue(self) -> str: ...
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, size: int = ...) -> str: ...
+    def readline(self, size: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> Optional[int]: ...
+    def __iter__(self) -> 'InputType': ...
+    def next(self) -> str: ...
+    def reset(self) -> None: ...
+
+class OutputType(IO[str], Iterator[str]):
+    @property
+    def softspace(self) -> int: ...
+    def getvalue(self) -> str: ...
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, size: int = ...) -> str: ...
+    def readline(self, size: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> Optional[int]: ...
+    def __iter__(self) -> 'OutputType': ...
+    def next(self) -> str: ...
+    def reset(self) -> None: ...
+    def write(self, b: Union[str, unicode]) -> None: ...
+    def writelines(self, lines: Iterable[Union[str, unicode]]) -> None: ...
+
+ at overload
+def StringIO() -> OutputType: ...
+ at overload
+def StringIO(s: str) -> InputType: ...
diff --git a/typeshed/stdlib/2.7/codecs.pyi b/typeshed/stdlib/2.7/codecs.pyi
new file mode 100644
index 0000000..bd576ad
--- /dev/null
+++ b/typeshed/stdlib/2.7/codecs.pyi
@@ -0,0 +1,194 @@
+# Better codecs stubs hand-written by o11c.
+# https://docs.python.org/2/library/codecs.html
+from typing import (
+        BinaryIO,
+        Callable,
+        Iterable,
+        Iterator,
+        List,
+        Tuple,
+        Union,
+)
+
+from abc import abstractmethod
+
+
+# TODO: this only satisfies the most common interface, where
+# str is the raw form and unicode is the cooked form.
+# In the long run, both should become template parameters maybe?
+# There *are* str->str and unicode->unicode encodings in the standard library.
+# And unlike python 3, they are in fairly widespread use.
+
+_decoded = unicode
+_encoded = str
+
+# TODO: It is not possible to specify these signatures correctly, because
+# they have an optional positional or keyword argument for errors=.
+_encode_type = Callable[[_decoded], _encoded] # signature of Codec().encode
+_decode_type = Callable[[_encoded], _decoded] # signature of Codec().decode
+_stream_reader_type = Callable[[BinaryIO], 'StreamReader'] # signature of StreamReader __init__
+_stream_writer_type = Callable[[BinaryIO], 'StreamWriter'] # signature of StreamWriter __init__
+_incremental_encoder_type = Callable[[], 'IncrementalEncoder'] # signature of IncrementalEncoder __init__
+_incremental_decode_type = Callable[[], 'IncrementalDecoder'] # signature of IncrementalDecoder __init__
+
+
+def encode(obj: _decoded, encoding: str = ..., errors: str = ...) -> _encoded:
+    ...
+def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded:
+    ...
+
+def lookup(encoding: str) -> 'CodecInfo':
+    ...
+class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]):
+    def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decode_type = ..., name: str = ...) -> None:
+        self.encode = encode
+        self.decode = decode
+        self.streamreader = streamreader
+        self.streamwriter = streamwriter
+        self.incrementalencoder = incrementalencoder
+        self.incrementaldecoder = incrementaldecoder
+        self.name = name
+
+def getencoder(encoding: str) -> _encode_type:
+    ...
+def getdecoder(encoding: str) -> _encode_type:
+    ...
+def getincrementalencoder(encoding: str) -> _incremental_encoder_type:
+    ...
+def getincrementaldecoder(encoding: str) -> _incremental_encoder_type:
+    ...
+def getreader(encoding: str) -> _stream_reader_type:
+    ...
+def getwriter(encoding: str) -> _stream_writer_type:
+    ...
+
+def register(search_function: Callable[[str], CodecInfo]) -> None:
+    ...
+
+def open(filename: str, mode: str = ..., encoding: str = ..., errors: str = ..., buffering: int = ...) -> StreamReaderWriter:
+    ...
+
+def EncodedFile(file: BinaryIO, data_encoding: str, file_encoding: str = ..., errors = ...) -> 'StreamRecoder':
+    ...
+
+def iterencode(iterator: Iterable[_decoded], encoding: str, errors: str = ...) -> Iterator[_encoded]:
+    ...
+def iterdecode(iterator: Iterable[_encoded], encoding: str, errors: str = ...) -> Iterator[_decoded]:
+    ...
+
+BOM = b''
+BOM_BE = b''
+BOM_LE = b''
+BOM_UTF8 = b''
+BOM_UTF16 = b''
+BOM_UTF16_BE = b''
+BOM_UTF16_LE = b''
+BOM_UTF32 = b''
+BOM_UTF32_BE = b''
+BOM_UTF32_LE = b''
+
+# It is expected that different actions be taken depending on which of the
+# three subclasses of `UnicodeError` is actually ...ed. However, the Union
+# is still needed for at least one of the cases.
+def register_error(name: str, error_handler: Callable[[UnicodeError], Tuple[Union[str, bytes], int]]) -> None:
+    ...
+def lookup_error(name: str) -> Callable[[UnicodeError], Tuple[Union[str, bytes], int]]:
+    ...
+
+def strict_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def replace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def ignore_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def xmlcharrefreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def backslashreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+
+class Codec:
+    # These are sort of @abstractmethod but sort of not.
+    # The StreamReader and StreamWriter subclasses only implement one.
+    def encode(self, input: _decoded, errors: str = ...) -> Tuple[_encoded, int]:
+        ...
+    def decode(self, input: _encoded, errors: str = ...) -> Tuple[_decoded, int]:
+        ...
+
+class IncrementalEncoder:
+    def __init__(self, errors: str = ...) -> None:
+        self.errors = errors
+    @abstractmethod
+    def encode(self, object: _decoded, final: bool = ...) -> _encoded:
+        ...
+    def reset(self) -> None:
+        ...
+    # documentation says int but str is needed for the subclass.
+    def getstate(self) -> Union[int, _decoded]:
+        ...
+    def setstate(self, state: Union[int, _decoded]) -> None:
+        ...
+
+class IncrementalDecoder:
+    def __init__(self, errors: str = ...) -> None:
+        self.errors = errors
+    @abstractmethod
+    def decode(self, object: _encoded, final: bool = ...) -> _decoded:
+        ...
+    def reset(self) -> None:
+        ...
+    def getstate(self) -> Tuple[_encoded, int]:
+        ...
+    def setstate(self, state: Tuple[_encoded, int]) -> None:
+        ...
+
+# These are not documented but used in encodings/*.py implementations.
+class BufferedIncrementalEncoder(IncrementalEncoder):
+    def __init__(self, errors: str = ...) -> None:
+        IncrementalEncoder.__init__(self, errors)
+        self.buffer = ''
+    @abstractmethod
+    def _buffer_encode(self, input: _decoded, errors: str, final: bool) -> _encoded:
+        ...
+    def encode(self, input: _decoded, final: bool = ...) -> _encoded:
+        ...
+class BufferedIncrementalDecoder(IncrementalDecoder):
+    def __init__(self, errors: str = ...) -> None:
+        IncrementalDecoder.__init__(self, errors)
+        self.buffer = b''
+    @abstractmethod
+    def _buffer_decode(self, input: _encoded, errors: str, final: bool) -> Tuple[_decoded, int]:
+        ...
+    def decode(self, object: _encoded, final: bool = ...) -> _decoded:
+        ...
+
+# TODO: it is not possible to specify the requirement that all other
+# attributes and methods are passed-through from the stream.
+class StreamWriter(Codec):
+    def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
+        self.errors = errors
+    def write(self, obj: _decoded) -> None:
+        ...
+    def writelines(self, list: List[str]) -> None:
+        ...
+    def reset(self) -> None:
+        ...
+
+class StreamReader(Codec):
+    def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
+        self.errors = errors
+    def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> _decoded:
+        ...
+    def readline(self, size: int = ..., keepends: bool = ...) -> _decoded:
+        ...
+    def readlines(self, sizehint: int = ..., keepends: bool = ...) -> List[_decoded]:
+        ...
+    def reset(self) -> None:
+        ...
+
+class StreamReaderWriter:
+    def __init__(self, stream: BinaryIO, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
+        ...
+
+class StreamRecoder(BinaryIO):
+    def __init__(self, stream: BinaryIO, encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
+        ...
diff --git a/typeshed/stdlib/2.7/collections.pyi b/typeshed/stdlib/2.7/collections.pyi
new file mode 100644
index 0000000..69f1367
--- /dev/null
+++ b/typeshed/stdlib/2.7/collections.pyi
@@ -0,0 +1,91 @@
+# Stubs for collections
+
+# Based on http://docs.python.org/2.7/library/collections.html
+
+# TODO more abstract base classes (interfaces in mypy)
+
+# NOTE: These are incomplete!
+
+from typing import (
+    Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload, Iterator, Sized,
+    Optional, List, Set, Sequence, Union, Reversible, MutableMapping, MutableSequence
+)
+import typing
+
+_T = TypeVar('_T')
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+
+# namedtuple is special-cased in the type checker; the initializer is ignored.
+namedtuple = object()
+
+class deque(Sized, Iterable[_T], Reversible[_T], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T] = ...,
+                 maxlen: int = ...) -> None: ...
+    @property
+    def maxlen(self) -> Optional[int]: ...
+    def append(self, x: _T) -> None: ...
+    def appendleft(self, x: _T) -> None: ...
+    def clear(self) -> None: ...
+    def count(self, x: _T) -> int: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def extendleft(self, iterable: Iterable[_T]) -> None: ...
+    def pop(self) -> _T: ...
+    def popleft(self) -> _T: ...
+    def remove(self, value: _T) -> None: ...
+    def reverse(self) -> None: ...
+    def rotate(self, n: int) -> None: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __hash__(self) -> int: ...
+    def __getitem__(self, i: int) -> _T: ...
+    def __setitem__(self, i: int, x: _T) -> None: ...
+    def __contains__(self, o: _T) -> bool: ...
+    def __reversed__(self) -> Iterator[_T]: ...
+
+class Counter(Dict[_T, int], Generic[_T]):
+    # TODO: __init__ keyword arguments
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, Mapping: Mapping[_T, int]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    def elements(self) -> Iterator[_T]: ...
+    def most_common(self, n: int = ...) -> List[_T]: ...
+    @overload
+    def subtract(self, mapping: Mapping[_T, int]) -> None: ...
+    @overload
+    def subtract(self, iterable: Iterable[_T]) -> None: ...
+    # The Iterable[Tuple[...]] argument type is not actually desirable
+    # (the tuples will be added as keys, breaking type safety) but
+    # it's included so that the signature is compatible with
+    # Dict.update. Not sure if we should use '# type: ignore' instead
+    # and omit the type from the union.
+    def update(self, m: Union[Mapping[_T, int],
+                              Iterable[Tuple[_T, int]],
+                              Iterable[_T]]) -> None: ...
+
+class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
+    def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
+    def move_to_end(self, key: _KT, last: bool = ...) -> None: ...
+
+class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
+    default_factory = ...  # type: Callable[[], _VT]
+    # TODO: __init__ keyword args
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    @overload
+    def __init__(self, default_factory: Callable[[], _VT]) -> None: ...
+    @overload
+    def __init__(self, default_factory: Callable[[], _VT],
+                 map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, default_factory: Callable[[], _VT],
+                 iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    def __missing__(self, key: _KT) -> _VT: ...
diff --git a/typeshed/stdlib/2.7/compileall.pyi b/typeshed/stdlib/2.7/compileall.pyi
new file mode 100644
index 0000000..103d622
--- /dev/null
+++ b/typeshed/stdlib/2.7/compileall.pyi
@@ -0,0 +1,7 @@
+# Stubs for compileall (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def compile_dir(dir, maxlevels=..., ddir=..., force=..., rx=..., quiet=...): ...
+def compile_file(fullname, ddir=..., force=..., rx=..., quiet=...): ...
+def compile_path(skip_curdir=..., maxlevels=..., force=..., quiet=...): ...
diff --git a/typeshed/stdlib/2.7/contextlib.pyi b/typeshed/stdlib/2.7/contextlib.pyi
new file mode 100644
index 0000000..af9f0ab
--- /dev/null
+++ b/typeshed/stdlib/2.7/contextlib.pyi
@@ -0,0 +1,15 @@
+# Stubs for contextlib
+
+# NOTE: These are incomplete!
+
+from typing import Any, TypeVar, Generic
+
+# TODO more precise type?
+def contextmanager(func: Any) -> Any: ...
+
+_T = TypeVar('_T')
+
+class closing(Generic[_T]):
+    def __init__(self, thing: _T) -> None: ...
+    def __enter__(self) -> _T: ...
+    def __exit__(self, *exc_info) -> None: ...
diff --git a/typeshed/stdlib/2.7/copy.pyi b/typeshed/stdlib/2.7/copy.pyi
new file mode 100644
index 0000000..237f420
--- /dev/null
+++ b/typeshed/stdlib/2.7/copy.pyi
@@ -0,0 +1,10 @@
+# Stubs for copy
+
+# NOTE: These are incomplete!
+
+from typing import TypeVar
+
+_T = TypeVar('_T')
+
+def deepcopy(x: _T) -> _T: ...
+def copy(x: _T) -> _T: ...
diff --git a/typeshed/stdlib/2.7/csv.pyi b/typeshed/stdlib/2.7/csv.pyi
new file mode 100644
index 0000000..ce33010
--- /dev/null
+++ b/typeshed/stdlib/2.7/csv.pyi
@@ -0,0 +1,93 @@
+# Stubs for csv (Python 2.7)
+#
+# NOTE: Based on a dynamically typed stub automatically generated by stubgen.
+
+from abc import ABCMeta, abstractmethod
+from typing import Any, Dict, Iterable, List, Sequence, Union
+
+# Public interface of _csv.reader
+class Reader(Iterable[List[str]], metaclass=ABCMeta):
+    dialect = ...  # type: Dialect
+    line_num = ...  # type: int
+
+    @abstractmethod
+    def next(self) -> List[str]: ...
+
+_Row = Sequence[Union[str, int]]
+
+# Public interface of _csv.writer
+class Writer(metaclass=ABCMeta):
+    dialect = ...  # type: Dialect
+
+    @abstractmethod
+    def writerow(self, row: _Row) -> None: ...
+
+    @abstractmethod
+    def writerows(self, rows: Iterable[_Row]) -> None: ...
+
+QUOTE_ALL = ...  # type: int
+QUOTE_MINIMAL = ...  # type: int
+QUOTE_NONE = ...  # type: int
+QUOTE_NONNUMERIC = ...  # type: int
+
+class Error(Exception): ...
+
+_Dialect = Union[str, Dialect]
+
+def writer(csvfile: Any, dialect: _Dialect = ..., **fmtparams) -> Writer: ...
+def reader(csvfile: Iterable[str], dialect: _Dialect = ..., **fmtparams) -> Reader: ...
+def register_dialect(name, dialect=..., **fmtparams): ...
+def unregister_dialect(name): ...
+def get_dialect(name: str) -> Dialect: ...
+def list_dialects(): ...
+def field_size_limit(new_limit: int = ...) -> int: ...
+
+class Dialect:
+    delimiter = ...  # type: str
+    quotechar = ...  # type: str
+    escapechar = ...  # type: str
+    doublequote = ...  # type: bool
+    skipinitialspace = ...  # type: bool
+    lineterminator = ...  # type: str
+    quoting = ...  # type: int
+    def __init__(self) -> None: ...
+
+class excel(Dialect):
+    pass
+
+class excel_tab(excel):
+    pass
+
+class unix_dialect(Dialect):
+    pass
+
+class DictReader(Iterable):
+    restkey = ...  # type: Any
+    restval = ...  # type: Any
+    reader = ...  # type: Any
+    dialect = ...  # type: _Dialect
+    line_num = ...  # type: int
+    fieldnames = ...  # type: Sequence[Any]
+    def __init__(self, f: Iterable[str], fieldnames: Sequence[Any] = ..., restkey=...,
+                 restval=..., dialect: _Dialect = ..., *args, **kwds) -> None: ...
+    def __iter__(self): ...
+    def __next__(self): ...
+
+_DictRow = Dict[Any, Union[str, int]]
+
+class DictWriter:
+    fieldnames = ...  # type: Any
+    restval = ...  # type: Any
+    extrasaction = ...  # type: Any
+    writer = ...  # type: Any
+    def __init__(self, f: Any, fieldnames: Sequence[Any], restval=..., extrasaction: str = ...,
+                 dialect: _Dialect = ..., *args, **kwds) -> None: ...
+    def writeheader(self) -> None: ...
+    def writerow(self, rowdict: _DictRow) -> None: ...
+    def writerows(self, rowdicts: Iterable[_DictRow]) -> None: ...
+
+class Sniffer:
+    preferred = ...  # type: Any
+    def __init__(self) -> None: ...
+    def sniff(self, sample: str, delimiters: str = ...) -> Dialect: ...
+    def has_header(self, sample: str) -> bool: ...
diff --git a/typeshed/stdlib/2.7/datetime.pyi b/typeshed/stdlib/2.7/datetime.pyi
new file mode 100644
index 0000000..44699f3
--- /dev/null
+++ b/typeshed/stdlib/2.7/datetime.pyi
@@ -0,0 +1,221 @@
+# Stubs for datetime
+
+# NOTE: These are incomplete!
+
+from time import struct_time
+from typing import Optional, SupportsAbs, Tuple, Union, overload
+
+MINYEAR = 0
+MAXYEAR = 0
+
+class tzinfo(object):
+    def tzname(self, dt: Optional[datetime]) -> str: ...
+    def utcoffset(self, dt: Optional[datetime]) -> int: ...
+    def dst(self, dt: Optional[datetime]) -> int: ...
+    def fromutc(self, dt: datetime) -> datetime: ...
+
+class timezone(tzinfo):
+    utc = ...  # type: tzinfo
+    min = ...  # type: tzinfo
+    max = ...  # type: tzinfo
+
+    def __init__(self, offset: timedelta, name: str = ...) -> None: ...
+    def __hash__(self) -> int: ...
+
+_tzinfo = tzinfo
+_timezone = timezone
+
+class date(object):
+    min = ...  # type: date
+    max = ...  # type: date
+    resolution = ...  # type: timedelta
+
+    def __init__(self, year: int, month: int = ..., day: int = ...) -> None: ...
+
+    @classmethod
+    def fromtimestamp(cls, t: float) -> date: ...
+    @classmethod
+    def today(cls) -> date: ...
+    @classmethod
+    def fromordinal(cls, n: int) -> date: ...
+
+    @property
+    def year(self) -> int: ...
+    @property
+    def month(self) -> int: ...
+    @property
+    def day(self) -> int: ...
+
+    def ctime(self) -> str: ...
+    def strftime(self, fmt: str) -> str: ...
+    def __format__(self, fmt: Union[str, unicode]) -> str: ...
+    def isoformat(self) -> str: ...
+    def timetuple(self) -> struct_time: ...
+    def toordinal(self) -> int: ...
+    def replace(self, year: int = ..., month: int = ..., day: int = ...) -> date: ...
+    def __le__(self, other: date) -> bool: ...
+    def __lt__(self, other: date) -> bool: ...
+    def __ge__(self, other: date) -> bool: ...
+    def __gt__(self, other: date) -> bool: ...
+    def __add__(self, other: timedelta) -> date: ...
+    @overload
+    def __sub__(self, other: timedelta) -> date: ...
+    @overload
+    def __sub__(self, other: date) -> timedelta: ...
+    def __hash__(self) -> int: ...
+    def weekday(self) -> int: ...
+    def isoweekday(self) -> int: ...
+    def isocalendar(self) -> Tuple[int, int, int]: ...
+
+class time:
+    min = ...  # type: time
+    max = ...  # type: time
+    resolution = ...  # type: timedelta
+
+    def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ...,
+                 tzinfo: tzinfo = ...) -> None: ...
+
+    @property
+    def hour(self) -> int: ...
+    @property
+    def minute(self) -> int: ...
+    @property
+    def second(self) -> int: ...
+    @property
+    def microsecond(self) -> int: ...
+    @property
+    def tzinfo(self) -> _tzinfo: ...
+
+    def __le__(self, other: time) -> bool: ...
+    def __lt__(self, other: time) -> bool: ...
+    def __ge__(self, other: time) -> bool: ...
+    def __gt__(self, other: time) -> bool: ...
+    def __hash__(self) -> int: ...
+    def isoformat(self) -> str: ...
+    def strftime(self, fmt: str) -> str: ...
+    def __format__(self, fmt: str) -> str: ...
+    def utcoffset(self) -> Optional[int]: ...
+    def tzname(self) -> Optional[str]: ...
+    def dst(self) -> Optional[int]: ...
+    def replace(self, hour: int = ..., minute: int = ..., second: int = ...,
+                microsecond: int = ..., tzinfo: Union[_tzinfo, bool] = ...) -> time: ...
+
+_date = date
+_time = time
+
+class timedelta(SupportsAbs[timedelta]):
+    min = ...  # type: timedelta
+    max = ...  # type: timedelta
+    resolution = ...  # type: timedelta
+
+    def __init__(self, days: float = ..., seconds: float = ..., microseconds: float = ...,
+                 milliseconds: float = ..., minutes: float = ..., hours: float = ...,
+                 weeks: float = ...) -> None: ...
+
+    @property
+    def days(self) -> int: ...
+    @property
+    def seconds(self) -> int: ...
+    @property
+    def microseconds(self) -> int: ...
+
+    def total_seconds(self) -> float: ...
+    def __add__(self, other: timedelta) -> timedelta: ...
+    def __radd__(self, other: timedelta) -> timedelta: ...
+    def __sub__(self, other: timedelta) -> timedelta: ...
+    def __rsub(self, other: timedelta) -> timedelta: ...
+    def __neg__(self) -> timedelta: ...
+    def __pos__(self) -> timedelta: ...
+    def __abs__(self) -> timedelta: ...
+    def __mul__(self, other: float) -> timedelta: ...
+    def __rmul__(self, other: float) -> timedelta: ...
+    @overload
+    def __floordiv__(self, other: timedelta) -> int: ...
+    @overload
+    def __floordiv__(self, other: int) -> timedelta: ...
+    @overload
+    def __truediv__(self, other: timedelta) -> float: ...
+    @overload
+    def __truediv__(self, other: float) -> timedelta: ...
+    def __mod__(self, other: timedelta) -> timedelta: ...
+    def __divmod__(self, other: timedelta) -> Tuple[int, timedelta]: ...
+    def __le__(self, other: timedelta) -> bool: ...
+    def __lt__(self, other: timedelta) -> bool: ...
+    def __ge__(self, other: timedelta) -> bool: ...
+    def __gt__(self, other: timedelta) -> bool: ...
+    def __hash__(self) -> int: ...
+
+class datetime(object):
+    # TODO: is actually subclass of date, but __le__, __lt__, __ge__, __gt__ don't work with date.
+    min = ...  # type: datetime
+    max = ...  # type: datetime
+    resolution = ...  # type: timedelta
+
+    def __init__(self, year: int, month: int = ..., day: int = ..., hour: int = ...,
+                 minute: int = ..., second: int = ..., microseconds: int = ...,
+                 tzinfo: tzinfo = ...) -> None: ...
+
+    @property
+    def year(self) -> int: ...
+    @property
+    def month(self) -> int: ...
+    @property
+    def day(self) -> int: ...
+    @property
+    def hour(self) -> int: ...
+    @property
+    def minute(self) -> int: ...
+    @property
+    def second(self) -> int: ...
+    @property
+    def microsecond(self) -> int: ...
+    @property
+    def tzinfo(self) -> _tzinfo: ...
+
+    @classmethod
+    def fromtimestamp(cls, t: float, tz: timezone = ...) -> datetime: ...
+    @classmethod
+    def utcfromtimestamp(cls, t: float) -> datetime: ...
+    @classmethod
+    def today(cls) -> datetime: ...
+    @classmethod
+    def fromordinal(cls, n: int) -> datetime: ...
+    @classmethod
+    def now(cls, tz: timezone = ...) -> datetime: ...
+    @classmethod
+    def utcnow(cls) -> datetime: ...
+    @classmethod
+    def combine(cls, date: date, time: time) -> datetime: ...
+    def strftime(self, fmt: str) -> str: ...
+    def __format__(self, fmt: str) -> str: ...
+    def toordinal(self) -> int: ...
+    def timetuple(self) -> struct_time: ...
+    def timestamp(self) -> float: ...
+    def utctimetuple(self) -> struct_time: ...
+    def date(self) -> _date: ...
+    def time(self) -> _time: ...
+    def timetz(self) -> _time: ...
+    def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ...,
+                minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo:
+                Union[_tzinfo, bool] = ...) -> datetime: ...
+    def astimezone(self, tz: timezone = ...) -> datetime: ...
+    def ctime(self) -> str: ...
+    def isoformat(self, sep: str = ...) -> str: ...
+    @classmethod
+    def strptime(cls, date_string: str, format: str) -> datetime: ...
+    def utcoffset(self) -> Optional[int]: ...
+    def tzname(self) -> Optional[str]: ...
+    def dst(self) -> Optional[int]: ...
+    def __le__(self, other: datetime) -> bool: ...
+    def __lt__(self, other: datetime) -> bool: ...
+    def __ge__(self, other: datetime) -> bool: ...
+    def __gt__(self, other: datetime) -> bool: ...
+    def __add__(self, other: timedelta) -> datetime: ...
+    @overload
+    def __sub__(self, other: datetime) -> timedelta: ...
+    @overload
+    def __sub__(self, other: timedelta) -> datetime: ...
+    def __hash__(self) -> int: ...
+    def weekday(self) -> int: ...
+    def isoweekday(self) -> int: ...
+    def isocalendar(self) -> Tuple[int, int, int]: ...
diff --git a/typeshed/stdlib/2.7/difflib.pyi b/typeshed/stdlib/2.7/difflib.pyi
new file mode 100644
index 0000000..eaf2b5d
--- /dev/null
+++ b/typeshed/stdlib/2.7/difflib.pyi
@@ -0,0 +1,63 @@
+# Stubs for difflib
+
+# Based on https://docs.python.org/2.7/library/difflib.html
+
+# TODO: Support unicode?
+
+from typing import (
+    TypeVar, Callable, Iterable, List, NamedTuple, Sequence, Tuple, Generic
+)
+
+_T = TypeVar('_T')
+
+class SequenceMatcher(Generic[_T]):
+    def __init__(self, isjunk: Callable[[_T], bool] = ...,
+                 a: Sequence[_T] = ..., b: Sequence[_T] = ...,
+                 autojunk: bool = ...) -> None: ...
+    def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ...
+    def set_seq1(self, a: Sequence[_T]) -> None: ...
+    def set_seq2(self, b: Sequence[_T]) -> None: ...
+    def find_longest_match(self, alo: int, ahi: int, blo: int,
+                           bhi: int) -> Tuple[int, int, int]: ...
+    def get_matching_blocks(self) -> List[Tuple[int, int, int]]: ...
+    def get_opcodes(self) -> List[Tuple[str, int, int, int, int]]: ...
+    def get_grouped_opcodes(self, n: int = ...
+                            ) -> Iterable[Tuple[str, int, int, int, int]]: ...
+    def ratio(self) -> float: ...
+    def quick_ratio(self) -> float: ...
+    def real_quick_ratio(self) -> float: ...
+
+def get_close_matches(word: Sequence[_T], possibilities: List[Sequence[_T]],
+                      n: int = ..., cutoff: float = ...) -> List[Sequence[_T]]: ...
+
+class Differ:
+    def __init__(self, linejunk: Callable[[str], bool] = ...,
+                 charjunk: Callable[[str], bool] = ...) -> None: ...
+    def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterable[str]: ...
+
+def IS_LINE_JUNK(str) -> bool: ...
+def IS_CHARACTER_JUNK(str) -> bool: ...
+def unified_diff(a: Sequence[str], b: Sequence[str], fromfile: str = ...,
+                 tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
+                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+def context_diff(a: Sequence[str], b: Sequence[str], fromfile: str=...,
+                 tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
+                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+def ndiff(a: Sequence[str], b: Sequence[str],
+          linejunk: Callable[[str], bool] = ...,
+          charjunk: Callable[[str], bool] = ...
+          ) -> Iterable[str]: ...
+
+class HtmlDiff(object):
+    def __init__(self, tabsize: int = ..., wrapcolumn: int = ...,
+                 linejunk: Callable[[str], bool] = ...,
+                 charjunk: Callable[[str], bool] = ...
+                 ) -> None: ...
+    def make_file(self, fromlines: Sequence[str], tolines: Sequence[str],
+                  fromdesc: str = ..., todesc: str = ..., context: bool = ...,
+                  numlines: int = ...) -> str: ...
+    def make_table(self, fromlines: Sequence[str], tolines: Sequence[str],
+                   fromdesc: str = ..., todesc: str = ..., context: bool = ...,
+                   numlines: int = ...) -> str: ...
+
+def restore(delta: Iterable[str], which: int) -> Iterable[int]: ...
diff --git a/typeshed/stdlib/2.7/distutils/__init__.pyi b/typeshed/stdlib/2.7/distutils/__init__.pyi
new file mode 100644
index 0000000..d4853fe
--- /dev/null
+++ b/typeshed/stdlib/2.7/distutils/__init__.pyi
@@ -0,0 +1,7 @@
+# Stubs for distutils (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+__revision__ = ... # type: Any
diff --git a/typeshed/stdlib/2.7/distutils/version.pyi b/typeshed/stdlib/2.7/distutils/version.pyi
new file mode 100644
index 0000000..a850144
--- /dev/null
+++ b/typeshed/stdlib/2.7/distutils/version.pyi
@@ -0,0 +1,23 @@
+# Stubs for distutils.version (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Version:
+    def __init__(self, vstring=...) -> None: ...
+
+class StrictVersion(Version):
+    version_re = ... # type: Any
+    version = ... # type: Any
+    prerelease = ... # type: Any
+    def parse(self, vstring): ...
+    def __cmp__(self, other): ...
+
+class LooseVersion(Version):
+    component_re = ... # type: Any
+    def __init__(self, vstring=...) -> None: ...
+    vstring = ... # type: Any
+    version = ... # type: Any
+    def parse(self, vstring): ...
+    def __cmp__(self, other): ...
diff --git a/typeshed/stdlib/2.7/doctest.pyi b/typeshed/stdlib/2.7/doctest.pyi
new file mode 100644
index 0000000..ab88328
--- /dev/null
+++ b/typeshed/stdlib/2.7/doctest.pyi
@@ -0,0 +1,9 @@
+# Stubs for doctest
+
+# NOTE: These are incomplete!
+
+from typing import Any, Tuple
+
+# TODO arguments missing
+def testmod(m: Any = ..., name: str = ..., globs: Any = ...,
+            verbose: bool = ...) -> Tuple[int, int]: ...
diff --git a/typeshed/stdlib/2.7/email/MIMEText.pyi b/typeshed/stdlib/2.7/email/MIMEText.pyi
new file mode 100644
index 0000000..a15eb1d
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/MIMEText.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.MIMEText (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEText(MIMENonMultipart):
+    def __init__(self, _text, _subtype=..., _charset=...) -> None: ...
diff --git a/typeshed/stdlib/2.7/email/__init__.pyi b/typeshed/stdlib/2.7/email/__init__.pyi
new file mode 100644
index 0000000..384d956
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/__init__.pyi
@@ -0,0 +1,6 @@
+from typing import IO, Any, AnyStr
+
+def message_from_string(s: AnyStr, *args, **kwargs): ...
+def message_from_bytes(s: str, *args, **kwargs): ...
+def message_from_file(fp: IO[AnyStr], *args, **kwargs): ...
+def message_from_binary_file(fp: IO[str], *args, **kwargs): ...
diff --git a/typeshed/stdlib/2.7/email/mime/__init__.pyi b/typeshed/stdlib/2.7/email/mime/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/2.7/email/mime/base.pyi b/typeshed/stdlib/2.7/email/mime/base.pyi
new file mode 100644
index 0000000..82554da
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/mime/base.pyi
@@ -0,0 +1,10 @@
+# Stubs for email.mime.base (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+# import message
+
+# TODO
+#class MIMEBase(message.Message):
+class MIMEBase:
+    def __init__(self, _maintype, _subtype, **_params) -> None: ...
diff --git a/typeshed/stdlib/2.7/email/mime/multipart.pyi b/typeshed/stdlib/2.7/email/mime/multipart.pyi
new file mode 100644
index 0000000..2bc5a9e
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/mime/multipart.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.multipart (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.base import MIMEBase
+
+class MIMEMultipart(MIMEBase):
+    def __init__(self, _subtype=..., boundary=..., _subparts=..., **_params) -> None: ...
diff --git a/typeshed/stdlib/2.7/email/mime/nonmultipart.pyi b/typeshed/stdlib/2.7/email/mime/nonmultipart.pyi
new file mode 100644
index 0000000..b0894ab
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/mime/nonmultipart.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.nonmultipart (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.base import MIMEBase
+
+class MIMENonMultipart(MIMEBase):
+    def attach(self, payload): ...
diff --git a/typeshed/stdlib/2.7/email/mime/text.pyi b/typeshed/stdlib/2.7/email/mime/text.pyi
new file mode 100644
index 0000000..b6ec4c8
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/mime/text.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.text (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEText(MIMENonMultipart):
+    def __init__(self, _text, _subtype=..., _charset=...) -> None: ...
diff --git a/typeshed/stdlib/2.7/encodings/__init__.pyi b/typeshed/stdlib/2.7/encodings/__init__.pyi
new file mode 100644
index 0000000..2ae6c0a
--- /dev/null
+++ b/typeshed/stdlib/2.7/encodings/__init__.pyi
@@ -0,0 +1,6 @@
+import codecs
+
+import typing
+
+def search_function(encoding: str) -> codecs.CodecInfo:
+    ...
diff --git a/typeshed/stdlib/2.7/encodings/utf_8.pyi b/typeshed/stdlib/2.7/encodings/utf_8.pyi
new file mode 100644
index 0000000..3be496a
--- /dev/null
+++ b/typeshed/stdlib/2.7/encodings/utf_8.pyi
@@ -0,0 +1,14 @@
+import codecs
+
+class IncrementalEncoder(codecs.IncrementalEncoder):
+    pass
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+    pass
+class StreamWriter(codecs.StreamWriter):
+    pass
+class StreamReader(codecs.StreamReader):
+    pass
+
+def getregentry() -> codecs.CodecInfo: pass
+def encode(input: str, errors: str = ...) -> bytes: pass
+def decode(input: bytes, errors: str = ...) -> str: pass
diff --git a/typeshed/stdlib/2.7/errno.pyi b/typeshed/stdlib/2.7/errno.pyi
new file mode 100644
index 0000000..c60170d
--- /dev/null
+++ b/typeshed/stdlib/2.7/errno.pyi
@@ -0,0 +1,129 @@
+"""Stubs for the 'errno' module."""
+
+from typing import Dict
+
+errorcode = ...  # type: Dict[int, str]
+
+E2BIG = ...  # type: int
+EACCES = ...  # type: int
+EADDRINUSE = ...  # type: int
+EADDRNOTAVAIL = ...  # type: int
+EADV = ...  # type: int
+EAFNOSUPPORT = ...  # type: int
+EAGAIN = ...  # type: int
+EALREADY = ...  # type: int
+EBADE = ...  # type: int
+EBADF = ...  # type: int
+EBADFD = ...  # type: int
+EBADMSG = ...  # type: int
+EBADR = ...  # type: int
+EBADRQC = ...  # type: int
+EBADSLT = ...  # type: int
+EBFONT = ...  # type: int
+EBUSY = ...  # type: int
+ECHILD = ...  # type: int
+ECHRNG = ...  # type: int
+ECOMM = ...  # type: int
+ECONNABORTED = ...  # type: int
+ECONNREFUSED = ...  # type: int
+ECONNRESET = ...  # type: int
+EDEADLK = ...  # type: int
+EDEADLOCK = ...  # type: int
+EDESTADDRREQ = ...  # type: int
+EDOM = ...  # type: int
+EDOTDOT = ...  # type: int
+EDQUOT = ...  # type: int
+EEXIST = ...  # type: int
+EFAULT = ...  # type: int
+EFBIG = ...  # type: int
+EHOSTDOWN = ...  # type: int
+EHOSTUNREACH = ...  # type: int
+EIDRM = ...  # type: int
+EILSEQ = ...  # type: int
+EINPROGRESS = ...  # type: int
+EINTR = ...  # type: int
+EINVAL = ...  # type: int
+EIO = ...  # type: int
+EISCONN = ...  # type: int
+EISDIR = ...  # type: int
+EISNAM = ...  # type: int
+EL2HLT = ...  # type: int
+EL2NSYNC = ...  # type: int
+EL3HLT = ...  # type: int
+EL3RST = ...  # type: int
+ELIBACC = ...  # type: int
+ELIBBAD = ...  # type: int
+ELIBEXEC = ...  # type: int
+ELIBMAX = ...  # type: int
+ELIBSCN = ...  # type: int
+ELNRNG = ...  # type: int
+ELOOP = ...  # type: int
+EMFILE = ...  # type: int
+EMLINK = ...  # type: int
+EMSGSIZE = ...  # type: int
+EMULTIHOP = ...  # type: int
+ENAMETOOLONG = ...  # type: int
+ENAVAIL = ...  # type: int
+ENETDOWN = ...  # type: int
+ENETRESET = ...  # type: int
+ENETUNREACH = ...  # type: int
+ENFILE = ...  # type: int
+ENOANO = ...  # type: int
+ENOBUFS = ...  # type: int
+ENOCSI = ...  # type: int
+ENODATA = ...  # type: int
+ENODEV = ...  # type: int
+ENOENT = ...  # type: int
+ENOEXEC = ...  # type: int
+ENOLCK = ...  # type: int
+ENOLINK = ...  # type: int
+ENOMEM = ...  # type: int
+ENOMSG = ...  # type: int
+ENONET = ...  # type: int
+ENOPKG = ...  # type: int
+ENOPROTOOPT = ...  # type: int
+ENOSPC = ...  # type: int
+ENOSR = ...  # type: int
+ENOSTR = ...  # type: int
+ENOSYS = ...  # type: int
+ENOTBLK = ...  # type: int
+ENOTCONN = ...  # type: int
+ENOTDIR = ...  # type: int
+ENOTEMPTY = ...  # type: int
+ENOTNAM = ...  # type: int
+ENOTSOCK = ...  # type: int
+ENOTSUP = ...  # type: int
+ENOTTY = ...  # type: int
+ENOTUNIQ = ...  # type: int
+ENXIO = ...  # type: int
+EOPNOTSUPP = ...  # type: int
+EOVERFLOW = ...  # type: int
+EPERM = ...  # type: int
+EPFNOSUPPORT = ...  # type: int
+EPIPE = ...  # type: int
+EPROTO = ...  # type: int
+EPROTONOSUPPORT = ...  # type: int
+EPROTOTYPE = ...  # type: int
+ERANGE = ...  # type: int
+EREMCHG = ...  # type: int
+EREMOTE = ...  # type: int
+EREMOTEIO = ...  # type: int
+ERESTART = ...  # type: int
+EROFS = ...  # type: int
+ESHUTDOWN = ...  # type: int
+ESOCKTNOSUPPORT = ...  # type: int
+ESPIPE = ...  # type: int
+ESRCH = ...  # type: int
+ESRMNT = ...  # type: int
+ESTALE = ...  # type: int
+ESTRPIPE = ...  # type: int
+ETIME = ...  # type: int
+ETIMEDOUT = ...  # type: int
+ETOOMANYREFS = ...  # type: int
+ETXTBSY = ...  # type: int
+EUCLEAN = ...  # type: int
+EUNATCH = ...  # type: int
+EUSERS = ...  # type: int
+EWOULDBLOCK = ...  # type: int
+EXDEV = ...  # type: int
+EXFULL = ...  # type: int
diff --git a/typeshed/stdlib/2.7/exceptions.pyi b/typeshed/stdlib/2.7/exceptions.pyi
new file mode 100644
index 0000000..1f29e90
--- /dev/null
+++ b/typeshed/stdlib/2.7/exceptions.pyi
@@ -0,0 +1,80 @@
+from typing import Any, List, Optional
+
+class StandardError(Exception): ...
+class ArithmeticError(StandardError): ...
+class AssertionError(StandardError): ...
+class AttributeError(StandardError): ...
+class BaseException(object):
+    args = ...  # type: List[Any]
+    message = ...  # type: str
+    def __getslice__(self, start, end) -> Any: ...
+    def __getitem__(self, start, end) -> Any: ...
+    def __unicode__(self) -> unicode: ...
+class BufferError(StandardError): ...
+class BytesWarning(Warning): ...
+class DeprecationWarning(Warning): ...
+class EOFError(StandardError): ...
+class EnvironmentError(StandardError):
+    errno = ...  # type: int
+    strerror = ...  # type: str
+    filename = ...  # type: str
+class Exception(BaseException): ...
+class FloatingPointError(ArithmeticError): ...
+class FutureWarning(Warning): ...
+class GeneratorExit(BaseException): ...
+class IOError(EnvironmentError): ...
+class ImportError(StandardError): ...
+class ImportWarning(Warning): ...
+class IndentationError(SyntaxError): ...
+class IndexError(LookupError): ...
+class KeyError(LookupError): ...
+class KeyboardInterrupt(BaseException): ...
+class LookupError(StandardError): ...
+class MemoryError(StandardError): ...
+class NameError(StandardError): ...
+class NotImplementedError(RuntimeError): ...
+class OSError(EnvironmentError): ...
+class OverflowError(ArithmeticError): ...
+class PendingDeprecationWarning(Warning): ...
+class ReferenceError(StandardError): ...
+class RuntimeError(StandardError): ...
+class RuntimeWarning(Warning): ...
+class StopIteration(Exception): ...
+class SyntaxError(StandardError):
+    text = ...  # type: str
+    print_file_and_line = ...  # type: Optional[str]
+    filename = ...  # type: str
+    lineno = ...  # type: int
+    offset = ...  # type: int
+    msg = ...  # type: str
+class SyntaxWarning(Warning): ...
+class SystemError(StandardError): ...
+class SystemExit(BaseException):
+    code = ...  # type: int
+class TabError(IndentationError): ...
+class TypeError(StandardError): ...
+class UnboundLocalError(NameError): ...
+class UnicodeError(ValueError): ...
+class UnicodeDecodeError(UnicodeError):
+    start = ...  # type: int
+    reason = ...  # type: str
+    object = ...  # type: str
+    end = ...  # type: int
+    encoding = ...  # type: str
+class UnicodeEncodeError(UnicodeError):
+    start = ...  # type: int
+    reason = ...  # type: str
+    object = ...  # type: unicode
+    end = ...  # type: int
+    encoding = ...  # type: str
+class UnicodeTranslateError(UnicodeError):
+    start = ...  # type: int
+    reason = ...  # type: str
+    object = ...  # type: Any
+    end = ...  # type: int
+    encoding = ...  # type: str
+class UnicodeWarning(Warning): ...
+class UserWarning(Warning): ...
+class ValueError(StandardError): ...
+class Warning(Exception): ...
+class ZeroDivisionError(ArithmeticError): ...
diff --git a/typeshed/stdlib/2.7/fcntl.pyi b/typeshed/stdlib/2.7/fcntl.pyi
new file mode 100644
index 0000000..5a1a536
--- /dev/null
+++ b/typeshed/stdlib/2.7/fcntl.pyi
@@ -0,0 +1,85 @@
+from typing import Union
+import io
+
+FASYNC = ...  # type: int
+FD_CLOEXEC = ...  # type: int
+
+DN_ACCESS = ...  # type: int
+DN_ATTRIB = ...  # type: int
+DN_CREATE = ...  # type: int
+DN_DELETE = ...  # type: int
+DN_MODIFY = ...  # type: int
+DN_MULTISHOT = ...  # type: int
+DN_RENAME = ...  # type: int
+F_DUPFD = ...  # type: int
+F_EXLCK = ...  # type: int
+F_GETFD = ...  # type: int
+F_GETFL = ...  # type: int
+F_GETLEASE = ...  # type: int
+F_GETLK = ...  # type: int
+F_GETLK64 = ...  # type: int
+F_GETOWN = ...  # type: int
+F_GETSIG = ...  # type: int
+F_NOTIFY = ...  # type: int
+F_RDLCK = ...  # type: int
+F_SETFD = ...  # type: int
+F_SETFL = ...  # type: int
+F_SETLEASE = ...  # type: int
+F_SETLK = ...  # type: int
+F_SETLK64 = ...  # type: int
+F_SETLKW = ...  # type: int
+F_SETLKW64 = ...  # type: int
+F_SETOWN = ...  # type: int
+F_SETSIG = ...  # type: int
+F_SHLCK = ...  # type: int
+F_UNLCK = ...  # type: int
+F_WRLCK = ...  # type: int
+I_ATMARK = ...  # type: int
+I_CANPUT = ...  # type: int
+I_CKBAND = ...  # type: int
+I_FDINSERT = ...  # type: int
+I_FIND = ...  # type: int
+I_FLUSH = ...  # type: int
+I_FLUSHBAND = ...  # type: int
+I_GETBAND = ...  # type: int
+I_GETCLTIME = ...  # type: int
+I_GETSIG = ...  # type: int
+I_GRDOPT = ...  # type: int
+I_GWROPT = ...  # type: int
+I_LINK = ...  # type: int
+I_LIST = ...  # type: int
+I_LOOK = ...  # type: int
+I_NREAD = ...  # type: int
+I_PEEK = ...  # type: int
+I_PLINK = ...  # type: int
+I_POP = ...  # type: int
+I_PUNLINK = ...  # type: int
+I_PUSH = ...  # type: int
+I_RECVFD = ...  # type: int
+I_SENDFD = ...  # type: int
+I_SETCLTIME = ...  # type: int
+I_SETSIG = ...  # type: int
+I_SRDOPT = ...  # type: int
+I_STR = ...  # type: int
+I_SWROPT = ...  # type: int
+I_UNLINK = ...  # type: int
+LOCK_EX = ...  # type: int
+LOCK_MAND = ...  # type: int
+LOCK_NB = ...  # type: int
+LOCK_READ = ...  # type: int
+LOCK_RW = ...  # type: int
+LOCK_SH = ...  # type: int
+LOCK_UN = ...  # type: int
+LOCK_WRITE = ...  # type: int
+
+_ANYFILE = Union[int, io.IOBase]
+
+def fcntl(fd: _ANYFILE, op: int, arg: Union[int, str] = ...) -> Union[int, str]: ...
+
+# TODO: arg: int or read-only buffer interface or read-write buffer interface
+def ioctl(fd: _ANYFILE, op: int, arg: Union[int, str] = ...,
+          mutate_flag: bool = ...) -> Union[int, str]: ...
+
+def flock(fd: _ANYFILE, op: int) -> None: ...
+def lockf(fd: _ANYFILE, op: int, length: int = ..., start: int = ...,
+          whence: int = ...) -> Union[int, str]: ...
diff --git a/typeshed/stdlib/2.7/fnmatch.pyi b/typeshed/stdlib/2.7/fnmatch.pyi
new file mode 100644
index 0000000..23b5978
--- /dev/null
+++ b/typeshed/stdlib/2.7/fnmatch.pyi
@@ -0,0 +1,6 @@
+from typing import Iterable
+
+def fnmatch(filename: str, pattern: str) -> bool: ...
+def fnmatchcase(filename: str, pattern: str) -> bool: ...
+def filter(names: Iterable[str], pattern: str) -> Iterable[str]: ...
+def translate(pattern: str) -> str: ...
diff --git a/typeshed/stdlib/2.7/functools.pyi b/typeshed/stdlib/2.7/functools.pyi
new file mode 100644
index 0000000..f4ad487
--- /dev/null
+++ b/typeshed/stdlib/2.7/functools.pyi
@@ -0,0 +1,29 @@
+# Stubs for functools (Python 2.7)
+
+# NOTE: These are incomplete!
+
+from abc import ABCMeta, abstractmethod
+from typing import Any, Callable, Generic, Dict, Iterator, Optional, Sequence, Tuple, TypeVar
+from collections import namedtuple
+
+_AnyCallable = Callable[..., Any]
+
+_T = TypeVar("_T")
+def reduce(function: Callable[[_T], _T],
+           sequence: Iterator[_T], initial: Optional[_T] = ...) -> _T: ...
+
+WRAPPER_ASSIGNMENTS = ... # type: Sequence[str]
+WRAPPER_UPDATES = ... # type: Sequence[str]
+
+def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ...,
+                   updated: Sequence[str] = ...) -> None: ...
+def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ...
+def total_ordering(cls: type) -> type: ...
+def cmp_to_key(mycmp: Callable[[_T, _T], bool]) -> Callable[[_T], Any]: ...
+
+class partial(Generic[_T]):
+    func = ...  # Callable[..., _T]
+    args = ...  # type: Tuple[Any, ...]
+    keywords = ...  # type: Dict[str, Any]
+    def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ...
+    def __call__(self, *args: Any, **kwargs: Any) -> _T: ...
diff --git a/typeshed/stdlib/2.7/gc.pyi b/typeshed/stdlib/2.7/gc.pyi
new file mode 100644
index 0000000..f5728d9
--- /dev/null
+++ b/typeshed/stdlib/2.7/gc.pyi
@@ -0,0 +1,27 @@
+"""Stubs for the 'gc' module."""
+
+from typing import List, Any, Tuple
+
+def enable() -> None: ...
+def disable() -> None: ...
+def isenabled() -> bool: ...
+def collect(generation: int = ...) -> int: ...
+def set_debug(flags: int) -> None: ...
+def get_debug() -> int: ...
+def get_objects() -> List[Any]: ...
+def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ...
+def get_count() -> Tuple[int, int, int]: ...
+def get_threshold() -> Tuple[int, int, int]: ...
+def get_referrers(*objs: Any) -> List[Any]: ...
+def get_referents(*objs: Any) -> List[Any]: ...
+def is_tracked(obj: Any) -> bool: ...
+
+garbage = ... # type: List[Any]
+
+DEBUG_STATS = ... # type: Any
+DEBUG_COLLECTABLE = ... # type: Any
+DEBUG_UNCOLLECTABLE = ... # type: Any
+DEBUG_INSTANCES = ... # type: Any
+DEBUG_OBJECTS = ... # type: Any
+DEBUG_SAVEALL = ... # type: Any
+DEBUG_LEAK = ... # type: Any
diff --git a/typeshed/stdlib/2.7/getpass.pyi b/typeshed/stdlib/2.7/getpass.pyi
new file mode 100644
index 0000000..011fc8e
--- /dev/null
+++ b/typeshed/stdlib/2.7/getpass.pyi
@@ -0,0 +1,8 @@
+# Stubs for getpass (Python 2)
+
+from typing import Any, IO
+
+class GetPassWarning(UserWarning): ...
+
+def getpass(prompt: str = ..., stream: IO[Any] = ...) -> str: ...
+def getuser() -> str: ...
diff --git a/typeshed/stdlib/2.7/gettext.pyi b/typeshed/stdlib/2.7/gettext.pyi
new file mode 100644
index 0000000..16a9651
--- /dev/null
+++ b/typeshed/stdlib/2.7/gettext.pyi
@@ -0,0 +1,40 @@
+# TODO(MichalPokorny): better types
+
+from typing import Any, IO, List, Optional, Union
+
+def bindtextdomain(domain: str, localedir: str = ...) -> str: ...
+def bind_textdomain_codeset(domain: str, codeset: str = ...) -> str: ...
+def textdomain(domain: str = ...) -> str: ...
+def gettext(message: str) -> str: ...
+def lgettext(message: str) -> str: ...
+def dgettext(domain: str, message: str) -> str: ...
+def ldgettext(domain: str, message: str) -> str: ...
+def ngettext(singular: str, plural: str, n: int) -> str: ...
+def lngettext(singular: str, plural: str, n: int) -> str: ...
+def dngettext(domain: str, singular: str, plural: str, n: int) -> str: ...
+def ldngettext(domain: str, singular: str, plural: str, n: int) -> str: ...
+
+class Translations(object):
+    def _parse(self, fp: IO[str]) -> None: ...
+    def add_fallback(self, fallback: Any) -> None: ...
+    def gettext(self, message: str) -> str: ...
+    def lgettext(self, message: str) -> str: ...
+    def ugettext(self, message: str) -> unicode: ...
+    def ngettext(self, singular: str, plural: str, n: int) -> str: ...
+    def lngettext(self, singular: str, plural: str, n: int) -> str: ...
+    def ungettext(self, singular: str, plural: str, n: int) -> str: ...
+    def info(self) -> Any: ...
+    def charset(self) -> Any: ...
+    def output_charset(self) -> Any: ...
+    def set_output_charset(self, charset: Any) -> None: ...
+    def install(self, unicode: bool = ..., names: Any = ...) -> None: ...
+
+# TODO: NullTranslations, GNUTranslations
+
+def find(domain: str, localedir: str = ..., languages: List[str] = ...,
+         all: Any = ...) -> Optional[Union[str, List[str]]]: ...
+
+def translation(domain: str, localedir: str = ..., languages: List[str] = ...,
+                class_: Any = ..., fallback: Any = ..., codeset: Any = ...) -> Translations: ...
+def install(domain: str, localedir: str = ..., unicode: Any = ..., codeset: Any = ...,
+            names: Any = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/glob.pyi b/typeshed/stdlib/2.7/glob.pyi
new file mode 100644
index 0000000..9b70e5c
--- /dev/null
+++ b/typeshed/stdlib/2.7/glob.pyi
@@ -0,0 +1,4 @@
+from typing import List, Iterator, AnyStr
+
+def glob(pathname: AnyStr) -> List[AnyStr]: ...
+def iglob(pathname: AnyStr) -> Iterator[AnyStr]: ...
diff --git a/typeshed/stdlib/2.7/grp.pyi b/typeshed/stdlib/2.7/grp.pyi
new file mode 100644
index 0000000..6a1f758
--- /dev/null
+++ b/typeshed/stdlib/2.7/grp.pyi
@@ -0,0 +1,11 @@
+from typing import Optional, List
+
+class struct_group(object):
+    gr_name = ...  # type: Optional[str]
+    gr_passwd = ...  # type: Optional[str]
+    gr_gid = ...  # type: int
+    gr_mem = ...  # type: List[str]
+
+def getgrall() -> List[struct_group]: ...
+def getgrgid(id: int) -> struct_group: ...
+def getgrnam(name: str) -> struct_group: ...
diff --git a/typeshed/stdlib/2.7/gzip.pyi b/typeshed/stdlib/2.7/gzip.pyi
new file mode 100644
index 0000000..56caeb8
--- /dev/null
+++ b/typeshed/stdlib/2.7/gzip.pyi
@@ -0,0 +1,41 @@
+# Stubs for gzip (Python 2)
+#
+# NOTE: Based on a dynamically typed stub automatically generated by stubgen.
+
+from typing import Any, IO
+import io
+
+class GzipFile(io.BufferedIOBase):
+    myfileobj = ... # type: Any
+    max_read_chunk = ... # type: Any
+    mode = ... # type: Any
+    extrabuf = ... # type: Any
+    extrasize = ... # type: Any
+    extrastart = ... # type: Any
+    name = ... # type: Any
+    min_readsize = ... # type: Any
+    compress = ... # type: Any
+    fileobj = ... # type: Any
+    offset = ... # type: Any
+    mtime = ... # type: Any
+    def __init__(self, filename: str = ..., mode: str = ..., compresslevel: int = ...,
+                 fileobj: IO[str] = ..., mtime: float = ...) -> None: ...
+    @property
+    def filename(self): ...
+    size = ... # type: Any
+    crc = ... # type: Any
+    def write(self, data): ...
+    def read(self, size=...): ...
+    @property
+    def closed(self): ...
+    def close(self): ...
+    def flush(self, zlib_mode=...): ...
+    def fileno(self): ...
+    def rewind(self): ...
+    def readable(self): ...
+    def writable(self): ...
+    def seekable(self): ...
+    def seek(self, offset, whence=...): ...
+    def readline(self, size=...): ...
+
+def open(filename: str, mode: str = ..., compresslevel: int = ...) -> GzipFile: ...
diff --git a/typeshed/stdlib/2.7/hashlib.pyi b/typeshed/stdlib/2.7/hashlib.pyi
new file mode 100644
index 0000000..dd7edd3
--- /dev/null
+++ b/typeshed/stdlib/2.7/hashlib.pyi
@@ -0,0 +1,27 @@
+# Stubs for hashlib (Python 2)
+
+from typing import Tuple
+
+class _hash(object):
+    # This is not actually in the module namespace.
+    digest_size = 0
+    block_size = 0
+    def update(self, arg: str) -> None: ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def copy(self) -> _hash: ...
+
+def new(algo: str = ...) -> _hash: ...
+
+def md5(s: str = ...) -> _hash: ...
+def sha1(s: str = ...) -> _hash: ...
+def sha224(s: str = ...) -> _hash: ...
+def sha256(s: str = ...) -> _hash: ...
+def sha384(s: str = ...) -> _hash: ...
+def sha512(s: str = ...) -> _hash: ...
+
+algorithms = ...  # type: Tuple[str, ...]
+algorithms_guaranteed = ...  # type: Tuple[str, ...]
+algorithms_available = ...  # type: Tuple[str, ...]
+
+def pbkdf2_hmac(name: str, password: str, salt: str, rounds: int, dklen: int = ...) -> str: ...
diff --git a/typeshed/stdlib/2.7/hmac.pyi b/typeshed/stdlib/2.7/hmac.pyi
new file mode 100644
index 0000000..eafa030
--- /dev/null
+++ b/typeshed/stdlib/2.7/hmac.pyi
@@ -0,0 +1,11 @@
+# Stubs for hmac (Python 2)
+
+from typing import Any
+
+class HMAC:
+    def update(self, msg: str) -> None: ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def copy(self) -> HMAC: ...
+
+def new(key: str, msg: str = ..., digestmod: Any = ...) -> HMAC: ...
diff --git a/typeshed/stdlib/2.7/htmlentitydefs.pyi b/typeshed/stdlib/2.7/htmlentitydefs.pyi
new file mode 100644
index 0000000..1b9bddd
--- /dev/null
+++ b/typeshed/stdlib/2.7/htmlentitydefs.pyi
@@ -0,0 +1,9 @@
+# Stubs for htmlentitydefs (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Mapping
+
+name2codepoint = ... # type: Mapping[str, int]
+codepoint2name = ... # type: Mapping[int, str]
+entitydefs = ... # type: Mapping[str, str]
diff --git a/typeshed/stdlib/2.7/httplib.pyi b/typeshed/stdlib/2.7/httplib.pyi
new file mode 100644
index 0000000..a8a87d8
--- /dev/null
+++ b/typeshed/stdlib/2.7/httplib.pyi
@@ -0,0 +1,124 @@
+# Stubs for httplib (Python 2)
+#
+# Generated by stubgen and manually massaged a bit.
+# Needs lots more work!
+
+from typing import Any, Dict
+import mimetools
+
+responses = ... # type: Dict[int, str]
+
+class HTTPMessage(mimetools.Message):
+    def addheader(self, key: str, value: str) -> None: ...
+    def addcontinue(self, key: str, more: str) -> None: ...
+    dict = ... # type: Dict[str, str]
+    unixfrom = ... # type: str
+    headers = ... # type: Any
+    status = ... # type: str
+    seekable = ... # type: bool
+    def readheaders(self) -> None: ...
+
+class HTTPResponse:
+    fp = ... # type: Any
+    debuglevel = ... # type: Any
+    strict = ... # type: Any
+    msg = ... # type: Any
+    version = ... # type: Any
+    status = ... # type: Any
+    reason = ... # type: Any
+    chunked = ... # type: Any
+    chunk_left = ... # type: Any
+    length = ... # type: Any
+    will_close = ... # type: Any
+    def __init__(self, sock, debuglevel=0, strict=0, method=None, buffering:bool=...) -> None: ...
+    def begin(self): ...
+    def close(self): ...
+    def isclosed(self): ...
+    def read(self, amt=None): ...
+    def fileno(self): ...
+    def getheader(self, name, default=None): ...
+    def getheaders(self): ...
+
+class HTTPConnection:
+    response_class = ... # type: Any
+    default_port = ... # type: Any
+    auto_open = ... # type: Any
+    debuglevel = ... # type: Any
+    strict = ... # type: Any
+    timeout = ... # type: Any
+    source_address = ... # type: Any
+    sock = ... # type: Any
+    def __init__(self, host, port=None, strict=None, timeout=..., source_address=None) -> None: ...
+    def set_tunnel(self, host, port=None, headers=None): ...
+    def set_debuglevel(self, level): ...
+    def connect(self): ...
+    def close(self): ...
+    def send(self, data): ...
+    def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): ...
+    def putheader(self, header, *values): ...
+    def endheaders(self, message_body=None): ...
+    def request(self, method, url, body=None, headers=...): ...
+    def getresponse(self, buffering:bool=...): ...
+
+class HTTP:
+    debuglevel = ... # type: Any
+    def __init__(self, host='', port=None, strict=None) -> None: ...
+    def connect(self, host=None, port=None): ...
+    def getfile(self): ...
+    file = ... # type: Any
+    headers = ... # type: Any
+    def getreply(self, buffering:bool=...): ...
+    def close(self): ...
+
+class HTTPSConnection(HTTPConnection):
+    default_port = ... # type: Any
+    key_file = ... # type: Any
+    cert_file = ... # type: Any
+    def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=..., source_address=None, context=None) -> None: ...
+    sock = ... # type: Any
+    def connect(self): ...
+
+class HTTPS(HTTP):
+    key_file = ... # type: Any
+    cert_file = ... # type: Any
+    def __init__(self, host='', port=None, key_file=None, cert_file=None, strict=None, context=None) -> None: ...
+
+class HTTPException(Exception): ...
+class NotConnected(HTTPException): ...
+class InvalidURL(HTTPException): ...
+
+class UnknownProtocol(HTTPException):
+    args = ... # type: Any
+    version = ... # type: Any
+    def __init__(self, version) -> None: ...
+
+class UnknownTransferEncoding(HTTPException): ...
+class UnimplementedFileMode(HTTPException): ...
+
+class IncompleteRead(HTTPException):
+    args = ... # type: Any
+    partial = ... # type: Any
+    expected = ... # type: Any
+    def __init__(self, partial, expected=None) -> None: ...
+
+class ImproperConnectionState(HTTPException): ...
+class CannotSendRequest(ImproperConnectionState): ...
+class CannotSendHeader(ImproperConnectionState): ...
+class ResponseNotReady(ImproperConnectionState): ...
+
+class BadStatusLine(HTTPException):
+    args = ... # type: Any
+    line = ... # type: Any
+    def __init__(self, line) -> None: ...
+
+class LineTooLong(HTTPException):
+    def __init__(self, line_type) -> None: ...
+
+error = ... # type: Any
+
+class LineAndFileWrapper:
+    def __init__(self, line, file) -> None: ...
+    def __getattr__(self, attr): ...
+    def read(self, amt=None): ...
+    def readline(self): ...
+    def readlines(self, size=None): ...
diff --git a/typeshed/stdlib/2.7/imp.pyi b/typeshed/stdlib/2.7/imp.pyi
new file mode 100644
index 0000000..d8a9ae1
--- /dev/null
+++ b/typeshed/stdlib/2.7/imp.pyi
@@ -0,0 +1,35 @@
+"""Stubs for the 'imp' module."""
+
+from typing import List, Optional, Tuple, Iterable, IO, Any
+import types
+
+C_BUILTIN = ...  # type: int
+C_EXTENSION = ...  # type: int
+IMP_HOOK = ...  # type: int
+PKG_DIRECTORY = ...  # type: int
+PY_CODERESOURCE = ...  # type: int
+PY_COMPILED = ...  # type: int
+PY_FROZEN = ...  # type: int
+PY_RESOURCE = ...  # type: int
+PY_SOURCE = ...  # type: int
+SEARCH_ERROR = ...  # type: int
+
+def acquire_lock() -> None: ...
+def find_module(name: str, path: Iterable[str] = ...) -> Optional[Tuple[str, str, Tuple[str, str, int]]]: ...
+def get_magic() -> str: ...
+def get_suffixes() -> List[Tuple[str, str, int]]: ...
+def init_builtin(name: str) -> types.ModuleType: ...
+def init_frozen(name: str) -> types.ModuleType: ...
+def is_builtin(name: str) -> int: ...
+def is_frozen(name: str) -> bool: ...
+def load_compiled(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ...
+def load_dynamic(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ...
+def load_module(name: str, file: str, pathname: str, description: Tuple[str, str, int]) -> types.ModuleType: ...
+def load_source(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ...
+def lock_held() -> bool: ...
+def new_module(name: str) -> types.ModuleType: ...
+def release_lock() -> None: ...
+
+class NullImporter:
+    def __init__(self, path_string: str) -> None: ...
+    def find_module(fullname: str, path: str = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/importlib.pyi b/typeshed/stdlib/2.7/importlib.pyi
new file mode 100644
index 0000000..afa073d
--- /dev/null
+++ b/typeshed/stdlib/2.7/importlib.pyi
@@ -0,0 +1,3 @@
+import types
+
+def import_module(name: str, package: str = ...) -> types.ModuleType: ...
diff --git a/typeshed/stdlib/2.7/inspect.pyi b/typeshed/stdlib/2.7/inspect.pyi
new file mode 100644
index 0000000..1ea9fa4
--- /dev/null
+++ b/typeshed/stdlib/2.7/inspect.pyi
@@ -0,0 +1,19 @@
+# TODO incomplete
+from typing import Any, List, Tuple, NamedTuple
+
+def isgenerator(object: Any) -> bool: ...
+
+class _Frame:
+    ...
+_FrameRecord = Tuple[_Frame, str, int, str, List[str], int]
+
+def currentframe() -> _FrameRecord: ...
+def stack(context: int = ...) -> List[_FrameRecord]: ...
+
+ArgSpec = NamedTuple('ArgSpec', [('args', List[str]),
+                                 ('varargs', str),
+                                 ('keywords', str),
+                                 ('defaults', tuple),
+                                 ])
+
+def getargspec(func: object) -> ArgSpec: ...
diff --git a/typeshed/stdlib/2.7/io.pyi b/typeshed/stdlib/2.7/io.pyi
new file mode 100644
index 0000000..68a1b2b
--- /dev/null
+++ b/typeshed/stdlib/2.7/io.pyi
@@ -0,0 +1,101 @@
+# Stubs for io
+
+# Based on https://docs.python.org/2/library/io.html
+
+# Only a subset of functionality is included.
+
+DEFAULT_BUFFER_SIZE = 0
+
+from typing import List, BinaryIO, TextIO, IO, overload, Iterator, Iterable, Any, Union
+
+def open(file: Union[str, unicode, int],
+         mode: unicode = ..., buffering: int = ..., encoding: unicode = ...,
+         errors: unicode = ..., newline: unicode = ...,
+         closefd: bool = ...) -> IO[Any]: ...
+
+class IOBase:
+    # TODO
+    ...
+
+class BytesIO(BinaryIO):
+    def __init__(self, initial_bytes: str = ...) -> None: ...
+    # TODO getbuffer
+    # TODO see comments in BinaryIO for missing functionality
+    def close(self) -> None: ...
+    def closed(self) -> bool: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, n: int = ...) -> str: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    def write(self, s: str) -> None: ...
+    def writelines(self, lines: Iterable[str]) -> None: ...
+    def getvalue(self) -> str: ...
+    def read1(self) -> str: ...
+
+    def __iter__(self) -> Iterator[str]: ...
+    def __enter__(self) -> 'BytesIO': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class StringIO(TextIO):
+    def __init__(self, initial_value: unicode = ...,
+                 newline: unicode = ...) -> None: ...
+    # TODO see comments in BinaryIO for missing functionality
+    def close(self) -> None: ...
+    def closed(self) -> bool: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, n: int = ...) -> unicode: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> unicode: ...
+    def readlines(self, hint: int = ...) -> List[unicode]: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    def write(self, s: unicode) -> None: ...
+    def writelines(self, lines: Iterable[unicode]) -> None: ...
+    def getvalue(self) -> unicode: ...
+
+    def __iter__(self) -> Iterator[unicode]: ...
+    def __enter__(self) -> 'StringIO': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class TextIOWrapper(TextIO):
+    # write_through is undocumented but used by subprocess
+    def __init__(self, buffer: IO[str], encoding: unicode = ...,
+                 errors: unicode = ..., newline: unicode = ...,
+                 line_buffering: bool = ...,
+                 write_through: bool = ...) -> None: ...
+    # TODO see comments in BinaryIO for missing functionality
+    def close(self) -> None: ...
+    def closed(self) -> bool: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, n: int = ...) -> unicode: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> unicode: ...
+    def readlines(self, hint: int = ...) -> List[unicode]: ...
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    def write(self, s: unicode) -> None: ...
+    def writelines(self, lines: Iterable[unicode]) -> None: ...
+
+    def __iter__(self) -> Iterator[unicode]: ...
+    def __enter__(self) -> StringIO: ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class BufferedIOBase(IOBase): ...
diff --git a/typeshed/stdlib/2.7/itertools.pyi b/typeshed/stdlib/2.7/itertools.pyi
new file mode 100644
index 0000000..fe2fa9c
--- /dev/null
+++ b/typeshed/stdlib/2.7/itertools.pyi
@@ -0,0 +1,81 @@
+# Stubs for itertools
+
+# Based on https://docs.python.org/2/library/itertools.html
+
+from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
+                    Union, Sequence)
+
+_T = TypeVar('_T')
+_S = TypeVar('_S')
+
+def count(start: int = ...,
+          step: int = ...) -> Iterator[int]: ... # more general types?
+def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ...
+
+def repeat(object: _T, times: int = ...) -> Iterator[_T]: ...
+
+def accumulate(iterable: Iterable[_T]) -> Iterator[_T]: ...
+def chain(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+# TODO chain.from_Iterable
+def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ...
+def dropwhile(predicate: Callable[[_T], Any],
+              iterable: Iterable[_T]) -> Iterator[_T]: ...
+def ifilter(predicate: Callable[[_T], Any],
+            iterable: Iterable[_T]) -> Iterator[_T]: ...
+def ifilterfalse(predicate: Callable[[_T], Any],
+                 iterable: Iterable[_T]) -> Iterator[_T]: ...
+
+ at overload
+def groupby(iterable: Iterable[_T]) -> Iterator[Tuple[_T, Iterator[_T]]]: ...
+ at overload
+def groupby(iterable: Iterable[_T],
+            key: Callable[[_T], _S]) -> Iterator[Tuple[_S, Iterator[_T]]]: ...
+
+ at overload
+def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ...
+ at overload
+def islice(iterable: Iterable[_T], start: int, stop: int,
+           step: int = ...) -> Iterator[_T]: ...
+
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_T3 = TypeVar('_T3')
+_T4 = TypeVar('_T4')
+
+ at overload
+def imap(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterable[_S]: ...
+ at overload
+def imap(func: Callable[[_T1, _T2], _S],
+        iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> Iterable[_S]: ...  # TODO more than two iterables
+
+def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ...
+def takewhile(predicate: Callable[[_T], Any],
+              iterable: Iterable[_T]) -> Iterator[_T]: ...
+def tee(iterable: Iterable[Any], n: int = ...) -> Iterator[Any]: ...
+
+ at overload
+def izip(iter1: Iterable[_T1]) -> Iterable[Tuple[_T1]]: ...
+ at overload
+def izip(iter1: Iterable[_T1],
+         iter2: Iterable[_T2]) -> Iterable[Tuple[_T1, _T2]]: ...
+ at overload
+def izip(iter1: Iterable[_T1], iter2: Iterable[_T2],
+         iter3: Iterable[_T3]) -> Iterable[Tuple[_T1, _T2, _T3]]: ...
+ at overload
+def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+         iter4: Iterable[_T4]) -> Iterable[Tuple[_T1, _T2,
+                                           _T3, _T4]]: ...  # TODO more than four iterables
+def izip_longest(*p: Iterable[Any],
+                 fillvalue: Any = ...) -> Iterator[Any]: ...
+
+# TODO: Return type should be Iterator[Tuple[..]], but unknown tuple shape.
+#       Iterator[Sequence[_T]] loses this type information.
+def product(*p: Iterable[_T], repeat: int = ...) -> Iterator[Sequence[_T]]: ...
+
+def permutations(iterable: Iterable[_T],
+                 r: int = ...) -> Iterator[Sequence[_T]]: ...
+def combinations(iterable: Iterable[_T],
+                 r: int) -> Iterable[Sequence[_T]]: ...
+def combinations_with_replacement(iterable: Iterable[_T],
+                                  r: int) -> Iterable[Sequence[_T]]: ...
diff --git a/typeshed/stdlib/2.7/json.pyi b/typeshed/stdlib/2.7/json.pyi
new file mode 100644
index 0000000..f319371
--- /dev/null
+++ b/typeshed/stdlib/2.7/json.pyi
@@ -0,0 +1,54 @@
+from typing import Any, IO, Optional, Tuple, Callable, Dict, List
+
+class JSONDecodeError(object):
+    def dumps(self, obj: Any) -> str: ...
+    def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
+    def loads(self, s: str) -> Any: ...
+    def load(self, fp: IO[str]) -> Any: ...
+
+def dumps(obj: Any,
+    skipkeys: bool = ...,
+    ensure_ascii: bool = ...,
+    check_circular: bool = ...,
+    allow_nan: bool = ...,
+    cls: Any = ...,
+    indent: Optional[int] = ...,
+    separators: Optional[Tuple[str, str]] = ...,
+    encoding: str = ...,
+    default: Optional[Callable[[Any], Any]] = ...,
+    sort_keys: bool = ...,
+    **kwds: Any) -> str: ...
+
+def dump(obj: Any,
+    fp: IO[str],
+    skipkeys: bool = ...,
+    ensure_ascii: bool = ...,
+    check_circular: bool = ...,
+    allow_nan: bool = ...,
+    cls: Any = ...,
+    indent: Optional[int] = ...,
+    separators: Optional[Tuple[str, str]] = ...,
+    encoding: str = ...,
+    default: Optional[Callable[[Any], Any]] = ...,
+    sort_keys: bool = ...,
+    **kwds: Any) -> None: ...
+
+def loads(s: str,
+    encoding: Any = ...,
+    cls: Any = ...,
+    object_hook: Callable[[Dict], Any] = ...,
+    parse_float: Optional[Callable[[str], Any]] = ...,
+    parse_int: Optional[Callable[[str], Any]] = ...,
+    parse_constant: Optional[Callable[[str], Any]] = ...,
+    object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
+    **kwds: Any) -> Any: ...
+
+def load(fp: IO[str],
+    encoding: Optional[str] = ...,
+    cls: Any = ...,
+    object_hook: Callable[[Dict], Any] = ...,
+    parse_float: Optional[Callable[[str], Any]] = ...,
+    parse_int: Optional[Callable[[str], Any]] = ...,
+    parse_constant: Optional[Callable[[str], Any]] = ...,
+    object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
+    **kwds: Any) -> Any: ...
diff --git a/typeshed/stdlib/2.7/logging/__init__.pyi b/typeshed/stdlib/2.7/logging/__init__.pyi
new file mode 100644
index 0000000..e4489d1
--- /dev/null
+++ b/typeshed/stdlib/2.7/logging/__init__.pyi
@@ -0,0 +1,239 @@
+# Stubs for logging (Python 2.7)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Dict, Optional, Sequence, Tuple, overload
+
+CRITICAL = 0
+FATAL = 0
+ERROR = 0
+WARNING = 0
+WARN = 0
+INFO = 0
+DEBUG = 0
+NOTSET = 0
+
+def getLevelName(level: int) -> str: ...
+def addLevelName(level: int, levelName: str) -> None: ...
+
+class LogRecord:
+    name = ...  # type: str
+    msg = ...  # type: str
+    args = ...  # type: Sequence[Any]
+    levelname = ...  # type: str
+    levelno = ...  # type: int
+    pathname = ...  # type: str
+    filename = ...  # type: str
+    module = ...  # type: str
+    exc_info = ...  # type: Tuple[Any, Any, Any]
+    exc_text = ...  # type: str
+    lineno = ...  # type: int
+    funcName = ...  # type: Optional[str]
+    created = ...  # type: float
+    msecs = ...  # type: float
+    relativeCreated = ...  # type: float
+    thread = ...  # type: Any
+    threadName = ...  # type: Any
+    processName = ...  # type: Any
+    process = ...  # type: Any
+    def __init__(self, name: str, level: int, pathname: str, lineno: int, msg: str,
+                 args: Sequence[Any], exc_info: Tuple[Any, Any, Any], func: str = ...) -> None: ...
+    def getMessage(self) -> str: ...
+
+def makeLogRecord(dict: Dict[str, Any]) -> LogRecord: ...
+
+class PercentStyle:
+    default_format = ...  # type: Any
+    asctime_format = ...  # type: Any
+    asctime_search = ...  # type: Any
+    def __init__(self, fmt) -> None: ...
+    def usesTime(self): ...
+    def format(self, record): ...
+
+class StrFormatStyle(PercentStyle):
+    default_format = ...  # type: Any
+    asctime_format = ...  # type: Any
+    asctime_search = ...  # type: Any
+    def format(self, record): ...
+
+class StringTemplateStyle(PercentStyle):
+    default_format = ...  # type: Any
+    asctime_format = ...  # type: Any
+    asctime_search = ...  # type: Any
+    def __init__(self, fmt) -> None: ...
+    def usesTime(self): ...
+    def format(self, record): ...
+
+BASIC_FORMAT = ...  # type: Any
+
+class Formatter:
+    converter = ...  # type: Any
+    datefmt = ...  # type: Any
+    def __init__(self, fmt: str = ..., datefmt: str = ...) -> None: ...
+    default_time_format = ...  # type: Any
+    default_msec_format = ...  # type: Any
+    def formatTime(self, record, datefmt=...): ...
+    def formatException(self, ei): ...
+    def usesTime(self): ...
+    def formatMessage(self, record): ...
+    def formatStack(self, stack_info): ...
+    def format(self, record: LogRecord) -> str: ...
+
+class BufferingFormatter:
+    linefmt = ...  # type: Any
+    def __init__(self, linefmt=...) -> None: ...
+    def formatHeader(self, records): ...
+    def formatFooter(self, records): ...
+    def format(self, records): ...
+
+class Filter:
+    name = ...  # type: Any
+    nlen = ...  # type: Any
+    def __init__(self, name: str = ...) -> None: ...
+    def filter(self, record: LogRecord) -> int: ...
+
+class Filterer:
+    filters = ...  # type: Any
+    def __init__(self) -> None: ...
+    def addFilter(self, filter: Filter) -> None: ...
+    def removeFilter(self, filter: Filter) -> None: ...
+    def filter(self, record: LogRecord) -> int: ...
+
+class Handler(Filterer):
+    level = ...  # type: Any
+    formatter = ...  # type: Any
+    def __init__(self, level: int = ...) -> None: ...
+    def get_name(self): ...
+    def set_name(self, name): ...
+    name = ...  # type: Any
+    lock = ...  # type: Any
+    def createLock(self): ...
+    def acquire(self): ...
+    def release(self): ...
+    def setLevel(self, level: int) -> None: ...
+    def format(self, record: LogRecord) -> str: ...
+    def emit(self, record: LogRecord) -> None: ...
+    def handle(self, record: LogRecord) -> Any: ... # Return value undocumented
+    def setFormatter(self, fmt: Formatter) -> None: ...
+    def flush(self) -> None: ...
+    def close(self) -> None: ...
+    def handleError(self, record: LogRecord) -> None: ...
+
+class StreamHandler(Handler):
+    terminator = ...  # type: Any
+    stream = ...  # type: Any
+    def __init__(self, stream=...) -> None: ...
+    def flush(self): ...
+    def emit(self, record): ...
+
+class FileHandler(StreamHandler):
+    baseFilename = ...  # type: Any
+    mode = ...  # type: Any
+    encoding = ...  # type: Any
+    delay = ...  # type: Any
+    stream = ...  # type: Any
+    def __init__(self, filename: str, mode: str = ..., encoding: str = ..., delay: int = ...) -> None: ...
+    def close(self): ...
+    def emit(self, record): ...
+
+class _StderrHandler(StreamHandler):
+    def __init__(self, level=...) -> None: ...
+
+lastResort = ...  # type: Any
+
+class PlaceHolder:
+    loggerMap = ...  # type: Any
+    def __init__(self, alogger) -> None: ...
+    def append(self, alogger): ...
+
+def setLoggerClass(klass): ...
+def getLoggerClass(): ...
+
+class Manager:
+    root = ...  # type: Any
+    disable = ...  # type: Any
+    emittedNoHandlerWarning = ...  # type: Any
+    loggerDict = ...  # type: Any
+    loggerClass = ...  # type: Any
+    logRecordFactory = ...  # type: Any
+    def __init__(self, rootnode) -> None: ...
+    def getLogger(self, name): ...
+    def setLoggerClass(self, klass): ...
+    def setLogRecordFactory(self, factory): ...
+
+class Logger(Filterer):
+    name = ...  # type: Any
+    level = ...  # type: Any
+    parent = ...  # type: Any
+    propagate = False
+    handlers = ...  # type: Any
+    disabled = ...  # type: Any
+    def __init__(self, name: str, level: int = ...) -> None: ...
+    def setLevel(self, level: int) -> None: ...
+    def debug(self, msg: str, *args, **kwargs) -> None: ...
+    def info(self, msg: str, *args, **kwargs) -> None: ...
+    def warning(self, msg: str, *args, **kwargs) -> None: ...
+    def warn(self, msg: str, *args, **kwargs) -> None: ...
+    def error(self, msg: str, *args, **kwargs) -> None: ...
+    def exception(self, msg: str, *args, **kwargs) -> None: ...
+    def critical(self, msg: str, *args, **kwargs) -> None: ...
+    fatal = ...  # type: Any
+    def log(self, level: int, msg: str, *args, **kwargs) -> None: ...
+    def findCaller(self) -> Tuple[str, int, str]: ...
+    def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=..., extra=...,
+                   sinfo=...): ...
+    def handle(self, record): ...
+    def addHandler(self, hdlr: Handler) -> None: ...
+    def removeHandler(self, hdlr: Handler) -> None: ...
+    def hasHandlers(self): ...
+    def callHandlers(self, record): ...
+    def getEffectiveLevel(self) -> int: ...
+    def isEnabledFor(self, level: int) -> bool: ...
+    def getChild(self, suffix: str) -> Logger: ...
+
+class RootLogger(Logger):
+    def __init__(self, level) -> None: ...
+
+class LoggerAdapter:
+    logger = ...  # type: Any
+    extra = ...  # type: Any
+    def __init__(self, logger, extra) -> None: ...
+    def process(self, msg, kwargs): ...
+    def debug(self, msg: str, *args, **kwargs) -> None: ...
+    def info(self, msg: str, *args, **kwargs) -> None: ...
+    def warning(self, msg: str, *args, **kwargs) -> None: ...
+    def warn(self, msg: str, *args, **kwargs) -> None: ...
+    def error(self, msg: str, *args, **kwargs) -> None: ...
+    def exception(self, msg: str, *args, **kwargs) -> None: ...
+    def critical(self, msg: str, *args, **kwargs) -> None: ...
+    def log(self, level: int, msg: str, *args, **kwargs) -> None: ...
+    def isEnabledFor(self, level: int) -> bool: ...
+    def setLevel(self, level: int) -> None: ...
+    def getEffectiveLevel(self) -> int: ...
+    def hasHandlers(self): ...
+
+def basicConfig(**kwargs) -> None: ...
+def getLogger(name: str = ...) -> Logger: ...
+def critical(msg: str, *args, **kwargs) -> None: ...
+
+fatal = ...  # type: Any
+
+def error(msg: str, *args, **kwargs) -> None: ...
+ at overload
+def exception(msg: str, *args, **kwargs) -> None: ...
+ at overload
+def exception(exception: Exception, *args, **kwargs) -> None: ...
+def warning(msg: str, *args, **kwargs) -> None: ...
+def warn(msg: str, *args, **kwargs) -> None: ...
+def info(msg: str, *args, **kwargs) -> None: ...
+def debug(msg: str, *args, **kwargs) -> None: ...
+def log(level: int, msg: str, *args, **kwargs) -> None: ...
+def disable(level: int) -> None: ...
+
+class NullHandler(Handler):
+    def handle(self, record): ...
+    def emit(self, record): ...
+    lock = ...  # type: Any
+    def createLock(self): ...
+
+def captureWarnings(capture: bool) -> None: ...
diff --git a/typeshed/stdlib/2.7/logging/handlers.pyi b/typeshed/stdlib/2.7/logging/handlers.pyi
new file mode 100644
index 0000000..584d759
--- /dev/null
+++ b/typeshed/stdlib/2.7/logging/handlers.pyi
@@ -0,0 +1,200 @@
+# Stubs for logging.handlers (Python 2.7)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import logging
+
+threading = ...  # type: Any
+DEFAULT_TCP_LOGGING_PORT = ...  # type: Any
+DEFAULT_UDP_LOGGING_PORT = ...  # type: Any
+DEFAULT_HTTP_LOGGING_PORT = ...  # type: Any
+DEFAULT_SOAP_LOGGING_PORT = ...  # type: Any
+SYSLOG_UDP_PORT = ...  # type: Any
+SYSLOG_TCP_PORT = ...  # type: Any
+
+class BaseRotatingHandler(logging.FileHandler):
+    mode = ...  # type: Any
+    encoding = ...  # type: Any
+    namer = ...  # type: Any
+    rotator = ...  # type: Any
+    def __init__(self, filename, mode, encoding=..., delay=...) -> None: ...
+    def emit(self, record): ...
+    def rotation_filename(self, default_name): ...
+    def rotate(self, source, dest): ...
+
+class RotatingFileHandler(BaseRotatingHandler):
+    maxBytes = ...  # type: Any
+    backupCount = ...  # type: Any
+    def __init__(self, filename: str, mode: str = ..., maxBytes: int = ..., backupCount:int = ...,
+                 encoding: str = ..., delay: int = ...) -> None: ...
+    stream = ...  # type: Any
+    def doRollover(self): ...
+    def shouldRollover(self, record): ...
+
+class TimedRotatingFileHandler(BaseRotatingHandler):
+    when = ...  # type: Any
+    backupCount = ...  # type: Any
+    utc = ...  # type: Any
+    atTime = ...  # type: Any
+    interval = ...  # type: Any
+    suffix = ...  # type: Any
+    extMatch = ...  # type: Any
+    dayOfWeek = ...  # type: Any
+    rolloverAt = ...  # type: Any
+    def __init__(self, filename, when=..., interval=..., backupCount=..., encoding=..., delay=...,
+                 utc=..., atTime=...): ...
+    def computeRollover(self, currentTime): ...
+    def shouldRollover(self, record): ...
+    def getFilesToDelete(self): ...
+    stream = ...  # type: Any
+    def doRollover(self): ...
+
+class WatchedFileHandler(logging.FileHandler):
+    def __init__(self, filename: str, mode: str = ..., encoding: str = ..., delay: int = ...) -> None: ...
+    stream = ...  # type: Any
+    def emit(self, record): ...
+
+class SocketHandler(logging.Handler):
+    host = ...  # type: Any
+    port = ...  # type: Any
+    address = ...  # type: Any
+    sock = ...  # type: Any
+    closeOnError = ...  # type: Any
+    retryTime = ...  # type: Any
+    retryStart = ...  # type: Any
+    retryMax = ...  # type: Any
+    retryFactor = ...  # type: Any
+    def __init__(self, host, port) -> None: ...
+    def makeSocket(self, timeout=...): ...
+    retryPeriod = ...  # type: Any
+    def createSocket(self): ...
+    def send(self, s): ...
+    def makePickle(self, record): ...
+    def handleError(self, record): ...
+    def emit(self, record): ...
+    def close(self): ...
+
+class DatagramHandler(SocketHandler):
+    closeOnError = ...  # type: Any
+    def __init__(self, host, port) -> None: ...
+    def makeSocket(self, timeout=...): ... # TODO: Actually does not have the timeout argument.
+    def send(self, s): ...
+
+class SysLogHandler(logging.Handler):
+    LOG_EMERG = ...  # type: Any
+    LOG_ALERT = ...  # type: Any
+    LOG_CRIT = ...  # type: Any
+    LOG_ERR = ...  # type: Any
+    LOG_WARNING = ...  # type: Any
+    LOG_NOTICE = ...  # type: Any
+    LOG_INFO = ...  # type: Any
+    LOG_DEBUG = ...  # type: Any
+    LOG_KERN = ...  # type: Any
+    LOG_USER = ...  # type: Any
+    LOG_MAIL = ...  # type: Any
+    LOG_DAEMON = ...  # type: Any
+    LOG_AUTH = ...  # type: Any
+    LOG_SYSLOG = ...  # type: Any
+    LOG_LPR = ...  # type: Any
+    LOG_NEWS = ...  # type: Any
+    LOG_UUCP = ...  # type: Any
+    LOG_CRON = ...  # type: Any
+    LOG_AUTHPRIV = ...  # type: Any
+    LOG_FTP = ...  # type: Any
+    LOG_LOCAL0 = ...  # type: Any
+    LOG_LOCAL1 = ...  # type: Any
+    LOG_LOCAL2 = ...  # type: Any
+    LOG_LOCAL3 = ...  # type: Any
+    LOG_LOCAL4 = ...  # type: Any
+    LOG_LOCAL5 = ...  # type: Any
+    LOG_LOCAL6 = ...  # type: Any
+    LOG_LOCAL7 = ...  # type: Any
+    priority_names = ...  # type: Any
+    facility_names = ...  # type: Any
+    priority_map = ...  # type: Any
+    address = ...  # type: Any
+    facility = ...  # type: Any
+    socktype = ...  # type: Any
+    unixsocket = ...  # type: Any
+    socket = ...  # type: Any
+    formatter = ...  # type: Any
+    def __init__(self, address=..., facility=..., socktype=...) -> None: ...
+    def encodePriority(self, facility, priority): ...
+    def close(self): ...
+    def mapPriority(self, levelName): ...
+    ident = ...  # type: Any
+    append_nul = ...  # type: Any
+    def emit(self, record): ...
+
+class SMTPHandler(logging.Handler):
+    username = ...  # type: Any
+    fromaddr = ...  # type: Any
+    toaddrs = ...  # type: Any
+    subject = ...  # type: Any
+    secure = ...  # type: Any
+    timeout = ...  # type: Any
+    def __init__(self, mailhost, fromaddr, toaddrs, subject, credentials=..., secure=...,
+                 timeout=...): ...
+    def getSubject(self, record): ...
+    def emit(self, record): ...
+
+class NTEventLogHandler(logging.Handler):
+    appname = ...  # type: Any
+    dllname = ...  # type: Any
+    logtype = ...  # type: Any
+    deftype = ...  # type: Any
+    typemap = ...  # type: Any
+    def __init__(self, appname, dllname=..., logtype=...) -> None: ...
+    def getMessageID(self, record): ...
+    def getEventCategory(self, record): ...
+    def getEventType(self, record): ...
+    def emit(self, record): ...
+    def close(self): ...
+
+class HTTPHandler(logging.Handler):
+    host = ...  # type: Any
+    url = ...  # type: Any
+    method = ...  # type: Any
+    secure = ...  # type: Any
+    credentials = ...  # type: Any
+    def __init__(self, host, url, method=..., secure=..., credentials=...) -> None: ...
+    def mapLogRecord(self, record): ...
+    def emit(self, record): ...
+
+class BufferingHandler(logging.Handler):
+    capacity = ...  # type: Any
+    buffer = ...  # type: Any
+    def __init__(self, capacity: int) -> None: ...
+    def shouldFlush(self, record): ...
+    def emit(self, record): ...
+    def flush(self): ...
+    def close(self): ...
+
+class MemoryHandler(BufferingHandler):
+    flushLevel = ...  # type: Any
+    target = ...  # type: Any
+    def __init__(self, capacity, flushLevel=..., target=...) -> None: ...
+    def shouldFlush(self, record): ...
+    def setTarget(self, target): ...
+    buffer = ...  # type: Any
+    def flush(self): ...
+    def close(self): ...
+
+class QueueHandler(logging.Handler):
+    queue = ...  # type: Any
+    def __init__(self, queue) -> None: ...
+    def enqueue(self, record): ...
+    def prepare(self, record): ...
+    def emit(self, record): ...
+
+class QueueListener:
+    queue = ...  # type: Any
+    handlers = ...  # type: Any
+    def __init__(self, queue, *handlers) -> None: ...
+    def dequeue(self, block): ...
+    def start(self): ...
+    def prepare(self, record): ...
+    def handle(self, record): ...
+    def enqueue_sentinel(self): ...
+    def stop(self): ...
diff --git a/typeshed/stdlib/2.7/markupbase.pyi b/typeshed/stdlib/2.7/markupbase.pyi
new file mode 100644
index 0000000..129b49b
--- /dev/null
+++ b/typeshed/stdlib/2.7/markupbase.pyi
@@ -0,0 +1,9 @@
+from typing import Tuple
+
+class ParserBase(object):
+    def __init__(self) -> None: ...
+    def error(self, message: str) -> None: ...
+    def reset(self) -> None: ...
+    def getpos(self) -> Tuple[int, int]: ...
+
+    def unkown_decl(self, data: str) -> None: ...
diff --git a/typeshed/stdlib/2.7/md5.pyi b/typeshed/stdlib/2.7/md5.pyi
new file mode 100644
index 0000000..3488466
--- /dev/null
+++ b/typeshed/stdlib/2.7/md5.pyi
@@ -0,0 +1,11 @@
+# Stubs for Python 2.7 md5 stdlib module
+
+class md5(object):
+    def update(self, arg: str) -> None: ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def copy(self) -> md5: ...
+
+def new(string: str = ...) -> md5: ...
+blocksize = 0
+digest_size = 0
diff --git a/typeshed/stdlib/2.7/mimetools.pyi b/typeshed/stdlib/2.7/mimetools.pyi
new file mode 100644
index 0000000..62aa50b
--- /dev/null
+++ b/typeshed/stdlib/2.7/mimetools.pyi
@@ -0,0 +1,31 @@
+# Stubs for mimetools (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import rfc822
+
+class Message(rfc822.Message):
+    encodingheader = ... # type: Any
+    typeheader = ... # type: Any
+    def __init__(self, fp, seekable=1): ...
+    plisttext = ... # type: Any
+    type = ... # type: Any
+    maintype = ... # type: Any
+    subtype = ... # type: Any
+    def parsetype(self): ...
+    plist = ... # type: Any
+    def parseplist(self): ...
+    def getplist(self): ...
+    def getparam(self, name): ...
+    def getparamnames(self): ...
+    def getencoding(self): ...
+    def gettype(self): ...
+    def getmaintype(self): ...
+    def getsubtype(self): ...
+
+def choose_boundary(): ...
+def decode(input, output, encoding): ...
+def encode(input, output, encoding): ...
+def copyliteral(input, output): ...
+def copybinary(input, output): ...
diff --git a/typeshed/stdlib/2.7/numbers.pyi b/typeshed/stdlib/2.7/numbers.pyi
new file mode 100644
index 0000000..f55611a
--- /dev/null
+++ b/typeshed/stdlib/2.7/numbers.pyi
@@ -0,0 +1,77 @@
+# Stubs for numbers (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Number:
+    __metaclass__ = ... # type: Any
+    __hash__ = ... # type: Any
+
+class Complex(Number):
+    def __complex__(self): ...
+    def __nonzero__(self): ...
+    def real(self): ...
+    def imag(self): ...
+    def __add__(self, other): ...
+    def __radd__(self, other): ...
+    def __neg__(self): ...
+    def __pos__(self): ...
+    def __sub__(self, other): ...
+    def __rsub__(self, other): ...
+    def __mul__(self, other): ...
+    def __rmul__(self, other): ...
+    def __div__(self, other): ...
+    def __rdiv__(self, other): ...
+    def __truediv__(self, other): ...
+    def __rtruediv__(self, other): ...
+    def __pow__(self, exponent): ...
+    def __rpow__(self, base): ...
+    def __abs__(self): ...
+    def conjugate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class Real(Complex):
+    def __float__(self): ...
+    def __trunc__(self): ...
+    def __divmod__(self, other): ...
+    def __rdivmod__(self, other): ...
+    def __floordiv__(self, other): ...
+    def __rfloordiv__(self, other): ...
+    def __mod__(self, other): ...
+    def __rmod__(self, other): ...
+    def __lt__(self, other): ...
+    def __le__(self, other): ...
+    def __complex__(self): ...
+    @property
+    def real(self): ...
+    @property
+    def imag(self): ...
+    def conjugate(self): ...
+
+class Rational(Real):
+    def numerator(self): ...
+    def denominator(self): ...
+    def __float__(self): ...
+
+class Integral(Rational):
+    def __long__(self): ...
+    def __index__(self): ...
+    def __pow__(self, exponent, modulus=...): ...
+    def __lshift__(self, other): ...
+    def __rlshift__(self, other): ...
+    def __rshift__(self, other): ...
+    def __rrshift__(self, other): ...
+    def __and__(self, other): ...
+    def __rand__(self, other): ...
+    def __xor__(self, other): ...
+    def __rxor__(self, other): ...
+    def __or__(self, other): ...
+    def __ror__(self, other): ...
+    def __invert__(self): ...
+    def __float__(self): ...
+    @property
+    def numerator(self): ...
+    @property
+    def denominator(self): ...
diff --git a/typeshed/stdlib/2.7/os/__init__.pyi b/typeshed/stdlib/2.7/os/__init__.pyi
new file mode 100644
index 0000000..8a71f72
--- /dev/null
+++ b/typeshed/stdlib/2.7/os/__init__.pyi
@@ -0,0 +1,264 @@
+# created from https://docs.python.org/2/library/os.html
+
+from typing import List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, MutableMapping, Iterator
+import os.path as path
+
+error = OSError
+name = ... # type: str
+environ = ... # type: MutableMapping[str, str]
+
+def chdir(path: unicode) -> None: ...
+def fchdir(fd: int) -> None: ...
+def getcwd() -> str: ...
+def ctermid() -> str: ...
+def getegid() -> int: ...
+def geteuid() -> int: ...
+def getgid() -> int: ...
+def getgroups() -> List[int]: ...
+def initgroups(username: str, gid: int) -> None: ...
+def getlogin() -> str: ...
+def getpgid(pid: int) -> int: ...
+def getpgrp() -> int: ...
+def getpid() -> int: ...
+def getppid() -> int: ...
+def getresuid() -> Tuple[int, int, int]: ...
+def getresgid() -> Tuple[int, int, int]: ...
+def getuid() -> int: ...
+def getenv(varname: str, value: str = ...) -> str: ...
+def putenv(varname: str, value: str) -> None: ...
+def setegid(egid: int) -> None: ...
+def seteuid(euid: int) -> None: ...
+def setgid(gid: int) -> None: ...
+def setgroups(groups: Sequence[int]) -> None: ...
+
+# TODO(MichalPokorny)
+def setpgrp(*args) -> None: ...
+
+def setpgid(pid: int, pgrp: int) -> None: ...
+def setregid(rgid: int, egid: int) -> None: ...
+def setresgid(rgid: int, egid: int, sgid: int) -> None: ...
+def setresuid(ruid: int, euid: int, suid: int) -> None: ...
+def setreuid(ruid: int, euid: int) -> None: ...
+def getsid(pid: int) -> int: ...
+def setsid() -> None: ...
+def setuid(pid: int) -> None: ...
+
+def strerror(code: int) -> str:
+    raise ValueError()
+
+def umask(mask: int) -> int: ...
+def uname() -> Tuple[str, str, str, str, str]: ...
+def unsetenv(varname: str) -> None: ...
+
+# TODO(MichalPokorny)
+def fdopen(fd: int, *args, **kwargs) -> IO[Any]: ...
+def popen(command: str, *args, **kwargs) -> Optional[IO[Any]]: ...
+def tmpfile() -> IO[Any]: ...
+
+def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
+def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ...
+def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
+
+def close(fd: int) -> None: ...
+def closerange(fd_low: int, fd_high: int) -> None: ...
+def dup(fd: int) -> int: ...
+def dup2(fd: int, fd2: int) -> None: ...
+def fchmod(fd: int, mode: int) -> None: ...
+def fchown(fd: int, uid: int, gid: int) -> None: ...
+def fdatasync(fd: int) -> None: ...
+def fpathconf(fd: int, name: str) -> None: ...
+
+# TODO(prvak)
+def fstat(fd: int) -> Any: ...
+def fstatvfs(fd: int) -> Any: ...
+def fsync(fd: int) -> None: ...
+def ftruncate(fd: int, length: int) -> None: ...
+def isatty(fd: int) -> bool: ...
+
+def lseek(fd: int, pos: int, how: int) -> None: ...
+SEEK_SET = 0
+SEEK_CUR = 1
+SEEK_END = 2
+
+# TODO(prvak): maybe file should be unicode? (same with all other paths...)
+def open(file: unicode, flags: int, mode: int = ...) -> int: ...
+def openpty() -> Tuple[int, int]: ...
+def pipe() -> Tuple[int, int]: ...
+def read(fd: int, n: int) -> str: ...
+def tcgetpgrp(fd: int) -> int: ...
+def tcsetpgrp(fd: int, pg: int) -> None: ...
+def ttyname(fd: int) -> str: ...
+def write(fd: int, str: str) -> int: ...
+
+# TODO: O_*
+
+def access(path: unicode, mode: int) -> bool: ...
+F_OK = 0
+R_OK = 0
+W_OK = 0
+X_OK = 0
+
+def getcwdu() -> unicode: ...
+def chflags(path: unicode, flags: int) -> None: ...
+def chroot(path: unicode) -> None: ...
+def chmod(path: unicode, mode: int) -> None: ...
+def chown(path: unicode, uid: int, gid: int) -> None: ...
+def lchflags(path: unicode, flags: int) -> None: ...
+def lchmod(path: unicode, uid: int, gid: int) -> None: ...
+def lchown(path: unicode, uid: int, gid: int) -> None: ...
+def link(source: unicode, link_name: unicode) -> None: ...
+def listdir(path: AnyStr) -> List[AnyStr]: ...
+
+# TODO(MichalPokorny)
+def lstat(path: unicode) -> Any: ...
+
+def mkfifo(path: unicode, mode: int = ...) -> None: ...
+def mknod(filename: unicode, mode: int = ..., device: int = ...) -> None: ...
+def major(device: int) -> int: ...
+def minor(device: int) -> int: ...
+def makedev(major: int, minor: int) -> int: ...
+def mkdir(path: unicode, mode: int = ...) -> None: ...
+def makedirs(path: unicode, mode: int = ...) -> None: ...
+def pathconf(path: unicode, name: str) -> str: ...
+
+pathconf_names = ... # type: Mapping[str, int]
+
+def readlink(path: AnyStr) -> AnyStr: ...
+def remove(path: unicode) -> None: ...
+def removedirs(path: unicode) -> None:
+    raise OSError()
+def rename(src: unicode, dst: unicode) -> None: ...
+def renames(old: unicode, new: unicode) -> None: ...
+def rmdir(path: unicode) -> None: ...
+
+# TODO(MichalPokorny)
+def stat(path: unicode) -> Any: ...
+
+# TODO: stat_float_times, statvfs, tempnam, tmpnam, TMP_MAX
+def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
+         followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
+                                                    List[AnyStr]]]: ...
+
+def symlink(source: unicode, link_name: unicode) -> None: ...
+def unlink(path: unicode) -> None: ...
+def utime(path: unicode, times: Optional[Tuple[int, int]]) -> None: ...
+
+def abort() -> None: ...
+
+EX_OK = 0        # Unix only
+EX_USAGE = 0     # Unix only
+EX_DATAERR = 0   # Unix only
+EX_NOINPUT = 0   # Unix only
+EX_NOUSER = 0    # Unix only
+EX_NOHOST = 0    # Unix only
+EX_UNAVAILABLE = 0  # Unix only
+EX_SOFTWARE = 0  # Unix only
+EX_OSERR = 0     # Unix only
+EX_OSFILE = 0    # Unix only
+EX_CANTCREAT = 0 # Unix only
+EX_IOERR = 0     # Unix only
+EX_TEMPFAIL = 0  # Unix only
+EX_PROTOCOL = 0  # Unix only
+EX_NOPERM = 0    # Unix only
+EX_CONFIG = 0    # Unix only
+
+def execl(file: AnyStr, *args) -> None: ...
+def execle(file: AnyStr, *args) -> None: ...
+def execlp(file: AnyStr, *args) -> None: ...
+def execlpe(file: AnyStr, *args) -> None: ...
+def execvp(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
+def execvpe(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[AnyStr, AnyStr]) -> None: ...
+def execv(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
+def execve(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[AnyStr, AnyStr]) -> None: ...
+
+def _exit(n: int) -> None: ...
+
+def fork() -> int:
+    raise OSError()
+
+def forkpty() -> Tuple[int, int]:
+    raise OSError()
+
+def kill(pid: int, sig: int) -> None: ...
+def killpg(pgid: int, sig: int) -> None: ...
+def nice(increment: int) -> int: ...
+
+# TODO: plock, popen*, spawn*, P_*
+
+def startfile(path: unicode, operation: str) -> None: ...
+def system(command: unicode) -> int: ...
+def times() -> Tuple[float, float, float, float, float]: ...
+def wait() -> Tuple[int, int]: ... # Unix only
+def waitpid(pid: int, options: int) -> Tuple[int, int]:
+    raise OSError()
+# TODO: wait3, wait4, W...
+def confstr(name: Union[str, int]) -> Optional[str]: ...
+confstr_names = ... # type: Mapping[str, int]
+
+def getloadavg() -> Tuple[float, float, float]:
+    raise OSError()
+
+def sysconf(name: Union[str, int]) -> int: ...
+sysconf_names = ... # type: Mapping[str, int]
+
+curdir = ... # type: str
+pardir = ... # type: str
+sep = ... # type: str
+altsep = ... # type: str
+extsep = ... # type: str
+pathsep = ... # type: str
+defpath = ... # type: str
+linesep = ... # type: str
+devnull = ... # type: str
+
+def urandom(n: int) -> str: ...
+
+# More constants, copied from stdlib/3/os/__init__.pyi
+
+O_RDONLY = 0
+O_WRONLY = 0
+O_RDWR = 0
+O_APPEND = 0
+O_CREAT = 0
+O_EXCL = 0
+O_TRUNC = 0
+O_DSYNC = 0    # Unix only
+O_RSYNC = 0    # Unix only
+O_SYNC = 0     # Unix only
+O_NDELAY = 0   # Unix only
+O_NONBLOCK = 0 # Unix only
+O_NOCTTY = 0   # Unix only
+O_SHLOCK = 0   # Unix only
+O_EXLOCK = 0   # Unix only
+O_BINARY = 0     # Windows only
+O_NOINHERIT = 0  # Windows only
+O_SHORT_LIVED = 0# Windows only
+O_TEMPORARY = 0  # Windows only
+O_RANDOM = 0     # Windows only
+O_SEQUENTIAL = 0 # Windows only
+O_TEXT = 0       # Windows only
+O_ASYNC = 0      # Gnu extension if in C library
+O_DIRECT = 0     # Gnu extension if in C library
+O_DIRECTORY = 0  # Gnu extension if in C library
+O_NOFOLLOW = 0   # Gnu extension if in C library
+O_NOATIME = 0    # Gnu extension if in C library
+
+F_OK = 0
+R_OK = 0
+W_OK = 0
+X_OK = 0
+
+P_NOWAIT = 0
+P_NOWAITO = 0
+P_WAIT = 0
+#P_DETACH = 0  # Windows only
+#P_OVERLAY = 0  # Windows only
+
+# wait()/waitpid() options
+WNOHANG = 0  # Unix only
+#WCONTINUED = 0  # some Unix systems
+#WUNTRACED = 0  # Unix only
+
+P_ALL = 0
+WEXITED = 0
+WNOWAIT = 0
diff --git a/typeshed/stdlib/2.7/os/path.pyi b/typeshed/stdlib/2.7/os/path.pyi
new file mode 100644
index 0000000..99b29e5
--- /dev/null
+++ b/typeshed/stdlib/2.7/os/path.pyi
@@ -0,0 +1,65 @@
+# Stubs for os.path
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/os.path.html
+# adapted for 2.7 by Michal Pokorny
+
+from typing import overload, List, Any, Tuple, BinaryIO, TextIO, TypeVar, Callable, AnyStr
+
+# ----- os.path variables -----
+supports_unicode_filenames = False
+# aliases (also in os)
+curdir = ...  # type: str
+pardir = ...  # type: str
+sep = ...  # type: str
+altsep = ...  # type: str
+extsep = ...  # type: str
+pathsep = ...  # type: str
+defpath = ...  # type: str
+devnull = ...  # type: str
+
+# ----- os.path function stubs -----
+def abspath(path: AnyStr) -> AnyStr: ...
+def basename(path: AnyStr) -> AnyStr: ...
+
+def commonprefix(list: List[AnyStr]) -> AnyStr: ...
+def dirname(path: AnyStr) -> AnyStr: ...
+def exists(path: unicode) -> bool: ...
+def lexists(path: unicode) -> bool: ...
+def expanduser(path: AnyStr) -> AnyStr: ...
+def expandvars(path: AnyStr) -> AnyStr: ...
+
+# These return float if os.stat_float_times() == True,
+# but int is a subclass of float.
+def getatime(path: unicode) -> float: ...
+def getmtime(path: unicode) -> float: ...
+def getctime(path: unicode) -> float: ...
+
+def getsize(path: unicode) -> int: ...
+def isabs(path: unicode) -> bool: ...
+def isfile(path: unicode) -> bool: ...
+def isdir(path: unicode) -> bool: ...
+def islink(path: unicode) -> bool: ...
+def ismount(path: unicode) -> bool: ...
+
+def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ...
+
+def normcase(path: AnyStr) -> AnyStr: ...
+def normpath(path: AnyStr) -> AnyStr: ...
+def realpath(path: AnyStr) -> AnyStr: ...
+def relpath(path: AnyStr, start: AnyStr = ...) -> AnyStr: ...
+
+def samefile(path1: unicode, path2: unicode) -> bool: ...
+def sameopenfile(fp1: int, fp2: int) -> bool: ...
+# TODO
+#def samestat(stat1: stat_result,
+#             stat2: stat_result) -> bool: ...  # Unix only
+
+def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+
+def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...  # Windows only, deprecated
+
+_T = TypeVar('_T')
+def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ...
diff --git a/typeshed/stdlib/2.7/pickle.pyi b/typeshed/stdlib/2.7/pickle.pyi
new file mode 100644
index 0000000..1e47e02
--- /dev/null
+++ b/typeshed/stdlib/2.7/pickle.pyi
@@ -0,0 +1,8 @@
+# Stubs for pickle (Python 2)
+
+from typing import Any, IO
+
+def dump(obj: Any, file: IO[str], protocol: int = ...) -> None: ...
+def dumps(obj: Any, protocol: int = ...) -> str: ...
+def load(file: IO[str]) -> Any: ...
+def loads(str: str) -> Any: ...
diff --git a/typeshed/stdlib/2.7/pipes.pyi b/typeshed/stdlib/2.7/pipes.pyi
new file mode 100644
index 0000000..6cfb94e
--- /dev/null
+++ b/typeshed/stdlib/2.7/pipes.pyi
@@ -0,0 +1,13 @@
+from typing import Any, IO
+
+class Template:
+    def __init__(self) -> None: ...
+    def reset(self) -> None: ...
+    def clone(self) -> Template: ...
+    def debug(flag: bool) -> None: ...
+    def append(cmd: str, kind: str) -> None: ...
+    def prepend(cmd: str, kind: str) -> None: ...
+    def open(file: str, mode: str) -> IO[Any]: ...
+    def copy(infile: str, outfile: str) -> None: ...
+
+def quote(s: str) -> str: ...
diff --git a/typeshed/stdlib/2.7/platform.pyi b/typeshed/stdlib/2.7/platform.pyi
new file mode 100644
index 0000000..83c61fa
--- /dev/null
+++ b/typeshed/stdlib/2.7/platform.pyi
@@ -0,0 +1,45 @@
+# Stubs for platform (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+__copyright__ = ... # type: Any
+DEV_NULL = ... # type: Any
+
+def libc_ver(executable=..., lib='', version='', chunksize=2048): ...
+def linux_distribution(distname='', version='', id='', supported_dists=..., full_distribution_name=1): ...
+def dist(distname='', version='', id='', supported_dists=...): ...
+
+class _popen:
+    tmpfile = ... # type: Any
+    pipe = ... # type: Any
+    bufsize = ... # type: Any
+    mode = ... # type: Any
+    def __init__(self, cmd, mode='', bufsize=None): ...
+    def read(self): ...
+    def readlines(self): ...
+    def close(self, remove=..., error=...): ...
+    __del__ = ... # type: Any
+
+def popen(cmd, mode='', bufsize=None): ...
+def win32_ver(release='', version='', csd='', ptype=''): ...
+def mac_ver(release='', versioninfo=..., machine=''): ...
+def java_ver(release='', vendor='', vminfo=..., osinfo=...): ...
+def system_alias(system, release, version): ...
+def architecture(executable=..., bits='', linkage=''): ...
+def uname(): ...
+def system(): ...
+def node(): ...
+def release(): ...
+def version(): ...
+def machine(): ...
+def processor(): ...
+def python_implementation(): ...
+def python_version(): ...
+def python_version_tuple(): ...
+def python_branch(): ...
+def python_revision(): ...
+def python_build(): ...
+def python_compiler(): ...
+def platform(aliased=0, terse=0): ...
diff --git a/typeshed/stdlib/2.7/posix.pyi b/typeshed/stdlib/2.7/posix.pyi
new file mode 100644
index 0000000..dc8ae5d
--- /dev/null
+++ b/typeshed/stdlib/2.7/posix.pyi
@@ -0,0 +1,206 @@
+from typing import List, Mapping, Tuple, Union, Sequence, IO, Optional, TypeVar
+
+error = OSError
+
+confstr_names = ...  # type: Dict[str, int]
+environ = ...  # type: Dict[str, str]
+pathconf_names = ...  # type: Dict[str, int]
+sysconf_names = ...  # type: Dict[str, int]
+
+EX_CANTCREAT= ...  # type: int
+EX_CONFIG= ...  # type: int
+EX_DATAERR= ...  # type: int
+EX_IOERR= ...  # type: int
+EX_NOHOST= ...  # type: int
+EX_NOINPUT= ...  # type: int
+EX_NOPERM= ...  # type: int
+EX_NOUSER= ...  # type: int
+EX_OK= ...  # type: int
+EX_OSERR= ...  # type: int
+EX_OSFILE= ...  # type: int
+EX_PROTOCOL= ...  # type: int
+EX_SOFTWARE= ...  # type: int
+EX_TEMPFAIL= ...  # type: int
+EX_UNAVAILABLE= ...  # type: int
+EX_USAGE= ...  # type: int
+F_OK= ...  # type: int
+NGROUPS_MAX= ...  # type: int
+O_APPEND= ...  # type: int
+O_ASYNC= ...  # type: int
+O_CREAT= ...  # type: int
+O_DIRECT= ...  # type: int
+O_DIRECTORY= ...  # type: int
+O_DSYNC= ...  # type: int
+O_EXCL= ...  # type: int
+O_LARGEFILE= ...  # type: int
+O_NDELAY= ...  # type: int
+O_NOATIME= ...  # type: int
+O_NOCTTY= ...  # type: int
+O_NOFOLLOW= ...  # type: int
+O_NONBLOCK= ...  # type: int
+O_RDONLY= ...  # type: int
+O_RDWR= ...  # type: int
+O_RSYNC= ...  # type: int
+O_SYNC= ...  # type: int
+O_TRUNC= ...  # type: int
+O_WRONLY= ...  # type: int
+R_OK= ...  # type: int
+TMP_MAX= ...  # type: int
+WCONTINUED= ...  # type: int
+WNOHANG= ...  # type: int
+WUNTRACED= ...  # type: int
+W_OK= ...  # type: int
+X_OK= ...  # type: int
+
+def WCOREDUMP(status: int) -> bool: ...
+def WEXITSTATUS(status: int) -> bool: ...
+def WIFCONTINUED(status: int) -> bool: ...
+def WIFEXITED(status: int) -> bool: ...
+def WIFSIGNALED(status: int) -> bool: ...
+def WIFSTOPPED(status: int) -> bool: ...
+def WSTOPSIG(status: int) -> bool: ...
+def WTERMSIG(status: int) -> bool: ...
+
+class stat_result(object):
+    n_fields = ...  # type: int
+    n_sequence_fields = ...  # type: int
+    n_unnamed_fields = ...  # type: int
+    st_mode = ...  # type: int
+    st_ino = ...  # type: int
+    st_dev = ...  # type: int
+    st_nlink = ...  # type: int
+    st_uid = ...  # type: int
+    st_gid = ...  # type: int
+    st_size = ...  # type: int
+    st_atime = ...  # type: int
+    st_mtime = ...  # type: int
+    st_ctime = ...  # type: int
+
+class statvfs_result(object):
+    n_fields = ...  # type: int
+    n_sequence_fields = ...  # type: int
+    n_unnamed_fields = ...  # type: int
+    f_bsize = ...  # type: int
+    f_frsize = ...  # type: int
+    f_blocks = ...  # type: int
+    f_bfree = ...  # type: int
+    f_bavail = ...  # type: int
+    f_files = ...  # type: int
+    f_ffree = ...  # type: int
+    f_favail = ...  # type: int
+    f_flag = ...  # type: int
+    f_namemax = ...  # type: int
+
+def _exit(status: int) -> None: ...
+def abort() -> None: ...
+def access(path: unicode, mode: int) -> bool: ...
+def chdir(path: unicode) -> None: ...
+def chmod(path: unicode, mode: int) -> None: ...
+def chown(path: unicode, uid: int, gid: int) -> None: ...
+def chroot(path: unicode) -> None: ...
+def close(fd: int) -> None: ...
+def closerange(fd_low: int, fd_high: int) -> None: ...
+def confstr(name: Union[str, int]) -> str: ...
+def ctermid() -> str: ...
+def dup(fd: int) -> int: ...
+def dup2(fd: int, fd2: int) -> None: ...
+def execv(path: str, args: Sequence[str], env: Mapping[str, str]) -> None: ...
+def execve(path: str, args: Sequence[str], env: Mapping[str, str]) -> None: ...
+def fchdir(fd: int) -> None: ...
+def fchmod(fd: int, mode: int) -> None: ...
+def fchown(fd: int, uid: int, gid: int) -> None: ...
+def fdatasync(fd: int) -> None: ...
+def fdopen(fd: int, mode: str = ..., bufsize: int = ...) -> IO[str]: ...
+def fork() -> int:
+    raise OSError()
+def forkpty() -> Tuple[int, int]:
+    raise OSError()
+def fpathconf(fd: int, name: str) -> None: ...
+def fstat(fd: int) -> stat_result: ...
+def fstatvfs(fd: int) -> statvfs_result: ...
+def fsync(fd: int) -> None: ...
+def ftruncate(fd: int, length: int) -> None: ...
+def getcwd() -> str: ...
+def getcwdu() -> unicode: ...
+def getegid() -> int: ...
+def geteuid() -> int: ...
+def getgid() -> int: ...
+def getgroups() -> List[int]: ...
+def getloadavg() -> Tuple[float, float, float]:
+    raise OSError()
+def getlogin() -> str: ...
+def getpgid(pid: int) -> int: ...
+def getpgrp() -> int: ...
+def getpid() -> int: ...
+def getppid() -> int: ...
+def getresgid() -> Tuple[int, int, int]: ...
+def getresuid() -> Tuple[int, int, int]: ...
+def getsid(pid: int) -> int: ...
+def getuid() -> int: ...
+def initgroups(username: str, gid: int) -> None: ...
+def isatty(fd: int) -> bool: ...
+def kill(pid: int, sig: int) -> None: ...
+def killpg(pgid: int, sig: int) -> None: ...
+def lchown(path: unicode, uid: int, gid: int) -> None: ...
+def link(source: unicode, link_name: str) -> None: ...
+_T = TypeVar("_T")
+def listdir(path: _T) -> List[_T]: ...
+def lseek(fd: int, pos: int, how: int) -> None: ...
+def lstat(path: unicode) -> stat_result: ...
+def major(device: int) -> int: ...
+def makedev(major: int, minor: int) -> int: ...
+def minor(device: int) -> int: ...
+def mkdir(path: unicode, mode: int = ...) -> None: ...
+def mkfifo(path: unicode, mode: int = ...) -> None: ...
+def mknod(filename: unicode, mode: int = ..., device: int = ...) -> None: ...
+def nice(increment: int) -> int: ...
+def open(file: unicode, flags: int, mode: int = ...) -> int: ...
+def openpty() -> Tuple[int, int]: ...
+def pathconf(path: unicode, name: str) -> str: ...
+def pipe() -> Tuple[int, int]: ...
+def popen(command: str, mode: str = ..., bufsize: int = ...) -> IO[str]: ...
+def putenv(varname: str, value: str) -> None: ...
+def read(fd: int, n: int) -> str: ...
+def readlink(path: _T) -> _T: ...
+def remove(path: unicode) -> None: ...
+def rename(src: unicode, dst: unicode) -> None: ...
+def rmdir(path: unicode) -> None: ...
+def setegid(egid: int) -> None: ...
+def seteuid(euid: int) -> None: ...
+def setgid(gid: int) -> None: ...
+def setgroups(groups: Sequence[int]) -> None: ...
+def setpgid(pid: int, pgrp: int) -> None: ...
+def setpgrp() -> None: ...
+def setregid(rgid: int, egid: int) -> None: ...
+def setresgid(rgid: int, egid: int, sgid: int) -> None: ...
+def setresuid(ruid: int, euid: int, suid: int) -> None: ...
+def setreuid(ruid: int, euid: int) -> None: ...
+def setsid() -> None: ...
+def setuid(pid: int) -> None: ...
+def stat(path: unicode) -> stat_result: ...
+def statvfs(path: unicode) -> statvfs_result: ...
+def stat_float_times(fd: int) -> None: ...
+def strerror(code: int) -> str: ...
+def symlink(source: unicode, link_name: unicode) -> None: ...
+def sysconf(name: Union[str, int]) -> int: ...
+def system(command: unicode) -> int: ...
+def tcgetpgrp(fd: int) -> int: ...
+def tcsetpgrp(fd: int, pg: int) -> None: ...
+def times() -> Tuple[float, float, float, float, float]: ...
+def tmpfile() -> IO[str]: ...
+def ttyname(fd: int) -> str: ...
+def umask(mask: int) -> int: ...
+def uname() -> Tuple[str, str, str, str, str]: ...
+def unlink(path: unicode) -> None: ...
+def unsetenv(varname: str) -> None: ...
+def urandom(n: int) -> str: ...
+def utime(path: unicode, times: Optional[Tuple[int, int]]) -> None:
+    raise OSError
+def wait() -> int: ...
+_r = Tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int]
+def wait3(options: int) -> Tuple[int, int, _r]: ...
+def wait4(pid: int, options: int) -> Tuple[int, int, _r]: ...
+def waitpid(pid: int, options: int) -> int:
+    raise OSError()
+def write(fd: int, str: str) -> int: ...
+
diff --git a/typeshed/stdlib/2.7/pprint.pyi b/typeshed/stdlib/2.7/pprint.pyi
new file mode 100644
index 0000000..2eb27d6
--- /dev/null
+++ b/typeshed/stdlib/2.7/pprint.pyi
@@ -0,0 +1,21 @@
+# Stubs for pprint (Python 2)
+#
+# NOTE: Based on a dynamically typed automatically generated by stubgen.
+
+from typing import IO, Any
+
+def pprint(object: Any, stream: IO[Any] = ..., indent: int = ..., width: int = ...,
+           depth: int = ...) -> None: ...
+def pformat(object, indent=..., width=..., depth=...): ...
+def saferepr(object): ...
+def isreadable(object): ...
+def isrecursive(object): ...
+
+class PrettyPrinter:
+    def __init__(self, indent: int = ..., width: int = ..., depth: int = ...,
+                 stream: IO[Any] = ...) -> None: ...
+    def pprint(self, object): ...
+    def pformat(self, object): ...
+    def isrecursive(self, object): ...
+    def isreadable(self, object): ...
+    def format(self, object, context, maxlevels, level): ...
diff --git a/typeshed/stdlib/2.7/pwd.pyi b/typeshed/stdlib/2.7/pwd.pyi
new file mode 100644
index 0000000..facd9db
--- /dev/null
+++ b/typeshed/stdlib/2.7/pwd.pyi
@@ -0,0 +1,18 @@
+from typing import List
+
+class struct_passwd(tuple):
+    n_fields = ...  # type: int
+    n_sequence_fields = ...  # type: int
+    n_unnamed_fields = ...  # type: int
+    pw_dir = ...  # type: str
+    pw_name = ...  # type: str
+    pw_passwd = ...  # type: str
+    pw_shell = ...  # type: str
+    pw_gecos = ...  # type: str
+    pw_gid = ...  # type: int
+    pw_uid = ...  # type: int
+
+def getpwall() -> List[struct_passwd]: ...
+def getpwnam(name:str) -> struct_passwd: ...
+def getpwuid(uid:int) -> struct_passwd: ...
+
diff --git a/typeshed/stdlib/2.7/random.pyi b/typeshed/stdlib/2.7/random.pyi
new file mode 100644
index 0000000..e8800c8
--- /dev/null
+++ b/typeshed/stdlib/2.7/random.pyi
@@ -0,0 +1,76 @@
+# Stubs for random
+# Ron Murawski <ron at horizonchess.com>
+# Updated by Jukka Lehtosalo
+
+# based on https://docs.python.org/2/library/random.html
+
+# ----- random classes -----
+
+import _random
+from typing import (
+    Any, TypeVar, Sequence, List, Callable, AbstractSet, Union,
+    overload
+)
+
+_T = TypeVar('_T')
+
+class Random(_random.Random):
+    def __init__(self, x: object = ...) -> None: ...
+    def seed(self, x: object = ...) -> None: ...
+    def getstate(self) -> _random._State: ...
+    def setstate(self, state: _random._State) -> None: ...
+    def jumpahead(self, n : int) -> None: ...
+    def getrandbits(self, k: int) -> int: ...
+    @overload
+    def randrange(self, stop: int) -> int: ...
+    @overload
+    def randrange(self, start: int, stop: int, step: int = ...) -> int: ...
+    def randint(self, a: int, b: int) -> int: ...
+    def choice(self, seq: Sequence[_T]) -> _T: ...
+    def shuffle(self, x: List[Any], random: Callable[[], None] = ...) -> None: ...
+    def sample(self, population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ...
+    def random(self) -> float: ...
+    def uniform(self, a: float, b: float) -> float: ...
+    def triangular(self, low: float = ..., high: float = ...,
+                     mode: float = ...) -> float: ...
+    def betavariate(self, alpha: float, beta: float) -> float: ...
+    def expovariate(self, lambd: float) -> float: ...
+    def gammavariate(self, alpha: float, beta: float) -> float: ...
+    def gauss(self, mu: float, sigma: float) -> float: ...
+    def lognormvariate(self, mu: float, sigma: float) -> float: ...
+    def normalvariate(self, mu: float, sigma: float) -> float: ...
+    def vonmisesvariate(self, mu: float, kappa: float) -> float: ...
+    def paretovariate(self, alpha: float) -> float: ...
+    def weibullvariate(self, alpha: float, beta: float) -> float: ...
+
+# SystemRandom is not implemented for all OS's; good on Windows & Linux
+class SystemRandom(Random):
+    ...
+
+# ----- random function stubs -----
+def seed(x: object = ...) -> None: ...
+def getstate() -> object: ...
+def setstate(state: object) -> None: ...
+def jumpahead(n : int) -> None: ...
+def getrandbits(k: int) -> int: ...
+ at overload
+def randrange(stop: int) -> int: ...
+ at overload
+def randrange(start: int, stop: int, step: int = ...) -> int: ...
+def randint(a: int, b: int) -> int: ...
+def choice(seq: Sequence[_T]) -> _T: ...
+def shuffle(x: List[Any], random: Callable[[], float] = ...) -> None: ...
+def sample(population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ...
+def random() -> float: ...
+def uniform(a: float, b: float) -> float: ...
+def triangular(low: float = ..., high: float = ...,
+               mode: float = ...) -> float: ...
+def betavariate(alpha: float, beta: float) -> float: ...
+def expovariate(lambd: float) -> float: ...
+def gammavariate(alpha: float, beta: float) -> float: ...
+def gauss(mu: float, sigma: float) -> float: ...
+def lognormvariate(mu: float, sigma: float) -> float: ...
+def normalvariate(mu: float, sigma: float) -> float: ...
+def vonmisesvariate(mu: float, kappa: float) -> float: ...
+def paretovariate(alpha: float) -> float: ...
+def weibullvariate(alpha: float, beta: float) -> float: ...
diff --git a/typeshed/stdlib/2.7/re.pyi b/typeshed/stdlib/2.7/re.pyi
new file mode 100644
index 0000000..79b3a10
--- /dev/null
+++ b/typeshed/stdlib/2.7/re.pyi
@@ -0,0 +1,65 @@
+# Stubs for re
+# Ron Murawski <ron at horizonchess.com>
+# 'bytes' support added by Jukka Lehtosalo
+
+# based on: http://docs.python.org/2.7/library/re.html
+
+from typing import (
+    List, Iterator, overload, Callable, Tuple, Sequence, Dict,
+    Generic, AnyStr, Match, Pattern
+)
+
+# ----- re variables and constants -----
+DEBUG = 0
+I = 0
+IGNORECASE = 0
+L = 0
+LOCALE = 0
+M = 0
+MULTILINE = 0
+S = 0
+DOTALL = 0
+X = 0
+VERBOSE = 0
+U = 0
+UNICODE = 0
+T = 0
+TEMPLATE = 0
+
+class error(Exception): ...
+
+def compile(pattern: AnyStr, flags: int = ...) -> Pattern[AnyStr]: ...
+def search(pattern: AnyStr, string: AnyStr,
+           flags: int = ...) -> Match[AnyStr]: ...
+def match(pattern: AnyStr, string: AnyStr,
+          flags: int = ...) -> Match[AnyStr]: ...
+def split(pattern: AnyStr, string: AnyStr, maxsplit: int = ...,
+          flags: int = ...) -> List[AnyStr]: ...
+def findall(pattern: AnyStr, string: AnyStr,
+            flags: int = ...) -> List[AnyStr]: ...
+
+# Return an iterator yielding match objects over all non-overlapping matches
+# for the RE pattern in string. The string is scanned left-to-right, and
+# matches are returned in the order found. Empty matches are included in the
+# result unless they touch the beginning of another match.
+def finditer(pattern: AnyStr, string: AnyStr,
+             flags: int = ...) -> Iterator[Match[AnyStr]]: ...
+
+ at overload
+def sub(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ...,
+        flags: int = ...) -> AnyStr: ...
+ at overload
+def sub(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr],
+        string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ...
+
+ at overload
+def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ...,
+         flags: int = ...) -> Tuple[AnyStr, int]: ...
+ at overload
+def subn(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr],
+         string: AnyStr, count: int = ...,
+         flags: int = ...) -> Tuple[AnyStr, int]: ...
+
+def escape(string: AnyStr) -> AnyStr: ...
+
+def purge() -> None: ...
diff --git a/typeshed/stdlib/2.7/resource.pyi b/typeshed/stdlib/2.7/resource.pyi
new file mode 100644
index 0000000..8d094ef
--- /dev/null
+++ b/typeshed/stdlib/2.7/resource.pyi
@@ -0,0 +1,33 @@
+from typing import Tuple, NamedTuple
+
+class error(Exception): ...
+
+RLIM_INFINITY = ... # type: int
+def getrlimit(resource: int) -> Tuple[int, int]: ...
+def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ...
+
+RLIMIT_CORE = ... # type: int
+RLIMIT_CPU = ... # type: int
+RLIMIT_FSIZE = ... # type: int
+RLIMIT_DATA = ... # type: int
+RLIMIT_STACK = ... # type: int
+RLIMIT_RSS = ... # type: int
+RLIMIT_NPROC = ... # type: int
+RLIMIT_NOFILE = ... # type: int
+RLIMIT_OFILE= ... # type: int
+RLIMIT_MEMLOCK = ... # type: int
+RLIMIT_VMEM = ... # type: int
+RLIMIT_AS = ... # type: int
+
+_RUsage = NamedTuple('_RUsage', [('ru_utime', float), ('ru_stime', float), ('ru_maxrss', int),
+                                 ('ru_ixrss', int), ('ru_idrss', int), ('ru_isrss', int),
+                                 ('ru_minflt', int), ('ru_majflt', int), ('ru_nswap', int),
+                                 ('ru_inblock', int), ('ru_oublock', int), ('ru_msgsnd', int),
+                                 ('ru_msgrcv', int), ('ru_nsignals', int), ('ru_nvcsw', int),
+                                 ('ru_nivcsw', int)])
+def getrusage(who: int) -> _RUsage: ...
+def getpagesize() -> int: ...
+
+RUSAGE_SELF = ... # type: int
+RUSAGE_CHILDREN = ... # type: int
+RUSAGE_BOTH = ... # type: int
diff --git a/typeshed/stdlib/2.7/rfc822.pyi b/typeshed/stdlib/2.7/rfc822.pyi
new file mode 100644
index 0000000..1c2b032
--- /dev/null
+++ b/typeshed/stdlib/2.7/rfc822.pyi
@@ -0,0 +1,79 @@
+# Stubs for rfc822 (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Message:
+    fp = ... # type: Any
+    seekable = ... # type: Any
+    startofheaders = ... # type: Any
+    startofbody = ... # type: Any
+    def __init__(self, fp, seekable=1): ...
+    def rewindbody(self): ...
+    dict = ... # type: Any
+    unixfrom = ... # type: Any
+    headers = ... # type: Any
+    status = ... # type: Any
+    def readheaders(self): ...
+    def isheader(self, line): ...
+    def islast(self, line): ...
+    def iscomment(self, line): ...
+    def getallmatchingheaders(self, name): ...
+    def getfirstmatchingheader(self, name): ...
+    def getrawheader(self, name): ...
+    def getheader(self, name, default=None): ...
+    get = ... # type: Any
+    def getheaders(self, name): ...
+    def getaddr(self, name): ...
+    def getaddrlist(self, name): ...
+    def getdate(self, name): ...
+    def getdate_tz(self, name): ...
+    def __len__(self): ...
+    def __getitem__(self, name): ...
+    def __setitem__(self, name, value): ...
+    def __delitem__(self, name): ...
+    def setdefault(self, name, default=''): ...
+    def has_key(self, name): ...
+    def __contains__(self, name): ...
+    def __iter__(self): ...
+    def keys(self): ...
+    def values(self): ...
+    def items(self): ...
+
+class AddrlistClass:
+    specials = ... # type: Any
+    pos = ... # type: Any
+    LWS = ... # type: Any
+    CR = ... # type: Any
+    atomends = ... # type: Any
+    phraseends = ... # type: Any
+    field = ... # type: Any
+    commentlist = ... # type: Any
+    def __init__(self, field): ...
+    def gotonext(self): ...
+    def getaddrlist(self): ...
+    def getaddress(self): ...
+    def getrouteaddr(self): ...
+    def getaddrspec(self): ...
+    def getdomain(self): ...
+    def getdelimited(self, beginchar, endchars, allowcomments=1): ...
+    def getquote(self): ...
+    def getcomment(self): ...
+    def getdomainliteral(self): ...
+    def getatom(self, atomends=None): ...
+    def getphraselist(self): ...
+
+class AddressList(AddrlistClass):
+    addresslist = ... # type: Any
+    def __init__(self, field): ...
+    def __len__(self): ...
+    def __add__(self, other): ...
+    def __iadd__(self, other): ...
+    def __sub__(self, other): ...
+    def __isub__(self, other): ...
+    def __getitem__(self, index): ...
+
+def parsedate_tz(data): ...
+def parsedate(data): ...
+def mktime_tz(data): ...
diff --git a/typeshed/stdlib/2.7/robotparser.pyi b/typeshed/stdlib/2.7/robotparser.pyi
new file mode 100644
index 0000000..403039a
--- /dev/null
+++ b/typeshed/stdlib/2.7/robotparser.pyi
@@ -0,0 +1,7 @@
+class RobotFileParser:
+    def set_url(self, url: str): ...
+    def read(self): ...
+    def parse(self, lines: str): ...
+    def can_fetch(self, user_agent: str, url: str): ...
+    def mtime(self): ...
+    def modified(self): ...
diff --git a/typeshed/stdlib/2.7/select.pyi b/typeshed/stdlib/2.7/select.pyi
new file mode 100644
index 0000000..f17b22d
--- /dev/null
+++ b/typeshed/stdlib/2.7/select.pyi
@@ -0,0 +1,100 @@
+"""Stubs for the 'select' module."""
+
+from typing import Any, Optional, Tuple, Iterable, List
+
+EPOLLERR = ...  # type: int
+EPOLLET = ...  # type: int
+EPOLLHUP = ...  # type: int
+EPOLLIN = ...  # type: int
+EPOLLMSG = ...  # type: int
+EPOLLONESHOT = ...  # type: int
+EPOLLOUT = ...  # type: int
+EPOLLPRI = ...  # type: int
+EPOLLRDBAND = ...  # type: int
+EPOLLRDNORM = ...  # type: int
+EPOLLWRBAND = ...  # type: int
+EPOLLWRNORM = ...  # type: int
+EPOLL_RDHUP = ...  # type: int
+KQ_EV_ADD = ...  # type: int
+KQ_EV_CLEAR = ...  # type: int
+KQ_EV_DELETE = ...  # type: int
+KQ_EV_DISABLE = ...  # type: int
+KQ_EV_ENABLE = ...  # type: int
+KQ_EV_EOF = ...  # type: int
+KQ_EV_ERROR = ...  # type: int
+KQ_EV_FLAG1 = ...  # type: int
+KQ_EV_ONESHOT = ...  # type: int
+KQ_EV_SYSFLAGS = ...  # type: int
+KQ_FILTER_AIO = ...  # type: int
+KQ_FILTER_NETDEV = ...  # type: int
+KQ_FILTER_PROC = ...  # type: int
+KQ_FILTER_READ = ...  # type: int
+KQ_FILTER_SIGNAL = ...  # type: int
+KQ_FILTER_TIMER = ...  # type: int
+KQ_FILTER_VNODE = ...  # type: int
+KQ_FILTER_WRITE = ...  # type: int
+KQ_NOTE_ATTRIB = ...  # type: int
+KQ_NOTE_CHILD = ...  # type: int
+KQ_NOTE_DELETE = ...  # type: int
+KQ_NOTE_EXEC = ...  # type: int
+KQ_NOTE_EXIT = ...  # type: int
+KQ_NOTE_EXTEND = ...  # type: int
+KQ_NOTE_FORK = ...  # type: int
+KQ_NOTE_LINK = ...  # type: int
+KQ_NOTE_LINKDOWN = ...  # type: int
+KQ_NOTE_LINKINV = ...  # type: int
+KQ_NOTE_LINKUP = ...  # type: int
+KQ_NOTE_LOWAT = ...  # type: int
+KQ_NOTE_PCTRLMASK = ...  # type: int
+KQ_NOTE_PDATAMASK = ...  # type: int
+KQ_NOTE_RENAME = ...  # type: int
+KQ_NOTE_REVOKE = ...  # type: int
+KQ_NOTE_TRACK = ...  # type: int
+KQ_NOTE_TRACKERR = ...  # type: int
+KQ_NOTE_WRITE = ...  # type: int
+PIPE_BUF = ...  # type: int
+POLLERR = ...  # type: int
+POLLHUP = ...  # type: int
+POLLIN = ...  # type: int
+POLLMSG = ...  # type: int
+POLLNVAL = ...  # type: int
+POLLOUT = ...  # type: int
+POLLPRI = ...  # type: int
+POLLRDBAND = ...  # type: int
+POLLRDNORM = ...  # type: int
+POLLWRBAND = ...  # type: int
+POLLWRNORM = ...  # type: int
+
+def poll() -> epoll: ...
+def select(rlist, wlist, xlist, timeout: Optional[int]) -> Tuple[List, List, List]: ...
+
+class error(Exception): ...
+
+class kevent(object):
+    data = ... # type: Any
+    fflags = ... # type: int
+    filter = ... # type: int
+    flags = ... # type: int
+    ident = ... # type: Any
+    udata = ... # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+
+class kqueue(object):
+    closed = ... # type: bool
+    def __init__(self) -> None: ...
+    def close(self) -> None: ...
+    def control(self, changelist: Optional[Iterable[kevent]], max_events: int, timeout: int = ...) -> List[kevent]: ...
+    def fileno(self) -> int: ...
+    @classmethod
+    def fromfd(cls, fd: int) -> kqueue: ...
+
+class epoll(object):
+    def __init__(self, sizehint: int = ...) -> None: ...
+    def close(self) -> None: ...
+    def fileno(self) -> int: ...
+    def register(self, fd: int, eventmask: int = ...) -> None: ...
+    def modify(self, fd: int, eventmask: int) -> None: ...
+    def unregister(fd: int) -> None: ...
+    def poll(timeout: float = ..., maxevents: int = ...) -> Any: ...
+    @classmethod
+    def fromfd(self, fd: int) -> epoll: ...
diff --git a/typeshed/stdlib/2.7/sha.pyi b/typeshed/stdlib/2.7/sha.pyi
new file mode 100644
index 0000000..8e4d49a
--- /dev/null
+++ b/typeshed/stdlib/2.7/sha.pyi
@@ -0,0 +1,12 @@
+# Stubs for Python 2.7 sha stdlib module
+
+class sha(object):
+    def update(self, arg: str) -> None: ...
+    def digest(self) -> str: ...
+    def hexdigest(self) -> str: ...
+    def copy(self) -> sha: ...
+
+def new(string: str = ...) -> sha: ...
+blocksize = 0
+digest_size = 0
+
diff --git a/typeshed/stdlib/2.7/shlex.pyi b/typeshed/stdlib/2.7/shlex.pyi
new file mode 100644
index 0000000..de37acf
--- /dev/null
+++ b/typeshed/stdlib/2.7/shlex.pyi
@@ -0,0 +1,27 @@
+from typing import Optional, List, Any, IO
+
+def split(s: Optional[str], comments: bool = ..., posix: bool = ...) -> List[str]: ...
+
+class shlex:
+    def __init__(self, instream: IO[Any] = ..., infile: IO[Any] = ..., posix: bool = ...) -> None: ...
+    def get_token(self) -> Optional[str]: ...
+    def push_token(self, _str: str) -> None: ...
+    def read_token(self) -> str: ...
+    def sourcehook(self, filename: str) -> None: ...
+    def push_source(self, stream: IO[Any], filename: str = ...) -> None: ...
+    def pop_source(self) -> IO[Any]: ...
+    def error_leader(file: str = ..., line: int = ...) -> str: ...
+
+    commenters = ... # type: str
+    wordchars = ... # type: str
+    whitespace = ... # type: str
+    escape = ... # type: str
+    quotes = ... # type: str
+    escapedquotes = ... # type: str
+    whitespace_split = ... # type: bool
+    infile = ... # type: IO[Any]
+    source = ... # type: Optional[str]
+    debug = ... # type: int
+    lineno = ... # type: int
+    token = ... # type: Any
+    eof = ... # type: Optional[str]
diff --git a/typeshed/stdlib/2.7/shutil.pyi b/typeshed/stdlib/2.7/shutil.pyi
new file mode 100644
index 0000000..a826a09
--- /dev/null
+++ b/typeshed/stdlib/2.7/shutil.pyi
@@ -0,0 +1,30 @@
+# Stubs for shutil (Python 2)
+#
+# NOTE: Based on a dynamically typed stub automatically generated by stubgen.
+
+from typing import List, Iterable, Callable, IO, AnyStr, Any, Tuple, Sequence
+
+class Error(EnvironmentError): ...
+class SpecialFileError(EnvironmentError): ...
+class ExecError(EnvironmentError): ...
+
+def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr], length: int = ...) -> None: ...
+def copyfile(src: unicode, dst: unicode) -> None: ...
+def copymode(src: unicode, dst: unicode) -> None: ...
+def copystat(src: unicode, dst: unicode) -> None: ...
+def copy(src: unicode, dst: unicode) -> None: ...
+def copy2(src: unicode, dst: unicode) -> None: ...
+def ignore_patterns(*patterns: AnyStr) -> Callable[[AnyStr, List[AnyStr]], Iterable[AnyStr]]: ...
+def copytree(src: AnyStr, dst: AnyStr, symlinks: bool = ...,
+             ignore: Callable[[AnyStr, List[AnyStr]], Iterable[AnyStr]] = ...) -> None: ...
+def rmtree(path: AnyStr, ignore_errors: bool = ...,
+           onerror: Callable[[Any, AnyStr, Any], None] = ...) -> None: ...
+def move(src: unicode, dst: unicode) -> None: ...
+def get_archive_formats() -> List[Tuple[str, str]]: ...
+def register_archive_format(name: str, function: Callable[..., Any],
+                            extra_args: Sequence[Tuple[str, Any]] = ...,
+                            description: str = ...) -> None: ...
+def unregister_archive_format(name: str) -> None: ...
+def make_archive(base_name: AnyStr, format: str, root_dir: unicode = ...,
+                 base_dir: unicode = ..., verbose: int = ..., dry_run: int = ...,
+                 owner: str = ..., group: str = ..., logger: Any = ...) -> AnyStr: ...
diff --git a/typeshed/stdlib/2.7/signal.pyi b/typeshed/stdlib/2.7/signal.pyi
new file mode 100644
index 0000000..1c9481b
--- /dev/null
+++ b/typeshed/stdlib/2.7/signal.pyi
@@ -0,0 +1,62 @@
+from typing import Callable, Any, Tuple, Union
+from types import FrameType
+
+SIG_DFL = ...  # type: int
+SIG_IGN = ...  # type: int
+ITIMER_REAL = ...  # type: int
+ITIMER_VIRTUAL = ...  # type: int
+ITIMER_PROF = ...  # type: int
+
+SIGABRT = ...  # type: int
+SIGALRM = ...  # type: int
+SIGBUS = ...  # type: int
+SIGCHLD = ...  # type: int
+SIGCLD = ...  # type: int
+SIGCONT = ...  # type: int
+SIGFPE = ...  # type: int
+SIGHUP = ...  # type: int
+SIGILL = ...  # type: int
+SIGINT = ...  # type: int
+SIGIO = ...  # type: int
+SIGIOT = ...  # type: int
+SIGKILL = ...  # type: int
+SIGPIPE = ...  # type: int
+SIGPOLL = ...  # type: int
+SIGPROF = ...  # type: int
+SIGPWR = ...  # type: int
+SIGQUIT = ...  # type: int
+SIGRTMAX = ...  # type: int
+SIGRTMIN = ...  # type: int
+SIGSEGV = ...  # type: int
+SIGSTOP = ...  # type: int
+SIGSYS = ...  # type: int
+SIGTERM = ...  # type: int
+SIGTRAP = ...  # type: int
+SIGTSTP = ...  # type: int
+SIGTTIN = ...  # type: int
+SIGTTOU = ...  # type: int
+SIGURG = ...  # type: int
+SIGUSR1 = ...  # type: int
+SIGUSR2 = ...  # type: int
+SIGVTALRM = ...  # type: int
+SIGWINCH = ...  # type: int
+SIGXCPU = ...  # type: int
+SIGXFSZ = ...  # type: int
+NSIG = ...  # type: int
+
+class ItimerError(IOError): ...
+
+_HANDLER = Union[Callable[[int, FrameType], None], int, None]
+
+def alarm(time: int) -> int: ...
+def getsignal(signalnum: int) -> _HANDLER: ...
+def pause() -> None: ...
+def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float, float]: ...
+def getitimer(which: int) -> Tuple[float, float]: ...
+def set_wakeup_fd(fd: int) -> int: ...
+def siginterrupt(signalnum: int, flag: bool) -> None:
+    raise RuntimeError()
+def signal(signalnum: int, handler: _HANDLER) -> _HANDLER:
+    raise RuntimeError()
+def default_int_handler(signum: int, frame: FrameType) -> None:
+    raise KeyboardInterrupt()
diff --git a/typeshed/stdlib/2.7/simplejson/__init__.pyi b/typeshed/stdlib/2.7/simplejson/__init__.pyi
new file mode 100644
index 0000000..84e6483
--- /dev/null
+++ b/typeshed/stdlib/2.7/simplejson/__init__.pyi
@@ -0,0 +1,10 @@
+from typing import Any, IO
+
+def dumps(obj: Any) -> str: ...
+def dump(obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
+def loads(s: str, **kwds: Any) -> Any: ...
+def load(fp: IO[str]) -> Any: ...
+
+from .scanner import JSONDecodeError
+from .decoder import JSONDecoder
+from .encoder import JSONEncoder, JSONEncoderForHTML
diff --git a/typeshed/stdlib/2.7/simplejson/decoder.pyi b/typeshed/stdlib/2.7/simplejson/decoder.pyi
new file mode 100644
index 0000000..59111ce
--- /dev/null
+++ b/typeshed/stdlib/2.7/simplejson/decoder.pyi
@@ -0,0 +1,6 @@
+from typing import Any, Match
+
+class JSONDecoder(object):
+    def __init__(self, **kwargs): ...
+    def decode(self, s: str, _w: Match[str], _PY3: bool): ...
+    def raw_decode(self, s: str, idx: int, _w: Match[str], _PY3: bool): ...
diff --git a/typeshed/stdlib/2.7/simplejson/encoder.pyi b/typeshed/stdlib/2.7/simplejson/encoder.pyi
new file mode 100644
index 0000000..0e31806
--- /dev/null
+++ b/typeshed/stdlib/2.7/simplejson/encoder.pyi
@@ -0,0 +1,9 @@
+from typing import Any, IO
+
+class JSONEncoder(object):
+    def __init__(self, *args, **kwargs): ...
+    def encode(self, o: Any): ...
+    def default(self, o: Any): ...
+    def iterencode(self, o: Any, _one_shot: bool): ...
+
+class JSONEncoderForHTML(JSONEncoder): ...
diff --git a/typeshed/stdlib/2.7/simplejson/scanner.pyi b/typeshed/stdlib/2.7/simplejson/scanner.pyi
new file mode 100644
index 0000000..760b24d
--- /dev/null
+++ b/typeshed/stdlib/2.7/simplejson/scanner.pyi
@@ -0,0 +1,7 @@
+from typing import Any, IO
+
+class JSONDecodeError(ValueError):
+    def dumps(self, obj: Any) -> str: ...
+    def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
+    def loads(self, s: str) -> Any: ...
+    def load(self, fp: IO[str]) -> Any: ...
diff --git a/typeshed/stdlib/2.7/smtplib.pyi b/typeshed/stdlib/2.7/smtplib.pyi
new file mode 100644
index 0000000..5bf994b
--- /dev/null
+++ b/typeshed/stdlib/2.7/smtplib.pyi
@@ -0,0 +1,90 @@
+# Stubs for smtplib (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class SMTPException(Exception): ...
+class SMTPServerDisconnected(SMTPException): ...
+
+class SMTPResponseException(SMTPException):
+    smtp_code = ... # type: Any
+    smtp_error = ... # type: Any
+    args = ... # type: Any
+    def __init__(self, code, msg) -> None: ...
+
+class SMTPSenderRefused(SMTPResponseException):
+    smtp_code = ... # type: Any
+    smtp_error = ... # type: Any
+    sender = ... # type: Any
+    args = ... # type: Any
+    def __init__(self, code, msg, sender) -> None: ...
+
+class SMTPRecipientsRefused(SMTPException):
+    recipients = ... # type: Any
+    args = ... # type: Any
+    def __init__(self, recipients) -> None: ...
+
+class SMTPDataError(SMTPResponseException): ...
+class SMTPConnectError(SMTPResponseException): ...
+class SMTPHeloError(SMTPResponseException): ...
+class SMTPAuthenticationError(SMTPResponseException): ...
+
+def quoteaddr(addr): ...
+def quotedata(data): ...
+
+class SSLFakeFile:
+    sslobj = ... # type: Any
+    def __init__(self, sslobj) -> None: ...
+    def readline(self, size=...): ...
+    def close(self): ...
+
+class SMTP:
+    debuglevel = ... # type: Any
+    file = ... # type: Any
+    helo_resp = ... # type: Any
+    ehlo_msg = ... # type: Any
+    ehlo_resp = ... # type: Any
+    does_esmtp = ... # type: Any
+    default_port = ... # type: Any
+    timeout = ... # type: Any
+    esmtp_features = ... # type: Any
+    local_hostname = ... # type: Any
+    def __init__(self, host: str = ..., port: int = ..., local_hostname=..., timeout=...) -> None: ...
+    def set_debuglevel(self, debuglevel): ...
+    sock = ... # type: Any
+    def connect(self, host=..., port=...): ...
+    def send(self, str): ...
+    def putcmd(self, cmd, args=...): ...
+    def getreply(self): ...
+    def docmd(self, cmd, args=...): ...
+    def helo(self, name=...): ...
+    def ehlo(self, name=...): ...
+    def has_extn(self, opt): ...
+    def help(self, args=...): ...
+    def rset(self): ...
+    def noop(self): ...
+    def mail(self, sender, options=...): ...
+    def rcpt(self, recip, options=...): ...
+    def data(self, msg): ...
+    def verify(self, address): ...
+    vrfy = ... # type: Any
+    def expn(self, address): ...
+    def ehlo_or_helo_if_needed(self): ...
+    def login(self, user, password): ...
+    def starttls(self, keyfile=..., certfile=...): ...
+    def sendmail(self, from_addr, to_addrs, msg, mail_options=..., rcpt_options=...): ...
+    def close(self): ...
+    def quit(self): ...
+
+class SMTP_SSL(SMTP):
+    default_port = ... # type: Any
+    keyfile = ... # type: Any
+    certfile = ... # type: Any
+    def __init__(self, host=..., port=..., local_hostname=..., keyfile=..., certfile=..., timeout=...) -> None: ...
+
+class LMTP(SMTP):
+    ehlo_msg = ... # type: Any
+    def __init__(self, host=..., port=..., local_hostname=...) -> None: ...
+    sock = ... # type: Any
+    def connect(self, host=..., port=...): ...
diff --git a/typeshed/stdlib/2.7/socket.pyi b/typeshed/stdlib/2.7/socket.pyi
new file mode 100644
index 0000000..807c0f1
--- /dev/null
+++ b/typeshed/stdlib/2.7/socket.pyi
@@ -0,0 +1,388 @@
+# Stubs for socket
+# Ron Murawski <ron at horizonchess.com>
+
+# based on: http://docs.python.org/3.2/library/socket.html
+# see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py
+# see: http://nullege.com/codes/search/socket
+# adapted for Python 2.7 by Michal Pokorny
+
+from typing import Any, Tuple, overload, List, Optional, Union
+
+# ----- variables and constants -----
+
+AF_UNIX = 0
+AF_INET = 0
+AF_INET6 = 0
+SOCK_STREAM = 0
+SOCK_DGRAM = 0
+SOCK_RAW = 0
+SOCK_RDM = 0
+SOCK_SEQPACKET = 0
+SOCK_CLOEXEC = 0
+SOCK_NONBLOCK = 0
+SOMAXCONN = 0
+has_ipv6 = False
+_GLOBAL_DEFAULT_TIMEOUT = 0.0
+SocketType = ...  # type: Any
+SocketIO = ...  # type: Any
+
+
+# the following constants are included with Python 3.2.3 (Ubuntu)
+# some of the constants may be Linux-only
+# all Windows/Mac-specific constants are absent
+AF_APPLETALK = 0
+AF_ASH = 0
+AF_ATMPVC = 0
+AF_ATMSVC = 0
+AF_AX25 = 0
+AF_BLUETOOTH = 0
+AF_BRIDGE = 0
+AF_DECnet = 0
+AF_ECONET = 0
+AF_IPX = 0
+AF_IRDA = 0
+AF_KEY = 0
+AF_LLC = 0
+AF_NETBEUI = 0
+AF_NETLINK = 0
+AF_NETROM = 0
+AF_PACKET = 0
+AF_PPPOX = 0
+AF_ROSE = 0
+AF_ROUTE = 0
+AF_SECURITY = 0
+AF_SNA = 0
+AF_TIPC = 0
+AF_UNSPEC = 0
+AF_WANPIPE = 0
+AF_X25 = 0
+AI_ADDRCONFIG = 0
+AI_ALL = 0
+AI_CANONNAME = 0
+AI_NUMERICHOST = 0
+AI_NUMERICSERV = 0
+AI_PASSIVE = 0
+AI_V4MAPPED = 0
+BDADDR_ANY = 0
+BDADDR_LOCAL = 0
+BTPROTO_HCI = 0
+BTPROTO_L2CAP = 0
+BTPROTO_RFCOMM = 0
+BTPROTO_SCO = 0
+CAPI = 0
+EAGAIN = 0
+EAI_ADDRFAMILY = 0
+EAI_AGAIN = 0
+EAI_BADFLAGS = 0
+EAI_FAIL = 0
+EAI_FAMILY = 0
+EAI_MEMORY = 0
+EAI_NODATA = 0
+EAI_NONAME = 0
+EAI_OVERFLOW = 0
+EAI_SERVICE = 0
+EAI_SOCKTYPE = 0
+EAI_SYSTEM = 0
+EBADF = 0
+EINTR = 0
+EWOULDBLOCK = 0
+HCI_DATA_DIR = 0
+HCI_FILTER = 0
+HCI_TIME_STAMP = 0
+INADDR_ALLHOSTS_GROUP = 0
+INADDR_ANY = 0
+INADDR_BROADCAST = 0
+INADDR_LOOPBACK = 0
+INADDR_MAX_LOCAL_GROUP = 0
+INADDR_NONE = 0
+INADDR_UNSPEC_GROUP = 0
+IPPORT_RESERVED = 0
+IPPORT_USERRESERVED = 0
+IPPROTO_AH = 0
+IPPROTO_DSTOPTS = 0
+IPPROTO_EGP = 0
+IPPROTO_ESP = 0
+IPPROTO_FRAGMENT = 0
+IPPROTO_GRE = 0
+IPPROTO_HOPOPTS = 0
+IPPROTO_ICMP = 0
+IPPROTO_ICMPV6 = 0
+IPPROTO_IDP = 0
+IPPROTO_IGMP = 0
+IPPROTO_IP = 0
+IPPROTO_IPIP = 0
+IPPROTO_IPV6 = 0
+IPPROTO_NONE = 0
+IPPROTO_PIM = 0
+IPPROTO_PUP = 0
+IPPROTO_RAW = 0
+IPPROTO_ROUTING = 0
+IPPROTO_RSVP = 0
+IPPROTO_TCP = 0
+IPPROTO_TP = 0
+IPPROTO_UDP = 0
+IPV6_CHECKSUM = 0
+IPV6_DSTOPTS = 0
+IPV6_HOPLIMIT = 0
+IPV6_HOPOPTS = 0
+IPV6_JOIN_GROUP = 0
+IPV6_LEAVE_GROUP = 0
+IPV6_MULTICAST_HOPS = 0
+IPV6_MULTICAST_IF = 0
+IPV6_MULTICAST_LOOP = 0
+IPV6_NEXTHOP = 0
+IPV6_PKTINFO = 0
+IPV6_RECVDSTOPTS = 0
+IPV6_RECVHOPLIMIT = 0
+IPV6_RECVHOPOPTS = 0
+IPV6_RECVPKTINFO = 0
+IPV6_RECVRTHDR = 0
+IPV6_RECVTCLASS = 0
+IPV6_RTHDR = 0
+IPV6_RTHDRDSTOPTS = 0
+IPV6_RTHDR_TYPE_0 = 0
+IPV6_TCLASS = 0
+IPV6_UNICAST_HOPS = 0
+IPV6_V6ONLY = 0
+IP_ADD_MEMBERSHIP = 0
+IP_DEFAULT_MULTICAST_LOOP = 0
+IP_DEFAULT_MULTICAST_TTL = 0
+IP_DROP_MEMBERSHIP = 0
+IP_HDRINCL = 0
+IP_MAX_MEMBERSHIPS = 0
+IP_MULTICAST_IF = 0
+IP_MULTICAST_LOOP = 0
+IP_MULTICAST_TTL = 0
+IP_OPTIONS = 0
+IP_RECVOPTS = 0
+IP_RECVRETOPTS = 0
+IP_RETOPTS = 0
+IP_TOS = 0
+IP_TTL = 0
+MSG_CTRUNC = 0
+MSG_DONTROUTE = 0
+MSG_DONTWAIT = 0
+MSG_EOR = 0
+MSG_OOB = 0
+MSG_PEEK = 0
+MSG_TRUNC = 0
+MSG_WAITALL = 0
+NETLINK_DNRTMSG = 0
+NETLINK_FIREWALL = 0
+NETLINK_IP6_FW = 0
+NETLINK_NFLOG = 0
+NETLINK_ROUTE = 0
+NETLINK_USERSOCK = 0
+NETLINK_XFRM = 0
+NI_DGRAM = 0
+NI_MAXHOST = 0
+NI_MAXSERV = 0
+NI_NAMEREQD = 0
+NI_NOFQDN = 0
+NI_NUMERICHOST = 0
+NI_NUMERICSERV = 0
+PACKET_BROADCAST = 0
+PACKET_FASTROUTE = 0
+PACKET_HOST = 0
+PACKET_LOOPBACK = 0
+PACKET_MULTICAST = 0
+PACKET_OTHERHOST = 0
+PACKET_OUTGOING = 0
+PF_PACKET = 0
+SHUT_RD = 0
+SHUT_RDWR = 0
+SHUT_WR = 0
+SOL_HCI = 0
+SOL_IP = 0
+SOL_SOCKET = 0
+SOL_TCP = 0
+SOL_TIPC = 0
+SOL_UDP = 0
+SO_ACCEPTCONN = 0
+SO_BROADCAST = 0
+SO_DEBUG = 0
+SO_DONTROUTE = 0
+SO_ERROR = 0
+SO_KEEPALIVE = 0
+SO_LINGER = 0
+SO_OOBINLINE = 0
+SO_RCVBUF = 0
+SO_RCVLOWAT = 0
+SO_RCVTIMEO = 0
+SO_REUSEADDR = 0
+SO_SNDBUF = 0
+SO_SNDLOWAT = 0
+SO_SNDTIMEO = 0
+SO_TYPE = 0
+TCP_CORK = 0
+TCP_DEFER_ACCEPT = 0
+TCP_INFO = 0
+TCP_KEEPCNT = 0
+TCP_KEEPIDLE = 0
+TCP_KEEPINTVL = 0
+TCP_LINGER2 = 0
+TCP_MAXSEG = 0
+TCP_NODELAY = 0
+TCP_QUICKACK = 0
+TCP_SYNCNT = 0
+TCP_WINDOW_CLAMP = 0
+TIPC_ADDR_ID = 0
+TIPC_ADDR_NAME = 0
+TIPC_ADDR_NAMESEQ = 0
+TIPC_CFG_SRV = 0
+TIPC_CLUSTER_SCOPE = 0
+TIPC_CONN_TIMEOUT = 0
+TIPC_CRITICAL_IMPORTANCE = 0
+TIPC_DEST_DROPPABLE = 0
+TIPC_HIGH_IMPORTANCE = 0
+TIPC_IMPORTANCE = 0
+TIPC_LOW_IMPORTANCE = 0
+TIPC_MEDIUM_IMPORTANCE = 0
+TIPC_NODE_SCOPE = 0
+TIPC_PUBLISHED = 0
+TIPC_SRC_DROPPABLE = 0
+TIPC_SUBSCR_TIMEOUT = 0
+TIPC_SUB_CANCEL = 0
+TIPC_SUB_PORTS = 0
+TIPC_SUB_SERVICE = 0
+TIPC_TOP_SRV = 0
+TIPC_WAIT_FOREVER = 0
+TIPC_WITHDRAWN = 0
+TIPC_ZONE_SCOPE = 0
+
+
+# ----- exceptions -----
+class error(IOError):
+    ...
+
+class herror(error):
+    def __init__(self, herror: int, string: str) -> None: ...
+
+class gaierror(error):
+    def __init__(self, error: int, string: str) -> None: ...
+
+class timeout(error):
+    ...
+
+
+# Addresses can be either tuples of varying lengths (AF_INET, AF_INET6,
+# AF_NETLINK, AF_TIPC) or strings (AF_UNIX).
+
+# TODO AF_PACKET and AF_BLUETOOTH address objects
+
+
+# ----- classes -----
+class socket:
+    family = 0
+    type = 0
+    proto = 0
+
+    def __init__(self, family: int = ..., type: int = ...,
+                 proto: int = ..., fileno: int = ...) -> None: ...
+
+    # --- methods ---
+    # second tuple item is an address
+    def accept(self) -> Tuple['socket', Any]: ...
+
+    @overload
+    def bind(self, address: tuple) -> None: ...
+    @overload
+    def bind(self, address: str) -> None: ...
+
+    def close(self) -> None: ...
+
+    @overload
+    def connect(self, address: tuple) -> None: ...
+    @overload
+    def connect(self, address: str) -> None: ...
+
+    @overload
+    def connect_ex(self, address: tuple) -> int: ...
+    @overload
+    def connect_ex(self, address: str) -> int: ...
+
+    def detach(self) -> int: ...
+    def fileno(self) -> int: ...
+
+    # return value is an address
+    def getpeername(self) -> Any: ...
+    def getsockname(self) -> Any: ...
+
+    @overload
+    def getsockopt(self, level: int, optname: str) -> str: ...
+    @overload
+    def getsockopt(self, level: int, optname: str, buflen: int) -> str: ...
+
+    def gettimeout(self) -> float: ...
+    def ioctl(self, control: object,
+              option: Tuple[int, int, int]) -> None: ...
+    def listen(self, backlog: int) -> None: ...
+    # TODO the return value may be BinaryIO or TextIO, depending on mode
+    def makefile(self, mode: str = ..., buffering: int = ...,
+                 encoding: str = ..., errors: str = ...,
+                 newline: str = ...) -> Any:
+        ...
+    def recv(self, bufsize: int, flags: int = ...) -> str: ...
+
+    # return type is an address
+    def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ...
+    def recvfrom_into(self, buffer: str, nbytes: int,
+                      flags: int = ...) -> Any: ...
+    def recv_into(self, buffer: str, nbytes: int,
+                  flags: int = ...) -> Any: ...
+    def send(self, data: str, flags=...) -> int: ...
+    def sendall(self, data: str, flags=...) -> Any:
+        ... # return type: None on success
+
+    @overload
+    def sendto(self, data: str, address: tuple, flags: int = ...) -> int: ...
+    @overload
+    def sendto(self, data: str, address: str, flags: int = ...) -> int: ...
+
+    def setblocking(self, flag: bool) -> None: ...
+    # TODO None valid for the value argument
+    def settimeout(self, value: float) -> None: ...
+
+    @overload
+    def setsockopt(self, level: int, optname: str, value: int) -> None: ...
+    @overload
+    def setsockopt(self, level: int, optname: str, value: str) -> None: ...
+
+    def shutdown(self, how: int) -> None: ...
+
+
+# ----- functions -----
+def create_connection(address: Tuple[str, int],
+                      timeout: float = ...,
+                      source_address: Tuple[str, int] = ...) -> socket: ...
+
+# the 5th tuple item is an address
+def getaddrinfo(
+        host: Optional[str], port: Union[str, int, None], family: int = ...,
+        socktype: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[int, int, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]]:
+    ...
+
+def getfqdn(name: str = ...) -> str: ...
+def gethostbyname(hostname: str) -> str: ...
+def gethostbyname_ex(hostname: str) -> Tuple[str, List[str], List[str]]: ...
+def gethostname() -> str: ...
+def gethostbyaddr(ip_address: str) -> Tuple[str, List[str], List[str]]: ...
+def getnameinfo(sockaddr: tuple, flags: int) -> Tuple[str, int]: ...
+def getprotobyname(protocolname: str) -> int: ...
+def getservbyname(servicename: str, protocolname: str = ...) -> int: ...
+def getservbyport(port: int, protocolname: str = ...) -> str: ...
+def socketpair(family: int = ...,
+               type: int = ...,
+               proto: int = ...) -> Tuple[socket, socket]: ...
+def fromfd(fd: int, family: int, type: int, proto: int = ...) -> socket: ...
+def ntohl(x: int) -> int: ...  # param & ret val are 32-bit ints
+def ntohs(x: int) -> int: ...  # param & ret val are 16-bit ints
+def htonl(x: int) -> int: ...  # param & ret val are 32-bit ints
+def htons(x: int) -> int: ...  # param & ret val are 16-bit ints
+def inet_aton(ip_string: str) -> str: ...  # ret val 4 bytes in length
+def inet_ntoa(packed_ip: str) -> str: ...
+def inet_pton(address_family: int, ip_string: str) -> str: ...
+def inet_ntop(address_family: int, packed_ip: str) -> str: ...
+# TODO the timeout may be None
+def getdefaulttimeout() -> float: ...
+def setdefaulttimeout(timeout: float) -> None: ...
diff --git a/typeshed/stdlib/2.7/spwd.pyi b/typeshed/stdlib/2.7/spwd.pyi
new file mode 100644
index 0000000..ee09838
--- /dev/null
+++ b/typeshed/stdlib/2.7/spwd.pyi
@@ -0,0 +1,15 @@
+from typing import List
+
+class struct_spwd(object):
+    sp_nam = ...  # type: str
+    sp_pwd = ...  # type: str
+    sp_lstchg = ...  # type: int
+    sp_min = ...  # type: int
+    sp_max = ...  # type: int
+    sp_warn = ...  # type: int
+    sp_inact = ...  # type: int
+    sp_expire = ...  # type: int
+    sp_flag = ...  # type: int
+
+def getspall() -> List[struct_spwd]: pass
+def getspnam() -> struct_spwd: pass
diff --git a/typeshed/stdlib/2.7/sqlite3/__init__.pyi b/typeshed/stdlib/2.7/sqlite3/__init__.pyi
new file mode 100644
index 0000000..28bc3ba
--- /dev/null
+++ b/typeshed/stdlib/2.7/sqlite3/__init__.pyi
@@ -0,0 +1,5 @@
+# Stubs for sqlite3 (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from sqlite3.dbapi2 import *
diff --git a/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi b/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
new file mode 100644
index 0000000..aa423c2
--- /dev/null
+++ b/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
@@ -0,0 +1,237 @@
+# Stubs for sqlite3.dbapi2 (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+paramstyle = ... # type: Any
+threadsafety = ... # type: Any
+apilevel = ... # type: Any
+Date = ... # type: Any
+Time = ... # type: Any
+Timestamp = ... # type: Any
+
+def DateFromTicks(ticks): ...
+def TimeFromTicks(ticks): ...
+def TimestampFromTicks(ticks): ...
+
+version_info = ... # type: Any
+sqlite_version_info = ... # type: Any
+Binary = ... # type: Any
+
+def register_adapters_and_converters(): ...
+
+# The remaining definitions are imported from _sqlite3.
+
+PARSE_COLNAMES = ... # type: int
+PARSE_DECLTYPES = ... # type: int
+SQLITE_ALTER_TABLE = ... # type: int
+SQLITE_ANALYZE = ... # type: int
+SQLITE_ATTACH = ... # type: int
+SQLITE_CREATE_INDEX = ... # type: int
+SQLITE_CREATE_TABLE = ... # type: int
+SQLITE_CREATE_TEMP_INDEX = ... # type: int
+SQLITE_CREATE_TEMP_TABLE = ... # type: int
+SQLITE_CREATE_TEMP_TRIGGER = ... # type: int
+SQLITE_CREATE_TEMP_VIEW = ... # type: int
+SQLITE_CREATE_TRIGGER = ... # type: int
+SQLITE_CREATE_VIEW = ... # type: int
+SQLITE_DELETE = ... # type: int
+SQLITE_DENY = ... # type: int
+SQLITE_DETACH = ... # type: int
+SQLITE_DROP_INDEX = ... # type: int
+SQLITE_DROP_TABLE = ... # type: int
+SQLITE_DROP_TEMP_INDEX = ... # type: int
+SQLITE_DROP_TEMP_TABLE = ... # type: int
+SQLITE_DROP_TEMP_TRIGGER = ... # type: int
+SQLITE_DROP_TEMP_VIEW = ... # type: int
+SQLITE_DROP_TRIGGER = ... # type: int
+SQLITE_DROP_VIEW = ... # type: int
+SQLITE_IGNORE = ... # type: int
+SQLITE_INSERT = ... # type: int
+SQLITE_OK = ... # type: int
+SQLITE_PRAGMA = ... # type: int
+SQLITE_READ = ... # type: int
+SQLITE_REINDEX = ... # type: int
+SQLITE_SELECT = ... # type: int
+SQLITE_TRANSACTION = ... # type: int
+SQLITE_UPDATE = ... # type: int
+adapters = ... # type: Any
+converters = ... # type: Any
+sqlite_version = ... # type: str
+version = ... # type: str
+
+def adapt(obj, protocol, alternate): ...
+def complete_statement(sql): ...
+def connect(*args, **kwargs): ...
+def enable_callback_tracebacks(flag): ...
+def enable_shared_cache(do_enable): ...
+def register_adapter(type, callable): ...
+def register_converter(typename, callable): ...
+
+class Cache:
+    def __init__(self, *args, **kwargs): ...
+    def display(self, *args, **kwargs): ...
+    def get(self, *args, **kwargs): ...
+
+class Connection:
+    DataError = ... # type: Any
+    DatabaseError = ... # type: Any
+    Error = ... # type: Any
+    IntegrityError = ... # type: Any
+    InterfaceError = ... # type: Any
+    InternalError = ... # type: Any
+    NotSupportedError = ... # type: Any
+    OperationalError = ... # type: Any
+    ProgrammingError = ... # type: Any
+    Warning = ... # type: Any
+    in_transaction = ... # type: Any
+    isolation_level = ... # type: Any
+    row_factory = ... # type: Any
+    text_factory = ... # type: Any
+    total_changes = ... # type: Any
+    def __init__(self, *args, **kwargs): ...
+    def close(self, *args, **kwargs): ...
+    def commit(self, *args, **kwargs): ...
+    def create_aggregate(self, *args, **kwargs): ...
+    def create_collation(self, *args, **kwargs): ...
+    def create_function(self, *args, **kwargs): ...
+    def cursor(self, *args, **kwargs): ...
+    def execute(self, *args, **kwargs): ...
+    def executemany(self, *args, **kwargs): ...
+    def executescript(self, *args, **kwargs): ...
+    def interrupt(self, *args, **kwargs): ...
+    def iterdump(self, *args, **kwargs): ...
+    def rollback(self, *args, **kwargs): ...
+    def set_authorizer(self, *args, **kwargs): ...
+    def set_progress_handler(self, *args, **kwargs): ...
+    def set_trace_callback(self, *args, **kwargs): ...
+    def __call__(self, *args, **kwargs): ...
+    def __enter__(self, *args, **kwargs): ...
+    def __exit__(self, *args, **kwargs): ...
+
+class Cursor:
+    arraysize = ... # type: Any
+    connection = ... # type: Any
+    description = ... # type: Any
+    lastrowid = ... # type: Any
+    row_factory = ... # type: Any
+    rowcount = ... # type: Any
+    def __init__(self, *args, **kwargs): ...
+    def close(self, *args, **kwargs): ...
+    def execute(self, *args, **kwargs): ...
+    def executemany(self, *args, **kwargs): ...
+    def executescript(self, *args, **kwargs): ...
+    def fetchall(self, *args, **kwargs): ...
+    def fetchmany(self, *args, **kwargs): ...
+    def fetchone(self, *args, **kwargs): ...
+    def setinputsizes(self, *args, **kwargs): ...
+    def setoutputsize(self, *args, **kwargs): ...
+    def __iter__(self): ...
+    def __next__(self): ...
+
+class DataError(DatabaseError): ...
+
+class DatabaseError(Error): ...
+
+class Error(Exception): ...
+
+class IntegrityError(DatabaseError): ...
+
+class InterfaceError(Error): ...
+
+class InternalError(DatabaseError): ...
+
+class NotSupportedError(DatabaseError): ...
+
+class OperationalError(DatabaseError): ...
+
+class OptimizedUnicode:
+    maketrans = ... # type: Any
+    def __init__(self, *args, **kwargs): ...
+    def capitalize(self, *args, **kwargs): ...
+    def casefold(self, *args, **kwargs): ...
+    def center(self, *args, **kwargs): ...
+    def count(self, *args, **kwargs): ...
+    def encode(self, *args, **kwargs): ...
+    def endswith(self, *args, **kwargs): ...
+    def expandtabs(self, *args, **kwargs): ...
+    def find(self, *args, **kwargs): ...
+    def format(self, *args, **kwargs): ...
+    def format_map(self, *args, **kwargs): ...
+    def index(self, *args, **kwargs): ...
+    def isalnum(self, *args, **kwargs): ...
+    def isalpha(self, *args, **kwargs): ...
+    def isdecimal(self, *args, **kwargs): ...
+    def isdigit(self, *args, **kwargs): ...
+    def isidentifier(self, *args, **kwargs): ...
+    def islower(self, *args, **kwargs): ...
+    def isnumeric(self, *args, **kwargs): ...
+    def isprintable(self, *args, **kwargs): ...
+    def isspace(self, *args, **kwargs): ...
+    def istitle(self, *args, **kwargs): ...
+    def isupper(self, *args, **kwargs): ...
+    def join(self, *args, **kwargs): ...
+    def ljust(self, *args, **kwargs): ...
+    def lower(self, *args, **kwargs): ...
+    def lstrip(self, *args, **kwargs): ...
+    def partition(self, *args, **kwargs): ...
+    def replace(self, *args, **kwargs): ...
+    def rfind(self, *args, **kwargs): ...
+    def rindex(self, *args, **kwargs): ...
+    def rjust(self, *args, **kwargs): ...
+    def rpartition(self, *args, **kwargs): ...
+    def rsplit(self, *args, **kwargs): ...
+    def rstrip(self, *args, **kwargs): ...
+    def split(self, *args, **kwargs): ...
+    def splitlines(self, *args, **kwargs): ...
+    def startswith(self, *args, **kwargs): ...
+    def strip(self, *args, **kwargs): ...
+    def swapcase(self, *args, **kwargs): ...
+    def title(self, *args, **kwargs): ...
+    def translate(self, *args, **kwargs): ...
+    def upper(self, *args, **kwargs): ...
+    def zfill(self, *args, **kwargs): ...
+    def __add__(self, other): ...
+    def __contains__(self, *args, **kwargs): ...
+    def __eq__(self, other): ...
+    def __format__(self, *args, **kwargs): ...
+    def __ge__(self, other): ...
+    def __getitem__(self, index): ...
+    def __getnewargs__(self, *args, **kwargs): ...
+    def __gt__(self, other): ...
+    def __hash__(self): ...
+    def __iter__(self): ...
+    def __le__(self, other): ...
+    def __len__(self, *args, **kwargs): ...
+    def __lt__(self, other): ...
+    def __mod__(self, other): ...
+    def __mul__(self, other): ...
+    def __ne__(self, other): ...
+    def __rmod__(self, other): ...
+    def __rmul__(self, other): ...
+    def __sizeof__(self): ...
+
+class PrepareProtocol:
+    def __init__(self, *args, **kwargs): ...
+
+class ProgrammingError(DatabaseError): ...
+
+class Row:
+    def __init__(self, *args, **kwargs): ...
+    def keys(self, *args, **kwargs): ...
+    def __eq__(self, other): ...
+    def __ge__(self, other): ...
+    def __getitem__(self, index): ...
+    def __gt__(self, other): ...
+    def __hash__(self): ...
+    def __iter__(self): ...
+    def __le__(self, other): ...
+    def __len__(self, *args, **kwargs): ...
+    def __lt__(self, other): ...
+    def __ne__(self, other): ...
+
+class Statement:
+    def __init__(self, *args, **kwargs): ...
+
+class Warning(Exception): ...
diff --git a/typeshed/stdlib/2.7/ssl.pyi b/typeshed/stdlib/2.7/ssl.pyi
new file mode 100644
index 0000000..c6ca2fa
--- /dev/null
+++ b/typeshed/stdlib/2.7/ssl.pyi
@@ -0,0 +1,5 @@
+# Stubs for ssl (incomplete)
+
+import socket
+
+class SSLError(socket.error): ...
diff --git a/typeshed/stdlib/2.7/stat.pyi b/typeshed/stdlib/2.7/stat.pyi
new file mode 100644
index 0000000..a83d880
--- /dev/null
+++ b/typeshed/stdlib/2.7/stat.pyi
@@ -0,0 +1,58 @@
+def S_ISDIR(mode: int) -> bool: ...
+def S_ISCHR(mode: int) -> bool: ...
+def S_ISBLK(mode: int) -> bool: ...
+def S_ISREG(mode: int) -> bool: ...
+def S_ISFIFO(mode: int) -> bool: ...
+def S_ISLNK(mode: int) -> bool: ...
+def S_ISSOCK(mode: int) -> bool: ...
+
+def S_IMODE(mode: int) -> int: ...
+def S_IFMT(mode: int) -> int: ...
+
+ST_MODE = 0
+ST_INO = 0
+ST_DEV = 0
+ST_NLINK = 0
+ST_UID = 0
+ST_GID = 0
+ST_SIZE = 0
+ST_ATIME = 0
+ST_MTIME = 0
+ST_CTIME = 0
+ST_IFSOCK = 0
+ST_IFLNK = 0
+ST_IFREG = 0
+ST_IFBLK = 0
+ST_IFDIR = 0
+ST_IFCHR = 0
+ST_IFIFO = 0
+S_ISUID = 0
+S_ISGID = 0
+S_ISVTX = 0
+S_IRWXU = 0
+S_IRUSR = 0
+S_IWUSR = 0
+S_IXUSR = 0
+S_IRGRP = 0
+S_IWGRP = 0
+S_IXGRP = 0
+S_IRWXO = 0
+S_IROTH = 0
+S_IWOTH = 0
+S_IXOTH = 0
+S_ENFMT = 0
+S_IREAD = 0
+S_IWRITE = 0
+S_IEXEC = 0
+UF_NODUMP = 0
+UF_IMMUTABLE = 0
+UF_APPEND = 0
+UF_OPAQUE = 0
+UF_NOUNLINK = 0
+UF_COMPRESSED = 0
+UF_HIDDEN = 0
+SF_ARCHIVED = 0
+SF_IMMUTABLE = 0
+SF_APPEND = 0
+SF_NOUNLINK = 0
+SF_SNAPSHOT = 0
diff --git a/typeshed/stdlib/2.7/string.pyi b/typeshed/stdlib/2.7/string.pyi
new file mode 100644
index 0000000..09fcc5d
--- /dev/null
+++ b/typeshed/stdlib/2.7/string.pyi
@@ -0,0 +1,74 @@
+# Stubs for string
+
+# Based on http://docs.python.org/3.2/library/string.html
+
+from typing import Mapping, Sequence, Any, Optional, Union, List, Tuple, Iterable, AnyStr
+
+ascii_letters = ...  # type: str
+ascii_lowercase = ...  # type: str
+ascii_uppercase = ...  # type: str
+digits = ...  # type: str
+hexdigits = ...  # type: str
+letters = ...  # type: str
+lowercase = ...  # type: str
+octdigits = ...  # type: str
+punctuation = ...  # type: str
+printable = ...  # type: str
+uppercase = ...  # type: str
+whitespace = ...  # type: str
+
+def capwords(s: AnyStr, sep: AnyStr = ...) -> AnyStr: ...
+# TODO: originally named 'from'
+def maketrans(_from: str, to: str) -> str: ...
+def atof(s: unicode) -> float: ...
+def atoi(s: unicode, base: int = ...) -> int: ...
+def atol(s: unicode, base: int = ...) -> int: ...
+def capitalize(word: AnyStr) -> AnyStr: ...
+def find(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def rfind(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def index(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def rindex(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def count(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def lower(s: AnyStr) -> AnyStr: ...
+def split(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ...
+def rsplit(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ...
+def splitfields(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ...
+def join(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ...
+def joinfields(word: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ...
+def lstrip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ...
+def rstrip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ...
+def strip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ...
+def swapcase(s: AnyStr) -> AnyStr: ...
+def translate(s: str, table: str, deletechars: str = ...) -> str: ...
+def upper(s: AnyStr) -> AnyStr: ...
+def ljust(s: AnyStr, width: int, fillhar: AnyStr = ...) -> AnyStr: ...
+def rjust(s: AnyStr, width: int, fillhar: AnyStr = ...) -> AnyStr: ...
+def center(s: AnyStr, width: int, fillhar: AnyStr = ...) -> AnyStr: ...
+def zfill(s: AnyStr, width: int) -> AnyStr: ...
+def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ...
+
+class Template(object):
+    # TODO: Unicode support?
+    template = ...  # type: str
+
+    def __init__(self, template: str) -> None: ...
+    def substitute(self, mapping: Mapping[str, str], **kwds: str) -> str: ...
+    def safe_substitute(self, mapping: Mapping[str, str],
+                        **kwds: str) -> str: ...
+
+# TODO(MichalPokorny): This is probably badly and/or loosely typed.
+class Formatter(object):
+    def format(self, format_string: str, *args, **kwargs) -> str: ...
+    def vformat(self, format_string: str, args: Sequence[Any],
+                kwargs: Mapping[str, Any]) -> str: ...
+    def parse(self, format_string: str) -> Iterable[Tuple[str, str, str, str]]: ...
+    def get_field(self, field_name: str, args: Sequence[Any],
+                  kwargs: Mapping[str, Any]) -> Any: ...
+    def get_value(self, key: Union[int, str], args: Sequence[Any],
+                  kwargs: Mapping[str, Any]) -> Any:
+        raise IndexError()
+        raise KeyError()
+    def check_unused_args(self, used_args: Sequence[Union[int, str]], args: Sequence[Any],
+                          kwargs: Mapping[str, Any]) -> None: ...
+    def format_field(self, value: Any, format_spec: str) -> Any: ...
+    def convert_field(self, value: Any, conversion: str) -> Any: ...
diff --git a/typeshed/stdlib/2.7/strop.pyi b/typeshed/stdlib/2.7/strop.pyi
new file mode 100644
index 0000000..e37abe9
--- /dev/null
+++ b/typeshed/stdlib/2.7/strop.pyi
@@ -0,0 +1,73 @@
+"""Stub file for the 'strop' module."""
+
+from typing import List, Sequence
+
+lowercase = ...  # type: str
+uppercase = ...  # type: str
+whitespace = ...  # type: str
+
+def atof(a: str) -> float:
+    raise DeprecationWarning()
+
+def atoi(a: str, base:int = ...) -> int:
+    raise DeprecationWarning()
+
+def atol(a: str, base:int = ...) -> long:
+    raise DeprecationWarning()
+
+def capitalize(s: str) -> str:
+    raise DeprecationWarning()
+
+def count(s: str, sub: str, start: int = ..., end: int = ...) -> int:
+    raise DeprecationWarning()
+
+def expandtabs(string:str, tabsize:int = ...) -> str:
+    raise DeprecationWarning()
+    raise OverflowError()
+
+def find(s: str, sub: str, start: int = ..., end: int = ...) -> int:
+    raise DeprecationWarning()
+
+def join(list: Sequence[str], sep:str = ...) -> str:
+    raise DeprecationWarning()
+    raise OverflowError()
+
+def joinfields(list: Sequence[str], sep:str = ...) -> str:
+    raise DeprecationWarning()
+    raise OverflowError()
+
+def lower(s: str) -> str:
+    raise DeprecationWarning()
+
+def lstrip(s: str) -> str:
+    raise DeprecationWarning()
+
+def maketrans(frm: str, to: str) -> str: ...
+
+def replace(s: str, old: str, new: str, maxsplit:int = ...) -> str:
+    raise DeprecationWarning()
+
+def rfind(s: str, sub: str, start: int = ..., end: int = ...) -> int:
+    raise DeprecationWarning()
+
+def rstrip(s: str) -> str:
+    raise DeprecationWarning()
+
+def split(s: str, sep: str, maxsplit: int = ...) -> List[str]:
+    raise DeprecationWarning()
+
+def splitfields(s: str, sep: str, maxsplit: int = ...) -> List[str]:
+    raise DeprecationWarning()
+
+def strip(s: str) -> str:
+    raise DeprecationWarning()
+
+def swapcase(s: str) -> str:
+    raise DeprecationWarning()
+
+def translate(s: str, table: str, deletechars: str = ...) -> str:
+    raise DeprecationWarning()
+
+def upper(s: str) -> str:
+    raise DeprecationWarning()
+
diff --git a/typeshed/stdlib/2.7/struct.pyi b/typeshed/stdlib/2.7/struct.pyi
new file mode 100644
index 0000000..c1b3283
--- /dev/null
+++ b/typeshed/stdlib/2.7/struct.pyi
@@ -0,0 +1,28 @@
+# Stubs for struct for Python 2.7
+# Based on https://docs.python.org/2/library/struct.html
+
+from typing import Any, Tuple
+
+class error(Exception): ...
+
+def pack(fmt: str, *v: Any) -> str: ...
+# TODO buffer type
+def pack_into(fmt: str, buffer: Any, offset: int, *v: Any) -> None: ...
+
+# TODO buffer type
+def unpack(fmt: str, buffer: Any) -> Tuple[Any, ...]: ...
+def unpack_from(fmt: str, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
+
+def calcsize(fmt: str) -> int: ...
+
+class Struct:
+    format = ... # type: str
+    size = ... # type: int
+
+    def __init__(self, format: str) -> None: ...
+
+    def pack(self, *v: Any) -> str: ...
+    # TODO buffer type
+    def pack_into(self, buffer: Any, offset: int, *v: Any) -> None: ...
+    def unpack(self, buffer: Any) -> Tuple[Any, ...]: ...
+    def unpack_from(self, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
diff --git a/typeshed/stdlib/2.7/subprocess.pyi b/typeshed/stdlib/2.7/subprocess.pyi
new file mode 100644
index 0000000..eaa5892
--- /dev/null
+++ b/typeshed/stdlib/2.7/subprocess.pyi
@@ -0,0 +1,79 @@
+# Stubs for subprocess
+
+# Based on http://docs.python.org/2/library/subprocess.html and Python 3 stub
+
+from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Union, Optional
+
+_FILE = Union[int, IO[Any]]
+
+# TODO force keyword arguments
+# TODO more keyword arguments (from Popen)
+def call(args: Sequence[str], *,
+         stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ...,
+         shell: bool = ..., env: Mapping[str, str] = ...,
+         cwd: str = ...) -> int: ...
+def check_call(args: Sequence[str], *,
+               stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ...,
+               shell: bool = ..., env: Mapping[str, str] = ..., cwd: str = ...,
+               close_fds: Sequence[_FILE] = ..., preexec_fn: Callable[[], Any] = ...) -> int: ...
+def check_output(args: Sequence[str], *,
+                 stdin: _FILE = ..., stderr: _FILE = ...,
+                 shell: bool = ..., universal_newlines: bool = ...,
+                 env: Mapping[str, str] = ..., cwd: str = ...) -> str: ...
+
+PIPE = ... # type: int
+STDOUT = ... # type: int
+
+class CalledProcessError(Exception):
+    returncode = 0
+    cmd = ...  # type: str
+    output = ...  # type: str # May be None
+
+    def __init__(self, returncode: int, cmd: str, output: str) -> None: ...
+
+class Popen:
+    stdin = ... # type: Optional[IO[Any]]
+    stdout = ... # type: Optional[IO[Any]]
+    stderr = ... # type: Optional[IO[Any]]
+    pid = 0
+    returncode = 0
+
+    def __init__(self,
+                 args: Sequence[str],
+                 bufsize: int = ...,
+                 executable: str = ...,
+                 stdin: _FILE = ...,
+                 stdout: _FILE = ...,
+                 stderr: _FILE = ...,
+                 preexec_fn: Callable[[], Any] = ...,
+                 close_fds: bool = ...,
+                 shell: bool = ...,
+                 cwd: str = ...,
+                 env: Mapping[str, str] = ...,
+                 universal_newlines: bool = ...,
+                 startupinfo: Any = ...,
+                 creationflags: int = ...) -> None: ...
+
+    def poll(self) -> int: ...
+    def wait(self) -> int: ...
+    # Return str/bytes
+    def communicate(self, input: str = ...) -> Tuple[str, str]: ...
+    def send_signal(self, signal: int) -> None: ...
+    def terminatate(self) -> None: ...
+    def kill(self) -> None: ...
+    def __enter__(self) -> 'Popen': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+def getstatusoutput(cmd: str) -> Tuple[int, str]: ...
+def getoutput(cmd: str) -> str: ...
+
+# Windows-only: STARTUPINFO etc.
+
+STD_INPUT_HANDLE = ... # type: Any
+STD_OUTPUT_HANDLE = ... # type: Any
+STD_ERROR_HANDLE = ... # type: Any
+SW_HIDE = ... # type: Any
+STARTF_USESTDHANDLES = ... # type: Any
+STARTF_USESHOWWINDOW = ... # type: Any
+CREATE_NEW_CONSOLE = ... # type: Any
+CREATE_NEW_PROCESS_GROUP = ... # type: Any
diff --git a/typeshed/stdlib/2.7/sys.pyi b/typeshed/stdlib/2.7/sys.pyi
new file mode 100644
index 0000000..8bd920f
--- /dev/null
+++ b/typeshed/stdlib/2.7/sys.pyi
@@ -0,0 +1,128 @@
+"""Stubs for the 'sys' module."""
+
+from typing import (
+    IO, Union, List, Sequence, Any, Dict, Tuple, BinaryIO, Optional, Callable, overload
+)
+from types import FrameType, ModuleType, TracebackType
+
+class _flags:
+    bytes_warning = ...  # type: int
+    debug = ...  # type: int
+    division_new = ...  # type: int
+    division_warning = ...  # type: int
+    dont_write_bytecode = ...  # type: int
+    hash_randomization = ...  # type: int
+    ignore_environment = ...  # type: int
+    inspect = ...  # type: int
+    interactive = ...  # type: int
+    no_site = ...  # type: int
+    no_user_site = ...  # type: int
+    optimize = ...  # type: int
+    py3k_warning = ...  # type: int
+    tabcheck = ...  # type: int
+    unicode = ...  # type: int
+    verbose = ...  # type: int
+
+class _float_info:
+    max = ...  # type: float
+    max_exp = ...  # type: int
+    max_10_exp = ...  # type: int
+    min = ...  # type: float
+    min_exp = ...  # type: int
+    min_10_exp = ...  # type: int
+    dig = ...  # type: int
+    mant_dig = ...  # type: int
+    epsilon = ...  # type: float
+    radix = ...  # type: int
+    rounds = ...  # type: int
+
+class _version_info(Tuple[int, int, int, str, int]):
+    major = 0
+    minor = 0
+    micro = 0
+    releaselevel = ...  # type: str
+    serial = 0
+
+_mercurial = ...  # type: Tuple[str, str, str]
+api_version = ...  # type: int
+argv = ...  # type: List[str]
+builtin_module_names = ...  # type: Tuple[str, ...]
+byteorder = ...  # type: str
+copyright = ...  # type: str
+dont_write_bytecode = ...  # type: bool
+exec_prefix = ...  # type: str
+executable = ...  # type: str
+flags = ...  # type: _flags
+float_repr_style = ...  # type: str
+hexversion = ...  # type: int
+long_info = ...  # type: object
+maxint = ...  # type: int
+maxsize = ...  # type: int
+maxunicode = ...  # type: int
+modules = ...  # type: Dict[str, ModuleType]
+path = ...  # type: List[str]
+platform = ...  # type: str
+prefix = ...  # type: str
+py3kwarning = ...  # type: bool
+__stderr__ = ...  # type: IO[str]
+__stdin__ = ...  # type: IO[str]
+__stdout__ = ...  # type: IO[str]
+stderr = ...  # type: IO[str]
+stdin = ...  # type: IO[str]
+stdout = ...  # type: IO[str]
+subversion = ...  # type: Tuple[str, str, str]
+version = ...  # type: str
+warnoptions = ...  # type: object
+float_info = ...  # type: _float_info
+version_info = ...  # type: _version_info
+ps1 = ...  # type: str
+ps2 = ...  # type: str
+last_type = ...  # type: type
+last_value = ...  # type: BaseException
+last_traceback = ...  # type: TracebackType
+# TODO precise types
+meta_path = ...  # type: List[Any]
+path_hooks = ...  # type: List[Any]
+path_importer_cache = ...  # type: Dict[str, Any]
+displayhook = ...  # type: Optional[Callable[[int], None]]
+excepthook = ...  # type: Optional[Callable[[type, BaseException, TracebackType], None]]
+
+class _WindowsVersionType:
+    major = ...  # type: Any
+    minor = ...  # type: Any
+    build = ...  # type: Any
+    platform = ...  # type: Any
+    service_pack = ...  # type: Any
+    service_pack_major = ...  # type: Any
+    service_pack_minor = ...  # type: Any
+    suite_mask = ...  # type: Any
+    product_type = ...  # type: Any
+
+def getwindowsversion() -> _WindowsVersionType: ...
+
+def _clear_type_cache() -> None: ...
+def _current_frames() -> Dict[int, FrameType]: ...
+def _getframe(depth: int = ...) -> FrameType: ...
+def call_tracing(fn: Any, args: Any) -> Any: ...
+def __displayhook__(value: int) -> None: ...
+def __excepthook__(type_: type, value: BaseException, traceback: TracebackType) -> None: ...
+def exc_clear() -> None:
+    raise DeprecationWarning()
+def exc_info() -> Tuple[type, BaseException, TracebackType]: ...
+# sys.exit() accepts an optional argument of anything printable
+def exit(arg: Any = ...) -> None:
+    raise SystemExit()
+def getcheckinterval() -> int: ...  # deprecated
+def getdefaultencoding() -> str: ...
+def getdlopenflags() -> int: ...
+def getfilesystemencoding() -> Union[str, None]: ...
+def getrefcount(object) -> int: ...
+def getrecursionlimit() -> int: ...
+def getsizeof(obj: object, default: int = ...) -> int: ...
+def getprofile() -> None: ...
+def gettrace() -> None: ...
+def setcheckinterval(interval: int) -> None: ...  # deprecated
+def setdlopenflags(n: int) -> None: ...
+def setprofile(profilefunc: Any) -> None: ... # TODO type
+def setrecursionlimit(limit: int) -> None: ...
+def settrace(tracefunc: Any) -> None: ... # TODO type
diff --git a/typeshed/stdlib/2.7/syslog.pyi b/typeshed/stdlib/2.7/syslog.pyi
new file mode 100644
index 0000000..82e0b9a
--- /dev/null
+++ b/typeshed/stdlib/2.7/syslog.pyi
@@ -0,0 +1,38 @@
+LOG_ALERT = ...  # type: int
+LOG_AUTH = ...  # type: int
+LOG_CONS = ...  # type: int
+LOG_CRIT = ...  # type: int
+LOG_CRON = ...  # type: int
+LOG_DAEMON = ...  # type: int
+LOG_DEBUG = ...  # type: int
+LOG_EMERG = ...  # type: int
+LOG_ERR = ...  # type: int
+LOG_INFO = ...  # type: int
+LOG_KERN = ...  # type: int
+LOG_LOCAL0 = ...  # type: int
+LOG_LOCAL1 = ...  # type: int
+LOG_LOCAL2 = ...  # type: int
+LOG_LOCAL3 = ...  # type: int
+LOG_LOCAL4 = ...  # type: int
+LOG_LOCAL5 = ...  # type: int
+LOG_LOCAL6 = ...  # type: int
+LOG_LOCAL7 = ...  # type: int
+LOG_LPR = ...  # type: int
+LOG_MAIL = ...  # type: int
+LOG_NDELAY = ...  # type: int
+LOG_NEWS = ...  # type: int
+LOG_NOTICE = ...  # type: int
+LOG_NOWAIT = ...  # type: int
+LOG_PERROR = ...  # type: int
+LOG_PID = ...  # type: int
+LOG_SYSLOG = ...  # type: int
+LOG_USER = ...  # type: int
+LOG_UUCP = ...  # type: int
+LOG_WARNING = ...  # type: int
+
+def LOG_MASK(a: int) -> int: ...
+def LOG_UPTO(a: int) -> int: ...
+def closelog() -> None: ...
+def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
+def setlogmask(x: int) -> int: ...
+def syslog(priority: int, message: str) -> None: ...
diff --git a/typeshed/stdlib/2.7/tarfile.pyi b/typeshed/stdlib/2.7/tarfile.pyi
new file mode 100644
index 0000000..d9a4d50
--- /dev/null
+++ b/typeshed/stdlib/2.7/tarfile.pyi
@@ -0,0 +1,237 @@
+# Stubs for tarfile (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class TarError(Exception): ...
+class ExtractError(TarError): ...
+class ReadError(TarError): ...
+class CompressionError(TarError): ...
+class StreamError(TarError): ...
+class HeaderError(TarError): ...
+class EmptyHeaderError(HeaderError): ...
+class TruncatedHeaderError(HeaderError): ...
+class EOFHeaderError(HeaderError): ...
+class InvalidHeaderError(HeaderError): ...
+class SubsequentHeaderError(HeaderError): ...
+
+class _LowLevelFile:
+    fd = ... # type: Any
+    def __init__(self, name, mode) -> None: ...
+    def close(self): ...
+    def read(self, size): ...
+    def write(self, s): ...
+
+class _Stream:
+    name = ... # type: Any
+    mode = ... # type: Any
+    comptype = ... # type: Any
+    fileobj = ... # type: Any
+    bufsize = ... # type: Any
+    buf = ... # type: Any
+    pos = ... # type: Any
+    closed = ... # type: Any
+    zlib = ... # type: Any
+    crc = ... # type: Any
+    dbuf = ... # type: Any
+    cmp = ... # type: Any
+    def __init__(self, name, mode, comptype, fileobj, bufsize) -> None: ...
+    def __del__(self): ...
+    def write(self, s): ...
+    def close(self): ...
+    def tell(self): ...
+    def seek(self, pos=...): ...
+    def read(self, size=...): ...
+
+class _StreamProxy:
+    fileobj = ... # type: Any
+    buf = ... # type: Any
+    def __init__(self, fileobj) -> None: ...
+    def read(self, size): ...
+    def getcomptype(self): ...
+    def close(self): ...
+
+class _BZ2Proxy:
+    blocksize = ... # type: Any
+    fileobj = ... # type: Any
+    mode = ... # type: Any
+    name = ... # type: Any
+    def __init__(self, fileobj, mode) -> None: ...
+    pos = ... # type: Any
+    bz2obj = ... # type: Any
+    buf = ... # type: Any
+    def init(self): ...
+    def read(self, size): ...
+    def seek(self, pos): ...
+    def tell(self): ...
+    def write(self, data): ...
+    def close(self): ...
+
+class _FileInFile:
+    fileobj = ... # type: Any
+    offset = ... # type: Any
+    size = ... # type: Any
+    sparse = ... # type: Any
+    position = ... # type: Any
+    def __init__(self, fileobj, offset, size, sparse=...) -> None: ...
+    def tell(self): ...
+    def seek(self, position): ...
+    def read(self, size=...): ...
+    def readnormal(self, size): ...
+    def readsparse(self, size): ...
+    def readsparsesection(self, size): ...
+
+class ExFileObject:
+    blocksize = ... # type: Any
+    fileobj = ... # type: Any
+    name = ... # type: Any
+    mode = ... # type: Any
+    closed = ... # type: Any
+    size = ... # type: Any
+    position = ... # type: Any
+    buffer = ... # type: Any
+    def __init__(self, tarfile, tarinfo) -> None: ...
+    def read(self, size=...): ...
+    def readline(self, size=...): ...
+    def readlines(self): ...
+    def tell(self): ...
+    def seek(self, pos, whence=...): ...
+    def close(self): ...
+    def __iter__(self): ...
+
+class TarInfo:
+    name = ... # type: Any
+    mode = ... # type: Any
+    uid = ... # type: Any
+    gid = ... # type: Any
+    size = ... # type: Any
+    mtime = ... # type: Any
+    chksum = ... # type: Any
+    type = ... # type: Any
+    linkname = ... # type: Any
+    uname = ... # type: Any
+    gname = ... # type: Any
+    devmajor = ... # type: Any
+    devminor = ... # type: Any
+    offset = ... # type: Any
+    offset_data = ... # type: Any
+    pax_headers = ... # type: Any
+    def __init__(self, name=...) -> None: ...
+    path = ... # type: Any
+    linkpath = ... # type: Any
+    def get_info(self, encoding, errors): ...
+    def tobuf(self, format=..., encoding=..., errors=...): ...
+    def create_ustar_header(self, info): ...
+    def create_gnu_header(self, info): ...
+    def create_pax_header(self, info, encoding, errors): ...
+    @classmethod
+    def create_pax_global_header(cls, pax_headers): ...
+    @classmethod
+    def frombuf(cls, buf): ...
+    @classmethod
+    def fromtarfile(cls, tarfile): ...
+    def isreg(self): ...
+    def isfile(self): ...
+    def isdir(self): ...
+    def issym(self): ...
+    def islnk(self): ...
+    def ischr(self): ...
+    def isblk(self): ...
+    def isfifo(self): ...
+    def issparse(self): ...
+    def isdev(self): ...
+
+class TarFile:
+    debug = ... # type: Any
+    dereference = ... # type: Any
+    ignore_zeros = ... # type: Any
+    errorlevel = ... # type: Any
+    format = ... # type: Any
+    encoding = ... # type: Any
+    errors = ... # type: Any
+    tarinfo = ... # type: Any
+    fileobject = ... # type: Any
+    mode = ... # type: Any
+    name = ... # type: Any
+    fileobj = ... # type: Any
+    pax_headers = ... # type: Any
+    closed = ... # type: Any
+    members = ... # type: Any
+    offset = ... # type: Any
+    inodes = ... # type: Any
+    firstmember = ... # type: Any
+    def __init__(self, name=..., mode=..., fileobj=..., format=..., tarinfo=..., dereference=..., ignore_zeros=..., encoding=..., errors=..., pax_headers=..., debug=..., errorlevel=...) -> None: ...
+    posix = ... # type: Any
+    @classmethod
+    def open(cls, name=..., mode=..., fileobj=..., bufsize=..., **kwargs): ...
+    @classmethod
+    def taropen(cls, name, mode=..., fileobj=..., **kwargs): ...
+    @classmethod
+    def gzopen(cls, name, mode=..., fileobj=..., compresslevel=..., **kwargs): ...
+    @classmethod
+    def bz2open(cls, name, mode=..., fileobj=..., compresslevel=..., **kwargs): ...
+    OPEN_METH = ... # type: Any
+    def close(self): ...
+    def getmember(self, name): ...
+    def getmembers(self): ...
+    def getnames(self): ...
+    def gettarinfo(self, name=..., arcname=..., fileobj=...): ...
+    def list(self, verbose=...): ...
+    def add(self, name, arcname=..., recursive=..., exclude=..., filter=...): ...
+    def addfile(self, tarinfo, fileobj=...): ...
+    def extractall(self, path=..., members=...): ...
+    def extract(self, member, path=...): ...
+    def extractfile(self, member): ...
+    def makedir(self, tarinfo, targetpath): ...
+    def makefile(self, tarinfo, targetpath): ...
+    def makeunknown(self, tarinfo, targetpath): ...
+    def makefifo(self, tarinfo, targetpath): ...
+    def makedev(self, tarinfo, targetpath): ...
+    def makelink(self, tarinfo, targetpath): ...
+    def chown(self, tarinfo, targetpath): ...
+    def chmod(self, tarinfo, targetpath): ...
+    def utime(self, tarinfo, targetpath): ...
+    def next(self): ...
+    def __iter__(self): ...
+    def __enter__(self): ...
+    def __exit__(self, type, value, traceback): ...
+
+class TarIter:
+    tarfile = ... # type: Any
+    index = ... # type: Any
+    def __init__(self, tarfile) -> None: ...
+    def __iter__(self): ...
+    def next(self): ...
+
+class _section:
+    offset = ... # type: Any
+    size = ... # type: Any
+    def __init__(self, offset, size) -> None: ...
+    def __contains__(self, offset): ...
+
+class _data(_section):
+    realpos = ... # type: Any
+    def __init__(self, offset, size, realpos) -> None: ...
+
+class _hole(_section): ...
+
+class _ringbuffer(list):
+    idx = ... # type: Any
+    def __init__(self) -> None: ...
+    def find(self, offset): ...
+
+class TarFileCompat:
+    tarfile = ... # type: Any
+    def __init__(self, file, mode=..., compression=...) -> None: ...
+    def namelist(self): ...
+    def infolist(self): ...
+    def printdir(self): ...
+    def testzip(self): ...
+    def getinfo(self, name): ...
+    def read(self, name): ...
+    def write(self, filename, arcname=..., compress_type=...): ...
+    def writestr(self, zinfo, bytes): ...
+    def close(self): ...
+
+def is_tarfile(name): ...
diff --git a/typeshed/stdlib/2.7/tempfile.pyi b/typeshed/stdlib/2.7/tempfile.pyi
new file mode 100644
index 0000000..42c19ad
--- /dev/null
+++ b/typeshed/stdlib/2.7/tempfile.pyi
@@ -0,0 +1,42 @@
+# Stubs for tempfile
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.3/library/tempfile.html
+# Adapted for Python 2.7 by Michal Pokorny
+
+from typing import Tuple, IO
+
+# global variables
+tempdir = ...  # type: str
+template = ...  # type: str
+
+# TODO text files
+
+# function stubs
+def TemporaryFile(
+            mode: str = ..., bufsize: int = ..., suffix: str = ...,
+            prefix: str = ..., dir: str = ...) -> IO[str]: ...
+def NamedTemporaryFile(
+            mode: str = ..., bufsize: int = ..., suffix: str = ...,
+            prefix: str = ..., dir: str = ..., delete: bool = ...
+            ) -> IO[str]: ...
+def SpooledTemporaryFile(
+           max_size: int = ..., mode: str = ..., buffering: int = ...,
+           suffix: str = ..., prefix: str = ..., dir: str = ...) -> IO[str]:
+    ...
+
+class TemporaryDirectory:
+    name = ...  # type: basestring
+    def __init__(self, suffix: basestring = ..., prefix: basestring = ...,
+                 dir: basestring = ...) -> None: ...
+    def cleanup(self) -> None: ...
+    def __enter__(self) -> basestring: ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+def mkstemp(suffix: basestring = ..., prefix: basestring = ..., dir: basestring = ...,
+            text: bool = ...) -> Tuple[int, basestring]: ...
+def mkdtemp(suffix: basestring = ..., prefix: basestring = ...,
+            dir: basestring = ...) -> basestring: ...
+def mktemp(suffix: basestring = ..., prefix: basestring = ..., dir: basestring = ...) -> basestring: ...
+def gettempdir() -> basestring: ...
+def gettempprefix() -> basestring: ...
diff --git a/typeshed/stdlib/2.7/textwrap.pyi b/typeshed/stdlib/2.7/textwrap.pyi
new file mode 100644
index 0000000..6ed6356
--- /dev/null
+++ b/typeshed/stdlib/2.7/textwrap.pyi
@@ -0,0 +1,33 @@
+# Stubs for textwrap (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _unicode: ...
+
+class TextWrapper:
+    whitespace_trans = ... # type: Any
+    unicode_whitespace_trans = ... # type: Any
+    uspace = ... # type: Any
+    wordsep_re = ... # type: Any
+    wordsep_simple_re = ... # type: Any
+    sentence_end_re = ... # type: Any
+    width = ... # type: Any
+    initial_indent = ... # type: Any
+    subsequent_indent = ... # type: Any
+    expand_tabs = ... # type: Any
+    replace_whitespace = ... # type: Any
+    fix_sentence_endings = ... # type: Any
+    break_long_words = ... # type: Any
+    drop_whitespace = ... # type: Any
+    break_on_hyphens = ... # type: Any
+    wordsep_re_uni = ... # type: Any
+    wordsep_simple_re_uni = ... # type: Any
+    def __init__(self, width=..., initial_indent=..., subsequent_indent=..., expand_tabs=..., replace_whitespace=..., fix_sentence_endings=..., break_long_words=..., drop_whitespace=..., break_on_hyphens=...) -> None: ...
+    def wrap(self, text): ...
+    def fill(self, text): ...
+
+def wrap(text, width=..., **kwargs): ...
+def fill(text, width=..., **kwargs): ...
+def dedent(text): ...
diff --git a/typeshed/stdlib/2.7/thread.pyi b/typeshed/stdlib/2.7/thread.pyi
new file mode 100644
index 0000000..a54a946
--- /dev/null
+++ b/typeshed/stdlib/2.7/thread.pyi
@@ -0,0 +1,33 @@
+"""Stubs for the "thread" module."""
+from typing import Callable, Any
+
+def _count() -> int: ...
+
+class error(Exception): ...
+
+class LockType:
+    def acquire(self, waitflag: int = ...) -> bool: ...
+    def acquire_lock(self, waitflag: int = ...) -> bool: ...
+    def release(self) -> None: ...
+    def release_lock(self) -> None: ...
+    def locked(self) -> bool: ...
+    def locked_lock(self) -> bool: ...
+    def __enter__(self) -> LockType: ...
+    def __exit__(self, value: Any, traceback: Any) -> None: ...
+
+class _local(object):
+    pass
+
+class _localdummy(object):
+    pass
+
+def start_new(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ...
+def start_new_thread(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ...
+def interrupt_main() -> None: ...
+def exit() -> None:
+    raise SystemExit()
+def exit_thread() -> Any:
+    raise SystemExit()
+def allocate_lock() -> LockType: ...
+def get_ident() -> int: ...
+def stack_size(size: int = ...) -> int: ...
diff --git a/typeshed/stdlib/2.7/threading.pyi b/typeshed/stdlib/2.7/threading.pyi
new file mode 100644
index 0000000..791fa4d
--- /dev/null
+++ b/typeshed/stdlib/2.7/threading.pyi
@@ -0,0 +1,95 @@
+# Stubs for threading
+
+from typing import Any, Optional, Callable, TypeVar, Union, List, Mapping, Sequence
+
+def active_count() -> int: ...
+def activeCount() -> int: ...
+
+def current_thread() -> Thread: ...
+def currentThread() -> Thread: ...
+def enumerate() -> List[Thread]: ...
+
+class Thread(object):
+    name = ...  # type: str
+    ident = 0
+    daemon = False
+
+    def __init__(self, group: Any = ..., target: Callable[..., Any] = ...,
+                 name: str = ..., args: Sequence[Any] = ...,
+                 kwargs: Mapping[str, Any] = ...) -> None: ...
+    def start(self) -> None: ...
+    def run(self) -> None: ...
+    def join(self, timeout: float = ...) -> None: ...
+    def is_alive(self) -> bool: ...
+
+    # Legacy methods
+    def isAlive(self) -> bool: ...
+    def getName(self) -> str: ...
+    def setName(self, name: str) -> None: ...
+    def isDaemon(self) -> bool: ...
+    def setDaemon(self, daemon: bool) -> None: ...
+
+class Timer(Thread):
+    def __init__(self, interval: float, function: Callable[..., Any],
+                 args: Sequence[Any] = ...,
+                 kwargs: Mapping[str, Any] = ...) -> None: ...
+    def cancel(self) -> None : ...
+
+# TODO: better type
+def settrace(func: Callable[[Any, str, Any], Any]) -> None: ...
+def setprofile(func: Any) -> None: ...
+def stack_size(size: int = ...) -> None: ...
+
+class ThreadError(Exception):
+    pass
+
+class local(Any): ...
+
+class Event(object):
+    def is_set(self) -> bool: ...
+    def isSet(self) -> bool: ...
+    def set(self) -> None: ...
+    def clear(self) -> None: ...
+    # TODO can it return None?
+    def wait(self, timeout: float = ...) -> bool: ...
+
+class Lock(object):
+    def acquire(self, blocking: bool = ...) -> bool: ...
+    def release(self) -> None: ...
+    def locked(self) -> bool: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+
+class RLock(object):
+    def acquire(self, blocking: int = ...) -> Optional[bool]: ...
+    def release(self) -> None: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+
+class Semaphore(object):
+    def acquire(self, blocking: bool = ...) -> Optional[bool]: ...
+    def release(self) -> None: ...
+    def __init__(self, value: int = ...) -> None: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+
+class BoundedSemaphore(object):
+    def acquire(self, blocking: bool = ...) -> Optional[bool]: ...
+    def release(self) -> None: ...
+    def __init__(self, value: int = ...) -> None: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+
+_T = TypeVar('_T')
+
+class Condition(object):
+    def acquire(self, blocking: bool = ...) -> bool: ...
+    def release(self) -> None: ...
+    def notify(self, n: int = ...) -> None: ...
+    def notify_all(self) -> None: ...
+    def notifyAll(self) -> None: ...
+    def wait(self, timeout: float = ...) -> bool: ...
+    def wait_for(self, predicate: Callable[[], _T], timeout: float = ...) -> Union[_T, bool]: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+    def __init__(self, lock: Lock = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/time.pyi b/typeshed/stdlib/2.7/time.pyi
new file mode 100644
index 0000000..8a564c8
--- /dev/null
+++ b/typeshed/stdlib/2.7/time.pyi
@@ -0,0 +1,48 @@
+"""Stub file for the 'time' module."""
+# See https://docs.python.org/2/library/time.html
+
+from typing import NamedTuple, Tuple, Union
+
+# ----- variables and constants -----
+accept2dyear = False 
+altzone = 0
+daylight = 0
+timezone = 0
+tzname = ... # type: Tuple[str, str]
+
+struct_time = NamedTuple('struct_time',
+                         [('tm_year', int), ('tm_mon', int), ('tm_mday', int),
+                          ('tm_hour', int), ('tm_min', int), ('tm_sec', int),
+                          ('tm_wday', int), ('tm_yday', int), ('tm_isdst', int)])
+
+_TIME_TUPLE = Tuple[int, int, int, int, int, int, int, int, int]
+
+def asctime(t: struct_time = ...) -> str:
+    raise ValueError()
+
+def clock() -> float: ...
+
+def ctime(secs: float = ...) -> str:
+    raise ValueError()
+
+def gmtime(secs: float = ...) -> struct_time: ...
+
+def localtime(secs: float = ...) -> struct_time: ...
+
+def mktime(t: struct_time) -> float:
+    raise OverflowError()
+    raise ValueError()
+
+def sleep(secs: float) -> None: ...
+
+def strftime(format: str, t: struct_time = ...) -> str:
+    raise MemoryError()
+    raise ValueError()
+
+def strptime(string: str, format: str = ...) -> struct_time:
+    raise ValueError()
+
+def time() -> float:
+    raise IOError()
+
+def tzset() -> None: ...
diff --git a/typeshed/stdlib/2.7/token.pyi b/typeshed/stdlib/2.7/token.pyi
new file mode 100644
index 0000000..d0c8412
--- /dev/null
+++ b/typeshed/stdlib/2.7/token.pyi
@@ -0,0 +1,62 @@
+from typing import Dict
+
+ENDMARKER = 0
+NAME = 0
+NUMBER = 0
+STRING = 0
+NEWLINE = 0
+INDENT = 0
+DEDENT = 0
+LPAR = 0
+RPAR = 0
+LSQB = 0
+RSQB = 0
+COLON = 0
+COMMA = 0
+SEMI = 0
+PLUS = 0
+MINUS = 0
+STAR = 0
+SLASH = 0
+VBAR = 0
+AMPER = 0
+LESS = 0
+GREATER = 0
+EQUAL = 0
+DOT = 0
+PERCENT = 0
+BACKQUOTE = 0
+LBRACE = 0
+RBRACE = 0
+EQEQUAL = 0
+NOTEQUAL = 0
+LESSEQUAL = 0
+GREATEREQUAL = 0
+TILDE = 0
+CIRCUMFLEX = 0
+LEFTSHIFT = 0
+RIGHTSHIFT = 0
+DOUBLESTAR = 0
+PLUSEQUAL = 0
+MINEQUAL = 0
+STAREQUAL = 0
+SLASHEQUAL = 0
+PERCENTEQUAL = 0
+AMPEREQUAL = 0
+VBAREQUAL = 0
+CIRCUMFLEXEQUAL = 0
+LEFTSHIFTEQUAL = 0
+RIGHTSHIFTEQUAL = 0
+DOUBLESTAREQUAL = 0
+DOUBLESLASH = 0
+DOUBLESLASHEQUAL = 0
+AT = 0
+OP = 0
+ERRORTOKEN = 0
+N_TOKENS = 0
+NT_OFFSET = 0
+tok_name = {} # type: Dict[int, str]
+
+def ISTERMINAL(x) -> bool: ...
+def ISNONTERMINAL(x) -> bool: ...
+def ISEOF(x) -> bool: ...
diff --git a/typeshed/stdlib/2.7/tokenize.pyi b/typeshed/stdlib/2.7/tokenize.pyi
new file mode 100644
index 0000000..159adf9
--- /dev/null
+++ b/typeshed/stdlib/2.7/tokenize.pyi
@@ -0,0 +1,143 @@
+# Automatically generated by pytype, manually fixed up. May still contain errors.
+
+from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union, Iterable
+
+__all__ = ...  # type: List[str]
+__author__ = ...  # type: str
+__credits__ = ...  # type: str
+
+AMPER = ...  # type: int
+AMPEREQUAL = ...  # type: int
+AT = ...  # type: int
+BACKQUOTE = ...  # type: int
+Binnumber = ...  # type: str
+Bracket = ...  # type: str
+CIRCUMFLEX = ...  # type: int
+CIRCUMFLEXEQUAL = ...  # type: int
+COLON = ...  # type: int
+COMMA = ...  # type: int
+COMMENT = ...  # type: int
+Comment = ...  # type: str
+ContStr = ...  # type: str
+DEDENT = ...  # type: int
+DOT = ...  # type: int
+DOUBLESLASH = ...  # type: int
+DOUBLESLASHEQUAL = ...  # type: int
+DOUBLESTAR = ...  # type: int
+DOUBLESTAREQUAL = ...  # type: int
+Decnumber = ...  # type: str
+Double = ...  # type: str
+Double3 = ...  # type: str
+ENDMARKER = ...  # type: int
+EQEQUAL = ...  # type: int
+EQUAL = ...  # type: int
+ERRORTOKEN = ...  # type: int
+Expfloat = ...  # type: str
+Exponent = ...  # type: str
+Floatnumber = ...  # type: str
+Funny = ...  # type: str
+GREATER = ...  # type: int
+GREATEREQUAL = ...  # type: int
+Hexnumber = ...  # type: str
+INDENT = ...  # type: int
+
+def ISEOF(x: int) -> bool: ...
+def ISNONTERMINAL(x: int) -> bool: ...
+def ISTERMINAL(x: int) -> bool: ...
+
+Ignore = ...  # type: str
+Imagnumber = ...  # type: str
+Intnumber = ...  # type: str
+LBRACE = ...  # type: int
+LEFTSHIFT = ...  # type: int
+LEFTSHIFTEQUAL = ...  # type: int
+LESS = ...  # type: int
+LESSEQUAL = ...  # type: int
+LPAR = ...  # type: int
+LSQB = ...  # type: int
+MINEQUAL = ...  # type: int
+MINUS = ...  # type: int
+NAME = ...  # type: int
+NEWLINE = ...  # type: int
+NL = ...  # type: int
+NOTEQUAL = ...  # type: int
+NT_OFFSET = ...  # type: int
+NUMBER = ...  # type: int
+N_TOKENS = ...  # type: int
+Name = ...  # type: str
+Number = ...  # type: str
+OP = ...  # type: int
+Octnumber = ...  # type: str
+Operator = ...  # type: str
+PERCENT = ...  # type: int
+PERCENTEQUAL = ...  # type: int
+PLUS = ...  # type: int
+PLUSEQUAL = ...  # type: int
+PlainToken = ...  # type: str
+Pointfloat = ...  # type: str
+PseudoExtras = ...  # type: str
+PseudoToken = ...  # type: str
+RBRACE = ...  # type: int
+RIGHTSHIFT = ...  # type: int
+RIGHTSHIFTEQUAL = ...  # type: int
+RPAR = ...  # type: int
+RSQB = ...  # type: int
+SEMI = ...  # type: int
+SLASH = ...  # type: int
+SLASHEQUAL = ...  # type: int
+STAR = ...  # type: int
+STAREQUAL = ...  # type: int
+STRING = ...  # type: int
+Single = ...  # type: str
+Single3 = ...  # type: str
+Special = ...  # type: str
+String = ...  # type: str
+TILDE = ...  # type: int
+Token = ...  # type: str
+Triple = ...  # type: str
+VBAR = ...  # type: int
+VBAREQUAL = ...  # type: int
+Whitespace = ...  # type: str
+chain = ...  # type: type
+double3prog = ...  # type: type
+endprogs = ...  # type: Dict[str, Any]
+pseudoprog = ...  # type: type
+re = ...  # type: module
+single3prog = ...  # type: type
+single_quoted = ...  # type: Dict[str, str]
+string = ...  # type: module
+sys = ...  # type: module
+t = ...  # type: str
+tabsize = ...  # type: int
+tok_name = ...  # type: Dict[int, str]
+token = ...  # type: module
+tokenprog = ...  # type: type
+triple_quoted = ...  # type: Dict[str, str]
+x = ...  # type: str
+
+_Pos = Tuple[int, int]
+_TokenType = Tuple[int, str, _Pos, _Pos, str]
+
+def any(*args, **kwargs) -> str: ...
+def generate_tokens(readline: Callable[[], str]) -> Generator[_TokenType, None, None]: ...
+def group(*args: str) -> str: ...
+def maybe(*args: str) -> str: ...
+def printtoken(type: int, token: str, srow_scol: _Pos, erow_ecol: _Pos, line: str) -> None: ...
+def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
+def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
+def untokenize(iterable: Iterable[_TokenType]) -> str: ...
+
+class StopTokenizing(Exception):
+    pass
+
+class TokenError(Exception):
+    pass
+
+class Untokenizer:
+    prev_col = ...  # type: int
+    prev_row = ...  # type: int
+    tokens = ...  # type: List[str]
+    def __init__(self) -> None: ...
+    def add_whitespace(self, _Pos) -> None: ...
+    def compat(self, token: Tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ...
+    def untokenize(self, iterable: Iterable[_TokenType]) -> str: ...
diff --git a/typeshed/stdlib/2.7/traceback.pyi b/typeshed/stdlib/2.7/traceback.pyi
new file mode 100644
index 0000000..b407107
--- /dev/null
+++ b/typeshed/stdlib/2.7/traceback.pyi
@@ -0,0 +1,17 @@
+from typing import Any, IO, AnyStr, Callable, Tuple, List
+from types import TracebackType, FrameType
+
+def print_tb(traceback: TracebackType, limit: int = ..., file: IO[str] = ...) -> None: ...
+def print_exception(type: type, value: Exception, limit: int = ..., file: IO[str] = ...) -> None: ...
+def print_exc(limit: int = ..., file: IO[str] = ...) -> None: ...
+def format_exc(limit: int = ...) -> str: ...
+def print_last(limit: int = ..., file: IO[str] = ...) -> None: ...
+def print_stack(f: FrameType, limit: int = ..., file: IO[AnyStr] = ...) -> None: ...
+def extract_tb(f: TracebackType, limit: int = ...) -> List[Tuple[str, int, str, str]]: ...
+def extract_stack(f: FrameType = ..., limit: int = ...) -> None: ...
+def format_list(list: List[Tuple[str, int, str, str]]) -> str: ...
+def format_exception_only(type: type, value: List[str]) -> str: ...
+def format_exception(type: type, value: List[str], tb: TracebackType, limit: int) -> str: ...
+def format_tb(f: TracebackType, limit: int = ...) -> str: ...
+def format_stack(f: FrameType = ..., limit: int = ...) -> str: ...
+def tb_lineno(tb: TracebackType) -> AnyStr: ...
diff --git a/typeshed/stdlib/2.7/types.pyi b/typeshed/stdlib/2.7/types.pyi
new file mode 100644
index 0000000..607d9fa
--- /dev/null
+++ b/typeshed/stdlib/2.7/types.pyi
@@ -0,0 +1,162 @@
+# Stubs for types
+# Note, all classes "defined" here require special handling.
+
+from typing import (
+    Any, Callable, Dict, Iterable, Iterator, List, Optional,
+    Tuple, TypeVar, Union, overload,
+)
+
+_T = TypeVar('_T')
+
+class NoneType: ...
+TypeType = type
+ObjectType = object
+
+IntType = int
+LongType = long
+FloatType = float
+BooleanType = bool
+ComplexType = complex
+StringType = str
+UnicodeType = unicode
+StringTypes = (StringType, UnicodeType)
+BufferType = buffer
+TupleType = tuple
+ListType = list
+DictType = DictionaryType = dict
+
+class _Cell:
+    cell_contents = ...  # type: Any
+
+class FunctionType:
+    func_closure = ...  # type: Optional[Tuple[_Cell, ...]]
+    func_code = ...  # type: CodeType
+    func_defaults = ...  # type: Optional[Tuple[Any, ...]]
+    func_dict = ...  # type: Dict[str, Any]
+    func_doc = ...  # type: Optional[str]
+    func_globals = ...  # type: Dict[str, Any]
+    func_name = ...  # type: str
+    __closure__ = func_closure
+    __code__ = func_code
+    __defaults__ = func_defaults
+    __dict__ = func_dict
+    __doc__ = func_doc
+    __globals__ = func_globals
+    __name__ = func_name
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+    def __get__(self, obj: Optional[object], type: Optional[type]) -> 'UnboundMethodType': ...
+
+LambdaType = FunctionType
+
+class CodeType:
+    co_argcount = ...  # type: int
+    co_cellvars = ...  # type: Tuple[str, ...]
+    co_code = ...  # type: str
+    co_consts = ...  # type: Tuple[Any, ...]
+    co_filename = ...  # type: Optional[str]
+    co_firstlineno = ...  # type: int
+    co_flags = ...  # type: int
+    co_freevars = ...  # type: Tuple[str, ...]
+    co_lnotab = ...  # type: str
+    co_name = ...  # type: str
+    co_names = ...  # type: Tuple[str, ...]
+    co_nlocals= ...  # type: int
+    co_stacksize= ...  # type: int
+    co_varnames = ...  # type: Tuple[str, ...]
+
+class GeneratorType:
+    gi_code = ...  # type: CodeType
+    gi_frame = ...  # type: FrameType
+    gi_running = ...  # type: int
+    def __iter__(self) -> 'GeneratorType': ...
+    def close(self) -> None: ...
+    def next(self) -> Any: ...
+    def send(self, arg: Any) -> Any: ...
+    @overload
+    def throw(self, val: BaseException) -> Any: ...
+    @overload
+    def throw(self, typ: type, val: BaseException = ..., tb: 'TracebackType' = ...) -> Any: ...
+
+class ClassType: ...
+class UnboundMethodType:
+    im_class = ...  # type: type
+    im_func = ...  # type: FunctionType
+    im_self = ...  # type: Optional[object]
+    __func__ = im_func
+    __self__ = im_self
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+class InstanceType: ...
+MethodType = UnboundMethodType
+
+class BuiltinFunctionType:
+    __self__ = ...  # type: Optional[object]
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+BuiltinMethodType = BuiltinFunctionType
+
+class ModuleType:
+    __doc__ = ...  # type: Optional[str]
+    __file__ = ...  # type: Optional[str]
+    __name__ = ...  # type: str
+    __package__ = ...  # type: Optional[str]
+    __path__ = ...  # type: Optional[Iterable[str]]
+    def __init__(self, name: str, doc: str) -> None: ...
+FileType = file
+XRangeType = xrange
+
+class TracebackType:
+    tb_frame = ... # type: FrameType
+    tb_lasti = ... # type: int
+    tb_lineno = ... # type: int
+    tb_next = ... # type: TracebackType
+
+class FrameType:
+    f_back = ... # type: FrameType
+    f_builtins = ... # type: Dict[str, Any]
+    f_code = ... # type: CodeType
+    f_exc_type = ... # type: None
+    f_exc_value = ... # type: None
+    f_exc_traceback = ... # type: None
+    f_globals = ... # type: Dict[str, Any]
+    f_lasti = ... # type: int
+    f_lineno = ... # type: int
+    f_locals = ... # type: Dict[str, Any]
+    f_restricted = ... # type: bool
+    f_trace = ... # type: Callable[[], None]
+
+    def clear(self) -> None: pass
+
+SliceType = slice
+class EllipsisType: ...
+
+class DictProxyType:
+    # TODO is it possible to have non-string keys?
+    # no __init__
+    def copy(self) -> dict: ...
+    def get(self, key: str, default: _T = ...) -> Union[Any, _T]: ...
+    def has_key(self, key: str) -> bool: ...
+    def items(self) -> List[Tuple[str, Any]]: ...
+    def iteritems(self) -> Iterator[Tuple[str, Any]]: ...
+    def iterkeys(self) -> Iterator[str]: ...
+    def itervalues(self) -> Iterator[Any]: ...
+    def keys(self) -> List[str]: ...
+    def values(self) -> List[Any]: ...
+    def __contains__(self, key: str) -> bool: ...
+    def __getitem__(self, key: str) -> Any: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __len__(self) -> int: ...
+
+class NotImplementedType: ...
+
+class GetSetDescriptorType:
+    __name__ = ...  # type: str
+    __objclass__ = ...  # type: type
+    def __get__(self, obj: Any, type: type = ...) -> Any: ...
+    def __set__(self, obj: Any) -> None: ...
+    def __delete__(self, obj: Any) -> None: ...
+# Same type on Jython, different on CPython and PyPy, unknown on IronPython.
+class MemberDescriptorType:
+    __name__ = ...  # type: str
+    __objclass__ = ...  # type: type
+    def __get__(self, obj: Any, type: type = ...) -> Any: ...
+    def __set__(self, obj: Any) -> None: ...
+    def __delete__(self, obj: Any) -> None: ...
diff --git a/typeshed/stdlib/2.7/typing.pyi b/typeshed/stdlib/2.7/typing.pyi
new file mode 100644
index 0000000..a04ad24
--- /dev/null
+++ b/typeshed/stdlib/2.7/typing.pyi
@@ -0,0 +1,312 @@
+# Stubs for typing (Python 2.7)
+
+from abc import abstractmethod, ABCMeta
+
+# Definitions of special type checking related constructs.  Their definition
+# are not used, so their value does not matter.
+
+cast = object()
+overload = object()
+Any = object()
+TypeVar = object()
+Generic = object()
+Tuple = object()
+Callable = object()
+builtinclass = object()
+_promote = object()
+NamedTuple = object()
+
+# Type aliases
+
+class TypeAlias:
+    # Class for defining generic aliases for library types.
+    def __init__(self, target_type) -> None: ...
+    def __getitem__(self, typeargs): ...
+
+Union = TypeAlias(object)
+Optional = TypeAlias(object)
+List = TypeAlias(object)
+Dict = TypeAlias(object)
+Set = TypeAlias(object)
+
+# Predefined type variables.
+AnyStr = TypeVar('AnyStr', str, unicode)
+
+# Abstract base classes.
+
+# These type variables are used by the container types.
+_T = TypeVar('_T')
+_S = TypeVar('_S')
+_KT = TypeVar('_KT')  # Key type.
+_VT = TypeVar('_VT')  # Value type.
+_T_co = TypeVar('_T_co', covariant=True)  # Any type covariant containers.
+_V_co = TypeVar('_V_co', covariant=True)  # Any type covariant containers.
+_VT_co = TypeVar('_VT_co', covariant=True)  # Value type covariant containers.
+_T_contra = TypeVar('_T_contra', contravariant=True)  # Ditto contravariant.
+
+class SupportsInt(metaclass=ABCMeta):
+    @abstractmethod
+    def __int__(self) -> int: ...
+
+class SupportsFloat(metaclass=ABCMeta):
+    @abstractmethod
+    def __float__(self) -> float: ...
+
+class SupportsAbs(Generic[_T]):
+    @abstractmethod
+    def __abs__(self) -> _T: ...
+
+class SupportsRound(Generic[_T]):
+    @abstractmethod
+    def __round__(self, ndigits: int = ...) -> _T: ...
+
+class Reversible(Generic[_T_co]):
+    @abstractmethod
+    def __reversed__(self) -> Iterator[_T_co]: ...
+
+class Sized(metaclass=ABCMeta):
+    @abstractmethod
+    def __len__(self) -> int: ...
+
+class Iterable(Generic[_T_co]):
+    @abstractmethod
+    def __iter__(self) -> Iterator[_T_co]: ...
+
+class Iterator(Iterable[_T_co], Generic[_T_co]):
+    @abstractmethod
+    def next(self) -> _T_co: ...
+
+class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
+    @abstractmethod
+    def next(self) -> _T_co:...
+
+    @abstractmethod
+    def send(self, value: _T_contra) -> _T_co:...
+
+    @abstractmethod
+    def throw(self, typ: BaseException, val: Any=None, tb=None) -> None:...
+
+    @abstractmethod
+    def close(self) -> None:...
+
+class Container(Generic[_T_co]):
+    @abstractmethod
+    def __contains__(self, x: object) -> bool: ...
+
+class Sequence(Iterable[_T_co], Container[_T_co], Sized, Reversible[_T_co], Generic[_T_co]):
+    @overload
+    @abstractmethod
+    def __getitem__(self, i: int) -> _T_co: ...
+    @overload
+    @abstractmethod
+    def __getitem__(self, s: slice) -> Sequence[_T_co]: ...
+    # Mixin methods
+    def index(self, x: Any) -> int: ...
+    def count(self, x: Any) -> int: ...
+    def __contains__(self, x: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T_co]: ...
+    def __reversed__(self) -> Iterator[_T_co]: ...
+
+class MutableSequence(Sequence[_T], Generic[_T]):
+    @abstractmethod
+    def insert(self, index: int, object: _T) -> None: ...
+    @overload
+    @abstractmethod
+    def __setitem__(self, i: int, o: _T) -> None: ...
+    @overload
+    @abstractmethod
+    def __setitem__(self, s: slice, o: Sequence[_T]) -> None: ...
+    @abstractmethod
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
+    # Mixin methods
+    def append(self, object: _T) -> None: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def reverse(self) -> None: ...
+    def pop(self, index: int = ...) -> _T: ...
+    def remove(self, object: _T) -> None: ...
+    def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ...
+
+class AbstractSet(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]):
+    @abstractmethod
+    def __contains__(self, x: object) -> bool: ...
+    # Mixin methods
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __and__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ...
+    def __or__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ...
+    def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ...
+    def __xor__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ...
+    # TODO: argument can be any container?
+    def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
+
+class MutableSet(AbstractSet[_T], Generic[_T]):
+    @abstractmethod
+    def add(self, x: _T) -> None: ...
+    @abstractmethod
+    def discard(self, x: _T) -> None: ...
+    # Mixin methods
+    def clear(self) -> None: ...
+    def pop(self) -> _T: ...
+    def remove(self, element: _T) -> None: ...
+    def __ior__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ...
+    def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ...
+    def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ...
+    def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ...
+
+class Mapping(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT]):
+    @abstractmethod
+    def __getitem__(self, k: _KT) -> _VT: ...
+    # Mixin methods
+    def get(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def keys(self) -> list[_KT]: ...
+    def values(self) -> list[_VT]: ...
+    def items(self) -> list[Tuple[_KT, _VT]]: ...
+    def iterkeys(self) -> Iterator[_KT]: ...
+    def itervalues(self) -> Iterator[_VT]: ...
+    def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
+
+class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
+    @abstractmethod
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    @abstractmethod
+    def __delitem__(self, v: _KT) -> None: ...
+
+    def clear(self) -> None: ...
+    def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+    def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def update(self, m: Union[Mapping[_KT, _VT],
+                              Iterable[Tuple[_KT, _VT]]]) -> None: ...
+
+class IO(Iterable[AnyStr], Generic[AnyStr]):
+    # TODO detach
+    # TODO use abstract properties
+    @property
+    def mode(self) -> str: ...
+    @property
+    def name(self) -> str: ...
+    @abstractmethod
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    @abstractmethod
+    def fileno(self) -> int: ...
+    @abstractmethod
+    def flush(self) -> None: ...
+    @abstractmethod
+    def isatty(self) -> bool: ...
+    # TODO what if n is None?
+    @abstractmethod
+    def read(self, n: int = ...) -> AnyStr: ...
+    @abstractmethod
+    def readable(self) -> bool: ...
+    @abstractmethod
+    def readline(self, limit: int = ...) -> AnyStr: ...
+    @abstractmethod
+    def readlines(self, hint: int = ...) -> list[AnyStr]: ...
+    @abstractmethod
+    def seek(self, offset: int, whence: int = ...) -> None: ...
+    @abstractmethod
+    def seekable(self) -> bool: ...
+    @abstractmethod
+    def tell(self) -> int: ...
+    @abstractmethod
+    def truncate(self, size: int = ...) -> Optional[int]: ...
+    @abstractmethod
+    def writable(self) -> bool: ...
+    # TODO buffer objects
+    @abstractmethod
+    def write(self, s: AnyStr) -> None: ...
+    @abstractmethod
+    def writelines(self, lines: Iterable[AnyStr]) -> None: ...
+
+    @abstractmethod
+    def __iter__(self) -> Iterator[AnyStr]: ...
+    @abstractmethod
+    def __enter__(self) -> 'IO[AnyStr]': ...
+    @abstractmethod
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class BinaryIO(IO[str]):
+    # TODO readinto
+    # TODO read1?
+    # TODO peek?
+    @abstractmethod
+    def __enter__(self) -> BinaryIO: ...
+
+class TextIO(IO[unicode]):
+    # TODO use abstractproperty
+    @property
+    def buffer(self) -> BinaryIO: ...
+    @property
+    def encoding(self) -> str: ...
+    @property
+    def errors(self) -> str: ...
+    @property
+    def line_buffering(self) -> bool: ...
+    @property
+    def newlines(self) -> Any: ... # None, str or tuple
+    @abstractmethod
+    def __enter__(self) -> TextIO: ...
+
+class Match(Generic[AnyStr]):
+    pos = 0
+    endpos = 0
+    lastindex = 0
+    lastgroup = None  # type: AnyStr
+    string = None  # type: AnyStr
+
+    # The regular expression object whose match() or search() method produced
+    # this match instance.
+    re = None  # type: 'Pattern[AnyStr]'
+
+    def expand(self, template: AnyStr) -> AnyStr: ...
+
+    @overload
+    def group(self, group1: int = ...) -> AnyStr: ...
+    @overload
+    def group(self, group1: str) -> AnyStr: ...
+    @overload
+    def group(self, group1: int, group2: int,
+              *groups: int) -> Sequence[AnyStr]: ...
+    @overload
+    def group(self, group1: str, group2: str,
+              *groups: str) -> Sequence[AnyStr]: ...
+
+    def groups(self, default: AnyStr = ...) -> Sequence[AnyStr]: ...
+    def groupdict(self, default: AnyStr = ...) -> dict[str, AnyStr]: ...
+    def start(self, group: int = ...) -> int: ...
+    def end(self, group: int = ...) -> int: ...
+    def span(self, group: int = ...) -> Tuple[int, int]: ...
+
+class Pattern(Generic[AnyStr]):
+    flags = 0
+    groupindex = 0
+    groups = 0
+    pattern = None  # type: AnyStr
+
+    def search(self, string: AnyStr, pos: int = ...,
+               endpos: int = ...) -> Match[AnyStr]: ...
+    def match(self, string: AnyStr, pos: int = ...,
+              endpos: int = ...) -> Match[AnyStr]: ...
+    def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ...
+    def findall(self, string: AnyStr, pos: int = ...,
+                endpos: int = ...) -> list[AnyStr]: ...
+    def finditer(self, string: AnyStr, pos: int = ...,
+                 endpos: int = ...) -> Iterator[Match[AnyStr]]: ...
+
+    @overload
+    def sub(self, repl: AnyStr, string: AnyStr,
+            count: int = ...) -> AnyStr: ...
+    @overload
+    def sub(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr,
+            count: int = ...) -> AnyStr: ...
+
+    @overload
+    def subn(self, repl: AnyStr, string: AnyStr,
+             count: int = ...) -> Tuple[AnyStr, int]: ...
+    @overload
+    def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr,
+             count: int = ...) -> Tuple[AnyStr, int]: ...
diff --git a/typeshed/stdlib/2.7/unicodedata.pyi b/typeshed/stdlib/2.7/unicodedata.pyi
new file mode 100644
index 0000000..196f142
--- /dev/null
+++ b/typeshed/stdlib/2.7/unicodedata.pyi
@@ -0,0 +1,40 @@
+"""Stubs for the 'unicodedata' module."""
+
+from typing import Any, TypeVar, Union
+
+ucd_3_2_0 = ... # type: UCD
+unidata_version = ... # type: str
+# PyCapsule
+ucnhash_CAPI = ... # type: Any
+
+_default = TypeVar("_default")
+
+def bidirectional(unichr: unicode) -> str: ...
+def category(unichr: unicode) -> str: ...
+def combining(unichr: unicode) -> int: ...
+def decimal(chr: unicode, default: _default = ...) -> Union[int, _default]: ...
+def decomposition(unichr: unicode) -> str: ...
+def digit(chr: unicode, default: _default = ...) -> Union[int, _default]: ...
+def east_asian_width(unichr: unicode) -> str: ...
+def lookup(name: str) -> unicode: ...
+def mirrored(unichr: unicode) -> int: ...
+def name(chr: unicode, default: _default = ...) -> Union[str, _default]: ...
+def normalize(form: str, unistr: unicode) -> unicode: ...
+def numeric(chr, default: _default = ...) -> Union[float, _default]: ...
+
+class UCD(object):
+    unidata_version = ... # type: str
+    # The methods below are constructed from the same array in C
+    # (unicodedata_functions) and hence identical to the methods above.
+    def bidirectional(self, unichr: unicode) -> str: ...
+    def category(self, unichr: unicode) -> str: ...
+    def combining(self, unichr: unicode) -> int: ...
+    def decimal(self, chr: unicode, default: _default = ...) -> Union[int, _default]: ...
+    def decomposition(self, unichr: unicode) -> str: ...
+    def digit(self, chr: unicode, default: _default = ...) -> Union[int, _default]: ...
+    def east_asian_width(self, unichr: unicode) -> str: ...
+    def lookup(self, name: str) -> unicode: ...
+    def mirrored(self, unichr: unicode) -> int: ...
+    def name(self, chr: unicode, default: _default = ...) -> Union[str, _default]: ...
+    def normalize(self, form: str, unistr: unicode) -> unicode: ...
+    def numeric(self, chr: unicode, default: _default = ...) -> Union[float, _default]: ...
diff --git a/typeshed/stdlib/2.7/unittest.pyi b/typeshed/stdlib/2.7/unittest.pyi
new file mode 100644
index 0000000..a2ad502
--- /dev/null
+++ b/typeshed/stdlib/2.7/unittest.pyi
@@ -0,0 +1,176 @@
+# Stubs for unittest
+
+# Based on http://docs.python.org/2.7/library/unittest.html
+
+# Only a subset of functionality is included.
+
+from typing import (
+    Any, Callable, Iterable, Tuple, List, TextIO, Sequence,
+    overload, TypeVar, Pattern
+)
+from abc import abstractmethod, ABCMeta
+
+_T = TypeVar('_T')
+_FT = TypeVar('_FT')
+
+class Testable(metaclass=ABCMeta):
+    @abstractmethod
+    def run(self, result: 'TestResult') -> None: ...
+    @abstractmethod
+    def debug(self) -> None: ...
+    @abstractmethod
+    def countTestCases(self) -> int: ...
+
+# TODO ABC for test runners?
+
+class TestResult:
+    errors = ... # type: List[Tuple[Testable, str]]
+    failures = ... # type: List[Tuple[Testable, str]]
+    testsRun = 0
+    shouldStop = False
+
+    def wasSuccessful(self) -> bool: ...
+    def stop(self) -> None: ...
+    def startTest(self, test: Testable) -> None: ...
+    def stopTest(self, test: Testable) -> None: ...
+    def addError(self, test: Testable,
+                  err: Tuple[type, Any, Any]) -> None: ... # TODO
+    def addFailure(self, test: Testable,
+                    err: Tuple[type, Any, Any]) -> None: ... # TODO
+    def addSuccess(self, test: Testable) -> None: ...
+
+class _AssertRaisesBaseContext:
+    expected = ... # type: Any
+    failureException = ... # type: type
+    obj_name = ...  # type: str
+    expected_regex = ... # type: Pattern[str]
+
+class _AssertRaisesContext(_AssertRaisesBaseContext):
+    exception = ... # type: Any # TODO precise type
+    def __enter__(self) -> _AssertRaisesContext: ...
+    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
+
+class _AssertWarnsContext(_AssertRaisesBaseContext):
+    warning = ... # type: Any # TODO precise type
+    filename = ...  # type: str
+    lineno = 0
+    def __enter__(self) -> _AssertWarnsContext: ...
+    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
+
+class TestCase(Testable):
+    def __init__(self, methodName: str = ...) -> None: ...
+    # TODO failureException
+    def setUp(self) -> None: ...
+    def tearDown(self) -> None: ...
+    def run(self, result: TestResult = ...) -> None: ...
+    def debug(self) -> None: ...
+    def assert_(self, expr: Any, msg: object = ...) -> None: ...
+    def failUnless(self, expr: Any, msg: object = ...) -> None: ...
+    def assertTrue(self, expr: Any, msg: object = ...) -> None: ...
+    def assertEqual(self, first: Any, second: Any,
+                    msg: object = ...) -> None: ...
+    def failUnlessEqual(self, first: Any, second: Any,
+                        msg: object = ...) -> None: ...
+    def assertNotEqual(self, first: Any, second: Any,
+                       msg: object = ...) -> None: ...
+    def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
+                            msg: object = ...,
+                            seq_type: type = ...) -> None: ...
+    def failIfEqual(self, first: Any, second: Any,
+                    msg: object = ...) -> None: ...
+    def assertAlmostEqual(self, first: float, second: float, places: int = ...,
+                          msg: object = ...,
+                          delta: float = ...) -> None: ...
+    def failUnlessAlmostEqual(self, first: float, second: float,
+                              places: int = ...,
+                              msg: object = ...) -> None: ...
+    def assertNotAlmostEqual(self, first: float, second: float,
+                             places: int = ..., msg: object = ...,
+                             delta: float = ...) -> None: ...
+    def failIfAlmostEqual(self, first: float, second: float, places: int = ...,
+                          msg: object = ...) -> None: ...
+    def assertGreater(self, first: Any, second: Any,
+                      msg: object = ...) -> None: ...
+    def assertGreaterEqual(self, first: Any, second: Any,
+                      msg: object = ...) -> None: ...
+    def assertLess(self, first: Any, second: Any,
+                   msg: object = ...) -> None: ...
+    def assertLessEqual(self, first: Any, second: Any,
+                        msg: object = ...) -> None: ...
+    def assertRaises(self, expected_exception: type, *args: Any, **kwargs: Any) -> Any: ...
+    def failIf(self, expr: Any, msg: object = ...) -> None: ...
+    def assertFalse(self, expr: Any, msg: object = ...) -> None: ...
+    def assertIs(self, first: object, second: object,
+                 msg: object = ...) -> None: ...
+    def assertIsNot(self, first: object, second: object,
+                    msg: object = ...) -> None: ...
+    def assertIsNone(self, expr: Any, msg: object = ...) -> None: ...
+    def assertIsNotNone(self, expr: Any, msg: object = ...) -> None: ...
+    def assertIn(self, first: _T, second: Iterable[_T],
+                 msg: object = ...) -> None: ...
+    def assertNotIn(self, first: _T, second: Iterable[_T],
+                    msg: object = ...) -> None: ...
+    def assertIsInstance(self, obj: Any, cls: type,
+                         msg: object = ...) -> None: ...
+    def assertNotIsInstance(self, obj: Any, cls: type,
+                            msg: object = ...) -> None: ...
+    def assertWarns(self, expected_warning: type, callable_obj: Any = ...,
+                    *args: Any, **kwargs: Any) -> _AssertWarnsContext: ...
+    def fail(self, msg: object = ...) -> None: ...
+    def countTestCases(self) -> int: ...
+    def defaultTestResult(self) -> TestResult: ...
+    def id(self) -> str: ...
+    def shortDescription(self) -> str: ... # May return None
+    def addCleanup(function: Any, *args: Any, **kwargs: Any) -> None: ...
+    def skipTest(self, reason: Any) -> None: ...
+
+    assertEquals = assertEqual
+    assertNotEquals = assertNotEqual
+    assertAlmostEquals = assertAlmostEqual
+    assertNotAlmostEquals = assertNotAlmostEqual
+    assert_ = assertTrue
+
+    failUnlessEqual = assertEqual
+    failIfEqual = assertNotEqual
+    failUnlessAlmostEqual = assertAlmostEqual
+    failIfAlmostEqual = assertNotAlmostEqual
+    failUnless = assertTrue
+    failUnlessRaises = assertRaises
+    failIf = assertFalse
+
+class CallableTestCase(Testable):
+    def __init__(self, testFunc: Callable[[], None],
+                 setUp: Callable[[], None] = ...,
+                 tearDown: Callable[[], None] = ...,
+                 description: str = ...) -> None: ...
+    def run(self, result: TestResult) -> None: ...
+    def debug(self) -> None: ...
+    def countTestCases(self) -> int: ...
+
+class TestSuite(Testable):
+    def __init__(self, tests: Iterable[Testable] = ...) -> None: ...
+    def addTest(self, test: Testable) -> None: ...
+    def addTests(self, tests: Iterable[Testable]) -> None: ...
+    def run(self, result: TestResult) -> None: ...
+    def debug(self) -> None: ...
+    def countTestCases(self) -> int: ...
+
+# TODO TestLoader
+# TODO defaultTestLoader
+
+class TextTestRunner:
+    def __init__(self, stream: TextIO = ..., descriptions: bool = ...,
+                 verbosity: int = ..., failfast: bool = ...) -> None: ...
+
+class SkipTest(Exception):
+    ...
+
+# TODO precise types
+def skipUnless(condition: Any, reason: str) -> Any: ...
+def skipIf(condition: Any, reason: str) -> Any: ...
+def expectedFailure(func: _FT) -> _FT: ...
+def skip(reason: str) -> Any: ...
+
+def main(module: str = ..., defaultTest: str = ...,
+         argv: List[str] = ..., testRunner: Any = ...,
+         testLoader: Any = ...) -> None: ... # TODO types
diff --git a/typeshed/stdlib/2.7/urllib.pyi b/typeshed/stdlib/2.7/urllib.pyi
new file mode 100644
index 0000000..080f88a
--- /dev/null
+++ b/typeshed/stdlib/2.7/urllib.pyi
@@ -0,0 +1,135 @@
+# Stubs for urllib (Python 2)
+# NOTE: This dynamically typed stub was originally automatically generated by stubgen.
+
+from typing import Any, Mapping, Union, Tuple, Sequence, IO
+
+def url2pathname(pathname: str) -> str: ...
+def pathname2url(pathname: str) -> str: ...
+def urlopen(url: str, data=..., proxies: Mapping[str, str] = ..., context=...) -> IO[Any]: ...
+def urlretrieve(url, filename=..., reporthook=..., data=..., context=...): ...
+def urlcleanup() -> None: ...
+
+class ContentTooShortError(IOError):
+    content = ... # type: Any
+    def __init__(self, message, content) -> None: ...
+
+class URLopener:
+    version = ... # type: Any
+    proxies = ... # type: Any
+    key_file = ... # type: Any
+    cert_file = ... # type: Any
+    context = ... # type: Any
+    addheaders = ... # type: Any
+    tempcache = ... # type: Any
+    ftpcache = ... # type: Any
+    def __init__(self, proxies: Mapping[str, str] = ..., context=..., **x509) -> None: ...
+    def __del__(self): ...
+    def close(self): ...
+    def cleanup(self): ...
+    def addheader(self, *args): ...
+    type = ... # type: Any
+    def open(self, fullurl: str, data=...): ...
+    def open_unknown(self, fullurl, data=...): ...
+    def open_unknown_proxy(self, proxy, fullurl, data=...): ...
+    def retrieve(self, url, filename=..., reporthook=..., data=...): ...
+    def open_http(self, url, data=...): ...
+    def http_error(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def http_error_default(self, url, fp, errcode, errmsg, headers): ...
+    def open_https(self, url, data=...): ...
+    def open_file(self, url): ...
+    def open_local_file(self, url): ...
+    def open_ftp(self, url): ...
+    def open_data(self, url, data=...): ...
+
+class FancyURLopener(URLopener):
+    auth_cache = ... # type: Any
+    tries = ... # type: Any
+    maxtries = ... # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+    def http_error_default(self, url, fp, errcode, errmsg, headers): ...
+    def http_error_302(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def redirect_internal(self, url, fp, errcode, errmsg, headers, data): ...
+    def http_error_301(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def http_error_303(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def http_error_307(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def http_error_401(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def http_error_407(self, url, fp, errcode, errmsg, headers, data=...): ...
+    def retry_proxy_http_basic_auth(self, url, realm, data=...): ...
+    def retry_proxy_https_basic_auth(self, url, realm, data=...): ...
+    def retry_http_basic_auth(self, url, realm, data=...): ...
+    def retry_https_basic_auth(self, url, realm, data=...): ...
+    def get_user_passwd(self, host, realm, clear_cache=...): ...
+    def prompt_user_passwd(self, host, realm): ...
+
+class ftpwrapper:
+    user = ... # type: Any
+    passwd = ... # type: Any
+    host = ... # type: Any
+    port = ... # type: Any
+    dirs = ... # type: Any
+    timeout = ... # type: Any
+    refcount = ... # type: Any
+    keepalive = ... # type: Any
+    def __init__(self, user, passwd, host, port, dirs, timeout=..., persistent=...) -> None: ...
+    busy = ... # type: Any
+    ftp = ... # type: Any
+    def init(self): ...
+    def retrfile(self, file, type): ...
+    def endtransfer(self): ...
+    def close(self): ...
+    def file_close(self): ...
+    def real_close(self): ...
+
+class addbase:
+    fp = ... # type: Any
+    read = ... # type: Any
+    readline = ... # type: Any
+    readlines = ... # type: Any
+    fileno = ... # type: Any
+    __iter__ = ... # type: Any
+    next = ... # type: Any
+    def __init__(self, fp) -> None: ...
+    def close(self): ...
+
+class addclosehook(addbase):
+    closehook = ... # type: Any
+    hookargs = ... # type: Any
+    def __init__(self, fp, closehook, *hookargs) -> None: ...
+    def close(self): ...
+
+class addinfo(addbase):
+    headers = ... # type: Any
+    def __init__(self, fp, headers) -> None: ...
+    def info(self): ...
+
+class addinfourl(addbase):
+    headers = ... # type: Any
+    url = ... # type: Any
+    code = ... # type: Any
+    def __init__(self, fp, headers, url, code=...) -> None: ...
+    def info(self): ...
+    def getcode(self): ...
+    def geturl(self): ...
+
+def unwrap(url): ...
+def splittype(url): ...
+def splithost(url): ...
+def splituser(host): ...
+def splitpasswd(user): ...
+def splitport(host): ...
+def splitnport(host, defport=...): ...
+def splitquery(url): ...
+def splittag(url): ...
+def splitattr(url): ...
+def splitvalue(attr): ...
+def unquote(s: str) -> str: ...
+def unquote_plus(s: str) -> str: ...
+def quote(s: str, safe=...) -> str: ...
+def quote_plus(s: str, safe=...) -> str: ...
+def urlencode(query: Union[Sequence[Tuple[Any, Any]], Mapping[Any, Any]], doseq=...) -> str: ...
+
+def getproxies() -> Mapping[str, str]: ... # type: Any
+def proxy_bypass(host): ...
+
+# Names in __all__ with no definition:
+#   basejoin
diff --git a/typeshed/stdlib/2.7/urllib2.pyi b/typeshed/stdlib/2.7/urllib2.pyi
new file mode 100644
index 0000000..966e6a8
--- /dev/null
+++ b/typeshed/stdlib/2.7/urllib2.pyi
@@ -0,0 +1,149 @@
+
+from urllib import addinfourl
+
+class URLError(IOError): ...
+
+class HTTPError(URLError, addinfourl): ...
+
+class Request(object):
+    host = ... # type: str
+    port = ... # type: str
+    data = ... # type: str
+    headers = ... # type: Dict[str, str]
+    unverifiable = ... # type: bool
+    type = ...
+    origin_req_host= ...
+    unredirected_hdrs = ...
+
+    def __init__(self, url: str, data: str, headers: Dict[str, str],
+                 origin_req_host: str, unverifiable: bool) -> None: ...
+    def __getattr__(self, attr): ...
+    def get_method(self) -> str: ...
+    def add_data(self, data) -> None: ...
+    def has_data(self) -> bool: ...
+    def get_data(self) -> str: ...
+    def get_full_url(self) -> str: ...
+    def get_type(self): ...
+    def get_host(self) -> str: ...
+    def get_selector(self): ...
+    def set_proxy(self, host, type)-> None: ...
+    def has_proxy(self) -> bool: ...
+    def get_origin_req_host(self) -> str: ...
+    def is_unverifiable(self) -> bool: ...
+    def add_header(self, key: str, val: str) -> None: ...
+    def add_unredirected_header(self, key: str, val: str) -> None: ...
+    def has_header(self, header_name: str) -> bool: ...
+    def get_header(self, header_name: str, default: str) -> str: ...
+    def header_items(self): ...
+
+class OpenerDirector(object): ...
+
+def urlopen(url, data, timeout): ...
+def install_opener(opener): ...
+def build_opener(*handlers): ...
+
+class BaseHandler:
+    handler_order = ... # int
+
+    def add_parent(self, parent) -> None: ...
+    def close(self) -> None: ...
+    def __lt__(self, other) -> bool: ...
+
+class HTTPErrorProcessor(BaseHandler):
+    def http_response(self, request, response): ...
+
+class HTTPDefaultErrorHandler(BaseHandler):
+    def http_error_default(self, req, fp, code, msg, hdrs): ...
+
+class HTTPRedirectHandler(BaseHandler):
+    max_repeats = ... # int
+    max_redirections = ... # int
+    def redirect_request(self, req, fp, code, msg, headers, newurl): ...
+    def http_error_302(self, req, fp, code, msg, headers): ...
+    http_error_301 = http_error_303 = http_error_307 = http_error_302
+    inf_msg = ... # type: str
+
+
+class ProxyHandler(BaseHandler):
+    def __init__(self, proxies): ...
+    def proxy_open(self, req, proxy, type): ...
+
+class HTTPPasswordMgr:
+    def __init__(self) -> None: ...
+    def add_password(self, realm, uri, user, passwd): ...
+    def find_user_password(self, realm, authuri): ...
+    def reduce_uri(self, uri, default_port: bool): ...
+    def is_suburi(self, base, test): ...
+
+class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): ...
+
+class AbstractBasicAuthHandler:
+    def __init__(self, password_mgr): ...
+    def reset_retry_count(self): ...
+    def http_error_auth_reqed(self, authreq, host, req, headers): ...
+    def retry_http_basic_auth(self, host, req, realm): ...
+
+class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
+    auth_header = ... # type: str
+    def http_error_401(self, req, fp, code, msg, headers): ...
+
+class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
+    auth_header = ... # type: str
+    def http_error_407(self, req, fp, code, msg, headers): ...
+
+class AbstractDigestAuthHandler:
+    def __init__(self, passwd): ...
+    def reset_retry_count(self): ...
+    def http_error_auth_reqed(self, auth_header, host, req, headers): ...
+    def retry_http_digest_auth(self, req, auth): ...
+    def get_cnonce(self, nonce): ...
+    def get_authorization(self, req, chal): ...
+    def get_algorithm_impls(self, algorithm): ...
+    def get_entity_digest(self, data, chal): ...
+
+class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
+    auth_header = ... # str
+    handler_order = ... # int
+    def http_error_401(self, req, fp, code, msg, headers): ...
+
+class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
+    auth_header = ... # str
+    handler_order = ... # int
+    def http_error_407(self, req, fp, code, msg, headers): ...
+
+class AbstractHTTPHandler(BaseHandler):
+    def __init__(self, debuglevel: int) -> None: ...
+    def do_request_(self, request): ...
+    def do_open(self, http_class, req): ...
+
+class HTTPHandler(AbstractHTTPHandler):
+    def http_open(self, req): ...
+    http_request = AbstractHTTPHandler.do_request_
+
+class HTTPSHandler(AbstractHTTPHandler):
+    def https_open(self, req): ...
+    https_request = AbstractHTTPHandler.do_request_
+
+class HTTPCookieProcessor(BaseHandler):
+    def __init__(self, cookiejar): ...
+    def http_request(self, request): ...
+    def http_response(self, request, response): ...
+
+class UnknownHandler(BaseHandler):
+    def unknown_open(self, req): ...
+
+class FileHandler(BaseHandler):
+    def file_open(self, req): ...
+    def get_names(self): ...
+    def open_local_file(self, req): ...
+
+class FTPHandler(BaseHandler):
+    def ftp_open(self, req): ...
+    def connect_ftp(self, user, passwd, host, port, dirs, timeout): ...
+
+class CacheFTPHandler(FTPHandler):
+    def __init__(self): ...
+    def setTimeout(self, t): ...
+    def setMaxConns(self, m): ...
+    def check_cache(self): ...
+    def clear_cache(self): ...
diff --git a/typeshed/stdlib/2.7/urlparse.pyi b/typeshed/stdlib/2.7/urlparse.pyi
new file mode 100644
index 0000000..2fe1951
--- /dev/null
+++ b/typeshed/stdlib/2.7/urlparse.pyi
@@ -0,0 +1,53 @@
+# Stubs for urlparse (Python 2)
+
+from typing import Dict, List, NamedTuple, Tuple, Sequence, overload
+
+uses_relative = []  # type: List[str]
+uses_netloc = []  # type: List[str]
+uses_params = []  # type: List[str]
+non_hierarchical = []  # type: List[str]
+uses_query = []  # type: List[str]
+uses_fragment = []  # type: List[str]
+scheme_chars = ...  # type: str
+MAX_CACHE_SIZE = 0
+
+def clear_cache() -> None: ...
+
+class ResultMixin(object):
+    @property
+    def username(self) -> str: ...
+    @property
+    def password(self) -> str: ...
+    @property
+    def hostname(self) -> str: ...
+    @property
+    def port(self) -> int: ...
+
+class SplitResult(NamedTuple('SplitResult', [
+        ('scheme', str), ('netloc', str), ('path', str), ('query', str), ('fragment', str)
+    ]), ResultMixin):
+    def geturl(self) -> str: ...
+
+class ParseResult(NamedTuple('ParseResult', [
+        ('scheme', str), ('netloc', str), ('path', str), ('params', str), ('query', str),
+        ('fragment', str)
+    ]), ResultMixin):
+    def geturl(self) -> str: ...
+
+def urlparse(url: str, scheme: str = ..., allow_fragments: bool = ...) -> ParseResult: ...
+def urlsplit(url: str, scheme: str = ..., allow_fragments: bool = ...) -> SplitResult: ...
+ at overload
+def urlunparse(data: Tuple[str, str, str, str, str, str]) -> str: ...
+ at overload
+def urlunparse(data: Sequence[str]) -> str: ...
+ at overload
+def urlunsplit(data: Tuple[str, str, str, str, str]) -> str: ...
+ at overload
+def urlunsplit(data: Sequence[str]) -> str: ...
+def urljoin(base: str, url: str, allow_fragments: bool = ...) -> str: ...
+def urldefrag(url: str) -> str: ...
+def unquote(s: str) -> str: ...
+def parse_qs(qs: str, keep_blank_values: bool = ...,
+             strict_parsing: bool = ...) -> Dict[str, List[str]]: ...
+def parse_qsl(qs: str, keep_blank_values: int = ...,
+              strict_parsing: bool = ...) -> List[Tuple[str, str]]: ...
diff --git a/typeshed/stdlib/2.7/uuid.pyi b/typeshed/stdlib/2.7/uuid.pyi
new file mode 100644
index 0000000..485c720
--- /dev/null
+++ b/typeshed/stdlib/2.7/uuid.pyi
@@ -0,0 +1,36 @@
+from typing import NamedTuple, Any, Tuple
+
+_int_type = int
+
+class _UUIDFields(NamedTuple('_UUIDFields',
+                             [('time_low', int), ('time_mid', int), ('time_hi_version', int), ('clock_seq_hi_variant', int), ('clock_seq_low', int), ('node', int)])):
+    time = ... # type: int
+    clock_seq = ... # type: int
+
+class UUID:
+    def __init__(self, hex: str = ..., bytes: str = ..., bytes_le: str = ...,
+                  fields: Tuple[int, int, int, int, int, int] = ..., int: int = ..., version: Any = ...) -> None: ...
+    bytes = ... # type: str
+    bytes_le = ... # type: str
+    fields = ... # type: _UUIDFields
+    hex = ... # type: str
+    int = ... # type: _int_type
+    urn = ... # type: str
+    variant = ... # type: _int_type
+    version = ... # type: _int_type
+
+RESERVED_NCS = ... # type: int
+RFC_4122 = ... # type: int
+RESERVED_MICROSOFT = ... # type: int
+RESERVED_FUTURE = ... # type: int
+
+def getnode() -> int: ...
+def uuid1(node: int = ..., clock_seq: int = ...) -> UUID: ...
+def uuid3(namespace: UUID, name: str) -> UUID: ...
+def uuid4() -> UUID: ...
+def uuid5(namespace: UUID, name: str) -> UUID: ...
+
+NAMESPACE_DNS = ... # type: str
+NAMESPACE_URL = ... # type: str
+NAMESPACE_OID = ... # type: str
+NAMESPACE_X500 = ... # type: str
diff --git a/typeshed/stdlib/2.7/xml/__init__.pyi b/typeshed/stdlib/2.7/xml/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/2.7/xml/sax/__init__.pyi b/typeshed/stdlib/2.7/xml/sax/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/2.7/xml/sax/handler.pyi b/typeshed/stdlib/2.7/xml/sax/handler.pyi
new file mode 100644
index 0000000..935a5a3
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/sax/handler.pyi
@@ -0,0 +1,50 @@
+# Stubs for xml.sax.handler (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+version = ... # type: Any
+
+class ErrorHandler:
+    def error(self, exception): ...
+    def fatalError(self, exception): ...
+    def warning(self, exception): ...
+
+class ContentHandler:
+    def __init__(self) -> None: ...
+    def setDocumentLocator(self, locator): ...
+    def startDocument(self): ...
+    def endDocument(self): ...
+    def startPrefixMapping(self, prefix, uri): ...
+    def endPrefixMapping(self, prefix): ...
+    def startElement(self, name, attrs): ...
+    def endElement(self, name): ...
+    def startElementNS(self, name, qname, attrs): ...
+    def endElementNS(self, name, qname): ...
+    def characters(self, content): ...
+    def ignorableWhitespace(self, whitespace): ...
+    def processingInstruction(self, target, data): ...
+    def skippedEntity(self, name): ...
+
+class DTDHandler:
+    def notationDecl(self, name, publicId, systemId): ...
+    def unparsedEntityDecl(self, name, publicId, systemId, ndata): ...
+
+class EntityResolver:
+    def resolveEntity(self, publicId, systemId): ...
+
+feature_namespaces = ... # type: Any
+feature_namespace_prefixes = ... # type: Any
+feature_string_interning = ... # type: Any
+feature_validation = ... # type: Any
+feature_external_ges = ... # type: Any
+feature_external_pes = ... # type: Any
+all_features = ... # type: Any
+property_lexical_handler = ... # type: Any
+property_declaration_handler = ... # type: Any
+property_dom_node = ... # type: Any
+property_xml_string = ... # type: Any
+property_encoding = ... # type: Any
+property_interning_dict = ... # type: Any
+all_properties = ... # type: Any
diff --git a/typeshed/stdlib/2.7/xml/sax/saxutils.pyi b/typeshed/stdlib/2.7/xml/sax/saxutils.pyi
new file mode 100644
index 0000000..7ff053a
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/sax/saxutils.pyi
@@ -0,0 +1,58 @@
+# Stubs for xml.sax.saxutils (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Mapping
+
+from xml.sax import handler
+from xml.sax import xmlreader
+
+def escape(data: str, entities: Mapping[str, str] = ...) -> str: ...
+def unescape(data: str, entities: Mapping[str, str] = ...) -> str: ...
+def quoteattr(data: str, entities: Mapping[str, str] = ...) -> str: ...
+
+class XMLGenerator(handler.ContentHandler):
+    def __init__(self, out=..., encoding=...) -> None: ...
+    def startDocument(self): ...
+    def endDocument(self): ...
+    def startPrefixMapping(self, prefix, uri): ...
+    def endPrefixMapping(self, prefix): ...
+    def startElement(self, name, attrs): ...
+    def endElement(self, name): ...
+    def startElementNS(self, name, qname, attrs): ...
+    def endElementNS(self, name, qname): ...
+    def characters(self, content): ...
+    def ignorableWhitespace(self, content): ...
+    def processingInstruction(self, target, data): ...
+
+class XMLFilterBase(xmlreader.XMLReader):
+    def __init__(self, parent=...) -> None: ...
+    def error(self, exception): ...
+    def fatalError(self, exception): ...
+    def warning(self, exception): ...
+    def setDocumentLocator(self, locator): ...
+    def startDocument(self): ...
+    def endDocument(self): ...
+    def startPrefixMapping(self, prefix, uri): ...
+    def endPrefixMapping(self, prefix): ...
+    def startElement(self, name, attrs): ...
+    def endElement(self, name): ...
+    def startElementNS(self, name, qname, attrs): ...
+    def endElementNS(self, name, qname): ...
+    def characters(self, content): ...
+    def ignorableWhitespace(self, chars): ...
+    def processingInstruction(self, target, data): ...
+    def skippedEntity(self, name): ...
+    def notationDecl(self, name, publicId, systemId): ...
+    def unparsedEntityDecl(self, name, publicId, systemId, ndata): ...
+    def resolveEntity(self, publicId, systemId): ...
+    def parse(self, source): ...
+    def setLocale(self, locale): ...
+    def getFeature(self, name): ...
+    def setFeature(self, name, state): ...
+    def getProperty(self, name): ...
+    def setProperty(self, name, value): ...
+    def getParent(self): ...
+    def setParent(self, parent): ...
+
+def prepare_input_source(source, base=...): ...
diff --git a/typeshed/stdlib/2.7/xml/sax/xmlreader.pyi b/typeshed/stdlib/2.7/xml/sax/xmlreader.pyi
new file mode 100644
index 0000000..a5f5553
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/sax/xmlreader.pyi
@@ -0,0 +1,75 @@
+# Stubs for xml.sax.xmlreader (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class XMLReader:
+    def __init__(self) -> None: ...
+    def parse(self, source): ...
+    def getContentHandler(self): ...
+    def setContentHandler(self, handler): ...
+    def getDTDHandler(self): ...
+    def setDTDHandler(self, handler): ...
+    def getEntityResolver(self): ...
+    def setEntityResolver(self, resolver): ...
+    def getErrorHandler(self): ...
+    def setErrorHandler(self, handler): ...
+    def setLocale(self, locale): ...
+    def getFeature(self, name): ...
+    def setFeature(self, name, state): ...
+    def getProperty(self, name): ...
+    def setProperty(self, name, value): ...
+
+class IncrementalParser(XMLReader):
+    def __init__(self, bufsize=...) -> None: ...
+    def parse(self, source): ...
+    def feed(self, data): ...
+    def prepareParser(self, source): ...
+    def close(self): ...
+    def reset(self): ...
+
+class Locator:
+    def getColumnNumber(self): ...
+    def getLineNumber(self): ...
+    def getPublicId(self): ...
+    def getSystemId(self): ...
+
+class InputSource:
+    def __init__(self, system_id=...) -> None: ...
+    def setPublicId(self, public_id): ...
+    def getPublicId(self): ...
+    def setSystemId(self, system_id): ...
+    def getSystemId(self): ...
+    def setEncoding(self, encoding): ...
+    def getEncoding(self): ...
+    def setByteStream(self, bytefile): ...
+    def getByteStream(self): ...
+    def setCharacterStream(self, charfile): ...
+    def getCharacterStream(self): ...
+
+class AttributesImpl:
+    def __init__(self, attrs) -> None: ...
+    def getLength(self): ...
+    def getType(self, name): ...
+    def getValue(self, name): ...
+    def getValueByQName(self, name): ...
+    def getNameByQName(self, name): ...
+    def getQNameByName(self, name): ...
+    def getNames(self): ...
+    def getQNames(self): ...
+    def __len__(self): ...
+    def __getitem__(self, name): ...
+    def keys(self): ...
+    def has_key(self, name): ...
+    def __contains__(self, name): ...
+    def get(self, name, alternative=...): ...
+    def copy(self): ...
+    def items(self): ...
+    def values(self): ...
+
+class AttributesNSImpl(AttributesImpl):
+    def __init__(self, attrs, qnames) -> None: ...
+    def getValueByQName(self, name): ...
+    def getNameByQName(self, name): ...
+    def getQNameByName(self, name): ...
+    def getQNames(self): ...
+    def copy(self): ...
diff --git a/typeshed/stdlib/2.7/xxsubtype.pyi b/typeshed/stdlib/2.7/xxsubtype.pyi
new file mode 100644
index 0000000..8612890
--- /dev/null
+++ b/typeshed/stdlib/2.7/xxsubtype.pyi
@@ -0,0 +1,17 @@
+"""Stub file for the 'xxsubtype' module."""
+
+from typing import Any
+
+def bench(obj: Any, name: str, n:int = ...) -> float: ...
+
+class spamdict(dict):
+    state = ...  # type: int
+    def getstate(self) -> int: ...
+    def setstate(self, a: int) -> None: ...
+
+class spamlist(list):
+    state = ...  # type: int
+    def getstate(self) -> int: ...
+    def setstate(self, a: int) -> None: ...
+    def classmeth(self, *args, **kwargs) -> tuple: ...
+    def staticmeth(self, *args, **kwargs) -> tuple: ...
diff --git a/typeshed/stdlib/2.7/zipimport.pyi b/typeshed/stdlib/2.7/zipimport.pyi
new file mode 100644
index 0000000..b972a37
--- /dev/null
+++ b/typeshed/stdlib/2.7/zipimport.pyi
@@ -0,0 +1,25 @@
+"""Stub file for the 'zipimport' module."""
+
+from typing import Dict, Optional
+from types import CodeType, ModuleType
+
+class ZipImportError(ImportError):
+    pass
+
+_zip_directory_cache = ...  # type: Dict[str, dict]
+
+class zipimporter(object):
+    archive = ...  # type: str
+    prefix = ...  # type: str
+    _files = ...  # type: Dict[str, tuple]
+    def __init__(self, path: str) -> None:
+        raise ZipImportError
+    def find_module(self, fullname: str, path: str = ...) -> Optional['zipimporter']: ...
+    def get_code(self, fullname: str) -> CodeType: ...
+    def get_data(self, fullname: str) -> str:
+        raise IOError
+    def get_filename(self, fullname: str) -> str: ...
+    def get_source(self, fullname: str) -> str: ...
+    def is_package(self, fullname: str) -> bool: ...
+    def load_module(self, fullname: str) -> ModuleType: ...
+
diff --git a/typeshed/stdlib/2.7/zlib.pyi b/typeshed/stdlib/2.7/zlib.pyi
new file mode 100644
index 0000000..d6a2c8c
--- /dev/null
+++ b/typeshed/stdlib/2.7/zlib.pyi
@@ -0,0 +1,36 @@
+# Stubs for zlib (Python 2.7)
+
+class error(Exception): ...
+
+DEFLATED = ... # type: int
+DEF_MEM_LEVEL = ... # type: int
+MAX_WBITS = ... # type: int
+ZLIB_VERSION = ... # type: str
+Z_BEST_COMPRESSION = ... # type: int
+Z_BEST_SPEED = ... # type: int
+Z_DEFAULT_COMPRESSION = ... # type: int
+Z_DEFAULT_STRATEGY = ... # type: int
+Z_FILTERED = ... # type: int
+Z_FINISH = ... # type: int
+Z_FULL_FLUSH = ... # type: int
+Z_HUFFMAN_ONLY = ... # type: int
+Z_NO_FLUSH = ... # type: int
+Z_SYNC_FLUSH = ... # type: int
+
+def adler32(data: str, value: int = ...) -> int: ...
+def compress(data: str, level: int = ...) -> str: ...
+def crc32(data: str, value: int = ...) -> int: ...
+def decompress(data: str, wbits: int = ..., bufsize: int = ...) -> str: ...
+
+class compressobj:
+    def __init__(self, level: int = ..., method: int = ..., wbits: int = ..., memlevel: int = ...,
+                 strategy: int = ...) -> None: ...
+    def copy(self) -> "compressobj": ...
+    def compress(self, data: str) -> str: ...
+    def flush(self) -> None: ...
+
+class decompressobj:
+    def __init__(self, wbits: int = ...) -> None: ...
+    def copy(self) -> "decompressobj": ...
+    def decompress(self, data: str, max_length: int = ...) -> str: ...
+    def flush(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/_bisect.pyi b/typeshed/stdlib/2and3/_bisect.pyi
new file mode 100644
index 0000000..4b6ad96
--- /dev/null
+++ b/typeshed/stdlib/2and3/_bisect.pyi
@@ -0,0 +1,11 @@
+"""Stub file for the '_bisect' module."""
+
+from typing import Any, Sequence, TypeVar
+
+T = TypeVar('T')
+def bisect(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> int: ...
+def bisect_left(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> int: ...
+def bisect_right(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> int: ...
+def insort(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> None: ...
+def insort_left(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> None: ...
+def insort_right(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/_heapq.pyi b/typeshed/stdlib/2and3/_heapq.pyi
new file mode 100644
index 0000000..8b7f6ea
--- /dev/null
+++ b/typeshed/stdlib/2and3/_heapq.pyi
@@ -0,0 +1,15 @@
+"""Stub file for the '_heapq' module."""
+
+from typing import TypeVar, List
+
+_T = TypeVar("_T")
+
+def heapify(heap: List[_T]) -> None: ...
+def heappop(heap: List[_T]) -> _T:
+    raise IndexError()  # if list is empty
+def heappush(heap: List[_T], item: _T) -> None: ...
+def heappushpop(heap: List[_T], item: _T) -> _T: ...
+def heapreplace(heap: List[_T], item: _T) -> _T:
+    raise IndexError()  # if list is empty
+def nlargest(a: int, b: List[_T]) -> List[_T]: ...
+def nsmallest(a: int, b: List[_T]) -> List[_T]: ...
diff --git a/typeshed/stdlib/2and3/cmath.pyi b/typeshed/stdlib/2and3/cmath.pyi
new file mode 100644
index 0000000..ffd1610
--- /dev/null
+++ b/typeshed/stdlib/2and3/cmath.pyi
@@ -0,0 +1,34 @@
+"""Stub file for the 'cmath' module."""
+
+import sys
+from typing import Union, Tuple
+
+e = ...  # type: float
+pi = ...  # type: float
+
+_C = Union[float, complex]
+
+def acos(x:_C) -> complex: ...
+def acosh(x:_C) -> complex: ...
+def asin(x:_C) -> complex: ...
+def asinh(x:_C) -> complex: ...
+def atan(x:_C) -> complex: ...
+def atanh(x:_C) -> complex: ...
+def cos(x:_C) -> complex: ...
+def cosh(x:_C) -> complex: ...
+def exp(x:_C) -> complex: ...
+def isinf(z:_C) -> bool: ...
+def isnan(z:_C) -> bool: ...
+def log(x:_C, base:_C = ...) -> complex: ...
+def log10(x:_C) -> complex: ...
+def phase(z:_C) -> float: ...
+def polar(z:_C) -> Tuple[float, float]: ...
+def rect(r:float, phi:float) -> complex: ...
+def sin(x:_C) -> complex: ...
+def sinh(x:_C) -> complex: ...
+def sqrt(x:_C) -> complex: ...
+def tan(x:_C) -> complex: ...
+def tanh(x:_C) -> complex: ...
+
+if sys.version_info[0] >= 3:
+    def isfinite(z:_C) -> bool: ...
diff --git a/typeshed/stdlib/2and3/marshal.pyi b/typeshed/stdlib/2and3/marshal.pyi
new file mode 100644
index 0000000..eb2d57a
--- /dev/null
+++ b/typeshed/stdlib/2and3/marshal.pyi
@@ -0,0 +1,8 @@
+from typing import Any, IO
+
+version = ...  # type: int
+
+def dump(value: Any, file: IO[Any], version: int = ...) -> None: ...
+def load(file: IO[Any]) -> Any: ...
+def dumps(value: Any, version: int = ...) -> str: ...
+def loads(string: str) -> Any: ...
diff --git a/typeshed/stdlib/2and3/math.pyi b/typeshed/stdlib/2and3/math.pyi
new file mode 100644
index 0000000..44fb75c
--- /dev/null
+++ b/typeshed/stdlib/2and3/math.pyi
@@ -0,0 +1,52 @@
+# Stubs for math
+# See: http://docs.python.org/2/library/math.html
+
+from typing import Tuple, Iterable, Optional
+
+import sys
+
+e = ...  # type: float
+pi = ...  # type: float
+
+def acos(x: float) -> float: ...
+def acosh(x: float) -> float: ...
+def asin(x: float) -> float: ...
+def asinh(x: float) -> float: ...
+def atan(x: float) -> float: ...
+def atan2(y: float, x: float) -> float: ...
+def atanh(x: float) -> float: ...
+def ceil(x: float) -> int: ...
+def copysign(x: float, y: float) -> float: ...
+def cos(x: float) -> float: ...
+def cosh(x: float) -> float: ...
+def degrees(x: float) -> float: ...
+def erf(x: float) -> float: ...
+def erfc(x: float) -> float: ...
+def exp(x: float) -> float: ...
+def expm1(x: float) -> float: ...
+def fabs(x: float) -> float: ...
+def factorial(x: int) -> int: ...
+def floor(x: float) -> float: ...
+def fmod(x: float, y: float) -> float: ...
+def frexp(x: float) -> Tuple[float, int]: ...
+def fsum(iterable: Iterable) -> float: ...
+def gamma(x: float) -> float: ...
+def hypot(x: float, y: float) -> float: ...
+def isinf(x: float) -> bool: ...
+if sys.version_info[0] >= 3:
+    def isfinite(x: float) -> bool: ...
+def isnan(x: float) -> bool: ...
+def ldexp(x: float, i: int) -> float: ...
+def lgamma(x: float) -> float: ...
+def log(x: float, base: float = ...) -> float: ...
+def log10(x: float) -> float: ...
+def log1p(x: float) -> float: ...
+def modf(x: float) -> Tuple[float, float]: ...
+def pow(x: float, y: float) -> float: ...
+def radians(x: float) -> float: ...
+def sin(x: float) -> float: ...
+def sinh(x: float) -> float: ...
+def sqrt(x: float) -> float: ...
+def tan(x: float) -> float: ...
+def tanh(x: float) -> float: ...
+def trunc(x: float) -> int: ...
diff --git a/typeshed/stdlib/2and3/operator.pyi b/typeshed/stdlib/2and3/operator.pyi
new file mode 100644
index 0000000..ae5c4ba
--- /dev/null
+++ b/typeshed/stdlib/2and3/operator.pyi
@@ -0,0 +1,127 @@
+# Stubs for operator
+
+from typing import Any, Callable
+
+def __abs__(a: Any) -> Any: ...
+def __add__(a: Any, b: Any) -> Any: ...
+def __and__(a: Any, b: Any) -> Any: ...
+def __concat__(a: Any, b: Any) -> Any: ...
+def __contains__(container: Any, item: Any) -> bool: ...
+def __delitem__(container: Any, item: Any) -> None: ...
+def __delslice__(container: Any, b: int, c: int) -> None: ...
+def __div__(a: Any, b: Any) -> Any: ...
+def __eq__(a: Any, b: Any) -> Any: ...
+def __floordiv__(a: Any, b: Any) -> Any: ...
+def __ge__(a: Any, b: Any) -> Any: ...
+def __getitem__(container: Any, key: Any) -> Any: ...
+def __getslice__(container, b: int, c: int) -> Any: ...
+def __gt__(a: Any, b: Any) -> Any: ...
+def __iadd__(a: Any, b: Any) -> Any: ...
+def __iand__(a: Any, b: Any) -> Any: ...
+def __iconcat__(a: Any, b: Any) -> Any: ...
+def __idiv__(a: Any, b: Any) -> Any: ...
+def __ifloordiv__(a: Any, b: Any) -> Any: ...
+def __ilshift__(a: Any, b: Any) -> Any: ...
+def __imod__(a: Any, b: Any) -> Any: ...
+def __imul__(a: Any, b: Any) -> Any: ...
+def __index__(x: Any) -> Any: ...
+def __inv__(x: Any) -> Any: ...
+def __invert__(x: Any) -> Any: ...
+def __ior__(a: Any, b: Any) -> Any: ...
+def __ipow__(a: Any, b: Any) -> Any: ...
+def __irepeat__(a: Any, b: int) -> Any: ...
+def __irshift__(a: Any, b: Any) -> Any: ...
+def __isub__(a: Any, b: Any) -> Any: ...
+def __itruediv__(a: Any, b: Any) -> Any: ...
+def __ixor__(a: Any, b: Any) -> Any: ...
+def __le__(a: Any, b: Any) -> Any: ...
+def __lshift__(a: Any, b: Any) -> Any: ...
+def __lt__(a: Any, b: Any) -> Any: ...
+def __mod__(a: Any, b: Any) -> Any: ...
+def __mul__(a: Any, b: Any) -> Any: ...
+def __ne__(a: Any, b: Any) -> Any: ...
+def __neg__(x: Any) -> Any: ...
+def __not__(x: Any) -> bool: ...
+def __or__(a: Any, b: Any) -> Any: ...
+def __pos__(x: Any) -> Any: ...
+def __pow__(a: Any, b: Any) -> Any: ...
+def __repeat__(a, b: int) -> Any: ...
+def __rshift__(a: Any, b: Any) -> Any: ...
+def __setitem__(container: Any, b: Any) -> None: ...
+def __setslice__(container: Any, b: int, c: int, item: Any) -> None: ...
+def __sub__(a: Any, b: Any) -> Any: ...
+def __truediv__(a: Any, b: Any) -> Any: ...
+def __xor__(a: Any, b: Any) -> Any: ...
+
+def abs(x: Any) -> Any: ...
+def add(a: Any, b: Any) -> Any: ...
+def and_(a: Any, b: Any) -> Any: ...
+def concat(a: Any, b: Any) -> Any: ...
+def contains(container: Any, item: Any) -> bool: ...
+def countOf(container: Any, item: Any) -> int: ...
+def delitem(container: Any, item: Any) -> None: ...
+def delslice(container: Any, b: int, c: int) -> None: ...
+def div(a: Any, b: Any) -> Any: ...
+def eq(a: Any, b: Any) -> Any: ...
+def floordiv(a: Any, b: Any) -> Any: ...
+def ge(a: Any, b: Any) -> Any: ...
+def getitem(a: Any, b: Any) -> Any: ...
+def getslice(container: Any, b: int, c: int) -> Any: ...
+def gt(a: Any, b: Any) -> Any: ...
+def iadd(a: Any, b: Any) -> Any: ...
+def iand(a: Any, b: Any) -> Any: ...
+def iconcat(a: Any, b: Any) -> Any: ...
+def idiv(a: Any, b: Any) -> Any: ...
+def ifloordiv(a: Any, b: Any) -> Any: ...
+def ilshift(a: Any, b: Any) -> Any: ...
+def imod(a: Any, b: Any) -> Any: ...
+def imul(a: Any, b: Any) -> Any: ...
+def index(x: Any) -> Any: ...
+def indexOf(container: Any, item: Any) -> int: ...
+def inv(x: Any) -> Any: ...
+def invert(x: Any) -> Any: ...
+def ior(a: Any, b: Any) -> Any: ...
+def ipow(a: Any, b: Any) -> Any: ...
+def irepeat(a, b: int) -> Any: ...
+def irshift(a: Any, b: Any) -> Any: ...
+def isCallable(x: Any) -> bool: ...
+def isMappingType(x: Any) -> bool: ...
+def isNumberType(x: Any) -> bool: ...
+def isSequenceType(x: Any) -> bool: ...
+def is_(a: Any, b: Any) -> bool: ...
+def is_not(a: Any, b: Any) -> bool: ...
+def isub(a: Any, b: Any) -> Any: ...
+def itruediv(a: Any, b: Any) -> Any: ...
+def ixor(a: Any, b: Any) -> Any: ...
+def le(a: Any, b: Any) -> Any: ...
+def lshift(a: Any, b: Any) -> Any: ...
+def lt(a: Any, b: Any) -> Any: ...
+def mod(a: Any, b: Any) -> Any: ...
+def mul(a: Any, b: Any) -> Any: ...
+def ne(a: Any, b: Any) -> Any: ...
+def neg(x: Any) -> Any: ...
+def not_(x: Any) -> bool: ...
+def or_(a: Any, b: Any) -> Any: ...
+def pos(x: Any) -> Any: ...
+def pow(a: Any, b: Any) -> Any: ...
+def repeat(a, b: int) -> Any: ...
+def rshift(a: Any, b: Any) -> Any: ...
+def sequenceIncludes(seq1: Any, seq2: Any) -> bool: ...
+def setitem(container: Any, key: Any, item: Any) -> None: ...
+def setslice(container: Any, b: int, c: int, slice: Any) -> None: ...
+def sub(a: Any, b: Any) -> Any: ...
+def truediv(a: Any, b: Any) -> Any: ...
+def truth(x: Any) -> bool: ...
+def xor(a: Any, b: Any) -> Any: ...
+
+# Unsupported feature: "If more than one attribute is requested,
+#   returns a tuple of attributes."
+# Unsupported: on Python 2 the parameter type should be `basestring`.
+def attrgetter(attr: str) -> Callable[[Any], Any]: ...
+
+# Unsupported feature: "If multiple items are specified, returns a
+#   tuple of lookup values."
+def itemgetter(item: Any) -> Callable[[Any], Any]: ...
+
+# Unsupported: on Python 2 the parameter type should be `basestring`.
+def methodcaller(name: str, *args, **kwargs) -> Callable[[Any], Any]: ...
diff --git a/typeshed/stdlib/2and3/webbrowser.pyi b/typeshed/stdlib/2and3/webbrowser.pyi
new file mode 100644
index 0000000..95b340e
--- /dev/null
+++ b/typeshed/stdlib/2and3/webbrowser.pyi
@@ -0,0 +1,100 @@
+# Stubs for webbrowser (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+import sys
+from typing import Any, Optional, Callable, List
+
+class Error(Exception): ...
+
+def register(name: str, klass: Optional[Callable[[], BaseBrowser]], instance: BaseBrowser=..., update_tryorder: int=...) -> None: ...
+def get(using: str=...) -> BaseBrowser: ...
+def open(url: str, new: int=..., autoraise: bool=...) -> bool: ...
+def open_new(url: str) -> bool: ...
+def open_new_tab(url: str) -> bool: ...
+
+class BaseBrowser:
+    args = ... # type: List[str]
+    name = ... # type: str
+    basename = ... # type: str
+    def __init__(self, name: str=...) -> None: ...
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+    def open_new(self, url: str) -> bool: ...
+    def open_new_tab(self, url: str) -> bool: ...
+
+class GenericBrowser(BaseBrowser):
+    name = ... # type: List[str]
+    args = ... # type: str
+    basename = ... # type: str
+    def __init__(self, name: str) -> None: ...
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class BackgroundBrowser(GenericBrowser):
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class UnixBrowser(BaseBrowser):
+    raise_opts = ... # type: List[str]
+    background = ... # type: bool
+    redirect_stdout = ... # type: bool
+    remote_args = ... # type: List[str]
+    remote_action = ... # type: str
+    remote_action_newwin = ... # type: str
+    remote_action_newtab = ... # type: str
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class Mozilla(UnixBrowser):
+    raise_opts = ... # type: List[str]
+    remote_args = ... # type: List[str]
+    remote_action = ... # type: str
+    remote_action_newwin = ... # type: str
+    remote_action_newtab = ... # type: str
+    background = ... # type: bool
+
+class Galeon(UnixBrowser):
+    raise_opts = ... # type: List[str]
+    remote_args = ... # type: List[str]
+    remote_action = ... # type: str
+    remote_action_newwin = ... # type: str
+    background = ... # type: bool
+
+if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 3):
+    class Chrome(UnixBrowser):
+        remote_args = ... # type: List[str]
+        remote_action = ... # type: str
+        remote_action_newwin = ... # type: str
+        remote_action_newtab = ... # type: str
+        background = ... # type: bool
+
+class Opera(UnixBrowser):
+    raise_opts = ... # type: List[str]
+    remote_args = ... # type: List[str]
+    remote_action = ... # type: str
+    remote_action_newwin = ... # type: str
+    remote_action_newtab = ... # type: str
+    background = ... # type: bool
+
+class Elinks(UnixBrowser):
+    remote_args = ... # type: List[str]
+    remote_action = ... # type: str
+    remote_action_newwin = ... # type: str
+    remote_action_newtab = ... # type: str
+    background = ... # type: bool
+    redirect_stdout = ... # type: bool
+
+class Konqueror(BaseBrowser):
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class Grail(BaseBrowser):
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class WindowsDefault(BaseBrowser):
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class MacOSX(BaseBrowser):
+    name = ... # type: str
+    def __init__(self, name: str) -> None: ...
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+
+class MacOSXOSAScript(BaseBrowser):
+    def __init__(self, name: str) -> None: ...
+    def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
diff --git a/typeshed/stdlib/3.3/ipaddress.pyi b/typeshed/stdlib/3.3/ipaddress.pyi
new file mode 100644
index 0000000..e568032
--- /dev/null
+++ b/typeshed/stdlib/3.3/ipaddress.pyi
@@ -0,0 +1,200 @@
+# Stubs for ipaddress (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+IPV4LENGTH = ... # type: Any
+IPV6LENGTH = ... # type: Any
+
+class AddressValueError(ValueError): ...
+class NetmaskValueError(ValueError): ...
+
+def ip_address(address): ...
+def ip_network(address, strict=...): ...
+def ip_interface(address): ...
+def v4_int_to_packed(address): ...
+def v6_int_to_packed(address): ...
+def summarize_address_range(first, last): ...
+def collapse_addresses(addresses): ...
+def get_mixed_type_key(obj): ...
+
+class _TotalOrderingMixin:
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def __lt__(self, other): ...
+    def __le__(self, other): ...
+    def __gt__(self, other): ...
+    def __ge__(self, other): ...
+
+class _IPAddressBase(_TotalOrderingMixin):
+    @property
+    def exploded(self): ...
+    @property
+    def compressed(self): ...
+    @property
+    def version(self): ...
+
+class _BaseAddress(_IPAddressBase):
+    def __init__(self, address) -> None: ...
+    def __int__(self): ...
+    def __eq__(self, other): ...
+    def __lt__(self, other): ...
+    def __add__(self, other): ...
+    def __sub__(self, other): ...
+    def __hash__(self): ...
+
+class _BaseNetwork(_IPAddressBase):
+    def __init__(self, address) -> None: ...
+    def hosts(self): ...
+    def __iter__(self): ...
+    def __getitem__(self, n): ...
+    def __lt__(self, other): ...
+    def __eq__(self, other): ...
+    def __hash__(self): ...
+    def __contains__(self, other): ...
+    def overlaps(self, other): ...
+    @property
+    def broadcast_address(self): ...
+    @property
+    def hostmask(self): ...
+    @property
+    def with_prefixlen(self): ...
+    @property
+    def with_netmask(self): ...
+    @property
+    def with_hostmask(self): ...
+    @property
+    def num_addresses(self): ...
+    @property
+    def prefixlen(self): ...
+    def address_exclude(self, other): ...
+    def compare_networks(self, other): ...
+    def subnets(self, prefixlen_diff=..., new_prefix=...): ...
+    def supernet(self, prefixlen_diff=..., new_prefix=...): ...
+    @property
+    def is_multicast(self): ...
+    @property
+    def is_reserved(self): ...
+    @property
+    def is_link_local(self): ...
+    @property
+    def is_private(self): ...
+    @property
+    def is_global(self): ...
+    @property
+    def is_unspecified(self): ...
+    @property
+    def is_loopback(self): ...
+
+class _BaseV4:
+    def __init__(self, address) -> None: ...
+    @property
+    def max_prefixlen(self): ...
+    @property
+    def version(self): ...
+
+class IPv4Address(_BaseV4, _BaseAddress):
+    def __init__(self, address) -> None: ...
+    @property
+    def packed(self): ...
+    @property
+    def is_reserved(self): ...
+    @property
+    def is_private(self): ...
+    @property
+    def is_multicast(self): ...
+    @property
+    def is_unspecified(self): ...
+    @property
+    def is_loopback(self): ...
+    @property
+    def is_link_local(self): ...
+
+class IPv4Interface(IPv4Address):
+    network = ... # type: Any
+    netmask = ... # type: Any
+    hostmask = ... # type: Any
+    def __init__(self, address) -> None: ...
+    def __eq__(self, other): ...
+    def __lt__(self, other): ...
+    def __hash__(self): ...
+    @property
+    def ip(self): ...
+    @property
+    def with_prefixlen(self): ...
+    @property
+    def with_netmask(self): ...
+    @property
+    def with_hostmask(self): ...
+
+class IPv4Network(_BaseV4, _BaseNetwork):
+    network_address = ... # type: Any
+    netmask = ... # type: Any
+    hosts = ... # type: Any
+    def __init__(self, address, strict=...) -> None: ...
+    @property
+    def is_global(self): ...
+
+class _BaseV6:
+    def __init__(self, address) -> None: ...
+    @property
+    def max_prefixlen(self): ...
+    @property
+    def version(self): ...
+
+class IPv6Address(_BaseV6, _BaseAddress):
+    def __init__(self, address) -> None: ...
+    @property
+    def packed(self): ...
+    @property
+    def is_multicast(self): ...
+    @property
+    def is_reserved(self): ...
+    @property
+    def is_link_local(self): ...
+    @property
+    def is_site_local(self): ...
+    @property
+    def is_private(self): ...
+    @property
+    def is_global(self): ...
+    @property
+    def is_unspecified(self): ...
+    @property
+    def is_loopback(self): ...
+    @property
+    def ipv4_mapped(self): ...
+    @property
+    def teredo(self): ...
+    @property
+    def sixtofour(self): ...
+
+class IPv6Interface(IPv6Address):
+    network = ... # type: Any
+    netmask = ... # type: Any
+    hostmask = ... # type: Any
+    def __init__(self, address) -> None: ...
+    def __eq__(self, other): ...
+    def __lt__(self, other): ...
+    def __hash__(self): ...
+    @property
+    def ip(self): ...
+    @property
+    def with_prefixlen(self): ...
+    @property
+    def with_netmask(self): ...
+    @property
+    def with_hostmask(self): ...
+    @property
+    def is_unspecified(self): ...
+    @property
+    def is_loopback(self): ...
+
+class IPv6Network(_BaseV6, _BaseNetwork):
+    network_address = ... # type: Any
+    netmask = ... # type: Any
+    def __init__(self, address, strict=...) -> None: ...
+    def hosts(self): ...
+    @property
+    def is_site_local(self): ...
diff --git a/typeshed/stdlib/3.4/_operator.pyi b/typeshed/stdlib/3.4/_operator.pyi
new file mode 100644
index 0000000..bb5e94f
--- /dev/null
+++ b/typeshed/stdlib/3.4/_operator.pyi
@@ -0,0 +1,108 @@
+"""Stub file for the '_operator' module."""
+# This is an autogenerated file. It serves as a starting point
+# for a more precise manual annotation of this module.
+# Feel free to edit the source below, but remove this header when you do.
+
+from typing import Any, List, Tuple, Dict, Generic
+
+def _compare_digest(a, b) -> bool:
+    raise BufferError()
+    raise TypeError()
+
+def abs(*args, **kwargs) -> Any: ...
+
+def add(*args, **kwargs) -> Any: ...
+
+def and_(*args, **kwargs) -> Any: ...
+
+def concat(*args, **kwargs) -> Any: ...
+
+def contains(*args, **kwargs) -> bool: ...
+
+def countOf(*args, **kwargs) -> int: ...
+
+def delitem(*args, **kwargs) -> None: ...
+
+def eq(*args, **kwargs) -> Any: ...
+
+def floordiv(*args, **kwargs) -> Any: ...
+
+def ge(*args, **kwargs) -> Any: ...
+
+def getitem(*args, **kwargs) -> Any: ...
+
+def gt(*args, **kwargs) -> Any: ...
+
+def iadd(*args, **kwargs) -> Any: ...
+
+def iand(*args, **kwargs) -> Any: ...
+
+def iconcat(*args, **kwargs) -> Any: ...
+
+def ifloordiv(*args, **kwargs) -> Any: ...
+
+def ilshift(*args, **kwargs) -> Any: ...
+
+def imod(*args, **kwargs) -> Any: ...
+
+def imul(*args, **kwargs) -> Any: ...
+
+def index(*args, **kwargs) -> Any: ...
+
+def indexOf(*args, **kwargs) -> int: ...
+
+def inv(*args, **kwargs) -> Any: ...
+
+def invert(*args, **kwargs) -> Any: ...
+
+def ior(*args, **kwargs) -> Any: ...
+
+def ipow(*args, **kwargs) -> Any: ...
+
+def irshift(*args, **kwargs) -> Any: ...
+
+def is_(*args, **kwargs) -> bool: ...
+
+def is_not(*args, **kwargs) -> bool: ...
+
+def isub(*args, **kwargs) -> Any: ...
+
+def itruediv(*args, **kwargs) -> Any: ...
+
+def ixor(*args, **kwargs) -> Any: ...
+
+def le(*args, **kwargs) -> Any: ...
+
+def length_hint(a, *args, **kwargs) -> int: ...
+
+def lshift(*args, **kwargs) -> Any: ...
+
+def lt(*args, **kwargs) -> Any: ...
+
+def mod(*args, **kwargs) -> Any: ...
+
+def mul(*args, **kwargs) -> Any: ...
+
+def ne(*args, **kwargs) -> Any: ...
+
+def neg(*args, **kwargs) -> Any: ...
+
+def not_(*args, **kwargs) -> bool: ...
+
+def or_(*args, **kwargs) -> Any: ...
+
+def pos(*args, **kwargs) -> Any: ...
+
+def pow(*args, **kwargs) -> Any: ...
+
+def rshift(*args, **kwargs) -> Any: ...
+
+def setitem(*args, **kwargs) -> None: ...
+
+def sub(*args, **kwargs) -> Any: ...
+
+def truediv(*args, **kwargs) -> Any: ...
+
+def truth(*args, **kwargs) -> bool: ...
+
+def xor(*args, **kwargs) -> Any: ...
diff --git a/typeshed/stdlib/3.4/_stat.pyi b/typeshed/stdlib/3.4/_stat.pyi
new file mode 100644
index 0000000..b68e176
--- /dev/null
+++ b/typeshed/stdlib/3.4/_stat.pyi
@@ -0,0 +1,69 @@
+"""Stub file for the '_stat' module."""
+
+SF_APPEND = ...  # type: int
+SF_ARCHIVED = ...  # type: int
+SF_IMMUTABLE = ...  # type: int
+SF_NOUNLINK = ...  # type: int
+SF_SNAPSHOT = ...  # type: int
+ST_ATIME = ...  # type: int
+ST_CTIME = ...  # type: int
+ST_DEV = ...  # type: int
+ST_GID = ...  # type: int
+ST_INO = ...  # type: int
+ST_MODE = ...  # type: int
+ST_MTIME = ...  # type: int
+ST_NLINK = ...  # type: int
+ST_SIZE = ...  # type: int
+ST_UID = ...  # type: int
+S_ENFMT = ...  # type: int
+S_IEXEC = ...  # type: int
+S_IFBLK = ...  # type: int
+S_IFCHR = ...  # type: int
+S_IFDIR = ...  # type: int
+S_IFDOOR = ...  # type: int
+S_IFIFO = ...  # type: int
+S_IFLNK = ...  # type: int
+S_IFPORT = ...  # type: int
+S_IFREG = ...  # type: int
+S_IFSOCK = ...  # type: int
+S_IFWHT = ...  # type: int
+S_IREAD = ...  # type: int
+S_IRGRP = ...  # type: int
+S_IROTH = ...  # type: int
+S_IRUSR = ...  # type: int
+S_IRWXG = ...  # type: int
+S_IRWXO = ...  # type: int
+S_IRWXU = ...  # type: int
+S_ISGID = ...  # type: int
+S_ISUID = ...  # type: int
+S_ISVTX = ...  # type: int
+S_IWGRP = ...  # type: int
+S_IWOTH = ...  # type: int
+S_IWRITE = ...  # type: int
+S_IWUSR = ...  # type: int
+S_IXGRP = ...  # type: int
+S_IXOTH = ...  # type: int
+S_IXUSR = ...  # type: int
+UF_APPEND = ...  # type: int
+UF_COMPRESSED = ...  # type: int
+UF_HIDDEN = ...  # type: int
+UF_IMMUTABLE = ...  # type: int
+UF_NODUMP = ...  # type: int
+UF_NOUNLINK = ...  # type: int
+UF_OPAQUE = ...  # type: int
+
+def S_IMODE(mode: int) -> int: ...
+def S_IFMT(mode: int) -> int: ...
+
+def S_ISBLK(mode: int) -> bool: ...
+def S_ISCHR(mode: int) -> bool: ...
+def S_ISDIR(mode: int) -> bool: ...
+def S_ISDOOR(mode: int) -> bool: ...
+def S_ISFIFO(mode: int ) -> bool: ...
+def S_ISLNK(mode: int) -> bool: ...
+def S_ISPORT(mode: int) -> bool: ...
+def S_ISREG(mode: int) -> bool: ...
+def S_ISSOCK(mode: int) -> bool: ...
+def S_ISWHT(mode: int) -> bool: ...
+
+def filemode(mode: int) -> str: ...
diff --git a/typeshed/stdlib/3.4/_tracemalloc.pyi b/typeshed/stdlib/3.4/_tracemalloc.pyi
new file mode 100644
index 0000000..b1db5ab
--- /dev/null
+++ b/typeshed/stdlib/3.4/_tracemalloc.pyi
@@ -0,0 +1,26 @@
+"""Stub file for the '_tracemalloc' module."""
+# This is an autogenerated file. It serves as a starting point
+# for a more precise manual annotation of this module.
+# Feel free to edit the source below, but remove this header when you do.
+
+from typing import Any, List, Tuple, Dict, Generic
+
+def _get_object_traceback(*args, **kwargs) -> Any: ...
+
+def _get_traces() -> Any:
+    raise MemoryError()
+
+def clear_traces() -> None: ...
+
+def get_traceback_limit() -> int: ...
+
+def get_traced_memory() -> tuple: ...
+
+def get_tracemalloc_memory() -> Any: ...
+
+def is_tracing() -> bool: ...
+
+def start(*args, **kwargs) -> None:
+    raise ValueError()
+
+def stop() -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/__init__.pyi b/typeshed/stdlib/3.4/asyncio/__init__.pyi
new file mode 100644
index 0000000..e22015e
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/__init__.pyi
@@ -0,0 +1,33 @@
+"""The asyncio package, tracking PEP 3156."""
+from asyncio.futures import (
+    Future as Future,
+)
+from asyncio.tasks import (
+    coroutine as coroutine,
+    sleep as sleep,
+    Task as Task,
+    FIRST_COMPLETED as FIRST_COMPLETED,
+    FIRST_EXCEPTION as FIRST_EXCEPTION,
+    ALL_COMPLETED as ALL_COMPLETED,
+    wait as wait,
+    wait_for as wait_for,
+)
+from asyncio.events import (
+    AbstractEventLoopPolicy as AbstractEventLoopPolicy,
+    AbstractEventLoop as AbstractEventLoop,
+    Handle as Handle,
+    get_event_loop as get_event_loop,
+)
+from asyncio.queues import (
+    Queue as Queue,
+    PriorityQueue as PriorityQueue,
+    LifoQueue as LifoQueue,
+    JoinableQueue as JoinableQueue,
+    QueueFull as QueueFull,
+    QueueEmpty as QueueEmpty,
+)
+
+__all__ = (futures.__all__ +
+            tasks.__all__ +
+            events.__all__ +
+            queues.__all__)
diff --git a/typeshed/stdlib/3.4/asyncio/events.pyi b/typeshed/stdlib/3.4/asyncio/events.pyi
new file mode 100644
index 0000000..afdd639
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/events.pyi
@@ -0,0 +1,172 @@
+from typing import Any, Awaitable, TypeVar, List, Callable, Tuple, Union, Dict, Generator
+from abc import ABCMeta, abstractmethod
+from asyncio.futures import Future
+
+# __all__ = ['AbstractServer',
+#            'TimerHandle',
+#            'get_event_loop_policy', 'set_event_loop_policy',
+#            'set_event_loop', 'new_event_loop',
+#            'get_child_watcher', 'set_child_watcher',
+#            ]
+
+
+__all__ = ['AbstractEventLoopPolicy', 'AbstractEventLoop', 'Handle', 'get_event_loop']
+
+_T = TypeVar('_T')
+
+PIPE = ...  # type: Any  # from subprocess.PIPE
+
+AF_UNSPEC = 0     # from socket
+AI_PASSIVE = 0
+
+class Handle:
+    __slots__ = [] # type: List[str]
+    _cancelled = False
+    _args = [] # type: List[Any]
+    def __init__(self, callback: Callable[[],Any], args: List[Any],
+        loop: AbstractEventLoop) -> None: ...
+    def __repr__(self) -> str: ...
+    def cancel(self) -> None: ...
+    def _run(self) -> None: ...
+
+
+class AbstractEventLoop(metaclass=ABCMeta):
+    @abstractmethod
+    def run_forever(self) -> None: ...
+    @abstractmethod
+    def run_until_complete(self, future: Union[Awaitable[_T], Future[_T], Generator[Any, Any, _T]]) -> _T: ...
+    @abstractmethod
+    def stop(self) -> None: ...
+    @abstractmethod
+    def is_running(self) -> bool: ...
+    @abstractmethod
+    def close(self) -> None: ...
+    # Methods scheduling callbacks.  All these return Handles.
+    @abstractmethod
+    def call_soon(self, callback: Callable[[],Any], *args: Any) -> Handle: ...
+    @abstractmethod
+    def call_later(self, delay: Union[int, float], callback: Callable[[],Any], *args: Any) -> Handle: ...
+    @abstractmethod
+    def call_at(self, when: float, callback: Callable[[],Any], *args: Any) -> Handle: ...
+    @abstractmethod
+    def time(self) -> float: ...
+    # Methods for interacting with threads
+    @abstractmethod
+    def call_soon_threadsafe(self, callback: Callable[[],Any], *args: Any) -> Handle: ...
+    @abstractmethod
+    def run_in_executor(self, executor: Any,
+        callback: Callable[[],Any], *args: Any) -> Future[Any]: ...
+    @abstractmethod
+    def set_default_executor(self, executor: Any) -> None: ...
+    # Network I/O methods returning Futures.
+    @abstractmethod
+    def getaddrinfo(self, host: str, port: int, *,
+        family: int = ..., type: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[int, int, int, str, tuple]]: ...
+    @abstractmethod
+    def getnameinfo(self, sockaddr: tuple, flags: int = ...) -> Tuple[str, int]: ...
+    @abstractmethod
+    def create_connection(self, protocol_factory: Any, host: str = ..., port: int = ..., *,
+                          ssl: Any = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: Any = ...,
+                          local_addr: str = ..., server_hostname: str = ...) -> tuple: ...
+                          # ?? check Any
+                          # return (Transport, Protocol)
+    @abstractmethod
+    def create_server(self, protocol_factory: Any, host: str = ..., port: int = ..., *,
+                      family: int = ..., flags: int = ...,
+                      sock: Any = ..., backlog: int = ..., ssl: Any = ..., reuse_address: Any = ...) -> Any: ...
+                    # ?? check Any
+                    # return Server
+    @abstractmethod
+    def create_unix_connection(self, protocol_factory: Any, path: str, *,
+                               ssl: Any = ..., sock: Any = ...,
+                               server_hostname: str = ...) -> tuple: ...
+                    # ?? check Any
+                    # return tuple(Transport, Protocol)
+    @abstractmethod
+    def create_unix_server(self, protocol_factory: Any, path: str, *,
+                           sock: Any = ..., backlog: int = ..., ssl: Any = ...) -> Any: ...
+                    # ?? check Any
+                    # return Server
+    @abstractmethod
+    def create_datagram_endpoint(self, protocol_factory: Any,
+                                 local_addr: str = ..., remote_addr: str = ..., *,
+                                 family: int = ..., proto: int = ..., flags: int = ...) -> tuple: ...
+                    #?? check Any
+                    # return (Transport, Protocol)
+    # Pipes and subprocesses.
+    @abstractmethod
+    def connect_read_pipe(self, protocol_factory: Any, pipe: Any) -> tuple: ...
+                    #?? check Any
+                    # return (Transport, Protocol)
+    @abstractmethod
+    def connect_write_pipe(self, protocol_factory: Any, pipe: Any) -> tuple: ...
+                    #?? check Any
+                    # return (Transport, Protocol)
+    @abstractmethod
+    def subprocess_shell(self, protocol_factory: Any, cmd: Union[bytes, str], *, stdin: Any = ...,
+                         stdout: Any = ..., stderr: Any = ...,
+                         **kwargs: Dict[str, Any]) -> tuple: ...
+                    #?? check Any
+                    # return (Transport, Protocol)
+    @abstractmethod
+    def subprocess_exec(self, protocol_factory: Any, *args: List[Any], stdin: Any = ...,
+                        stdout: Any = ..., stderr: Any = ...,
+                        **kwargs: Dict[str, Any]) -> tuple: ...
+                    #?? check Any
+                    # return (Transport, Protocol)
+    @abstractmethod
+    def add_reader(self, fd: int, callback: Callable[[],Any], *args: List[Any]) -> None: ...
+    @abstractmethod
+    def remove_reader(self, fd: int) -> None: ...
+    @abstractmethod
+    def add_writer(self, fd: int, callback: Callable[[],Any], *args: List[Any]) -> None: ...
+    @abstractmethod
+    def remove_writer(self, fd: int) -> None: ...
+    # Completion based I/O methods returning Futures.
+    @abstractmethod
+    def sock_recv(self, sock: Any, nbytes: int) -> Any: ... #TODO
+    @abstractmethod
+    def sock_sendall(self, sock: Any, data: bytes) -> None: ... #TODO
+    @abstractmethod
+    def sock_connect(self, sock: Any, address: str) -> Any: ... #TODO
+    @abstractmethod
+    def sock_accept(self, sock: Any) -> Any: ...
+    # Signal handling.
+    @abstractmethod
+    def add_signal_handler(self, sig: int, callback: Callable[[],Any], *args: List[Any]) -> None: ...
+    @abstractmethod
+    def remove_signal_handler(self, sig: int) -> None: ...
+    # Error handlers.
+    @abstractmethod
+    def set_exception_handler(self, handler: Callable[[], Any]) -> None: ...
+    @abstractmethod
+    def default_exception_handler(self, context: Any) -> None: ...
+    @abstractmethod
+    def call_exception_handler(self, context: Any) -> None: ...
+    # Debug flag management.
+    @abstractmethod
+    def get_debug(self) -> bool: ...
+    @abstractmethod
+    def set_debug(self, enabled: bool) -> None: ...
+
+class AbstractEventLoopPolicy(metaclass=ABCMeta):
+    @abstractmethod
+    def get_event_loop(self) -> AbstractEventLoop: ...
+    @abstractmethod
+    def set_event_loop(self, loop: AbstractEventLoop): ...
+    @abstractmethod
+    def new_event_loop(self) -> Any: ... # return selector_events.BaseSelectorEventLoop
+    # Child processes handling (Unix only).
+    @abstractmethod
+    def get_child_watcher(self) -> Any: ...  # return unix_events.AbstractChildWatcher
+    @abstractmethod
+    def set_child_watcher(self, watcher: Any) -> None: ... # gen unix_events.AbstractChildWatcher
+
+class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
+    def __init__(self) -> None: ...
+    def get_event_loop(self) -> AbstractEventLoop: ...
+    def set_event_loop(self, loop: AbstractEventLoop): ...
+    def new_event_loop(self) -> Any: ... # Same return than AbstractEventLoop
+
+
+def get_event_loop() -> AbstractEventLoop: ...
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
new file mode 100644
index 0000000..37e72a1
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -0,0 +1,40 @@
+from typing import Any, Union, Callable, TypeVar, List, Generic, Iterable, Generator
+from asyncio.events import AbstractEventLoop
+# __all__ = ['CancelledError', 'TimeoutError',
+#            'InvalidStateError',
+#            'wrap_future',
+#            ]
+__all__ = ['Future']
+
+_T = TypeVar('_T')
+
+class _TracebackLogger:
+    __slots__ = [] # type: List[str]
+    exc = ...  # type: BaseException
+    tb = [] # type: List[str]
+    def __init__(self, exc: Any, loop: AbstractEventLoop) -> None: ...
+    def activate(self) -> None: ...
+    def clear(self) -> None: ...
+    def __del__(self) -> None: ...
+
+class Future(Iterable[_T], Generic[_T]):
+    _state = ...  # type: str
+    _exception = ... # type: BaseException
+    _blocking = False
+    _log_traceback = False
+    _tb_logger = _TracebackLogger
+    def __init__(self, *, loop: AbstractEventLoop = ...) -> None: ...
+    def __repr__(self) -> str: ...
+    def __del__(self) -> None: ...
+    def cancel(self) -> bool: ...
+    def _schedule_callbacks(self) -> None: ...
+    def cancelled(self) -> bool: ...
+    def done(self) -> bool: ...
+    def result(self) -> _T: ...
+    def exception(self) -> BaseException: ...
+    def add_done_callback(self, fn: Callable[[Future[_T]], Any]) -> None: ...
+    def remove_done_callback(self, fn: Callable[[Future[_T]], Any]) -> int: ...
+    def set_result(self, result: _T) -> None: ...
+    def set_exception(self, exception: Union[type, BaseException]) -> None: ...
+    def _copy_state(self, other: Any) -> None: ...
+    def __iter__(self) -> Generator[Any, None, _T]: ...
diff --git a/typeshed/stdlib/3.4/asyncio/queues.pyi b/typeshed/stdlib/3.4/asyncio/queues.pyi
new file mode 100644
index 0000000..720864a
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/queues.pyi
@@ -0,0 +1,48 @@
+from typing import TypeVar, Generic
+
+__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue',
+           'QueueFull', 'QueueEmpty']
+
+from asyncio.events import AbstractEventLoop
+from .tasks import coroutine
+from .futures import Future
+
+
+class QueueEmpty(Exception): ...
+class QueueFull(Exception): ...
+
+T = TypeVar('T')
+
+class Queue(Generic[T]):
+    def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop = ...) -> None: ...
+    def _init(self, maxsize: int) -> None: ...
+    def _get(self) -> T: ...
+    def _put(self, item: T) -> None: ...
+    def __repr__(self) -> str: ...
+    def __str__(self) -> str: ...
+    def _format(self) -> str: ...
+    def _consume_done_getters(self) -> None: ...
+    def _consume_done_putters(self) -> None: ...
+    def qsize(self) -> int: ...
+    @property
+    def maxsize(self) -> int: ...
+    def empty(self) -> bool: ...
+    def full(self) -> bool: ...
+    @coroutine
+    def put(self, item: T) -> Future[None]: ...
+    def put_nowait(self, item: T) -> None: ...
+    @coroutine
+    def get(self) -> Future[T]: ...
+    def get_nowait(self) -> T: ...
+
+
+class PriorityQueue(Queue): ...
+
+
+class LifoQueue(Queue): ...
+
+
+class JoinableQueue(Queue):
+    def task_done(self) -> None: ...
+    @coroutine
+    def join(self) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
new file mode 100644
index 0000000..3adebe1
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -0,0 +1,38 @@
+from typing import Any, Iterable, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable, Generator
+from asyncio.events import AbstractEventLoop
+from asyncio.futures import Future
+# __all__ = ['iscoroutinefunction', 'iscoroutine',
+#            'as_completed', 'async',
+#            'gather', 'shield',
+#            ]
+
+__all__ = ['coroutine', 'Task', 'sleep',
+            'FIRST_COMPLETED', 'FIRST_EXCEPTION', 'ALL_COMPLETED',
+            'wait', 'wait_for']
+
+FIRST_EXCEPTION = 'FIRST_EXCEPTION'
+FIRST_COMPLETED = 'FIRST_COMPLETED'
+ALL_COMPLETED = 'ALL_COMPLETED'
+_T = TypeVar('_T')
+def coroutine(f: _T) -> _T: ...  # Here comes and go a function
+def sleep(delay: float, result: _T = ..., loop: AbstractEventLoop = ...) -> Future[_T]: ...
+def wait(fs: List[Task[_T]], *, loop: AbstractEventLoop = ...,
+    timeout: float = ..., return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ...
+def wait_for(fut: Union[Future[_T], Generator[Any, None, _T]], timeout: float, *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
+
+
+class Task(Future[_T], Generic[_T]):
+    _all_tasks = None  # type: Set[Task]
+    _current_tasks = {}  # type: Dict[AbstractEventLoop, Task]
+    @classmethod
+    def current_task(cls, loop: AbstractEventLoop = ...) -> Task: ...
+    @classmethod
+    def all_tasks(cls, loop: AbstractEventLoop = ...) -> Set[Task]: ...
+    def __init__(self, coro: Union[Future[_T], Generator[Any, None, _T]], *, loop: AbstractEventLoop = ...) -> None: ...
+    def __repr__(self) -> str: ...
+    def get_stack(self, *, limit: int = ...) -> List[Any]: ...  # return List[stackframe]
+    def print_stack(self, *, limit: int = ..., file: TextIO = ...) -> None: ...
+    def cancel(self) -> bool: ...
+    def _step(self, value: Any = ..., exc: Exception = ...) -> None: ...
+    def _wakeup(self, future: Future[Any]) -> None: ...
+
diff --git a/typeshed/stdlib/3.4/enum.pyi b/typeshed/stdlib/3.4/enum.pyi
new file mode 100644
index 0000000..dcb3b9c
--- /dev/null
+++ b/typeshed/stdlib/3.4/enum.pyi
@@ -0,0 +1,19 @@
+from typing import List, Any, TypeVar
+
+class Enum:
+    def __new__(cls, value: Any) -> None: ...
+    def __repr__(self) -> str: ...
+    def __str__(self) -> str: ...
+    def __dir__(self) -> List[str]: ...
+    def __format__(self, format_spec: str) -> str: ...
+    def __hash__(self) -> Any: ...
+    def __reduce_ex__(self, proto: Any) -> Any: ...
+
+    name = ...  # type: str
+    value = None  # type: Any
+
+class IntEnum(int, Enum): ...
+
+_T = TypeVar('_T')
+
+def unique(enumeration: _T) -> _T: ...
diff --git a/typeshed/stdlib/3.4/pathlib.pyi b/typeshed/stdlib/3.4/pathlib.pyi
new file mode 100644
index 0000000..dc0aeb3
--- /dev/null
+++ b/typeshed/stdlib/3.4/pathlib.pyi
@@ -0,0 +1,164 @@
+# Stubs for pathlib (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from collections import Sequence
+
+class _Flavour:
+    join = ... # type: Any
+    def __init__(self) -> None: ...
+    def parse_parts(self, parts): ...
+    def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2): ...
+
+class _WindowsFlavour(_Flavour):
+    sep = ... # type: Any
+    altsep = ... # type: Any
+    has_drv = ... # type: Any
+    pathmod = ... # type: Any
+    is_supported = ... # type: Any
+    drive_letters = ... # type: Any
+    ext_namespace_prefix = ... # type: Any
+    reserved_names = ... # type: Any
+    def splitroot(self, part, sep=...): ...
+    def casefold(self, s): ...
+    def casefold_parts(self, parts): ...
+    def resolve(self, path): ...
+    def is_reserved(self, parts): ...
+    def make_uri(self, path): ...
+
+class _PosixFlavour(_Flavour):
+    sep = ... # type: Any
+    altsep = ... # type: Any
+    has_drv = ... # type: Any
+    pathmod = ... # type: Any
+    is_supported = ... # type: Any
+    def splitroot(self, part, sep=...): ...
+    def casefold(self, s): ...
+    def casefold_parts(self, parts): ...
+    def resolve(self, path): ...
+    def is_reserved(self, parts): ...
+    def make_uri(self, path): ...
+
+class _Accessor: ...
+
+class _NormalAccessor(_Accessor):
+    stat = ... # type: Any
+    lstat = ... # type: Any
+    open = ... # type: Any
+    listdir = ... # type: Any
+    chmod = ... # type: Any
+    lchmod = ... # type: Any
+    mkdir = ... # type: Any
+    unlink = ... # type: Any
+    rmdir = ... # type: Any
+    rename = ... # type: Any
+    replace = ... # type: Any
+    def symlink(a, b, target_is_directory): ...
+    utime = ... # type: Any
+    def readlink(self, path): ...
+
+class _Selector:
+    child_parts = ... # type: Any
+    successor = ... # type: Any
+    def __init__(self, child_parts) -> None: ...
+    def select_from(self, parent_path): ...
+
+class _TerminatingSelector: ...
+
+class _PreciseSelector(_Selector):
+    name = ... # type: Any
+    def __init__(self, name, child_parts) -> None: ...
+
+class _WildcardSelector(_Selector):
+    pat = ... # type: Any
+    def __init__(self, pat, child_parts) -> None: ...
+
+class _RecursiveWildcardSelector(_Selector):
+    def __init__(self, pat, child_parts) -> None: ...
+
+class _PathParents(Sequence):
+    def __init__(self, path) -> None: ...
+    def __len__(self): ...
+    def __getitem__(self, idx): ...
+
+class PurePath:
+    def __init__(self, *args) -> None: ...
+    def __reduce__(self): ...
+    def as_posix(self): ...
+    def __bytes__(self): ...
+    def as_uri(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def __hash__(self): ...
+    def __lt__(self, other): ...
+    def __le__(self, other): ...
+    def __gt__(self, other): ...
+    def __ge__(self, other): ...
+    drive = ... # type: Any
+    root = ... # type: Any
+    @property
+    def anchor(self): ...
+    @property
+    def name(self): ...
+    @property
+    def suffix(self): ...
+    @property
+    def suffixes(self): ...
+    @property
+    def stem(self): ...
+    def with_name(self, name): ...
+    def with_suffix(self, suffix): ...
+    def relative_to(self, *other): ...
+    @property
+    def parts(self): ...
+    def joinpath(self, *args): ...
+    def __truediv__(self, key): ...
+    def __rtruediv__(self, key): ...
+    @property
+    def parent(self): ...
+    @property
+    def parents(self): ...
+    def is_absolute(self): ...
+    def is_reserved(self): ...
+    def match(self, path_pattern): ...
+
+class PurePosixPath(PurePath): ...
+class PureWindowsPath(PurePath): ...
+
+class Path(PurePath):
+    def __init__(self, *args, **kwargs) -> None: ...
+    def __enter__(self): ...
+    def __exit__(self, t, v, tb): ...
+    @classmethod
+    def cwd(cls): ...
+    def iterdir(self): ...
+    def glob(self, pattern): ...
+    def rglob(self, pattern): ...
+    def absolute(self): ...
+    def resolve(self): ...
+    def stat(self): ...
+    def owner(self): ...
+    def group(self): ...
+    def open(self, mode=..., buffering=..., encoding=..., errors=..., newline=...): ...
+    def touch(self, mode=..., exist_ok=...): ...
+    def mkdir(self, mode=..., parents=...): ...
+    def chmod(self, mode): ...
+    def lchmod(self, mode): ...
+    def unlink(self): ...
+    def rmdir(self): ...
+    def lstat(self): ...
+    def rename(self, target): ...
+    def replace(self, target): ...
+    def symlink_to(self, target, target_is_directory=...): ...
+    def exists(self): ...
+    def is_dir(self): ...
+    def is_file(self): ...
+    def is_symlink(self): ...
+    def is_block_device(self): ...
+    def is_char_device(self): ...
+    def is_fifo(self): ...
+    def is_socket(self): ...
+
+class PosixPath(Path, PurePosixPath): ...
+class WindowsPath(Path, PureWindowsPath): ...
diff --git a/typeshed/stdlib/3/__future__.pyi b/typeshed/stdlib/3/__future__.pyi
new file mode 100644
index 0000000..01265e8
--- /dev/null
+++ b/typeshed/stdlib/3/__future__.pyi
@@ -0,0 +1,9 @@
+class _Feature: ...
+
+absolute_import = ...  # type: _Feature
+division = ...  # type: _Feature
+generators = ...  # type: _Feature
+nested_scopes = ...  # type: _Feature
+print_function = ...  # type: _Feature
+unicode_literals = ...  # type: _Feature
+with_statement = ...  # type: _Feature
diff --git a/typeshed/stdlib/3/_codecs.pyi b/typeshed/stdlib/3/_codecs.pyi
new file mode 100644
index 0000000..23fcd82
--- /dev/null
+++ b/typeshed/stdlib/3/_codecs.pyi
@@ -0,0 +1,51 @@
+"""Stub file for the '_codecs' module."""
+
+from typing import Any, AnyStr, Callable, Tuple, Optional, Dict
+
+import codecs
+
+# For convenience:
+_Handler = Callable[[Exception], Tuple[str, int]]
+
+def register(search_function: Callable[[str], Any]) -> None: ...
+def register_error(errors: str, handler: _Handler) -> None: ...
+def lookup(a: str) -> codecs.CodecInfo: ...
+def lookup_error(a: str) -> _Handler: ...
+def decode(obj: Any, encoding:str = ..., errors:str = ...) -> Any: ...
+def encode(obj: Any, encoding:str = ..., errors:str = ...) -> Any: ...
+def charmap_build(a: str) -> Dict[int, int]: ...
+
+def ascii_decode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def ascii_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def charbuffer_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
+def charmap_decode(data: AnyStr, errors: str = ..., mapping: Optional[Dict[int, int]] = ...) -> Tuple[str, int]: ...
+def charmap_encode(data: AnyStr, errors: str, mapping: Optional[Dict[int, int]] = ...) -> Tuple[bytes, int]: ...
+def escape_decode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def escape_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def latin_1_decode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def latin_1_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def raw_unicode_escape_decode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def raw_unicode_escape_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def readbuffer_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def unicode_escape_decode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def unicode_escape_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def unicode_internal_decode(data: AnyStr, errors:str = ...) -> Tuple[str, int]: ...
+def unicode_internal_encode(data: AnyStr, errors:str = ...) -> Tuple[bytes, int]: ...
+def utf_16_be_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_16_be_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_16_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_16_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_16_ex_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_16_le_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_16_le_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_32_be_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_be_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_32_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_32_ex_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_le_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_32_le_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_7_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_7_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
+def utf_8_decode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[str, int]: ...
+def utf_8_encode(data: AnyStr, errors:str = ..., final:int = ...) -> Tuple[bytes, int]: ...
diff --git a/typeshed/stdlib/3/_dummy_thread.pyi b/typeshed/stdlib/3/_dummy_thread.pyi
new file mode 100644
index 0000000..a4ff81c
--- /dev/null
+++ b/typeshed/stdlib/3/_dummy_thread.pyi
@@ -0,0 +1,11 @@
+# Stubs for _dummy_thread
+
+# NOTE: These are incomplete!
+
+from typing import Any
+
+class LockType:
+    def acquire(self) -> None: ...
+    def release(self) -> None: ...
+
+def allocate_lock() -> LockType: ...
diff --git a/typeshed/stdlib/3/_io.pyi b/typeshed/stdlib/3/_io.pyi
new file mode 100644
index 0000000..f67f989
--- /dev/null
+++ b/typeshed/stdlib/3/_io.pyi
@@ -0,0 +1,48 @@
+# Stubs for _io (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _IOBase:
+    def __init__(self, *args, **kwargs) -> None: ...
+    @property
+    def closed(self): ...
+    def close(self): ...
+    def fileno(self): ...
+    def flush(self): ...
+    def isatty(self): ...
+    def readable(self): ...
+    def readline(self, size: int = ...): ...
+    def readlines(self, hint: int = ...): ...
+    def seek(self, offset, whence=...): ...
+    def seekable(self): ...
+    def tell(self): ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self): ...
+    def writelines(self, lines): ...
+    def __del__(self): ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_val, exc_tb): ...
+    def __iter__(self): ...
+    def __next__(self): ...
+
+class _BufferedIOBase(_IOBase):
+    def detach(self): ...
+    def read(self, size: int = ...): ...
+    def read1(self, size: int = ...): ...
+    def readinto(self, b): ...
+    def write(self, b): ...
+
+class _RawIOBase(_IOBase):
+    def read(self, size: int = ...): ...
+    def readall(self): ...
+
+class _TextIOBase(_IOBase):
+    encoding = ...  # type: Any
+    errors = ...  # type: Any
+    newlines = ...  # type: Any
+    def detach(self): ...
+    def read(self, size: int = ...): ...
+    def readline(self, size: int = ...): ...
+    def write(self, b): ...
diff --git a/typeshed/stdlib/3/_json.pyi b/typeshed/stdlib/3/_json.pyi
new file mode 100644
index 0000000..adfff55
--- /dev/null
+++ b/typeshed/stdlib/3/_json.pyi
@@ -0,0 +1,30 @@
+"""Stub file for the '_json' module."""
+
+from typing import Any, Tuple
+
+class make_encoder:
+    sort_keys = ...  # type: Any
+    skipkeys = ...  # type: Any
+    key_separator = ...  # type: Any
+    indent = ...  # type: Any
+    markers = ...  # type: Any
+    default = ...  # type: Any
+    encoder = ...  # type: Any
+    item_separator = ...  # type: Any
+    def __init__(self, markers, default, encoder, indent, key_separator,
+                 item_separator, sort_keys, skipkeys, allow_nan) -> None: ...
+    def __call__(self, *args, **kwargs) -> Any: ...
+
+class make_scanner:
+    object_hook = ...  # type: Any
+    object_pairs_hook = ...  # type: Any
+    parse_int = ...  # type: Any
+    parse_constant = ...  # type: Any
+    parse_float = ...  # type: Any
+    strict = ...  # type: bool
+    # TODO: 'context' needs the attrs above (ducktype), but not __call__.
+    def __init__(self, context: "make_scanner") -> None: ...
+    def __call__(self, string: str, index: int) -> Tuple[Any, int]: ...
+
+def encode_basestring_ascii(s: str) -> str: ...
+def scanstring(string: str, end: int, strict:bool = ...) -> Tuple[str, int]: ...
diff --git a/typeshed/stdlib/3/_locale.pyi b/typeshed/stdlib/3/_locale.pyi
new file mode 100644
index 0000000..beed6c9
--- /dev/null
+++ b/typeshed/stdlib/3/_locale.pyi
@@ -0,0 +1,84 @@
+# Stubs for _locale (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Iterable
+
+ABDAY_1 = ...  # type: int
+ABDAY_2 = ...  # type: int
+ABDAY_3 = ...  # type: int
+ABDAY_4 = ...  # type: int
+ABDAY_5 = ...  # type: int
+ABDAY_6 = ...  # type: int
+ABDAY_7 = ...  # type: int
+ABMON_1 = ...  # type: int
+ABMON_10 = ...  # type: int
+ABMON_11 = ...  # type: int
+ABMON_12 = ...  # type: int
+ABMON_2 = ...  # type: int
+ABMON_3 = ...  # type: int
+ABMON_4 = ...  # type: int
+ABMON_5 = ...  # type: int
+ABMON_6 = ...  # type: int
+ABMON_7 = ...  # type: int
+ABMON_8 = ...  # type: int
+ABMON_9 = ...  # type: int
+ALT_DIGITS = ...  # type: int
+AM_STR = ...  # type: int
+CHAR_MAX = ...  # type: int
+CODESET = ...  # type: int
+CRNCYSTR = ...  # type: int
+DAY_1 = ...  # type: int
+DAY_2 = ...  # type: int
+DAY_3 = ...  # type: int
+DAY_4 = ...  # type: int
+DAY_5 = ...  # type: int
+DAY_6 = ...  # type: int
+DAY_7 = ...  # type: int
+D_FMT = ...  # type: int
+D_T_FMT = ...  # type: int
+ERA = ...  # type: int
+ERA_D_FMT = ...  # type: int
+ERA_D_T_FMT = ...  # type: int
+ERA_T_FMT = ...  # type: int
+LC_ALL = ...  # type: int
+LC_COLLATE = ...  # type: int
+LC_CTYPE = ...  # type: int
+LC_MESSAGES = ...  # type: int
+LC_MONETARY = ...  # type: int
+LC_NUMERIC = ...  # type: int
+LC_TIME = ...  # type: int
+MON_1 = ...  # type: int
+MON_10 = ...  # type: int
+MON_11 = ...  # type: int
+MON_12 = ...  # type: int
+MON_2 = ...  # type: int
+MON_3 = ...  # type: int
+MON_4 = ...  # type: int
+MON_5 = ...  # type: int
+MON_6 = ...  # type: int
+MON_7 = ...  # type: int
+MON_8 = ...  # type: int
+MON_9 = ...  # type: int
+NOEXPR = ...  # type: int
+PM_STR = ...  # type: int
+RADIXCHAR = ...  # type: int
+THOUSEP = ...  # type: int
+T_FMT = ...  # type: int
+T_FMT_AMPM = ...  # type: int
+YESEXPR = ...  # type: int
+_DATE_FMT = ...  # type: int
+
+def bind_textdomain_codeset(domain, codeset): ...
+def bindtextdomain(domain, dir): ...
+def dcgettext(domain, msg, category): ...
+def dgettext(domain, msg): ...
+def gettext(msg): ...
+def localeconv(): ...
+def nl_langinfo(key): ...
+def setlocale(category: int, locale: Iterable[str] = ...) -> str: ...
+def strcoll(string1, string2) -> int: ...
+def strxfrm(string): ...
+def textdomain(domain): ...
+
+class Error(Exception): ...
diff --git a/typeshed/stdlib/3/_markupbase.pyi b/typeshed/stdlib/3/_markupbase.pyi
new file mode 100644
index 0000000..129b49b
--- /dev/null
+++ b/typeshed/stdlib/3/_markupbase.pyi
@@ -0,0 +1,9 @@
+from typing import Tuple
+
+class ParserBase(object):
+    def __init__(self) -> None: ...
+    def error(self, message: str) -> None: ...
+    def reset(self) -> None: ...
+    def getpos(self) -> Tuple[int, int]: ...
+
+    def unkown_decl(self, data: str) -> None: ...
diff --git a/typeshed/stdlib/3/_posixsubprocess.pyi b/typeshed/stdlib/3/_posixsubprocess.pyi
new file mode 100644
index 0000000..a048a10
--- /dev/null
+++ b/typeshed/stdlib/3/_posixsubprocess.pyi
@@ -0,0 +1,13 @@
+# Stubs for _posixsubprocess
+
+# NOTE: These are incomplete!
+
+from typing import Tuple, Sequence
+
+def cloexec_pipe() -> Tuple[int, int]: ...
+def fork_exec(args: Sequence[str],
+              executable_list, close_fds, fds_to_keep, cwd: str, env_list,
+              p2cread: int, p2cwrite: int, c2pred: int, c2pwrite: int,
+              errread: int, errwrite: int, errpipe_read: int,
+              errpipe_write: int, restore_signals, start_new_session,
+              preexec_fn) -> int: ...
diff --git a/typeshed/stdlib/3/_random.pyi b/typeshed/stdlib/3/_random.pyi
new file mode 100644
index 0000000..b3fcdb4
--- /dev/null
+++ b/typeshed/stdlib/3/_random.pyi
@@ -0,0 +1,12 @@
+# Stubs for _random
+
+# NOTE: These are incomplete!
+
+from typing import Any
+
+class Random:
+    def seed(self, x: Any = ...) -> None: ...
+    def getstate(self) -> tuple: ...
+    def setstate(self, state: tuple) -> None: ...
+    def random(self) -> float: ...
+    def getrandbits(self, k: int) -> int: ...
diff --git a/typeshed/stdlib/3/_subprocess.pyi b/typeshed/stdlib/3/_subprocess.pyi
new file mode 100644
index 0000000..76967b9
--- /dev/null
+++ b/typeshed/stdlib/3/_subprocess.pyi
@@ -0,0 +1,38 @@
+# Stubs for _subprocess
+
+# NOTE: These are incomplete!
+
+from typing import Mapping, Any, Tuple
+
+CREATE_NEW_CONSOLE = 0
+CREATE_NEW_PROCESS_GROUP = 0
+STD_INPUT_HANDLE = 0
+STD_OUTPUT_HANDLE = 0
+STD_ERROR_HANDLE = 0
+SW_HIDE = 0
+STARTF_USESTDHANDLES = 0
+STARTF_USESHOWWINDOW = 0
+INFINITE = 0
+DUPLICATE_SAME_ACCESS = 0
+WAIT_OBJECT_0 = 0
+
+# TODO not exported by the Python module
+class Handle:
+    def Close(self) -> None: ...
+
+def GetVersion() -> int: ...
+def GetExitCodeProcess(handle: Handle) -> int: ...
+def WaitForSingleObject(handle: Handle, timeout: int) -> int: ...
+def CreateProcess(executable: str, cmd_line: str,
+                  proc_attrs, thread_attrs,
+                  inherit: int, flags: int,
+                  env_mapping: Mapping[str, str],
+                  curdir: str,
+                  startupinfo: Any) -> Tuple[Any, Handle, int, int]: ...
+def GetModuleFileName(module: int) -> str: ...
+def GetCurrentProcess() -> Handle: ...
+def DuplicateHandle(source_proc: Handle, source: Handle, target_proc: Handle,
+                    target: Any, access: int, inherit: int) -> int: ...
+def CreatePipe(pipe_attrs, size: int) -> Tuple[Handle, Handle]: ...
+def GetStdHandle(arg: int) -> int: ...
+def TerminateProcess(handle: Handle, exit_code: int) -> None: ...
diff --git a/typeshed/stdlib/3/_thread.pyi b/typeshed/stdlib/3/_thread.pyi
new file mode 100644
index 0000000..fb45d93
--- /dev/null
+++ b/typeshed/stdlib/3/_thread.pyi
@@ -0,0 +1,14 @@
+# Stubs for _thread
+
+# NOTE: These are incomplete!
+
+from typing import Any
+
+def _count() -> int: ...
+_dangling = ...  # type: Any
+
+class LockType:
+    def acquire(self) -> None: ...
+    def release(self) -> None: ...
+
+def allocate_lock() -> LockType: ...
diff --git a/typeshed/stdlib/3/_warnings.pyi b/typeshed/stdlib/3/_warnings.pyi
new file mode 100644
index 0000000..03f1be1
--- /dev/null
+++ b/typeshed/stdlib/3/_warnings.pyi
@@ -0,0 +1,11 @@
+from typing import Any, List
+
+_defaultaction = ...  # type: str
+_onceregistry = ...  # type: dict
+filters = ...  # type: List[tuple]
+
+def warn(message: Warning, category:type = ..., stacklevel:int = ...) -> None: ...
+def warn_explicit(message: Warning, category:type,
+                  filename: str, lineno: int,
+                  module:Any = ..., registry:dict = ...,
+                  module_globals:dict = ...) -> None: ...
diff --git a/typeshed/stdlib/3/abc.pyi b/typeshed/stdlib/3/abc.pyi
new file mode 100644
index 0000000..1e32756
--- /dev/null
+++ b/typeshed/stdlib/3/abc.pyi
@@ -0,0 +1,6 @@
+# Stubs for abc.
+
+# Thesee definitions have special processing in type checker.
+class ABCMeta: ...
+abstractmethod = object()
+abstractproperty = object()
diff --git a/typeshed/stdlib/3/argparse.pyi b/typeshed/stdlib/3/argparse.pyi
new file mode 100644
index 0000000..08ea33c
--- /dev/null
+++ b/typeshed/stdlib/3/argparse.pyi
@@ -0,0 +1,162 @@
+# Stubs for argparse (Python 3)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Sequence
+
+SUPPRESS = ... # type: Any
+OPTIONAL = ... # type: Any
+ZERO_OR_MORE = ... # type: Any
+ONE_OR_MORE = ... # type: Any
+PARSER = ... # type: Any
+REMAINDER = ... # type: Any
+
+class _AttributeHolder: ...
+
+class HelpFormatter:
+    def __init__(self, prog, indent_increment=..., max_help_position=..., width=...) -> None: ...
+    def start_section(self, heading): ...
+    def end_section(self): ...
+    def add_text(self, text): ...
+    def add_usage(self, usage, actions, groups, prefix=...): ...
+    def add_argument(self, action): ...
+    def add_arguments(self, actions): ...
+    def format_help(self): ...
+
+class RawDescriptionHelpFormatter(HelpFormatter): ...
+class RawTextHelpFormatter(RawDescriptionHelpFormatter): ...
+class ArgumentDefaultsHelpFormatter(HelpFormatter): ...
+class MetavarTypeHelpFormatter(HelpFormatter): ...
+
+class ArgumentError(Exception):
+    argument_name = ... # type: Any
+    message = ... # type: Any
+    def __init__(self, argument, message) -> None: ...
+
+class ArgumentTypeError(Exception): ...
+
+class Action(_AttributeHolder):
+    option_strings = ... # type: Any
+    dest = ... # type: Any
+    nargs = ... # type: Any
+    const = ... # type: Any
+    default = ... # type: Any
+    type = ... # type: Any
+    choices = ... # type: Any
+    required = ... # type: Any
+    help = ... # type: Any
+    metavar = ... # type: Any
+    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
+                 choices=..., required=..., help=..., metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _StoreAction(Action):
+    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
+                 choices=..., required=..., help=..., metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _StoreConstAction(Action):
+    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
+                 metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _StoreTrueAction(_StoreConstAction):
+    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
+
+class _StoreFalseAction(_StoreConstAction):
+    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
+
+class _AppendAction(Action):
+    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
+                 choices=..., required=..., help=..., metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _AppendConstAction(Action):
+    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
+                 metavar=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _CountAction(Action):
+    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _HelpAction(Action):
+    def __init__(self, option_strings, dest=..., default=..., help=...) -> None: ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _VersionAction(Action):
+    version = ... # type: Any
+    def __init__(self, option_strings, version=..., dest=..., default=...,
+                 help=...): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class _SubParsersAction(Action):
+    def __init__(self, option_strings, prog, parser_class, dest=..., help=...,
+                 metavar=...): ...
+    def add_parser(self, name, **kwargs): ...
+    def __call__(self, parser, namespace, values, option_string=...): ...
+
+class FileType:
+    def __init__(self, mode=..., bufsize=..., encoding=..., errors=...) -> None: ...
+    def __call__(self, string): ...
+
+class Namespace(_AttributeHolder):
+    def __init__(self, **kwargs) -> None: ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def __contains__(self, key): ...
+    def __getattr__(self, name: str) -> Any: ...
+
+class _ActionsContainer:
+    description = ... # type: Any
+    argument_default = ... # type: Any
+    prefix_chars = ... # type: Any
+    conflict_handler = ... # type: Any
+    def __init__(self, description, prefix_chars, argument_default, conflict_handler) -> None: ...
+    def register(self, registry_name, value, object): ...
+    def set_defaults(self, **kwargs): ...
+    def get_default(self, dest): ...
+    def add_argument(self,
+        *args: str,
+        action: str = ...,
+        nargs: str = ...,
+        const: Any = ...,
+        default: Any = ...,
+        type: Any = ...,
+        choices: Any = ..., # TODO: Container?
+        required: bool = ...,
+        help: str = ...,
+        metavar: str = ...,
+        dest: str = ...
+    ) -> None: ...
+    def add_argument_group(self, *args, **kwargs): ...
+    def add_mutually_exclusive_group(self, **kwargs): ...
+
+class _ArgumentGroup(_ActionsContainer):
+    title = ... # type: Any
+    def __init__(self, container, title=..., description=..., **kwargs) -> None: ...
+
+class _MutuallyExclusiveGroup(_ArgumentGroup):
+    required = ... # type: Any
+    def __init__(self, container, required=...) -> None: ...
+
+class ArgumentParser(_AttributeHolder, _ActionsContainer):
+    prog = ... # type: Any
+    usage = ... # type: Any
+    epilog = ... # type: Any
+    formatter_class = ... # type: Any
+    fromfile_prefix_chars = ... # type: Any
+    add_help = ... # type: Any
+    def __init__(self, prog=..., usage=..., description=..., epilog=..., parents=...,
+                 formatter_class=..., prefix_chars=..., fromfile_prefix_chars=...,
+                 argument_default=..., conflict_handler=..., add_help=...): ...
+    def add_subparsers(self, **kwargs): ...
+    def parse_args(self, args: Sequence[str] = ..., namespace=...) -> Namespace: ...
+    def parse_known_args(self, args=..., namespace=...): ...
+    def convert_arg_line_to_args(self, arg_line): ...
+    def format_usage(self): ...
+    def format_help(self): ...
+    def print_usage(self, file=...): ...
+    def print_help(self, file=...): ...
+    def exit(self, status=..., message=...): ...
+    def error(self, message): ...
diff --git a/typeshed/stdlib/3/array.pyi b/typeshed/stdlib/3/array.pyi
new file mode 100644
index 0000000..77ed052
--- /dev/null
+++ b/typeshed/stdlib/3/array.pyi
@@ -0,0 +1,49 @@
+# Stubs for array
+
+# Based on http://docs.python.org/3.2/library/array.html
+
+from typing import Any, Iterable, Tuple, List, Iterator, BinaryIO, overload
+
+typecodes = ...  # type: str
+
+class array:
+    typecode = ...  # type: str
+    itemsize = ...  # type: int
+    def __init__(self, typecode: str,
+                 initializer: Iterable[Any] = ...) -> None: ...
+    def append(self, x: Any) -> None: ...
+    def buffer_info(self) -> Tuple[int, int]: ...
+    def byteswap(self) -> None: ...
+    def count(self, x: Any) -> int: ...
+    def extend(self, iterable: Iterable[Any]) -> None: ...
+    def frombytes(self, s: bytes) -> None: ...
+    def fromfile(self, f: BinaryIO, n: int) -> None: ...
+    def fromlist(self, list: List[Any]) -> None: ...
+    def fromstring(self, s: bytes) -> None: ...
+    def fromunicode(self, s: str) -> None: ...
+    def index(self, x: Any) -> int: ...
+    def insert(self, i: int, x: Any) -> None: ...
+    def pop(self, i: int = ...) -> Any: ...
+    def remove(self, x: Any) -> None: ...
+    def reverse(self) -> None: ...
+    def tobytes(self) -> bytes: ...
+    def tofile(self, f: BinaryIO) -> None: ...
+    def tolist(self) -> List[Any]: ...
+    def tostring(self) -> bytes: ...
+    def tounicode(self) -> str: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[Any]: ...
+    def __str__(self) -> str: ...
+    def __hash__(self) -> int: ...
+
+    @overload
+    def __getitem__(self, i: int) -> Any: ...
+    @overload
+    def __getitem__(self, s: slice) -> 'array': ...
+
+    def __setitem__(self, i: int, o: Any) -> None: ...
+    def __delitem__(self, i: int) -> None: ...
+    def __add__(self, x: 'array') -> 'array': ...
+    def __mul__(self, n: int) -> 'array': ...
+    def __contains__(self, o: object) -> bool: ...
diff --git a/typeshed/stdlib/3/atexit.pyi b/typeshed/stdlib/3/atexit.pyi
new file mode 100644
index 0000000..f4f0ada
--- /dev/null
+++ b/typeshed/stdlib/3/atexit.pyi
@@ -0,0 +1,9 @@
+"""Stub file for the 'atexit' module."""
+
+from typing import Any, Callable
+
+def _clear() -> None: ...
+def _ncallbacks() -> int: ...
+def _run_exitfuncs() -> None: ...
+def register(func: Callable[..., Any], *args, **kwargs) -> Callable[..., Any]: ...
+def unregister(func: Callable[..., Any]) -> None: ...
diff --git a/typeshed/stdlib/3/base64.pyi b/typeshed/stdlib/3/base64.pyi
new file mode 100644
index 0000000..2b5367f
--- /dev/null
+++ b/typeshed/stdlib/3/base64.pyi
@@ -0,0 +1,25 @@
+# Stubs for base64
+
+# Based on http://docs.python.org/3.2/library/base64.html
+
+from typing import IO
+
+def b64encode(s: bytes, altchars: bytes = ...) -> bytes: ...
+def b64decode(s: bytes, altchars: bytes = ...,
+              validate: bool = ...) -> bytes: ...
+def standard_b64encode(s: bytes) -> bytes: ...
+def standard_b64decode(s: bytes) -> bytes: ...
+def urlsafe_b64encode(s: bytes) -> bytes: ...
+def urlsafe_b64decode(s: bytes) -> bytes: ...
+def b32encode(s: bytes) -> bytes: ...
+def b32decode(s: bytes, casefold: bool = ...,
+              map01: bytes = ...) -> bytes: ...
+def b16encode(s: bytes) -> bytes: ...
+def b16decode(s: bytes, casefold: bool = ...) -> bytes: ...
+
+def decode(input: IO[bytes], output: IO[bytes]) -> None: ...
+def decodebytes(s: bytes) -> bytes: ...
+def decodestring(s: bytes) -> bytes: ...
+def encode(input: IO[bytes], output: IO[bytes]) -> None: ...
+def encodebytes(s: bytes) -> bytes: ...
+def encodestring(s: bytes) -> bytes: ...
diff --git a/typeshed/stdlib/3/binascii.pyi b/typeshed/stdlib/3/binascii.pyi
new file mode 100644
index 0000000..9f96136
--- /dev/null
+++ b/typeshed/stdlib/3/binascii.pyi
@@ -0,0 +1,26 @@
+# Stubs for binascii
+
+# Based on http://docs.python.org/3.2/library/binascii.html
+
+import typing
+
+def a2b_uu(string: bytes) -> bytes: ...
+def b2a_uu(data: bytes) -> bytes: ...
+def a2b_base64(string: bytes) -> bytes: ...
+def b2a_base64(data: bytes) -> bytes: ...
+def a2b_qp(string: bytes, header: bool = ...) -> bytes: ...
+def b2a_qp(data: bytes, quotetabs: bool = ..., istext: bool = ...,
+             header: bool = ...) -> bytes: ...
+def a2b_hqx(string: bytes) -> bytes: ...
+def rledecode_hqx(data: bytes) -> bytes: ...
+def rlecode_hqx(data: bytes) -> bytes: ...
+def b2a_hqx(data: bytes) -> bytes: ...
+def crc_hqx(data: bytes, crc: int) -> int: ...
+def crc32(data: bytes, crc: int = ...) -> int: ...
+def b2a_hex(data: bytes) -> bytes: ...
+def hexlify(data: bytes) -> bytes: ...
+def a2b_hex(hexstr: bytes) -> bytes: ...
+def unhexlify(hexlify: bytes) -> bytes: ...
+
+class Error(Exception): ...
+class Incomplete(Exception): ...
diff --git a/typeshed/stdlib/3/bisect.pyi b/typeshed/stdlib/3/bisect.pyi
new file mode 100644
index 0000000..954cd70
--- /dev/null
+++ b/typeshed/stdlib/3/bisect.pyi
@@ -0,0 +1,12 @@
+from typing import Sequence, TypeVar
+
+_T = TypeVar('_T')
+
+def insort_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...): pass
+def insort_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...): pass
+
+def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...): pass
+def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...): pass
+
+insort = insort_right
+bisect = bisect_right
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
new file mode 100644
index 0000000..7a6a499
--- /dev/null
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -0,0 +1,811 @@
+# Stubs for builtins (Python 3)
+
+from typing import (
+    TypeVar, Iterator, Iterable, overload,
+    Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic,
+    Set, AbstractSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsBytes,
+    SupportsAbs, SupportsRound, IO, Union, ItemsView, KeysView, ValuesView, ByteString
+)
+from abc import abstractmethod, ABCMeta
+
+# Note that names imported above are not automatically made visible via the
+# implicit builtins import.
+
+_T = TypeVar('_T')
+_T_co = TypeVar('_T_co', covariant=True)
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+_S = TypeVar('_S')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_T3 = TypeVar('_T3')
+_T4 = TypeVar('_T4')
+
+staticmethod = object() # Only valid as a decorator.
+classmethod = object() # Only valid as a decorator.
+property = object()
+
+class object:
+    __doc__ = ...  # type: str
+    __class__ = ...  # type: type
+
+    def __init__(self) -> None: ...
+    def __new__(cls) -> Any: ...
+    def __setattr__(self, name: str, value: Any) -> None: ...
+    def __eq__(self, o: object) -> bool: ...
+    def __ne__(self, o: object) -> bool: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __hash__(self) -> int: ...
+
+class type:
+    __name__ = ...  # type: str
+    __qualname__ = ...  # type: str
+    __module__ = ...  # type: str
+    __dict__ = ...  # type: Dict[str, Any]
+
+    def __init__(self, o: object) -> None: ...
+    @staticmethod
+    def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
+
+class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
+    def __init__(self, x: Union[SupportsInt, str, bytes] = None, base: int = None) -> None: ...
+    def bit_length(self) -> int: ...
+    def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ...
+    @classmethod
+    def from_bytes(cls, bytes: Sequence[int], byteorder: str, *,
+                   signed: bool = ...) -> int: ...  # TODO buffer object argument
+
+    def __add__(self, x: int) -> int: ...
+    def __sub__(self, x: int) -> int: ...
+    def __mul__(self, x: int) -> int: ...
+    def __floordiv__(self, x: int) -> int: ...
+    def __truediv__(self, x: int) -> float: ...
+    def __mod__(self, x: int) -> int: ...
+    def __radd__(self, x: int) -> int: ...
+    def __rsub__(self, x: int) -> int: ...
+    def __rmul__(self, x: int) -> int: ...
+    def __rfloordiv__(self, x: int) -> int: ...
+    def __rtruediv__(self, x: int) -> float: ...
+    def __rmod__(self, x: int) -> int: ...
+    def __pow__(self, x: int) -> Any: ...  # Return type can be int or float, depending on x.
+    def __rpow__(self, x: int) -> Any: ...
+    def __and__(self, n: int) -> int: ...
+    def __or__(self, n: int) -> int: ...
+    def __xor__(self, n: int) -> int: ...
+    def __lshift__(self, n: int) -> int: ...
+    def __rshift__(self, n: int) -> int: ...
+    def __rand__(self, n: int) -> int: ...
+    def __ror__(self, n: int) -> int: ...
+    def __rxor__(self, n: int) -> int: ...
+    def __rlshift__(self, n: int) -> int: ...
+    def __rrshift__(self, n: int) -> int: ...
+    def __neg__(self) -> int: ...
+    def __pos__(self) -> int: ...
+    def __invert__(self) -> int: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: int) -> bool: ...
+    def __le__(self, x: int) -> bool: ...
+    def __gt__(self, x: int) -> bool: ...
+    def __ge__(self, x: int) -> bool: ...
+
+    def __str__(self) -> str: ...
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: return self
+    def __abs__(self) -> int: ...
+    def __hash__(self) -> int: ...
+
+class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
+    def __init__(self, x: Union[SupportsFloat, str, bytes]=None) -> None: ...
+    def as_integer_ratio(self) -> Tuple[int, int]: ...
+    def hex(self) -> str: ...
+    def is_integer(self) -> bool: ...
+    @classmethod
+    def fromhex(cls, s: str) -> float: ...
+
+    def __add__(self, x: float) -> float: ...
+    def __sub__(self, x: float) -> float: ...
+    def __mul__(self, x: float) -> float: ...
+    def __floordiv__(self, x: float) -> float: ...
+    def __truediv__(self, x: float) -> float: ...
+    def __mod__(self, x: float) -> float: ...
+    def __pow__(self, x: float) -> float: ...
+    def __radd__(self, x: float) -> float: ...
+    def __rsub__(self, x: float) -> float: ...
+    def __rmul__(self, x: float) -> float: ...
+    def __rfloordiv__(self, x: float) -> float: ...
+    def __rtruediv__(self, x: float) -> float: ...
+    def __rmod__(self, x: float) -> float: ...
+    def __rpow__(self, x: float) -> float: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: float) -> bool: ...
+    def __le__(self, x: float) -> bool: ...
+    def __gt__(self, x: float) -> bool: ...
+    def __ge__(self, x: float) -> bool: ...
+    def __neg__(self) -> float: ...
+    def __pos__(self) -> float: ...
+
+    def __str__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class complex(SupportsAbs[float]):
+    @overload
+    def __init__(self, re: float = 0.0, im: float = 0.0) -> None: ...
+    @overload
+    def __init__(self, s: str) -> None: ...
+
+    @property
+    def real(self) -> float: ...
+    @property
+    def imag(self) -> float: ...
+
+    def conjugate(self) -> complex: ...
+
+    def __add__(self, x: complex) -> complex: ...
+    def __sub__(self, x: complex) -> complex: ...
+    def __mul__(self, x: complex) -> complex: ...
+    def __pow__(self, x: complex) -> complex: ...
+    def __truediv__(self, x: complex) -> complex: ...
+    def __radd__(self, x: complex) -> complex: ...
+    def __rsub__(self, x: complex) -> complex: ...
+    def __rmul__(self, x: complex) -> complex: ...
+    def __rpow__(self, x: complex) -> complex: ...
+    def __rtruediv__(self, x: complex) -> complex: ...
+
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __neg__(self) -> complex: ...
+    def __pos__(self) -> complex: ...
+
+    def __str__(self) -> str: ...
+    def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class str(Sequence[str]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, o: object) -> None: ...
+    @overload
+    def __init__(self, o: bytes, encoding: str = None, errors: str = 'strict') -> None: ...
+    def capitalize(self) -> str: ...
+    def center(self, width: int, fillchar: str = ' ') -> str: ...
+    def count(self, x: str) -> int: ...
+    def encode(self, encoding: str = 'utf-8', errors: str = 'strict') -> bytes: ...
+    def endswith(self, suffix: Union[str, Tuple[str, ...]], start: int = None,
+                 end: int = None) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> str: ...
+    def find(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+    def format(self, *args: Any, **kwargs: Any) -> str: ...
+    def format_map(self, map: Mapping[str, Any]) -> str: ...
+    def index(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdecimal(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def isidentifier(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isnumeric(self) -> bool: ...
+    def isprintable(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[str]) -> str: ...
+    def ljust(self, width: int, fillchar: str = ' ') -> str: ...
+    def lower(self) -> str: ...
+    def lstrip(self, chars: str = None) -> str: ...
+    def partition(self, sep: str) -> Tuple[str, str, str]: ...
+    def replace(self, old: str, new: str, count: int = -1) -> str: ...
+    def rfind(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: str = ' ') -> str: ...
+    def rpartition(self, sep: str) -> Tuple[str, str, str]: ...
+    def rsplit(self, sep: str = None, maxsplit: int = -1) -> List[str]: ...
+    def rstrip(self, chars: str = None) -> str: ...
+    def split(self, sep: str = None, maxsplit: int = -1) -> List[str]: ...
+    def splitlines(self, keepends: bool = ...) -> List[str]: ...
+    def startswith(self, prefix: Union[str, Tuple[str, ...]], start: int = None,
+                   end: int = None) -> bool: ...
+    def strip(self, chars: str = None) -> str: ...
+    def swapcase(self) -> str: ...
+    def title(self) -> str: ...
+    def translate(self, table: Dict[int, Any]) -> str: ...
+    def upper(self) -> str: ...
+    def zfill(self, width: int) -> str: ...
+    @staticmethod
+    @overload
+    def maketrans(self, x: Union[Dict[int, Any], Dict[str, Any]]) -> Dict[int, Any]: ...
+    @staticmethod
+    @overload
+    def maketrans(self, x: str, y: str, z: str = ...) -> Dict[int, Any]: ...
+
+    def __getitem__(self, i: Union[int, slice]) -> str: ...
+    def __add__(self, s: str) -> str: ...
+    def __mul__(self, n: int) -> str: ...
+    def __rmul__(self, n: int) -> str: ...
+    def __mod__(self, *args: Any) -> str: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: str) -> bool: ...
+    def __le__(self, x: str) -> bool: ...
+    def __gt__(self, x: str) -> bool: ...
+    def __ge__(self, x: str) -> bool: ...
+
+    def __len__(self) -> int: ...
+    def __contains__(self, s: object) -> bool: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __str__(self) -> str: return self
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+
+class bytes(ByteString):
+    @overload
+    def __init__(self, ints: Iterable[int]) -> None: ...
+    @overload
+    def __init__(self, string: str, encoding: str,
+                 errors: str = 'strict') -> None: ...
+    @overload
+    def __init__(self, length: int) -> None: ...
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, o: SupportsBytes) -> None: ...
+    def capitalize(self) -> bytes: ...
+    def center(self, width: int, fillchar: bytes = None) -> bytes: ...
+    def count(self, x: bytes) -> int: ...
+    def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
+    def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> bytes: ...
+    def find(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def index(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[bytes]) -> bytes: ...
+    def ljust(self, width: int, fillchar: bytes = None) -> bytes: ...
+    def lower(self) -> bytes: ...
+    def lstrip(self, chars: bytes = None) -> bytes: ...
+    def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
+    def replace(self, old: bytes, new: bytes, count: int = -1) -> bytes: ...
+    def rfind(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: bytes = None) -> bytes: ...
+    def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
+    def rsplit(self, sep: bytes = None, maxsplit: int = -1) -> List[bytes]: ...
+    def rstrip(self, chars: bytes = None) -> bytes: ...
+    def split(self, sep: bytes = None, maxsplit: int = -1) -> List[bytes]: ...
+    def splitlines(self, keepends: bool = ...) -> List[bytes]: ...
+    def startswith(self, prefix: Union[bytes, Tuple[bytes, ...]]) -> bool: ...
+    def strip(self, chars: bytes = None) -> bytes: ...
+    def swapcase(self) -> bytes: ...
+    def title(self) -> bytes: ...
+    def translate(self, table: bytes) -> bytes: ...
+    def upper(self) -> bytes: ...
+    def zfill(self, width: int) -> bytes: ...
+    @classmethod
+    def fromhex(cls, s: str) -> bytes: ...
+    @classmethod
+    def maketrans(cls, frm: bytes, to: bytes) -> bytes: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[int]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> int: ...
+    @overload
+    def __getitem__(self, s: slice) -> bytes: ...
+    def __add__(self, s: bytes) -> bytes: ...
+    def __mul__(self, n: int) -> bytes: ...
+    def __rmul__(self, n: int) -> bytes: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: bytes) -> bool: ...
+    def __le__(self, x: bytes) -> bool: ...
+    def __gt__(self, x: bytes) -> bool: ...
+    def __ge__(self, x: bytes) -> bool: ...
+
+class bytearray(MutableSequence[int], ByteString):
+    @overload
+    def __init__(self, ints: Iterable[int]) -> None: ...
+    @overload
+    def __init__(self, string: str, encoding: str, errors: str = 'strict') -> None: ...
+    @overload
+    def __init__(self, length: int) -> None: ...
+    @overload
+    def __init__(self) -> None: ...
+    def capitalize(self) -> bytearray: ...
+    def center(self, width: int, fillchar: bytes = None) -> bytearray: ...
+    def count(self, x: bytes) -> int: ...
+    def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
+    def endswith(self, suffix: bytes) -> bool: ...
+    def expandtabs(self, tabsize: int = 8) -> bytearray: ...
+    def find(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def index(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def insert(self, index: int, object: int) -> None: ...
+    def isalnum(self) -> bool: ...
+    def isalpha(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, iterable: Iterable[bytes]) -> bytearray: ...
+    def ljust(self, width: int, fillchar: bytes = None) -> bytearray: ...
+    def lower(self) -> bytearray: ...
+    def lstrip(self, chars: bytes = None) -> bytearray: ...
+    def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
+    def replace(self, old: bytes, new: bytes, count: int = -1) -> bytearray: ...
+    def rfind(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def rindex(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    def rjust(self, width: int, fillchar: bytes = None) -> bytearray: ...
+    def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
+    def rsplit(self, sep: bytes = None, maxsplit: int = -1) -> List[bytearray]: ...
+    def rstrip(self, chars: bytes = None) -> bytearray: ...
+    def split(self, sep: bytes = None, maxsplit: int = -1) -> List[bytearray]: ...
+    def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
+    def startswith(self, prefix: bytes) -> bool: ...
+    def strip(self, chars: bytes = None) -> bytearray: ...
+    def swapcase(self) -> bytearray: ...
+    def title(self) -> bytearray: ...
+    def translate(self, table: bytes) -> bytearray: ...
+    def upper(self) -> bytearray: ...
+    def zfill(self, width: int) -> bytearray: ...
+    @classmethod
+    def fromhex(cls, s: str) -> bytearray: ...
+    @classmethod
+    def maketrans(cls, frm: bytes, to: bytes) -> bytes: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[int]: ...
+    def __str__(self) -> str: ...
+    def __repr__(self) -> str: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> int: ...
+    @overload
+    def __getitem__(self, s: slice) -> bytearray: ...
+    @overload
+    def __setitem__(self, i: int, x: int) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, x: Sequence[int]) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
+    def __add__(self, s: bytes) -> bytearray: ...
+    # TODO: Mypy complains about __add__ and __iadd__ having different signatures.
+    def __iadd__(self, s: Iterable[int]) -> bytearray: ... # type: ignore
+    def __mul__(self, n: int) -> bytearray: ...
+    def __rmul__(self, n: int) -> bytearray: ...
+    def __imul__(self, n: int) -> bytearray: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __eq__(self, x: object) -> bool: ...
+    def __ne__(self, x: object) -> bool: ...
+    def __lt__(self, x: bytes) -> bool: ...
+    def __le__(self, x: bytes) -> bool: ...
+    def __gt__(self, x: bytes) -> bool: ...
+    def __ge__(self, x: bytes) -> bool: ...
+
+class memoryview():
+    # TODO arg can be any obj supporting the buffer protocol
+    def __init__(self, bytearray) -> None: ...
+
+class bool(int, SupportsInt, SupportsFloat):
+    def __init__(self, o: object = ...) -> None: ...
+
+class slice:
+    start = 0
+    step = 0
+    stop = 0
+    def __init__(self, start: int, stop: int = 0, step: int = 0) -> None: ...
+
+class tuple(Sequence[_T_co], Generic[_T_co]):
+    def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, x: object) -> bool: ...
+    @overload
+    def __getitem__(self, x: int) -> _T_co: ...
+    @overload
+    def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ...
+    def __iter__(self) -> Iterator[_T_co]: ...
+    def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __le__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ...
+    def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ...
+    def __mul__(self, n: int) -> Tuple[_T_co, ...]: ...
+    def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ...
+    def count(self, x: Any) -> int: ...
+    def index(self, x: Any) -> int: ...
+
+class function:
+    # TODO not defined in builtins!
+    __name__ = ...  # type: str
+    __qualname__ = ...  # type: str
+    __module__ = ...  # type: str
+    __code__ = ... # type: Any
+
+class list(MutableSequence[_T], Generic[_T]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    def clear(self) -> None: ...
+    def copy(self) -> List[_T]: ...
+    def append(self, object: _T) -> None: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def pop(self, index: int = -1) -> _T: ...
+    def index(self, object: _T, start: int = 0, stop: int = ...) -> int: ...
+    def count(self, object: _T) -> int: ...
+    def insert(self, index: int, object: _T) -> None: ...
+    def remove(self, object: _T) -> None: ...
+    def reverse(self) -> None: ...
+    def sort(self, *, key: Callable[[_T], Any] = None, reverse: bool = ...) -> None: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> _T: ...
+    @overload
+    def __getitem__(self, s: slice) -> List[_T]: ...
+    @overload
+    def __setitem__(self, i: int, o: _T) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, o: Sequence[_T]) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
+    def __add__(self, x: List[_T]) -> List[_T]: ...
+    def __iadd__(self, x: Iterable[_T]) -> List[_T]: ...
+    def __mul__(self, n: int) -> List[_T]: ...
+    def __rmul__(self, n: int) -> List[_T]: ...
+    def __imul__(self, n: int) -> List[_T]: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __reversed__(self) -> Iterator[_T]: ...
+    def __gt__(self, x: List[_T]) -> bool: ...
+    def __ge__(self, x: List[_T]) -> bool: ...
+    def __lt__(self, x: List[_T]) -> bool: ...
+    def __le__(self, x: List[_T]) -> bool: ...
+
+class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...  # TODO keyword args
+    def clear(self) -> None: ...
+    def copy(self) -> Dict[_KT, _VT]: ...
+    def get(self, k: _KT, default: _VT = None) -> _VT: ...
+    def pop(self, k: _KT, default: _VT = None) -> _VT: ...
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+    def setdefault(self, k: _KT, default: _VT = None) -> _VT: ...
+    def update(self, m: Union[Mapping[_KT, _VT],
+                              Iterable[Tuple[_KT, _VT]]]) -> None: ...
+    def keys(self) -> KeysView[_KT]: ...
+    def values(self) -> ValuesView[_VT]: ...
+    def items(self) -> ItemsView[_KT, _VT]: ...
+    @staticmethod
+    @overload
+    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method
+    @staticmethod
+    @overload
+    def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ...
+    def __len__(self) -> int: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    def __delitem__(self, v: _KT) -> None: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+    def __str__(self) -> str: ...
+
+class set(MutableSet[_T], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def add(self, element: _T) -> None: ...
+    def clear(self) -> None: ...
+    def copy(self) -> set[_T]: ...
+    def difference(self, s: Iterable[Any]) -> set[_T]: ...
+    def difference_update(self, s: Iterable[Any]) -> None: ...
+    def discard(self, element: _T) -> None: ...
+    def intersection(self, s: Iterable[Any]) -> set[_T]: ...
+    def intersection_update(self, s: Iterable[Any]) -> None: ...
+    def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
+    def issubset(self, s: AbstractSet[Any]) -> bool: ...
+    def issuperset(self, s: AbstractSet[Any]) -> bool: ...
+    def pop(self) -> _T: ...
+    def remove(self, element: _T) -> None: ...
+    def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
+    def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
+    def union(self, s: Iterable[_T]) -> set[_T]: ...
+    def update(self, s: Iterable[_T]) -> None: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __and__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __iand__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __or__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __ior__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __sub__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __isub__(self, s: AbstractSet[Any]) -> set[_T]: ...
+    def __xor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __ixor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    # TODO more set operations
+
+class frozenset(AbstractSet[_T], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def copy(self) -> frozenset[_T]: ...
+    def difference(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def intersection(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def isdisjoint(self, s: AbstractSet[_T]) -> bool: ...
+    def issubset(self, s: AbstractSet[Any]) -> bool: ...
+    def issuperset(self, s: AbstractSet[Any]) -> bool: ...
+    def symmetric_difference(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def union(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __and__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __or__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+    def __sub__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def __xor__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+
+class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
+    def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
+    def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
+    def __next__(self) -> Tuple[int, _T]: ...
+    # TODO __getattribute__
+
+class range(Sequence[int]):
+    @overload
+    def __init__(self, stop: int) -> None: ...
+    @overload
+    def __init__(self, start: int, stop: int, step: int = 1) -> None: ...
+    def count(self, value: int) -> int: ...
+    def index(self, value: int, start: int = 0, stop: int = None) -> int: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[int]: ...
+    @overload
+    def __getitem__(self, i: int) -> int: ...
+    @overload
+    def __getitem__(self, s: slice) -> range: ...
+    def __repr__(self) -> str: ...
+    def __reversed__(self) -> Iterator[int]: ...
+
+class module:
+    # TODO not defined in builtins!
+    __name__ = ...  # type: str
+    __file__ = ...  # type: str
+    __dict__ = ...  # type: Dict[str, Any]
+
+True = ...  # type: bool
+False = ...  # type: bool
+__debug__ = False
+
+NotImplemented = ...  # type: Any
+
+def abs(n: SupportsAbs[_T]) -> _T: ...
+def all(i: Iterable) -> bool: ...
+def any(i: Iterable) -> bool: ...
+def ascii(o: object) -> str: ...
+def bin(number: int) -> str: ...
+def callable(o: object) -> bool: ...
+def chr(code: int) -> str: ...
+def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = 0,
+            dont_inherit: int = 0) -> Any: ...
+def copyright() -> None: ...
+def credits() -> None: ...
+def delattr(o: Any, name: str) -> None: ...
+def dir(o: object = None) -> List[str]: ...
+_N = TypeVar('_N', int, float)
+def divmod(a: _N, b: _N) -> Tuple[_N, _N]: ...
+def eval(source: str, globals: Dict[str, Any] = None,
+         locals: Mapping[str, Any] = None) -> Any: ...  # TODO code object as source
+def exec(object: str, globals: Dict[str, Any] = None,
+         locals: Mapping[str, Any] = None) -> Any: ...  # TODO code object as source
+def exit(code: int = None) -> None: ...
+def filter(function: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ...
+def format(o: object, format_spec: str = '') -> str: ...
+def getattr(o: Any, name: str, default: Any = None) -> Any: ...
+def globals() -> Dict[str, Any]: ...
+def hasattr(o: Any, name: str) -> bool: ...
+def hash(o: object) -> int: ...
+def help(*args: Any, **kwds: Any) -> None: ...
+def hex(i: int) -> str: ...  # TODO __index__
+def id(o: object) -> int: ...
+def input(prompt: str = None) -> str: ...
+ at overload
+def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
+ at overload
+def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ...
+def isinstance(o: object, t: Union[type, Tuple[type, ...]]) -> bool: ...
+def issubclass(cls: type, classinfo: type) -> bool: ...
+# TODO support this
+#def issubclass(type cld, classinfo: Sequence[type]) -> bool: ...
+def len(o: Sized) -> int: ...
+def license() -> None: ...
+def locals() -> Dict[str, Any]: ...
+ at overload
+def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterator[_S]: ...
+ at overload
+def map(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> Iterator[_S]: ...  # TODO more than two iterables
+ at overload
+def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def max(iterable: Iterable[_T], key: Callable[[_T], Any] = None, default:_T = None) -> _T: ...
+# TODO memoryview
+ at overload
+def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+ at overload
+def min(iterable: Iterable[_T], key: Callable[[_T], Any] = None, default:_T = None) -> _T: ...
+ at overload
+def next(i: Iterator[_T]) -> _T: ...
+ at overload
+def next(i: Iterator[_T], default: _T) -> _T: ...
+def oct(i: int) -> str: ...  # TODO __index__
+def open(file: Union[str, bytes, int], mode: str = 'r', buffering: int = -1, encoding: str = None,
+         errors: str = None, newline: str = None, closefd: bool = ...) -> IO[Any]: ...
+def ord(c: Union[str, bytes, bytearray]) -> int: ...
+# TODO: in Python 3.2, print() does not support flush
+def print(*values: Any, sep: str = ' ', end: str = '\n', file: IO[str] = None, flush: bool = False) -> None: ...
+ at overload
+def pow(x: int, y: int) -> Any: ...  # The return type can be int or float, depending on y
+ at overload
+def pow(x: int, y: int, z: int) -> Any: ...
+ at overload
+def pow(x: float, y: float) -> float: ...
+ at overload
+def pow(x: float, y: float, z: float) -> float: ...
+def quit(code: int = None) -> None: ...
+ at overload
+def reversed(object: Reversible[_T]) -> Iterator[_T]: ...
+ at overload
+def reversed(object: Sequence[_T]) -> Iterator[_T]: ...
+def repr(o: object) -> str: ...
+ at overload
+def round(number: float) -> int: ...
+ at overload
+def round(number: float, ndigits: int) -> float: ...  # Always return a float if given ndigits.
+ at overload
+def round(number: SupportsRound[_T]) -> _T: ...
+ at overload
+def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
+def setattr(object: Any, name: str, value: Any) -> None: ...
+def sorted(iterable: Iterable[_T], *, key: Callable[[_T], Any] = None,
+           reverse: bool = False) -> List[_T]: ...
+def sum(iterable: Iterable[_T], start: _T = None) -> _T: ...
+def vars(object: Any = None) -> Dict[str, Any]: ...
+ at overload
+def zip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2],
+        iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+        iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2,
+                                               _T3, _T4]]: ... # TODO more than four iterables
+def __import__(name: str, globals: Dict[str, Any] = {}, locals: Dict[str, Any] = {},
+               fromlist: List[str] = [], level: int = -1) -> Any: ...
+
+# Ellipsis
+
+# Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
+# not exposed anywhere under that name, we make it private here.
+class ellipsis: ...
+Ellipsis = ...  # type: ellipsis
+
+# Exceptions
+
+class BaseException:
+    args = ...  # type: Any
+    message = ...  # type: str
+    def __init__(self, *args: Any) -> None: ...
+    def with_traceback(self, tb: Any) -> BaseException: ...
+
+class GeneratorExit(BaseException): ...
+class KeyboardInterrupt(BaseException): ...
+class SystemExit(BaseException):
+    code = 0
+class Exception(BaseException): ...
+class ArithmeticError(Exception): ...
+class EnvironmentError(Exception):
+    errno = 0
+    strerror = ...  # type: str
+    # TODO can this be bytes?
+    filename = ...  # type: str
+class LookupError(Exception): ...
+class RuntimeError(Exception): ...
+class ValueError(Exception): ...
+class AssertionError(Exception): ...
+class AttributeError(Exception): ...
+class BufferError(Exception): ...
+class EOFError(Exception): ...
+class FloatingPointError(ArithmeticError): ...
+class IOError(EnvironmentError): ...
+class ImportError(Exception): ...
+class IndexError(LookupError): ...
+class KeyError(LookupError): ...
+class MemoryError(Exception): ...
+class NameError(Exception): ...
+class NotImplementedError(RuntimeError): ...
+class OSError(EnvironmentError): ...
+class BlockingIOError(OSError):
+    characters_written = 0
+class ChildProcessError(OSError): ...
+class ConnectionError(OSError): ...
+class BrokenPipeError(ConnectionError): ...
+class ConnectionAbortedError(ConnectionError): ...
+class ConnectionRefusedError(ConnectionError): ...
+class ConnectionResetError(ConnectionError): ...
+class FileExistsError(OSError): ...
+class FileNotFoundError(OSError): ...
+class InterruptedError(OSError): ...
+class IsADirectoryError(OSError): ...
+class NotADirectoryError(OSError): ...
+class PermissionError(OSError): ...
+class ProcessLookupError(OSError): ...
+class TimeoutError(OSError): ...
+class WindowsError(OSError): ...
+class OverflowError(ArithmeticError): ...
+class ReferenceError(Exception): ...
+class StopIteration(Exception): ...
+class SyntaxError(Exception): ...
+class IndentationError(SyntaxError): ...
+class TabError(IndentationError): ...
+class SystemError(Exception): ...
+class TypeError(Exception): ...
+class UnboundLocalError(NameError): ...
+class UnicodeError(ValueError): ...
+class UnicodeDecodeError(UnicodeError):
+    encoding = ... # type: str
+    object = ... # type: bytes
+    start = ... # type: int
+    end = ... # type: int
+    reason = ... # type: str
+    def __init__(self, __encoding: str, __object: bytes, __start: int, __end: int,
+                 __reason: str) -> None: ...
+class UnicodeEncodeError(UnicodeError): ...
+class UnicodeTranslateError(UnicodeError): ...
+class ZeroDivisionError(ArithmeticError): ...
+
+class Warning(Exception): ...
+class UserWarning(Warning): ...
+class DeprecationWarning(Warning): ...
+class SyntaxWarning(Warning): ...
+class RuntimeWarning(Warning): ...
+class FutureWarning(Warning): ...
+class PendingDeprecationWarning(Warning): ...
+class ImportWarning(Warning): ...
+class UnicodeWarning(Warning): ...
+class BytesWarning(Warning): ...
+class ResourceWarning(Warning): ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3/bz2.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/3/calendar.pyi b/typeshed/stdlib/3/calendar.pyi
new file mode 100644
index 0000000..c0bfc73
--- /dev/null
+++ b/typeshed/stdlib/3/calendar.pyi
@@ -0,0 +1,15 @@
+# Stubs for calendar
+
+# NOTE: These are incomplete!
+
+from typing import overload, Tuple
+
+# TODO actually, any number of items larger than 5 is fine
+ at overload
+def timegm(t: Tuple[int, int, int, int, int, int]) -> int: ...
+ at overload
+def timegm(t: Tuple[int, int, int, int, int, int, int]) -> int: ...
+ at overload
+def timegm(t: Tuple[int, int, int, int, int, int, int, int]) -> int: ...
+ at overload
+def timegm(t: Tuple[int, int, int, int, int, int, int, int, int]) -> int: ...
diff --git a/typeshed/stdlib/3/cgi.pyi b/typeshed/stdlib/3/cgi.pyi
new file mode 100644
index 0000000..f251f31
--- /dev/null
+++ b/typeshed/stdlib/3/cgi.pyi
@@ -0,0 +1 @@
+def escape(s: str, quote: bool = ...) -> str: ...
diff --git a/typeshed/stdlib/3/codecs.pyi b/typeshed/stdlib/3/codecs.pyi
new file mode 100644
index 0000000..94b33dd
--- /dev/null
+++ b/typeshed/stdlib/3/codecs.pyi
@@ -0,0 +1,194 @@
+# Better codecs stubs hand-written by o11c.
+# https://docs.python.org/3/library/codecs.html
+from typing import (
+        BinaryIO,
+        Callable,
+        Iterable,
+        Iterator,
+        List,
+        Tuple,
+        Union,
+)
+
+from abc import abstractmethod
+
+
+# TODO: this only satisfies the most common interface, where
+# bytes is the raw form and str is the cooked form.
+# In the long run, both should become template parameters maybe?
+# There *are* bytes->bytes and str->str encodings in the standard library.
+# Python 3.5 supposedly might change something there.
+
+_decoded = str
+_encoded = bytes
+
+# TODO: It is not possible to specify these signatures correctly, because
+# they have an optional positional or keyword argument for errors=.
+_encode_type = Callable[[_decoded], _encoded] # signature of Codec().encode
+_decode_type = Callable[[_encoded], _decoded] # signature of Codec().decode
+_stream_reader_type = Callable[[BinaryIO], 'StreamReader'] # signature of StreamReader __init__
+_stream_writer_type = Callable[[BinaryIO], 'StreamWriter'] # signature of StreamWriter __init__
+_incremental_encoder_type = Callable[[], 'IncrementalEncoder'] # signature of IncrementalEncoder __init__
+_incremental_decode_type = Callable[[], 'IncrementalDecoder'] # signature of IncrementalDecoder __init__
+
+
+def encode(obj: _decoded, encoding: str = ..., errors: str = ...) -> _encoded:
+    ...
+def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded:
+    ...
+
+def lookup(encoding: str) -> 'CodecInfo':
+    ...
+class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]):
+    def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decode_type = ..., name: str = ...) -> None:
+        self.encode = encode
+        self.decode = decode
+        self.streamreader = streamreader
+        self.streamwriter = streamwriter
+        self.incrementalencoder = incrementalencoder
+        self.incrementaldecoder = incrementaldecoder
+        self.name = name
+
+def getencoder(encoding: str) -> _encode_type:
+    ...
+def getdecoder(encoding: str) -> _encode_type:
+    ...
+def getincrementalencoder(encoding: str) -> _incremental_encoder_type:
+    ...
+def getincrementaldecoder(encoding: str) -> _incremental_encoder_type:
+    ...
+def getreader(encoding: str) -> _stream_reader_type:
+    ...
+def getwriter(encoding: str) -> _stream_writer_type:
+    ...
+
+def register(search_function: Callable[[str], CodecInfo]) -> None:
+    ...
+
+def open(filename: str, mode: str = ..., encoding: str = ..., errors: str = ..., buffering: int = ...) -> StreamReaderWriter:
+    ...
+
+def EncodedFile(file: BinaryIO, data_encoding: str, file_encoding: str = ..., errors = ...) -> 'StreamRecoder':
+    ...
+
+def iterencode(iterator: Iterable[_decoded], encoding: str, errors: str = ...) -> Iterator[_encoded]:
+    ...
+def iterdecode(iterator: Iterable[_encoded], encoding: str, errors: str = ...) -> Iterator[_decoded]:
+    ...
+
+BOM = b''
+BOM_BE = b''
+BOM_LE = b''
+BOM_UTF8 = b''
+BOM_UTF16 = b''
+BOM_UTF16_BE = b''
+BOM_UTF16_LE = b''
+BOM_UTF32 = b''
+BOM_UTF32_BE = b''
+BOM_UTF32_LE = b''
+
+# It is expected that different actions be taken depending on which of the
+# three subclasses of `UnicodeError` is actually ...ed. However, the Union
+# is still needed for at least one of the cases.
+def register_error(name: str, error_handler: Callable[[UnicodeError], Tuple[Union[str, bytes], int]]) -> None:
+    ...
+def lookup_error(name: str) -> Callable[[UnicodeError], Tuple[Union[str, bytes], int]]:
+    ...
+
+def strict_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def replace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def ignore_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def xmlcharrefreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+def backslashreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
+    ...
+
+class Codec:
+    # These are sort of @abstractmethod but sort of not.
+    # The StreamReader and StreamWriter subclasses only implement one.
+    def encode(self, input: _decoded, errors: str = ...) -> Tuple[_encoded, int]:
+        ...
+    def decode(self, input: _encoded, errors: str = ...) -> Tuple[_decoded, int]:
+        ...
+
+class IncrementalEncoder:
+    def __init__(self, errors: str = ...) -> None:
+        self.errors = errors
+    @abstractmethod
+    def encode(self, object: _decoded, final: bool = ...) -> _encoded:
+        ...
+    def reset(self) -> None:
+        ...
+    # documentation says int but str is needed for the subclass.
+    def getstate(self) -> Union[int, _decoded]:
+        ...
+    def setstate(self, state: Union[int, _decoded]) -> None:
+        ...
+
+class IncrementalDecoder:
+    def __init__(self, errors: str = ...) -> None:
+        self.errors = errors
+    @abstractmethod
+    def decode(self, object: _encoded, final: bool = ...) -> _decoded:
+        ...
+    def reset(self) -> None:
+        ...
+    def getstate(self) -> Tuple[_encoded, int]:
+        ...
+    def setstate(self, state: Tuple[_encoded, int]) -> None:
+        ...
+
+# These are not documented but used in encodings/*.py implementations.
+class BufferedIncrementalEncoder(IncrementalEncoder):
+    def __init__(self, errors: str = ...) -> None:
+        IncrementalEncoder.__init__(self, errors)
+        self.buffer = ''
+    @abstractmethod
+    def _buffer_encode(self, input: _decoded, errors: str, final: bool) -> _encoded:
+        ...
+    def encode(self, input: _decoded, final: bool = ...) -> _encoded:
+        ...
+class BufferedIncrementalDecoder(IncrementalDecoder):
+    def __init__(self, errors: str = ...) -> None:
+        IncrementalDecoder.__init__(self, errors)
+        self.buffer = b''
+    @abstractmethod
+    def _buffer_decode(self, input: _encoded, errors: str, final: bool) -> Tuple[_decoded, int]:
+        ...
+    def decode(self, object: _encoded, final: bool = ...) -> _decoded:
+        ...
+
+# TODO: it is not possible to specify the requirement that all other
+# attributes and methods are passed-through from the stream.
+class StreamWriter(Codec):
+    def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
+        self.errors = errors
+    def write(self, obj: _decoded) -> None:
+        ...
+    def writelines(self, list: List[str]) -> None:
+        ...
+    def reset(self) -> None:
+        ...
+
+class StreamReader(Codec):
+    def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
+        self.errors = errors
+    def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> _decoded:
+        ...
+    def readline(self, size: int = ..., keepends: bool = ...) -> _decoded:
+        ...
+    def readlines(self, sizehint: int = ..., keepends: bool = ...) -> List[_decoded]:
+        ...
+    def reset(self) -> None:
+        ...
+
+class StreamReaderWriter:
+    def __init__(self, stream: BinaryIO, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
+        ...
+
+class StreamRecoder(BinaryIO):
+    def __init__(self, stream: BinaryIO, encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
+        ...
diff --git a/typeshed/stdlib/3/collections.pyi b/typeshed/stdlib/3/collections.pyi
new file mode 100644
index 0000000..4939e92
--- /dev/null
+++ b/typeshed/stdlib/3/collections.pyi
@@ -0,0 +1,134 @@
+# Stubs for collections
+
+# Based on http://docs.python.org/3.2/library/collections.html
+
+# TODO more abstract base classes (interfaces in mypy)
+
+# These are not exported.
+from typing import (
+    TypeVar, Iterable, Generic, Iterator, Dict, overload,
+    Mapping, List, Tuple, Callable, Sized,
+    Optional, Union
+)
+# These are exported.
+# TODO reexport more.
+from typing import (
+    MutableMapping as MutableMapping,
+    Sequence as Sequence,
+    MutableSequence as MutableSequence,
+    AbstractSet as Set,
+)
+
+_T = TypeVar('_T')
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+
+
+# namedtuple is special-cased in the type checker; the initializer is ignored.
+namedtuple = object()
+
+class UserDict(MutableMapping): ...
+class UserList(MutableSequence): ...
+class UserString(Sequence): ...
+class MutableString(UserString, MutableSequence): ...
+
+# Technically, deque only derives from MutableSequence in 3.5.
+# But in practice it's not worth losing sleep over.
+class deque(MutableSequence[_T], Generic[_T]):
+    maxlen = 0 # type: Optional[int] # TODO readonly
+    def __init__(self, iterable: Iterable[_T] = ...,
+                 maxlen: int = ...) -> None: ...
+    def append(self, x: _T) -> None: ...
+    def appendleft(self, x: _T) -> None: ...
+    def insert(self, i: int, x: _T) -> None: ...
+    def clear(self) -> None: ...
+    def count(self, x: _T) -> int: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def extendleft(self, iterable: Iterable[_T]) -> None: ...
+    def pop(self, i: int = ...) -> _T: ...
+    def popleft(self) -> _T: ...
+    def remove(self, value: _T) -> None: ...
+    def reverse(self) -> None: ...
+    def rotate(self, n: int) -> None: ...
+
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_T]: ...
+    def __str__(self) -> str: ...
+    def __hash__(self) -> int: ...
+
+    # These methods of deque don't really take slices, but we need to
+    # define them as taking a slice to satisfy MutableSequence.
+    @overload
+    def __getitem__(self, index: int) -> _T: ...
+    @overload
+    def __getitem__(self, s: slice) -> Sequence[_T]: raise TypeError
+    @overload
+    def __setitem__(self, i: int, x: _T) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, o: Sequence[_T]) -> None: raise TypeError
+    @overload
+    def __delitem__(self, i: int) -> None: ...
+    @overload
+    def __delitem__(self, s: slice) -> None: raise TypeError
+
+    def __contains__(self, o: object) -> bool: ...
+
+    # TODO __reversed__
+
+
+class Counter(Dict[_T, int], Generic[_T]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, Mapping: Mapping[_T, int]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[_T]) -> None: ...
+    # TODO keyword arguments
+
+    def elements(self) -> Iterator[_T]: ...
+
+    @overload
+    def most_common(self) -> List[_T]: ...
+    @overload
+    def most_common(self, n: int) -> List[_T]: ...
+
+    @overload
+    def subtract(self, Mapping: Mapping[_T, int]) -> None: ...
+    @overload
+    def subtract(self, iterable: Iterable[_T]) -> None: ...
+
+    # The Iterable[Tuple[...]] argument type is not actually desirable
+    # (the tuples will be added as keys, breaking type safety) but
+    # it's included so that the signature is compatible with
+    # Dict.update. Not sure if we should use '# type: ignore' instead
+    # and omit the type from the union.
+    def update(self, m: Union[Mapping[_T, int],
+                              Iterable[Tuple[_T, int]],
+                              Iterable[_T]]) -> None: ...
+
+class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
+    def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
+    def move_to_end(self, key: _KT, last: bool = ...) -> None: ...
+
+
+class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
+    default_factory = ...  # type: Callable[[], _VT]
+
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    @overload
+    def __init__(self, default_factory: Callable[[], _VT]) -> None: ...
+    @overload
+    def __init__(self, default_factory: Callable[[], _VT],
+                 map: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def __init__(self, default_factory: Callable[[], _VT],
+                 iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    # TODO __init__ keyword args
+
+    def __missing__(self, key: _KT) -> _VT: ...
+    # TODO __reversed__
diff --git a/typeshed/stdlib/3/contextlib.pyi b/typeshed/stdlib/3/contextlib.pyi
new file mode 100644
index 0000000..af9f0ab
--- /dev/null
+++ b/typeshed/stdlib/3/contextlib.pyi
@@ -0,0 +1,15 @@
+# Stubs for contextlib
+
+# NOTE: These are incomplete!
+
+from typing import Any, TypeVar, Generic
+
+# TODO more precise type?
+def contextmanager(func: Any) -> Any: ...
+
+_T = TypeVar('_T')
+
+class closing(Generic[_T]):
+    def __init__(self, thing: _T) -> None: ...
+    def __enter__(self) -> _T: ...
+    def __exit__(self, *exc_info) -> None: ...
diff --git a/typeshed/stdlib/3/copy.pyi b/typeshed/stdlib/3/copy.pyi
new file mode 100644
index 0000000..237f420
--- /dev/null
+++ b/typeshed/stdlib/3/copy.pyi
@@ -0,0 +1,10 @@
+# Stubs for copy
+
+# NOTE: These are incomplete!
+
+from typing import TypeVar
+
+_T = TypeVar('_T')
+
+def deepcopy(x: _T) -> _T: ...
+def copy(x: _T) -> _T: ...
diff --git a/typeshed/stdlib/3/csv.pyi b/typeshed/stdlib/3/csv.pyi
new file mode 100644
index 0000000..7b41cc8
--- /dev/null
+++ b/typeshed/stdlib/3/csv.pyi
@@ -0,0 +1,77 @@
+# Stubs for csv (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Iterable
+
+QUOTE_ALL = ...  # type: int
+QUOTE_MINIMAL = ...  # type: int
+QUOTE_NONE = ...  # type: int
+QUOTE_NONNUMERIC = ...  # type: int
+
+class Error(Exception): ...
+
+def writer(csvfile, dialect=..., **fmtparams): ...
+def reader(csvfile, dialect=..., **fmtparams): ...
+def register_dialect(name, dialect=..., **fmtparams): ...
+def unregister_dialect(name): ...
+def get_dialect(name): ...
+def list_dialects(): ...
+def field_size_limit(new_limit=...): ...
+
+class Dialect:
+    delimiter = ...  # type: Any
+    quotechar = ...  # type: Any
+    escapechar = ...  # type: Any
+    doublequote = ...  # type: Any
+    skipinitialspace = ...  # type: Any
+    lineterminator = ...  # type: Any
+    quoting = ...  # type: Any
+    def __init__(self) -> None: ...
+
+class excel(Dialect):
+    delimiter = ...  # type: Any
+    quotechar = ...  # type: Any
+    doublequote = ...  # type: Any
+    skipinitialspace = ...  # type: Any
+    lineterminator = ...  # type: Any
+    quoting = ...  # type: Any
+
+class excel_tab(excel):
+    delimiter = ...  # type: Any
+
+class unix_dialect(Dialect):
+    delimiter = ...  # type: Any
+    quotechar = ...  # type: Any
+    doublequote = ...  # type: Any
+    skipinitialspace = ...  # type: Any
+    lineterminator = ...  # type: Any
+    quoting = ...  # type: Any
+
+class DictReader(Iterable):
+    restkey = ...  # type: Any
+    restval = ...  # type: Any
+    reader = ...  # type: Any
+    dialect = ...  # type: Any
+    line_num = ...  # type: Any
+    fieldnames = ...  # type: Any # Actually a property
+    def __init__(self, f, fieldnames=..., restkey=..., restval=..., dialect=...,
+                 *args, **kwds): ...
+    def __iter__(self): ...
+    def __next__(self): ...
+
+class DictWriter:
+    fieldnames = ...  # type: Any
+    restval = ...  # type: Any
+    extrasaction = ...  # type: Any
+    writer = ...  # type: Any
+    def __init__(self, f, fieldnames, restval=..., extrasaction=..., dialect=..., *args, **kwds) -> None: ...
+    def writeheader(self): ...
+    def writerow(self, rowdict): ...
+    def writerows(self, rowdicts): ...
+
+class Sniffer:
+    preferred = ...  # type: Any
+    def __init__(self) -> None: ...
+    def sniff(self, sample, delimiters=...): ...
+    def has_header(self, sample): ...
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
new file mode 100644
index 0000000..4d8efb8
--- /dev/null
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -0,0 +1,221 @@
+# Stubs for datetime
+
+# NOTE: These are incomplete!
+
+from typing import Optional, SupportsAbs, Tuple, Union, overload
+
+MINYEAR = 0
+MAXYEAR = 0
+
+class tzinfo:
+    def tzname(self, dt: Optional[datetime]) -> str: ...
+    def utcoffset(self, dt: Optional[datetime]) -> int: ...
+    def dst(self, dt: Optional[datetime]) -> int: ...
+    def fromutc(self, dt: datetime) -> datetime: ...
+
+class timezone(tzinfo):
+    utc = ...  # type: tzinfo
+    min = ...  # type: tzinfo
+    max = ...  # type: tzinfo
+
+    def __init__(self, offset: timedelta, name: str = ...) -> None: ...
+    def __hash__(self) -> int: ...
+
+_tzinfo = tzinfo
+_timezone = timezone
+
+class date:
+    min = ...  # type: date
+    max = ...  # type: date
+    resolution = ...  # type: timedelta
+
+    def __init__(self, year: int, month: int = ..., day: int = ...) -> None: ...
+
+    @classmethod
+    def fromtimestamp(cls, t: float) -> date: ...
+    @classmethod
+    def today(cls) -> date: ...
+    @classmethod
+    def fromordinal(cls, n: int) -> date: ...
+
+    @property
+    def year(self) -> int: ...
+    @property
+    def month(self) -> int: ...
+    @property
+    def day(self) -> int: ...
+
+    def ctime(self) -> str: ...
+    def strftime(self, fmt: str) -> str: ...
+    def __format__(self, fmt: str) -> str: ...
+    def isoformat(self) -> str: ...
+    def timetuple(self) -> tuple: ... # TODO return type
+    def toordinal(self) -> int: ...
+    def replace(self, year: int = ..., month: int = ..., day: int = ...) -> date: ...
+    def __le__(self, other: date) -> bool: ...
+    def __lt__(self, other: date) -> bool: ...
+    def __ge__(self, other: date) -> bool: ...
+    def __gt__(self, other: date) -> bool: ...
+    def __add__(self, other: timedelta) -> date: ...
+    @overload
+    def __sub__(self, other: timedelta) -> date: ...
+    @overload
+    def __sub__(self, other: date) -> timedelta: ...
+    def __hash__(self) -> int: ...
+    def weekday(self) -> int: ...
+    def isoweekday(self) -> int: ...
+    def isocalendar(self) -> Tuple[int, int, int]: ...
+
+class time:
+    min = ...  # type: time
+    max = ...  # type: time
+    resolution = ...  # type: timedelta
+
+    def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ...,
+                 tzinfo: tzinfo = ...) -> None: ...
+
+    @property
+    def hour(self) -> int: ...
+    @property
+    def minute(self) -> int: ...
+    @property
+    def second(self) -> int: ...
+    @property
+    def microsecond(self) -> int: ...
+    @property
+    def tzinfo(self) -> _tzinfo: ...
+
+    def __le__(self, other: time) -> bool: ...
+    def __lt__(self, other: time) -> bool: ...
+    def __ge__(self, other: time) -> bool: ...
+    def __gt__(self, other: time) -> bool: ...
+    def __hash__(self) -> int: ...
+    def isoformat(self) -> str: ...
+    def strftime(self, fmt: str) -> str: ...
+    def __format__(self, fmt: str) -> str: ...
+    def utcoffset(self) -> Optional[int]: ...
+    def tzname(self) -> Optional[str]: ...
+    def dst(self) -> Optional[int]: ...
+    def replace(self, hour: int = ..., minute: int = ..., second: int = ...,
+                microsecond: int = ..., tzinfo: Union[_tzinfo, bool] = ...) -> time: ...
+
+_date = date
+_time = time
+
+class timedelta(SupportsAbs[timedelta]):
+    min = ...  # type: timedelta
+    max = ...  # type: timedelta
+    resolution = ...  # type: timedelta
+
+    def __init__(self, days: float = ..., seconds: float = ..., microseconds: float = ...,
+                 milliseconds: float = ..., minutes: float = ..., hours: float = ...,
+                 weeks: float = ...) -> None: ...
+
+    @property
+    def days(self) -> int: ...
+    @property
+    def seconds(self) -> int: ...
+    @property
+    def microseconds(self) -> int: ...
+
+    def total_seconds(self) -> float: ...
+    def __add__(self, other: timedelta) -> timedelta: ...
+    def __radd__(self, other: timedelta) -> timedelta: ...
+    def __sub__(self, other: timedelta) -> timedelta: ...
+    def __rsub(self, other: timedelta) -> timedelta: ...
+    def __neg__(self) -> timedelta: ...
+    def __pos__(self) -> timedelta: ...
+    def __abs__(self) -> timedelta: ...
+    def __mul__(self, other: float) -> timedelta: ...
+    def __rmul__(self, other: float) -> timedelta: ...
+    @overload
+    def __floordiv__(self, other: timedelta) -> int: ...
+    @overload
+    def __floordiv__(self, other: int) -> timedelta: ...
+    @overload
+    def __truediv__(self, other: timedelta) -> float: ...
+    @overload
+    def __truediv__(self, other: float) -> timedelta: ...
+    def __mod__(self, other: timedelta) -> timedelta: ...
+    def __divmod__(self, other: timedelta) -> Tuple[int, timedelta]: ...
+    def __le__(self, other: timedelta) -> bool: ...
+    def __lt__(self, other: timedelta) -> bool: ...
+    def __ge__(self, other: timedelta) -> bool: ...
+    def __gt__(self, other: timedelta) -> bool: ...
+    def __hash__(self) -> int: ...
+
+
+class datetime:
+    # TODO: Is a subclass of date, but this would make some types incompatible.
+    min = ...  # type: datetime
+    max = ...  # type: datetime
+    resolution = ...  # type: timedelta
+
+    def __init__(self, year: int, month: int = ..., day: int = ..., hour: int = ...,
+                 minute: int = ..., second: int = ..., microsecond: int = ...,
+                 tzinfo: tzinfo = ...) -> None: ...
+
+    @property
+    def year(self) -> int: ...
+    @property
+    def month(self) -> int: ...
+    @property
+    def day(self) -> int: ...
+    @property
+    def hour(self) -> int: ...
+    @property
+    def minute(self) -> int: ...
+    @property
+    def second(self) -> int: ...
+    @property
+    def microsecond(self) -> int: ...
+    @property
+    def tzinfo(self) -> _tzinfo: ...
+
+    @classmethod
+    def fromtimestamp(cls, t: float, tz: timezone = ...) -> datetime: ...
+    @classmethod
+    def utcfromtimestamp(cls, t: float) -> datetime: ...
+    @classmethod
+    def today(cls) -> datetime: ...
+    @classmethod
+    def fromordinal(cls, n: int) -> datetime: ...
+    @classmethod
+    def now(cls, tz: timezone = ...) -> datetime: ...
+    @classmethod
+    def utcnow(cls) -> datetime: ...
+    @classmethod
+    def combine(cls, date: date, time: time) -> datetime: ...
+    def strftime(self, fmt: str) -> str: ...
+    def __format__(self, fmt: str) -> str: ...
+    def toordinal(self) -> int: ...
+    def timetuple(self) -> tuple: ... # TODO return type
+    def timestamp(self) -> float: ...
+    def utctimetuple(self) -> tuple: ... # TODO return type
+    def date(self) -> _date: ...
+    def time(self) -> _time: ...
+    def timetz(self) -> _time: ...
+    def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ...,
+                minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo:
+                Union[_tzinfo, bool] = ...) -> datetime: ...
+    def astimezone(self, tz: timezone = ...) -> datetime: ...
+    def ctime(self) -> str: ...
+    def isoformat(self, sep: str = ...) -> str: ...
+    @classmethod
+    def strptime(cls, date_string: str, format: str) -> datetime: ...
+    def utcoffset(self) -> Optional[int]: ...
+    def tzname(self) -> Optional[str]: ...
+    def dst(self) -> Optional[int]: ...
+    def __le__(self, other: datetime) -> bool: ...
+    def __lt__(self, other: datetime) -> bool: ...
+    def __ge__(self, other: datetime) -> bool: ...
+    def __gt__(self, other: datetime) -> bool: ...
+    def __add__(self, other: timedelta) -> datetime: ...
+    @overload
+    def __sub__(self, other: datetime) -> timedelta: ...
+    @overload
+    def __sub__(self, other: timedelta) -> datetime: ...
+    def __hash__(self) -> int: ...
+    def weekday(self) -> int: ...
+    def isoweekday(self) -> int: ...
+    def isocalendar(self) -> Tuple[int, int, int]: ...
diff --git a/typeshed/stdlib/3/decimal.pyi b/typeshed/stdlib/3/decimal.pyi
new file mode 100644
index 0000000..6145b8c
--- /dev/null
+++ b/typeshed/stdlib/3/decimal.pyi
@@ -0,0 +1,255 @@
+# Stubs for decimal (Python 3.4)
+
+from typing import (
+    Any, Union, SupportsInt, SupportsFloat, SupportsAbs, SupportsRound, Sequence,
+    Tuple, NamedTuple, Dict
+)
+
+_Decimal = Union[Decimal, int]
+
+BasicContext = ...  # type: Context
+DefaultContext = ...  # type: Context
+ExtendedContext = ...  # type: Context
+HAVE_THREADS = ...  # type: bool
+MAX_EMAX = ...  # type: int
+MAX_PREC = ...  # type: int
+MIN_EMIN = ...  # type: int
+MIN_ETINY = ...  # type: int
+ROUND_05UP = ...  # type: str
+ROUND_CEILING = ...  # type: str
+ROUND_DOWN = ...  # type: str
+ROUND_FLOOR = ...  # type: str
+ROUND_HALF_DOWN = ...  # type: str
+ROUND_HALF_EVEN = ...  # type: str
+ROUND_HALF_UP = ...  # type: str
+ROUND_UP = ...  # type: str
+
+def getcontext() -> Context: ...
+def localcontext(ctx: Context = ...) -> _ContextManager: ...
+def setcontext(c: Context) -> None: ...
+
+DecimalTuple = NamedTuple('DecimalTuple',
+                          [('sign', int),
+                           ('digits', Sequence[int]), # TODO: Use Tuple[int, ...]
+                           ('exponent', int)])
+
+class _ContextManager:
+    def __enter__(self) -> Context: ...
+    def __exit__(self, t, v, tb) -> None: ...
+
+class Context:
+    Emax = ...  # type: int
+    Emin = ...  # type: int
+    capitals = ...  # type: int
+    clamp = ...  # type: int
+    prec = ...  # type: int
+    rounding = ...  # type: str
+    traps = ...  # type: Dict[type, bool]
+    def __init__(self, prec: int = ..., rounding: str = ..., Emin: int = ..., Emax: int = ...,
+                 capitals: int = ..., clamp: int = ..., flags=..., traps=...,
+                 _ignored_flags=...) -> None: ...
+    def Etiny(self): ...
+    def Etop(self): ...
+    def abs(self, x: _Decimal) -> Decimal: ...
+    def add(self, x: _Decimal, y: _Decimal) -> Decimal: ...
+    def canonical(self, x): ...
+    def clear_flags(self): ...
+    def clear_traps(self): ...
+    def compare(self, x, y): ...
+    def compare_signal(self, x, y): ...
+    def compare_total(self, x, y): ...
+    def compare_total_mag(self, x, y): ...
+    def copy(self): ...
+    def copy_abs(self, x): ...
+    def copy_decimal(self, x): ...
+    def copy_negate(self, x): ...
+    def copy_sign(self, x, y): ...
+    def create_decimal(self, x): ...
+    def create_decimal_from_float(self, f): ...
+    def divide(self, x, y): ...
+    def divide_int(self, x, y): ...
+    def divmod(self, x, y): ...
+    def exp(self, x): ...
+    def fma(self, x, y, z): ...
+    def is_canonical(self, x): ...
+    def is_finite(self, x): ...
+    def is_infinite(self, x): ...
+    def is_nan(self, x): ...
+    def is_normal(self, x): ...
+    def is_qnan(self, x): ...
+    def is_signed(self, x): ...
+    def is_snan(self): ...
+    def is_subnormal(self, x): ...
+    def is_zero(self, x): ...
+    def ln(self, x): ...
+    def log10(self, x): ...
+    def logb(self, x): ...
+    def logical_and(self, x, y): ...
+    def logical_invert(self, x): ...
+    def logical_or(self, x, y): ...
+    def logical_xor(self, x, y): ...
+    def max(self, x, y): ...
+    def max_mag(self, x, y): ...
+    def min(self, x, y): ...
+    def min_mag(self, x, y): ...
+    def minus(self, x): ...
+    def multiply(self, x, y): ...
+    def next_minus(self, x): ...
+    def next_plus(self, x): ...
+    def next_toward(self, x): ...
+    def normalize(self, x): ...
+    def number_class(self, x): ...
+    def plus(self, x): ...
+    def power(self, x, y): ...
+    def quantize(self, x, y): ...
+    def radix(self): ...
+    def remainder(self, x, y): ...
+    def remainder_near(self, x, y): ...
+    def rotate(self, x, y): ...
+    def same_quantum(self, x, y): ...
+    def scaleb(self, x, y): ...
+    def shift(self, x, y): ...
+    def sqrt(self, x): ...
+    def subtract(self, x, y): ...
+    def to_eng_string(self, x): ...
+    def to_integral(self, x): ...
+    def to_integral_exact(self, x): ...
+    def to_integral_value(self, x): ...
+    def to_sci_string(self, x): ...
+    def __copy__(self) -> Context: ...
+    def __delattr__(self, name): ...
+    def __reduce__(self): ...
+
+class ConversionSyntax(InvalidOperation): ...
+
+class Decimal(SupportsInt, SupportsFloat, SupportsAbs[Decimal], SupportsRound[int]):
+    # TODO: SupportsCeil, SupportsFloor, SupportsTrunc?
+
+    def __init__(cls, value: Union[_Decimal, float, str,
+                                   Tuple[int, Sequence[int], int]] = ...,
+                 context: Context = ...) -> None: ...
+
+    @property
+    def imag(self) -> Decimal: ...
+    @property
+    def real(self) -> Decimal: ...
+
+    def adjusted(self) -> int: ...
+    def as_tuple(self) -> DecimalTuple: ...
+    def canonical(self) -> Decimal: ...
+    def compare(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def compare_signal(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def compare_total(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def compare_total_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def conjugate(self) -> Decimal: ...
+    def copy_abs(self) -> Decimal: ...
+    def copy_negate(self) -> Decimal: ...
+    def copy_sign(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def exp(self, context: Context = ...) -> Decimal: ...
+    def fma(self, other: _Decimal, third: _Decimal, context: Context = ...) -> Decimal: ...
+    @classmethod
+    def from_float(cls, f: float) -> Decimal: ...
+    def is_canonical(self) -> bool: ...
+    def is_finite(self) -> bool: ...
+    def is_infinite(self) -> bool: ...
+    def is_nan(self) -> bool: ...
+    def is_normal(self, context: Context = ...) -> bool: ...
+    def is_qnan(self) -> bool: ...
+    def is_signed(self) -> bool: ...
+    def is_snan(self) -> bool: ...
+    def is_subnormal(self, context: Context = ...) -> bool: ...
+    def is_zero(self) -> bool: ...
+    def ln(self, context: Context = ...) -> Decimal: ...
+    def log10(self, context: Context = ...) -> Decimal: ...
+    def logb(self, context: Context = ...) -> Decimal: ...
+    def logical_and(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def logical_invert(self, context: Context = ...) -> Decimal: ...
+    def logical_or(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def logical_xor(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def max(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def max_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def min(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def min_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def next_minus(self, context: Context = ...) -> Decimal: ...
+    def next_plus(self, context: Context = ...) -> Decimal: ...
+    def next_toward(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def normalize(self, context: Context = ...) -> Decimal: ...
+    def number_class(self, context: Context = ...) -> str: ...
+    def quantize(self, exp: _Decimal, rounding: str = ...,
+                 context: Context = ...) -> Decimal: ...
+    def radix(self) -> Decimal: ...
+    def remainder_near(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def rotate(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def same_quantum(self, other: _Decimal, context: Context = ...) -> bool: ...
+    def scaleb(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def shift(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def sqrt(self, context: Context = ...) -> Decimal: ...
+    def to_eng_string(self, context: Context = ...) -> str: ...
+    def to_integral(self, rounding: str = ..., context: Context = ...) -> Decimal: ...
+    def to_integral_exact(self, rounding: str = ..., context: Context = ...) -> Decimal: ...
+    def to_integral_value(self, rounding: str = ..., context: Context = ...) -> Decimal: ...
+    def __abs__(self) -> Decimal: ...
+    def __add__(self, other: _Decimal) -> Decimal: ...
+    def __bool__(self) -> bool: ...
+    def __ceil__(self) -> int: ...
+    def __complex__(self) -> complex: ...
+    def __copy__(self) -> Decimal: ...
+    def __deepcopy__(self) -> Decimal: ...
+    def __divmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ...
+    def __eq__(self, other: object) -> bool: ...
+    def __float__(self) -> float: ...
+    def __floor__(self) -> int: ...
+    def __floordiv__(self, other: _Decimal) -> Decimal: ...
+    def __format__(self, specifier, context=..., _localeconv=...) -> str: ...
+    def __ge__(self, other: _Decimal) -> bool: ...
+    def __gt__(self, other: _Decimal) -> bool: ...
+    def __hash__(self) -> int: ...
+    def __int__(self) -> int: ...
+    def __le__(self, other: _Decimal) -> bool: ...
+    def __lt__(self, other: _Decimal) -> bool: ...
+    def __mod__(self, other: _Decimal) -> Decimal: ...
+    def __mul__(self, other: _Decimal) -> Decimal: ...
+    def __ne__(self, other: object) -> bool: ...
+    def __neg__(self) -> Decimal: ...
+    def __pos__(self) -> Decimal: ...
+    def __pow__(self, other: _Decimal) -> Decimal: ...
+    def __radd__(self, other: int) -> Decimal: ...
+    def __rdivmod__(self, other: int) -> Tuple[Decimal, Decimal]: ...
+    def __reduce__(self): ...
+    def __rfloordiv__(self, other: int) -> Decimal: ...
+    def __rmod__(self, other: int) -> Decimal: ...
+    def __rmul__(self, other: int) -> Decimal: ...
+    def __round__(self, n=...) -> int: ...
+    def __rpow__(self, other: int) -> Decimal: ...
+    def __rsub__(self, other: int) -> Decimal: ...
+    def __rtruediv__(self, other: int) -> Decimal: ...
+    def __sizeof__(self) -> int: ...
+    def __sub__(self, other: _Decimal) -> Decimal: ...
+    def __truediv__(self, other: _Decimal) -> Decimal: ...
+    def __trunc__(self) -> int: ...
+
+class DecimalException(ArithmeticError): ...
+
+class Clamped(DecimalException): ...
+
+class DivisionByZero(DecimalException, ZeroDivisionError): ...
+
+class DivisionImpossible(InvalidOperation): ...
+
+class DivisionUndefined(InvalidOperation, ZeroDivisionError): ...
+
+class FloatOperation(DecimalException, TypeError): ...
+
+class Inexact(DecimalException): ...
+
+class InvalidContext(InvalidOperation): ...
+
+class InvalidOperation(DecimalException): ...
+
+class Overflow(Inexact, Rounded): ...
+
+class Rounded(DecimalException): ...
+
+class Subnormal(DecimalException): ...
+
+class Underflow(Inexact, Rounded, Subnormal): ...
diff --git a/typeshed/stdlib/3/difflib.pyi b/typeshed/stdlib/3/difflib.pyi
new file mode 100644
index 0000000..98da327
--- /dev/null
+++ b/typeshed/stdlib/3/difflib.pyi
@@ -0,0 +1,61 @@
+# Stubs for difflib
+
+# Based on https://docs.python.org/3.2/library/difflib.html
+
+from typing import (
+    TypeVar, Callable, Iterable, List, NamedTuple, Sequence, Tuple, Generic
+)
+
+_T = TypeVar('_T')
+
+class SequenceMatcher(Generic[_T]):
+    def __init__(self, isjunk: Callable[[_T], bool] = ...,
+                 a: Sequence[_T] = ..., b: Sequence[_T] = ...,
+                 autojunk: bool = ...) -> None: ...
+    def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ...
+    def set_seq1(self, a: Sequence[_T]) -> None: ...
+    def set_seq2(self, b: Sequence[_T]) -> None: ...
+    def find_longest_match(self, alo: int, ahi: int, blo: int,
+                           bhi: int) -> Tuple[int, int, int]: ...
+    def get_matching_blocks(self) -> List[Tuple[int, int, int]]: ...
+    def get_opcodes(self) -> List[Tuple[str, int, int, int, int]]: ...
+    def get_grouped_opcodes(self, n: int = ...
+                            ) -> Iterable[Tuple[str, int, int, int, int]]: ...
+    def ratio(self) -> float: ...
+    def quick_ratio(self) -> float: ...
+    def real_quick_ratio(self) -> float: ...
+
+def get_close_matches(word: Sequence[_T], possibilities: List[Sequence[_T]],
+                      n: int = ..., cutoff: float = ...) -> List[Sequence[_T]]: ...
+
+class Differ:
+    def __init__(self, linejunk: Callable[[str], bool] = ...,
+                 charjunk: Callable[[str], bool] = ...) -> None: ...
+    def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterable[str]: ...
+
+def IS_LINE_JUNK(str) -> bool: ...
+def IS_CHARACTER_JUNK(str) -> bool: ...
+def unified_diff(a: Sequence[str], b: Sequence[str], fromfile: str = ...,
+                 tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
+                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+def context_diff(a: Sequence[str], b: Sequence[str], fromfile: str=...,
+                 tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
+                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+def ndiff(a: Sequence[str], b: Sequence[str],
+          linejunk: Callable[[str], bool] = ...,
+          charjunk: Callable[[str], bool] = ...
+          ) -> Iterable[str]: ...
+
+class HtmlDiff(object):
+    def __init__(self, tabsize: int = ..., wrapcolumn: int = ...,
+                 linejunk: Callable[[str], bool] = ...,
+                 charjunk: Callable[[str], bool] = ...
+                 ) -> None: ...
+    def make_file(self, fromlines: Sequence[str], tolines: Sequence[str],
+                  fromdesc: str = ..., todesc: str = ..., context: bool = ...,
+                  numlines: int = ...) -> str: ...
+    def make_table(self, fromlines: Sequence[str], tolines: Sequence[str],
+                   fromdesc: str = ..., todesc: str = ..., context: bool = ...,
+                   numlines: int = ...) -> str: ...
+
+def restore(delta: Iterable[str], which: int) -> Iterable[int]: ...
diff --git a/typeshed/stdlib/3/distutils/__init__.pyi b/typeshed/stdlib/3/distutils/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/3/distutils/errors.pyi b/typeshed/stdlib/3/distutils/errors.pyi
new file mode 100644
index 0000000..e5578f9
--- /dev/null
+++ b/typeshed/stdlib/3/distutils/errors.pyi
@@ -0,0 +1,4 @@
+import typing
+
+class DistutilsError(Exception): ...
+class DistutilsExecError(DistutilsError): ...
diff --git a/typeshed/stdlib/3/distutils/spawn.pyi b/typeshed/stdlib/3/distutils/spawn.pyi
new file mode 100644
index 0000000..ac6eaf3
--- /dev/null
+++ b/typeshed/stdlib/3/distutils/spawn.pyi
@@ -0,0 +1,6 @@
+from typing import List
+
+# In Python, arguments have integer default values
+def spawn(cmd: List[str], search_path: bool = ..., verbose: bool = ...,
+           dry_run: bool = ...) -> None: ...
+def find_executable(executable: str, path: str = ...) -> str: ...
diff --git a/typeshed/stdlib/3/doctest.pyi b/typeshed/stdlib/3/doctest.pyi
new file mode 100644
index 0000000..5cbdc1f
--- /dev/null
+++ b/typeshed/stdlib/3/doctest.pyi
@@ -0,0 +1,9 @@
+# Stubs for doctest
+
+# NOTE: These are incomplete!
+
+from typing import Any, Tuple
+
+# TODO arguments missing
+def testmod(module: Any = ..., *, name: str = ..., globs: Any = ...,
+            verbose: bool = ...) -> Tuple[int, int]: ...
diff --git a/typeshed/stdlib/3/email/__init__.pyi b/typeshed/stdlib/3/email/__init__.pyi
new file mode 100644
index 0000000..6cf5a07
--- /dev/null
+++ b/typeshed/stdlib/3/email/__init__.pyi
@@ -0,0 +1,23 @@
+# Stubs for email (Python 3.4)
+
+from typing import IO, Any
+
+def message_from_string(s: str, *args, **kwargs): ...
+def message_from_bytes(s: bytes, *args, **kwargs): ...
+def message_from_file(fp: IO[str], *args, **kwargs): ...
+def message_from_binary_file(fp: IO[bytes], *args, **kwargs): ...
+
+# Names in __all__ with no definition:
+#   base64mime
+#   charset
+#   encoders
+#   errors
+#   feedparser
+#   generator
+#   header
+#   iterators
+#   message
+#   mime
+#   parser
+#   quoprimime
+#   utils
diff --git a/typeshed/stdlib/3/email/_header_value_parser.pyi b/typeshed/stdlib/3/email/_header_value_parser.pyi
new file mode 100644
index 0000000..9101d67
--- /dev/null
+++ b/typeshed/stdlib/3/email/_header_value_parser.pyi
@@ -0,0 +1,397 @@
+# Stubs for email._header_value_parser (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+WSP = ...  # type: Any
+CFWS_LEADER = ...  # type: Any
+SPECIALS = ...  # type: Any
+ATOM_ENDS = ...  # type: Any
+DOT_ATOM_ENDS = ...  # type: Any
+PHRASE_ENDS = ...  # type: Any
+TSPECIALS = ...  # type: Any
+TOKEN_ENDS = ...  # type: Any
+ASPECIALS = ...  # type: Any
+ATTRIBUTE_ENDS = ...  # type: Any
+EXTENDED_ATTRIBUTE_ENDS = ...  # type: Any
+
+def quote_string(value): ...
+
+class _Folded:
+    maxlen = ...  # type: Any
+    policy = ...  # type: Any
+    lastlen = ...  # type: Any
+    stickyspace = ...  # type: Any
+    firstline = ...  # type: Any
+    done = ...  # type: Any
+    current = ...  # type: Any
+    def __init__(self, maxlen, policy) -> None: ...
+    def newline(self): ...
+    def finalize(self): ...
+    def append(self, stoken): ...
+    def append_if_fits(self, token, stoken=...): ...
+
+class TokenList(list):
+    token_type = ...  # type: Any
+    defects = ...  # type: Any
+    def __init__(self, *args, **kw) -> None: ...
+    @property
+    def value(self): ...
+    @property
+    def all_defects(self): ...
+    @property
+    def parts(self): ...
+    def startswith_fws(self): ...
+    def pop_leading_fws(self): ...
+    def pop_trailing_ws(self): ...
+    @property
+    def has_fws(self): ...
+    def has_leading_comment(self): ...
+    @property
+    def comments(self): ...
+    def fold(self, policy): ...
+    def as_encoded_word(self, charset): ...
+    def cte_encode(self, charset, policy): ...
+    def pprint(self, indent=...): ...
+    def ppstr(self, indent=...): ...
+
+class WhiteSpaceTokenList(TokenList):
+    @property
+    def value(self): ...
+    @property
+    def comments(self): ...
+
+class UnstructuredTokenList(TokenList):
+    token_type = ...  # type: Any
+    def cte_encode(self, charset, policy): ...
+
+class Phrase(TokenList):
+    token_type = ...  # type: Any
+    def cte_encode(self, charset, policy): ...
+
+class Word(TokenList):
+    token_type = ...  # type: Any
+
+class CFWSList(WhiteSpaceTokenList):
+    token_type = ...  # type: Any
+    def has_leading_comment(self): ...
+
+class Atom(TokenList):
+    token_type = ...  # type: Any
+
+class Token(TokenList):
+    token_type = ...  # type: Any
+
+class EncodedWord(TokenList):
+    token_type = ...  # type: Any
+    cte = ...  # type: Any
+    charset = ...  # type: Any
+    lang = ...  # type: Any
+    @property
+    def encoded(self): ...
+
+class QuotedString(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def content(self): ...
+    @property
+    def quoted_value(self): ...
+    @property
+    def stripped_value(self): ...
+
+class BareQuotedString(QuotedString):
+    token_type = ...  # type: Any
+    @property
+    def value(self): ...
+
+class Comment(WhiteSpaceTokenList):
+    token_type = ...  # type: Any
+    def quote(self, value): ...
+    @property
+    def content(self): ...
+    @property
+    def comments(self): ...
+
+class AddressList(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def addresses(self): ...
+    @property
+    def mailboxes(self): ...
+    @property
+    def all_mailboxes(self): ...
+
+class Address(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def display_name(self): ...
+    @property
+    def mailboxes(self): ...
+    @property
+    def all_mailboxes(self): ...
+
+class MailboxList(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def mailboxes(self): ...
+    @property
+    def all_mailboxes(self): ...
+
+class GroupList(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def mailboxes(self): ...
+    @property
+    def all_mailboxes(self): ...
+
+class Group(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def mailboxes(self): ...
+    @property
+    def all_mailboxes(self): ...
+    @property
+    def display_name(self): ...
+
+class NameAddr(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def display_name(self): ...
+    @property
+    def local_part(self): ...
+    @property
+    def domain(self): ...
+    @property
+    def route(self): ...
+    @property
+    def addr_spec(self): ...
+
+class AngleAddr(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def local_part(self): ...
+    @property
+    def domain(self): ...
+    @property
+    def route(self): ...
+    @property
+    def addr_spec(self): ...
+
+class ObsRoute(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def domains(self): ...
+
+class Mailbox(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def display_name(self): ...
+    @property
+    def local_part(self): ...
+    @property
+    def domain(self): ...
+    @property
+    def route(self): ...
+    @property
+    def addr_spec(self): ...
+
+class InvalidMailbox(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def display_name(self): ...
+    local_part = ...  # type: Any
+
+class Domain(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def domain(self): ...
+
+class DotAtom(TokenList):
+    token_type = ...  # type: Any
+
+class DotAtomText(TokenList):
+    token_type = ...  # type: Any
+
+class AddrSpec(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def local_part(self): ...
+    @property
+    def domain(self): ...
+    @property
+    def value(self): ...
+    @property
+    def addr_spec(self): ...
+
+class ObsLocalPart(TokenList):
+    token_type = ...  # type: Any
+
+class DisplayName(Phrase):
+    token_type = ...  # type: Any
+    @property
+    def display_name(self): ...
+    @property
+    def value(self): ...
+
+class LocalPart(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def value(self): ...
+    @property
+    def local_part(self): ...
+
+class DomainLiteral(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def domain(self): ...
+    @property
+    def ip(self): ...
+
+class MIMEVersion(TokenList):
+    token_type = ...  # type: Any
+    major = ...  # type: Any
+    minor = ...  # type: Any
+
+class Parameter(TokenList):
+    token_type = ...  # type: Any
+    sectioned = ...  # type: Any
+    extended = ...  # type: Any
+    charset = ...  # type: Any
+    @property
+    def section_number(self): ...
+    @property
+    def param_value(self): ...
+
+class InvalidParameter(Parameter):
+    token_type = ...  # type: Any
+
+class Attribute(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def stripped_value(self): ...
+
+class Section(TokenList):
+    token_type = ...  # type: Any
+    number = ...  # type: Any
+
+class Value(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def stripped_value(self): ...
+
+class MimeParameters(TokenList):
+    token_type = ...  # type: Any
+    @property
+    def params(self): ...
+
+class ParameterizedHeaderValue(TokenList):
+    @property
+    def params(self): ...
+    @property
+    def parts(self): ...
+
+class ContentType(ParameterizedHeaderValue):
+    token_type = ...  # type: Any
+    maintype = ...  # type: Any
+    subtype = ...  # type: Any
+
+class ContentDisposition(ParameterizedHeaderValue):
+    token_type = ...  # type: Any
+    content_disposition = ...  # type: Any
+
+class ContentTransferEncoding(TokenList):
+    token_type = ...  # type: Any
+    cte = ...  # type: Any
+
+class HeaderLabel(TokenList):
+    token_type = ...  # type: Any
+
+class Header(TokenList):
+    token_type = ...  # type: Any
+
+class Terminal(str):
+    token_type = ...  # type: Any
+    defects = ...  # type: Any
+    def __new__(cls, value, token_type): ...
+    @property
+    def all_defects(self): ...
+    def cte_encode(self, charset, policy): ...
+    def pop_trailing_ws(self): ...
+    def pop_leading_fws(self): ...
+    @property
+    def comments(self): ...
+    def has_leading_comment(self): ...
+    def __getnewargs__(self): ...
+
+class WhiteSpaceTerminal(Terminal):
+    @property
+    def value(self): ...
+    def startswith_fws(self): ...
+    has_fws = ...  # type: Any
+
+class ValueTerminal(Terminal):
+    @property
+    def value(self): ...
+    def startswith_fws(self): ...
+    has_fws = ...  # type: Any
+    def as_encoded_word(self, charset): ...
+
+class EWWhiteSpaceTerminal(WhiteSpaceTerminal):
+    @property
+    def value(self): ...
+    @property
+    def encoded(self): ...
+    has_fws = ...  # type: Any
+
+DOT = ...  # type: Any
+ListSeparator = ...  # type: Any
+RouteComponentMarker = ...  # type: Any
+
+def get_fws(value): ...
+def get_encoded_word(value): ...
+def get_unstructured(value): ...
+def get_qp_ctext(value): ...
+def get_qcontent(value): ...
+def get_atext(value): ...
+def get_bare_quoted_string(value): ...
+def get_comment(value): ...
+def get_cfws(value): ...
+def get_quoted_string(value): ...
+def get_atom(value): ...
+def get_dot_atom_text(value): ...
+def get_dot_atom(value): ...
+def get_word(value): ...
+def get_phrase(value): ...
+def get_local_part(value): ...
+def get_obs_local_part(value): ...
+def get_dtext(value): ...
+def get_domain_literal(value): ...
+def get_domain(value): ...
+def get_addr_spec(value): ...
+def get_obs_route(value): ...
+def get_angle_addr(value): ...
+def get_display_name(value): ...
+def get_name_addr(value): ...
+def get_mailbox(value): ...
+def get_invalid_mailbox(value, endchars): ...
+def get_mailbox_list(value): ...
+def get_group_list(value): ...
+def get_group(value): ...
+def get_address(value): ...
+def get_address_list(value): ...
+def parse_mime_version(value): ...
+def get_invalid_parameter(value): ...
+def get_ttext(value): ...
+def get_token(value): ...
+def get_attrtext(value): ...
+def get_attribute(value): ...
+def get_extended_attrtext(value): ...
+def get_extended_attribute(value): ...
+def get_section(value): ...
+def get_value(value): ...
+def get_parameter(value): ...
+def parse_mime_parameters(value): ...
+def parse_content_type_header(value): ...
+def parse_content_disposition_header(value): ...
+def parse_content_transfer_encoding_header(value): ...
diff --git a/typeshed/stdlib/3/email/_parseaddr.pyi b/typeshed/stdlib/3/email/_parseaddr.pyi
new file mode 100644
index 0000000..f5b34d4
--- /dev/null
+++ b/typeshed/stdlib/3/email/_parseaddr.pyi
@@ -0,0 +1,44 @@
+# Stubs for email._parseaddr (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def parsedate_tz(data): ...
+def parsedate(data): ...
+def mktime_tz(data): ...
+def quote(str): ...
+
+class AddrlistClass:
+    specials = ...  # type: Any
+    pos = ...  # type: Any
+    LWS = ...  # type: Any
+    CR = ...  # type: Any
+    FWS = ...  # type: Any
+    atomends = ...  # type: Any
+    phraseends = ...  # type: Any
+    field = ...  # type: Any
+    commentlist = ...  # type: Any
+    def __init__(self, field) -> None: ...
+    def gotonext(self): ...
+    def getaddrlist(self): ...
+    def getaddress(self): ...
+    def getrouteaddr(self): ...
+    def getaddrspec(self): ...
+    def getdomain(self): ...
+    def getdelimited(self, beginchar, endchars, allowcomments=...): ...
+    def getquote(self): ...
+    def getcomment(self): ...
+    def getdomainliteral(self): ...
+    def getatom(self, atomends=...): ...
+    def getphraselist(self): ...
+
+class AddressList(AddrlistClass):
+    addresslist = ...  # type: Any
+    def __init__(self, field) -> None: ...
+    def __len__(self): ...
+    def __add__(self, other): ...
+    def __iadd__(self, other): ...
+    def __sub__(self, other): ...
+    def __isub__(self, other): ...
+    def __getitem__(self, index): ...
diff --git a/typeshed/stdlib/3/email/_policybase.pyi b/typeshed/stdlib/3/email/_policybase.pyi
new file mode 100644
index 0000000..49af14c
--- /dev/null
+++ b/typeshed/stdlib/3/email/_policybase.pyi
@@ -0,0 +1,34 @@
+# Stubs for email._policybase (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _PolicyBase:
+    def __init__(self, **kw) -> None: ...
+    def clone(self, **kw): ...
+    def __setattr__(self, name, value): ...
+    def __add__(self, other): ...
+
+class Policy(_PolicyBase):
+    raise_on_defect = ...  # type: Any
+    linesep = ...  # type: Any
+    cte_type = ...  # type: Any
+    max_line_length = ...  # type: Any
+    def handle_defect(self, obj, defect): ...
+    def register_defect(self, obj, defect): ...
+    def header_max_count(self, name): ...
+    def header_source_parse(self, sourcelines): ...
+    def header_store_parse(self, name, value): ...
+    def header_fetch_parse(self, name, value): ...
+    def fold(self, name, value): ...
+    def fold_binary(self, name, value): ...
+
+class Compat32(Policy):
+    def header_source_parse(self, sourcelines): ...
+    def header_store_parse(self, name, value): ...
+    def header_fetch_parse(self, name, value): ...
+    def fold(self, name, value): ...
+    def fold_binary(self, name, value): ...
+
+compat32 = ...  # type: Any
diff --git a/typeshed/stdlib/3/email/base64mime.pyi b/typeshed/stdlib/3/email/base64mime.pyi
new file mode 100644
index 0000000..e94af9a
--- /dev/null
+++ b/typeshed/stdlib/3/email/base64mime.pyi
@@ -0,0 +1,13 @@
+# Stubs for email.base64mime (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def header_length(bytearray): ...
+def header_encode(header_bytes, charset=...): ...
+def body_encode(s, maxlinelen=..., eol=...): ...
+def decode(string): ...
+
+body_decode = ...  # type: Any
+decodestring = ...  # type: Any
diff --git a/typeshed/stdlib/3/email/charset.pyi b/typeshed/stdlib/3/email/charset.pyi
new file mode 100644
index 0000000..07f7ccb
--- /dev/null
+++ b/typeshed/stdlib/3/email/charset.pyi
@@ -0,0 +1,25 @@
+# Stubs for email.charset (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def add_charset(charset, header_enc=..., body_enc=..., output_charset=...): ...
+def add_alias(alias, canonical): ...
+def add_codec(charset, codecname): ...
+
+class Charset:
+    input_charset = ...  # type: Any
+    header_encoding = ...  # type: Any
+    body_encoding = ...  # type: Any
+    output_charset = ...  # type: Any
+    input_codec = ...  # type: Any
+    output_codec = ...  # type: Any
+    def __init__(self, input_charset=...) -> None: ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def get_body_encoding(self): ...
+    def get_output_charset(self): ...
+    def header_encode(self, string): ...
+    def header_encode_lines(self, string, maxlengths): ...
+    def body_encode(self, string): ...
diff --git a/typeshed/stdlib/3/email/contentmanager.pyi b/typeshed/stdlib/3/email/contentmanager.pyi
new file mode 100644
index 0000000..d738689
--- /dev/null
+++ b/typeshed/stdlib/3/email/contentmanager.pyi
@@ -0,0 +1,27 @@
+# Stubs for email.contentmanager (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class ContentManager:
+    get_handlers = ...  # type: Any
+    set_handlers = ...  # type: Any
+    def __init__(self) -> None: ...
+    def add_get_handler(self, key, handler): ...
+    def get_content(self, msg, *args, **kw): ...
+    def add_set_handler(self, typekey, handler): ...
+    def set_content(self, msg, obj, *args, **kw): ...
+
+raw_data_manager = ...  # type: Any
+
+def get_text_content(msg, errors=...): ...
+def get_non_text_content(msg): ...
+def get_message_content(msg): ...
+def get_and_fixup_unknown_message_content(msg): ...
+def set_text_content(msg, string, subtype=..., charset=..., cte=..., disposition=...,
+                     filename=..., cid=..., params=..., headers=...): ...
+def set_message_content(msg, message, subtype=..., cte=..., disposition=..., filename=...,
+                        cid=..., params=..., headers=...): ...
+def set_bytes_content(msg, data, maintype, subtype, cte=..., disposition=..., filename=...,
+                      cid=..., params=..., headers=...): ...
diff --git a/typeshed/stdlib/3/email/encoders.pyi b/typeshed/stdlib/3/email/encoders.pyi
new file mode 100644
index 0000000..f9f111a
--- /dev/null
+++ b/typeshed/stdlib/3/email/encoders.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.encoders (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def encode_base64(msg): ...
+def encode_quopri(msg): ...
+def encode_7or8bit(msg): ...
+def encode_noop(msg): ...
diff --git a/typeshed/stdlib/3/email/errors.pyi b/typeshed/stdlib/3/email/errors.pyi
new file mode 100644
index 0000000..2f95126
--- /dev/null
+++ b/typeshed/stdlib/3/email/errors.pyi
@@ -0,0 +1,44 @@
+# Stubs for email.errors (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class MessageError(Exception): ...
+class MessageParseError(MessageError): ...
+class HeaderParseError(MessageParseError): ...
+class BoundaryError(MessageParseError): ...
+class MultipartConversionError(MessageError, TypeError): ...
+class CharsetError(MessageError): ...
+
+class MessageDefect(ValueError):
+    line = ...  # type: Any
+    def __init__(self, line=...) -> None: ...
+
+class NoBoundaryInMultipartDefect(MessageDefect): ...
+class StartBoundaryNotFoundDefect(MessageDefect): ...
+class CloseBoundaryNotFoundDefect(MessageDefect): ...
+class FirstHeaderLineIsContinuationDefect(MessageDefect): ...
+class MisplacedEnvelopeHeaderDefect(MessageDefect): ...
+class MissingHeaderBodySeparatorDefect(MessageDefect): ...
+
+MalformedHeaderDefect = ...  # type: Any
+
+class MultipartInvariantViolationDefect(MessageDefect): ...
+class InvalidMultipartContentTransferEncodingDefect(MessageDefect): ...
+class UndecodableBytesDefect(MessageDefect): ...
+class InvalidBase64PaddingDefect(MessageDefect): ...
+class InvalidBase64CharactersDefect(MessageDefect): ...
+
+class HeaderDefect(MessageDefect):
+    def __init__(self, *args, **kw) -> None: ...
+
+class InvalidHeaderDefect(HeaderDefect): ...
+class HeaderMissingRequiredValue(HeaderDefect): ...
+
+class NonPrintableDefect(HeaderDefect):
+    non_printables = ...  # type: Any
+    def __init__(self, non_printables) -> None: ...
+
+class ObsoleteHeaderDefect(HeaderDefect): ...
+class NonASCIILocalPartDefect(HeaderDefect): ...
diff --git a/typeshed/stdlib/3/email/feedparser.pyi b/typeshed/stdlib/3/email/feedparser.pyi
new file mode 100644
index 0000000..135a9f8
--- /dev/null
+++ b/typeshed/stdlib/3/email/feedparser.pyi
@@ -0,0 +1,26 @@
+# Stubs for email.feedparser (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class BufferedSubFile:
+    def __init__(self) -> None: ...
+    def push_eof_matcher(self, pred): ...
+    def pop_eof_matcher(self): ...
+    def close(self): ...
+    def readline(self): ...
+    def unreadline(self, line): ...
+    def push(self, data): ...
+    def pushlines(self, lines): ...
+    def __iter__(self): ...
+    def __next__(self): ...
+
+class FeedParser:
+    policy = ...  # type: Any
+    def __init__(self, _factory=..., *, policy=...) -> None: ...
+    def feed(self, data): ...
+    def close(self): ...
+
+class BytesFeedParser(FeedParser):
+    def feed(self, data): ...
diff --git a/typeshed/stdlib/3/email/generator.pyi b/typeshed/stdlib/3/email/generator.pyi
new file mode 100644
index 0000000..a8ac7b4
--- /dev/null
+++ b/typeshed/stdlib/3/email/generator.pyi
@@ -0,0 +1,19 @@
+# Stubs for email.generator (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Generator:
+    maxheaderlen = ...  # type: Any
+    policy = ...  # type: Any
+    def __init__(self, outfp, mangle_from_=..., maxheaderlen=..., *, policy=...) -> None: ...
+    def write(self, s): ...
+    def flatten(self, msg, unixfrom=..., linesep=...): ...
+    def clone(self, fp): ...
+
+class BytesGenerator(Generator):
+    def write(self, s): ...
+
+class DecodedGenerator(Generator):
+    def __init__(self, outfp, mangle_from_=..., maxheaderlen=..., fmt=...) -> None: ...
diff --git a/typeshed/stdlib/3/email/header.pyi b/typeshed/stdlib/3/email/header.pyi
new file mode 100644
index 0000000..7665017
--- /dev/null
+++ b/typeshed/stdlib/3/email/header.pyi
@@ -0,0 +1,29 @@
+# Stubs for email.header (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def decode_header(header): ...
+def make_header(decoded_seq, maxlinelen=..., header_name=..., continuation_ws=...): ...
+
+class Header:
+    def __init__(self, s=..., charset=..., maxlinelen=..., header_name=...,
+                 continuation_ws=..., errors=...): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def append(self, s, charset=..., errors=...): ...
+    def encode(self, splitchars=..., maxlinelen=..., linesep=...): ...
+
+class _ValueFormatter:
+    def __init__(self, headerlen, maxlen, continuation_ws, splitchars) -> None: ...
+    def newline(self): ...
+    def add_transition(self): ...
+    def feed(self, fws, string, charset): ...
+
+class _Accumulator(list):
+    def __init__(self, initial_size=...) -> None: ...
+    def push(self, fws, string): ...
+    def pop_from(self, i=...): ...
+    def __len__(self): ...
+    def reset(self, startval=...): ...
+    def is_onlyws(self): ...
+    def part_count(self): ...
diff --git a/typeshed/stdlib/3/email/headerregistry.pyi b/typeshed/stdlib/3/email/headerregistry.pyi
new file mode 100644
index 0000000..41123fa
--- /dev/null
+++ b/typeshed/stdlib/3/email/headerregistry.pyi
@@ -0,0 +1,133 @@
+# Stubs for email.headerregistry (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Address:
+    def __init__(self, display_name=..., username=..., domain=..., addr_spec=...) -> None: ...
+    @property
+    def display_name(self): ...
+    @property
+    def username(self): ...
+    @property
+    def domain(self): ...
+    @property
+    def addr_spec(self): ...
+    def __eq__(self, other): ...
+
+class Group:
+    def __init__(self, display_name=..., addresses=...) -> None: ...
+    @property
+    def display_name(self): ...
+    @property
+    def addresses(self): ...
+    def __eq__(self, other): ...
+
+class BaseHeader(str):
+    def __new__(cls, name, value): ...
+    def init(self, name, parse_tree, defects): ...
+    @property
+    def name(self): ...
+    @property
+    def defects(self): ...
+    def __reduce__(self): ...
+    def fold(self, policy): ...
+
+class UnstructuredHeader:
+    max_count = ...  # type: Any
+    value_parser = ...  # type: Any
+    @classmethod
+    def parse(cls, value, kwds): ...
+
+class UniqueUnstructuredHeader(UnstructuredHeader):
+    max_count = ...  # type: Any
+
+class DateHeader:
+    max_count = ...  # type: Any
+    value_parser = ...  # type: Any
+    @classmethod
+    def parse(cls, value, kwds): ...
+    def init(self, *args, **kw): ...
+    @property
+    def datetime(self): ...
+
+class UniqueDateHeader(DateHeader):
+    max_count = ...  # type: Any
+
+class AddressHeader:
+    max_count = ...  # type: Any
+    @staticmethod
+    def value_parser(value): ...
+    @classmethod
+    def parse(cls, value, kwds): ...
+    def init(self, *args, **kw): ...
+    @property
+    def groups(self): ...
+    @property
+    def addresses(self): ...
+
+class UniqueAddressHeader(AddressHeader):
+    max_count = ...  # type: Any
+
+class SingleAddressHeader(AddressHeader):
+    @property
+    def address(self): ...
+
+class UniqueSingleAddressHeader(SingleAddressHeader):
+    max_count = ...  # type: Any
+
+class MIMEVersionHeader:
+    max_count = ...  # type: Any
+    value_parser = ...  # type: Any
+    @classmethod
+    def parse(cls, value, kwds): ...
+    def init(self, *args, **kw): ...
+    @property
+    def major(self): ...
+    @property
+    def minor(self): ...
+    @property
+    def version(self): ...
+
+class ParameterizedMIMEHeader:
+    max_count = ...  # type: Any
+    @classmethod
+    def parse(cls, value, kwds): ...
+    def init(self, *args, **kw): ...
+    @property
+    def params(self): ...
+
+class ContentTypeHeader(ParameterizedMIMEHeader):
+    value_parser = ...  # type: Any
+    def init(self, *args, **kw): ...
+    @property
+    def maintype(self): ...
+    @property
+    def subtype(self): ...
+    @property
+    def content_type(self): ...
+
+class ContentDispositionHeader(ParameterizedMIMEHeader):
+    value_parser = ...  # type: Any
+    def init(self, *args, **kw): ...
+    @property
+    def content_disposition(self): ...
+
+class ContentTransferEncodingHeader:
+    max_count = ...  # type: Any
+    value_parser = ...  # type: Any
+    @classmethod
+    def parse(cls, value, kwds): ...
+    def init(self, *args, **kw): ...
+    @property
+    def cte(self): ...
+
+class HeaderRegistry:
+    registry = ...  # type: Any
+    base_class = ...  # type: Any
+    default_class = ...  # type: Any
+    def __init__(self, base_class=..., default_class=..., use_default_map=...) -> None: ...
+    def map_to_type(self, name, cls): ...
+    def __getitem__(self, name): ...
+    def __call__(self, name, value): ...
diff --git a/typeshed/stdlib/3/email/iterators.pyi b/typeshed/stdlib/3/email/iterators.pyi
new file mode 100644
index 0000000..c1bfb2f
--- /dev/null
+++ b/typeshed/stdlib/3/email/iterators.pyi
@@ -0,0 +1,7 @@
+# Stubs for email.iterators (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def walk(self): ...
+def body_line_iterator(msg, decode=...): ...
+def typed_subpart_iterator(msg, maintype=..., subtype=...): ...
diff --git a/typeshed/stdlib/3/email/message.pyi b/typeshed/stdlib/3/email/message.pyi
new file mode 100644
index 0000000..57d9631
--- /dev/null
+++ b/typeshed/stdlib/3/email/message.pyi
@@ -0,0 +1,74 @@
+# Stubs for email.message (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Message:
+    policy = ...  # type: Any
+    preamble = ...  # type: Any
+    defects = ...  # type: Any
+    def __init__(self, policy=...) -> None: ...
+    def as_string(self, unixfrom=..., maxheaderlen=..., policy=...): ...
+    def __bytes__(self): ...
+    def as_bytes(self, unixfrom=..., policy=...): ...
+    def is_multipart(self): ...
+    def set_unixfrom(self, unixfrom): ...
+    def get_unixfrom(self): ...
+    def attach(self, payload): ...
+    def get_payload(self, i=..., decode=...): ...
+    def set_payload(self, payload, charset=...): ...
+    def set_charset(self, charset): ...
+    def get_charset(self): ...
+    def __len__(self): ...
+    def __getitem__(self, name): ...
+    def __setitem__(self, name, val): ...
+    def __delitem__(self, name): ...
+    def __contains__(self, name): ...
+    def __iter__(self): ...
+    def keys(self): ...
+    def values(self): ...
+    def items(self): ...
+    def get(self, name, failobj=...): ...
+    def set_raw(self, name, value): ...
+    def raw_items(self): ...
+    def get_all(self, name, failobj=...): ...
+    def add_header(self, _name, _value, **_params): ...
+    def replace_header(self, _name, _value): ...
+    def get_content_type(self): ...
+    def get_content_maintype(self): ...
+    def get_content_subtype(self): ...
+    def get_default_type(self): ...
+    def set_default_type(self, ctype): ...
+    def get_params(self, failobj=..., header=..., unquote=...): ...
+    def get_param(self, param, failobj=..., header=..., unquote=...): ...
+    def set_param(self, param, value, header=..., requote=..., charset=..., language=...,
+                  replace=...): ...
+    def del_param(self, param, header=..., requote=...): ...
+    def set_type(self, type, header=..., requote=...): ...
+    def get_filename(self, failobj=...): ...
+    def get_boundary(self, failobj=...): ...
+    def set_boundary(self, boundary): ...
+    def get_content_charset(self, failobj=...): ...
+    def get_charsets(self, failobj=...): ...
+
+class MIMEPart(Message):
+    def __init__(self, policy=...) -> None: ...
+    @property
+    def is_attachment(self): ...
+    def get_body(self, preferencelist=...): ...
+    def iter_attachments(self): ...
+    def iter_parts(self): ...
+    def get_content(self, *args, content_manager=..., **kw): ...
+    def set_content(self, *args, content_manager=..., **kw): ...
+    def make_related(self, boundary=...): ...
+    def make_alternative(self, boundary=...): ...
+    def make_mixed(self, boundary=...): ...
+    def add_related(self, *args, **kw): ...
+    def add_alternative(self, *args, **kw): ...
+    def add_attachment(self, *args, **kw): ...
+    def clear(self): ...
+    def clear_content(self): ...
+
+class EmailMessage(MIMEPart):
+    def set_content(self, *args, **kw): ...
diff --git a/typeshed/stdlib/3/email/mime/__init__.pyi b/typeshed/stdlib/3/email/mime/__init__.pyi
new file mode 100644
index 0000000..6c1b10c
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for email.mime (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/stdlib/3/email/mime/application.pyi b/typeshed/stdlib/3/email/mime/application.pyi
new file mode 100644
index 0000000..bccbcb8
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/application.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.application (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEApplication(MIMENonMultipart):
+    def __init__(self, _data, _subtype=..., _encoder=..., **_params) -> None: ...
diff --git a/typeshed/stdlib/3/email/mime/audio.pyi b/typeshed/stdlib/3/email/mime/audio.pyi
new file mode 100644
index 0000000..b153cda
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/audio.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.audio (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEAudio(MIMENonMultipart):
+    def __init__(self, _audiodata, _subtype=..., _encoder=..., **_params) -> None: ...
diff --git a/typeshed/stdlib/3/email/mime/base.pyi b/typeshed/stdlib/3/email/mime/base.pyi
new file mode 100644
index 0000000..3fe87d5
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/base.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.base (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email import message
+
+class MIMEBase(message.Message):
+    def __init__(self, _maintype, _subtype, **_params) -> None: ...
diff --git a/typeshed/stdlib/3/email/mime/image.pyi b/typeshed/stdlib/3/email/mime/image.pyi
new file mode 100644
index 0000000..629c61f
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/image.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.image (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEImage(MIMENonMultipart):
+    def __init__(self, _imagedata, _subtype=..., _encoder=..., **_params) -> None: ...
diff --git a/typeshed/stdlib/3/email/mime/message.pyi b/typeshed/stdlib/3/email/mime/message.pyi
new file mode 100644
index 0000000..d4ff7d5
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/message.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.message (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEMessage(MIMENonMultipart):
+    def __init__(self, _msg, _subtype=...) -> None: ...
diff --git a/typeshed/stdlib/3/email/mime/multipart.pyi b/typeshed/stdlib/3/email/mime/multipart.pyi
new file mode 100644
index 0000000..f998d94
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/multipart.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.multipart (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.base import MIMEBase
+
+class MIMEMultipart(MIMEBase):
+    def __init__(self, _subtype=..., boundary=..., _subparts=..., **_params) -> None: ...
diff --git a/typeshed/stdlib/3/email/mime/nonmultipart.pyi b/typeshed/stdlib/3/email/mime/nonmultipart.pyi
new file mode 100644
index 0000000..4e17cf9
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/nonmultipart.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.nonmultipart (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.base import MIMEBase
+
+class MIMENonMultipart(MIMEBase):
+    def attach(self, payload): ...
diff --git a/typeshed/stdlib/3/email/mime/text.pyi b/typeshed/stdlib/3/email/mime/text.pyi
new file mode 100644
index 0000000..71c86ac
--- /dev/null
+++ b/typeshed/stdlib/3/email/mime/text.pyi
@@ -0,0 +1,8 @@
+# Stubs for email.mime.text (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email.mime.nonmultipart import MIMENonMultipart
+
+class MIMEText(MIMENonMultipart):
+    def __init__(self, _text, _subtype=..., _charset=...) -> None: ...
diff --git a/typeshed/stdlib/3/email/parser.pyi b/typeshed/stdlib/3/email/parser.pyi
new file mode 100644
index 0000000..b8fc6ab
--- /dev/null
+++ b/typeshed/stdlib/3/email/parser.pyi
@@ -0,0 +1,29 @@
+# Stubs for email.parser (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import email.feedparser
+
+FeedParser = email.feedparser.FeedParser
+BytesFeedParser = email.feedparser.BytesFeedParser
+
+class Parser:
+    policy = ...  # type: Any
+    def __init__(self, _class=..., *, policy=...) -> None: ...
+    def parse(self, fp, headersonly=...): ...
+    def parsestr(self, text, headersonly=...): ...
+
+class HeaderParser(Parser):
+    def parse(self, fp, headersonly=...): ...
+    def parsestr(self, text, headersonly=...): ...
+
+class BytesParser:
+    parser = ...  # type: Any
+    def __init__(self, *args, **kw) -> None: ...
+    def parse(self, fp, headersonly=...): ...
+    def parsebytes(self, text, headersonly=...): ...
+
+class BytesHeaderParser(BytesParser):
+    def parse(self, fp, headersonly=...): ...
+    def parsebytes(self, text, headersonly=...): ...
diff --git a/typeshed/stdlib/3/email/policy.pyi b/typeshed/stdlib/3/email/policy.pyi
new file mode 100644
index 0000000..26c5dc3
--- /dev/null
+++ b/typeshed/stdlib/3/email/policy.pyi
@@ -0,0 +1,26 @@
+# Stubs for email.policy (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import email._policybase
+
+Policy = email._policybase.Policy
+Compat32 = email._policybase.Compat32
+
+class EmailPolicy(Policy):
+    refold_source = ...  # type: Any
+    header_factory = ...  # type: Any
+    content_manager = ...  # type: Any
+    def __init__(self, **kw) -> None: ...
+    def header_max_count(self, name): ...
+    def header_source_parse(self, sourcelines): ...
+    def header_store_parse(self, name, value): ...
+    def header_fetch_parse(self, name, value): ...
+    def fold(self, name, value): ...
+    def fold_binary(self, name, value): ...
+
+default = ...  # type: Any
+strict = ...  # type: Any
+SMTP = ...  # type: Any
+HTTP = ...  # type: Any
diff --git a/typeshed/stdlib/3/email/quoprimime.pyi b/typeshed/stdlib/3/email/quoprimime.pyi
new file mode 100644
index 0000000..558e8ff
--- /dev/null
+++ b/typeshed/stdlib/3/email/quoprimime.pyi
@@ -0,0 +1,18 @@
+# Stubs for email.quoprimime (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def header_length(bytearray): ...
+def body_length(bytearray): ...
+def unquote(s): ...
+def quote(c): ...
+def header_encode(header_bytes, charset=...): ...
+def body_encode(body, maxlinelen=..., eol=...): ...
+def decode(encoded, eol=...): ...
+
+body_decode = ...  # type: Any
+decodestring = ...  # type: Any
+
+def header_decode(s): ...
diff --git a/typeshed/stdlib/3/email/utils.pyi b/typeshed/stdlib/3/email/utils.pyi
new file mode 100644
index 0000000..6a2ac27
--- /dev/null
+++ b/typeshed/stdlib/3/email/utils.pyi
@@ -0,0 +1,22 @@
+# Stubs for email.utils (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+import email._parseaddr
+
+mktime_tz = email._parseaddr.mktime_tz
+parsedate = email._parseaddr.parsedate
+parsedate_tz = email._parseaddr.parsedate_tz
+
+def formataddr(pair, charset=...): ...
+def getaddresses(fieldvalues): ...
+def formatdate(timeval=..., localtime=..., usegmt=...): ...
+def format_datetime(dt, usegmt=...): ...
+def make_msgid(idstring=..., domain=...): ...
+def parsedate_to_datetime(data): ...
+def parseaddr(addr): ...
+def unquote(str): ...
+def decode_rfc2231(s): ...
+def encode_rfc2231(s, charset=..., language=...): ...
+def decode_params(params): ...
+def collapse_rfc2231_value(value, errors=..., fallback_charset=...): ...
diff --git a/typeshed/stdlib/3/encodings/__init__.pyi b/typeshed/stdlib/3/encodings/__init__.pyi
new file mode 100644
index 0000000..2ae6c0a
--- /dev/null
+++ b/typeshed/stdlib/3/encodings/__init__.pyi
@@ -0,0 +1,6 @@
+import codecs
+
+import typing
+
+def search_function(encoding: str) -> codecs.CodecInfo:
+    ...
diff --git a/typeshed/stdlib/3/encodings/utf_8.pyi b/typeshed/stdlib/3/encodings/utf_8.pyi
new file mode 100644
index 0000000..3be496a
--- /dev/null
+++ b/typeshed/stdlib/3/encodings/utf_8.pyi
@@ -0,0 +1,14 @@
+import codecs
+
+class IncrementalEncoder(codecs.IncrementalEncoder):
+    pass
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+    pass
+class StreamWriter(codecs.StreamWriter):
+    pass
+class StreamReader(codecs.StreamReader):
+    pass
+
+def getregentry() -> codecs.CodecInfo: pass
+def encode(input: str, errors: str = ...) -> bytes: pass
+def decode(input: bytes, errors: str = ...) -> str: pass
diff --git a/typeshed/stdlib/3/errno.pyi b/typeshed/stdlib/3/errno.pyi
new file mode 100644
index 0000000..e1f2ee3
--- /dev/null
+++ b/typeshed/stdlib/3/errno.pyi
@@ -0,0 +1,132 @@
+# Stubs for errno
+
+# Based on http://docs.python.org/3.2/library/errno.html
+
+from typing import Dict
+
+errorcode = ...  # type: Dict[int, str]
+
+# TODO some of the names below are platform specific
+
+EPERM = 0
+ENOENT = 0
+ESRCH = 0
+EINTR = 0
+EIO = 0
+ENXIO = 0
+E2BIG = 0
+ENOEXEC = 0
+EBADF = 0
+ECHILD = 0
+EAGAIN = 0
+ENOMEM = 0
+EACCES = 0
+EFAULT = 0
+ENOTBLK = 0
+EBUSY = 0
+EEXIST = 0
+EXDEV = 0
+ENODEV = 0
+ENOTDIR = 0
+EISDIR = 0
+EINVAL = 0
+ENFILE = 0
+EMFILE = 0
+ENOTTY = 0
+ETXTBSY = 0
+EFBIG = 0
+ENOSPC = 0
+ESPIPE = 0
+EROFS = 0
+EMLINK = 0
+EPIPE = 0
+EDOM = 0
+ERANGE = 0
+EDEADLK = 0
+ENAMETOOLONG = 0
+ENOLCK = 0
+ENOSYS = 0
+ENOTEMPTY = 0
+ELOOP = 0
+EWOULDBLOCK = 0
+ENOMSG = 0
+EIDRM = 0
+ECHRNG = 0
+EL2NSYNC = 0
+EL3HLT = 0
+EL3RST = 0
+ELNRNG = 0
+EUNATCH = 0
+ENOCSI = 0
+EL2HLT = 0
+EBADE = 0
+EBADR = 0
+EXFULL = 0
+ENOANO = 0
+EBADRQC = 0
+EBADSLT = 0
+EDEADLOCK = 0
+EBFONT = 0
+ENOSTR = 0
+ENODATA = 0
+ETIME = 0
+ENOSR = 0
+ENONET = 0
+ENOPKG = 0
+EREMOTE = 0
+ENOLINK = 0
+EADV = 0
+ESRMNT = 0
+ECOMM = 0
+EPROTO = 0
+EMULTIHOP = 0
+EDOTDOT = 0
+EBADMSG = 0
+EOVERFLOW = 0
+ENOTUNIQ = 0
+EBADFD = 0
+EREMCHG = 0
+ELIBACC = 0
+ELIBBAD = 0
+ELIBSCN = 0
+ELIBMAX = 0
+ELIBEXEC = 0
+EILSEQ = 0
+ERESTART = 0
+ESTRPIPE = 0
+EUSERS = 0
+ENOTSOCK = 0
+EDESTADDRREQ = 0
+EMSGSIZE = 0
+EPROTOTYPE = 0
+ENOPROTOOPT = 0
+EPROTONOSUPPORT = 0
+ESOCKTNOSUPPORT = 0
+EOPNOTSUPP = 0
+EPFNOSUPPORT = 0
+EAFNOSUPPORT = 0
+EADDRINUSE = 0
+EADDRNOTAVAIL = 0
+ENETDOWN = 0
+ENETUNREACH = 0
+ENETRESET = 0
+ECONNABORTED = 0
+ECONNRESET = 0
+ENOBUFS = 0
+EISCONN = 0
+ENOTCONN = 0
+ESHUTDOWN = 0
+ETOOMANYREFS = 0
+ETIMEDOUT = 0
+ECONNREFUSED = 0
+EHOSTDOWN = 0
+EHOSTUNREACH = 0
+EALREADY = 0
+EINPROGRESS = 0
+ESTALE = 0
+EUCLEAN = 0
+ENOTNAM = 0
+ENAVAIL = 0
+EISNAM = 0
+EREMOTEIO = 0
+EDQUOT = 0
diff --git a/typeshed/stdlib/3/fcntl.pyi b/typeshed/stdlib/3/fcntl.pyi
new file mode 100644
index 0000000..4db5947
--- /dev/null
+++ b/typeshed/stdlib/3/fcntl.pyi
@@ -0,0 +1,11 @@
+# Stubs for fcntl
+
+# NOTE: These are incomplete!
+
+import typing
+
+FD_CLOEXEC = 0
+F_GETFD = 0
+F_SETFD = 0
+
+def fcntl(fd: int, op: int, arg: int = ...) -> int: ...
diff --git a/typeshed/stdlib/3/fnmatch.pyi b/typeshed/stdlib/3/fnmatch.pyi
new file mode 100644
index 0000000..4f99b4a
--- /dev/null
+++ b/typeshed/stdlib/3/fnmatch.pyi
@@ -0,0 +1,11 @@
+# Stubs for fnmatch
+
+# Based on http://docs.python.org/3.2/library/fnmatch.html and
+# python-lib/fnmatch.py
+
+from typing import Iterable, List, AnyStr
+
+def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ...
+def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ...
+def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]: ...
+def translate(pat: str) -> str: ...
diff --git a/typeshed/stdlib/3/functools.pyi b/typeshed/stdlib/3/functools.pyi
new file mode 100644
index 0000000..e8563b5
--- /dev/null
+++ b/typeshed/stdlib/3/functools.pyi
@@ -0,0 +1,45 @@
+# Stubs for functools (Python 3)
+
+# NOTE: These are incomplete!
+
+from abc import ABCMeta, abstractmethod
+from typing import Any, Callable, Generic, Dict, Iterator, Optional, Sequence, Tuple, TypeVar, NamedTuple
+from collections import namedtuple
+
+_AnyCallable = Callable[..., Any]
+
+_T = TypeVar("_T")
+def reduce(function: Callable[[_T], _T],
+           sequence: Iterator[_T], initial: Optional[_T] = ...) -> _T: ...
+
+
+class CacheInfo(NamedTuple('CacheInfo', [
+    ('hits', int), ('misses', int), ('maxsize', int), ('currsize', int)])):
+     pass
+
+class _lru_cache_wrapper(Generic[_T]):
+    __wrapped__ = ... # type: Callable[..., _T]
+    def __call__(self, *args: Any, **kwargs: Any) -> _T: ...
+    def cache_info(self) -> CacheInfo: ...
+
+class lru_cache():
+    def __init__(self, maxsize: int = ..., typed: bool = ...) -> None:
+        pass
+    def __call__(self, f: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ...
+
+
+WRAPPER_ASSIGNMENTS = ... # type: Sequence[str]
+WRAPPER_UPDATES = ... # type: Sequence[str]
+
+def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ...,
+                   updated: Sequence[str] = ...) -> None: ...
+def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ...
+def total_ordering(cls: type) -> type: ...
+def cmp_to_key(mycmp: Callable[[_T, _T], bool]) -> Callable[[_T], Any]: ...
+
+class partial(Generic[_T]):
+    func = ...  # Callable[..., _T]
+    args = ...  # type: Tuple[Any, ...]
+    keywords = ...  # type: Dict[str, Any]
+    def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ...
+    def __call__(self, *args: Any, **kwargs: Any) -> _T: ...
diff --git a/typeshed/stdlib/3/gc.pyi b/typeshed/stdlib/3/gc.pyi
new file mode 100644
index 0000000..7f45cdb
--- /dev/null
+++ b/typeshed/stdlib/3/gc.pyi
@@ -0,0 +1,10 @@
+# Stubs for gc
+
+# NOTE: These are incomplete!
+
+import typing
+
+def collect(generation: int = ...) -> int: ...
+def disable() -> None: ...
+def enable() -> None: ...
+def isenabled() -> bool: ...
diff --git a/typeshed/stdlib/3/getopt.pyi b/typeshed/stdlib/3/getopt.pyi
new file mode 100644
index 0000000..169c4e1
--- /dev/null
+++ b/typeshed/stdlib/3/getopt.pyi
@@ -0,0 +1,19 @@
+# Stubs for getopt
+
+# Based on http://docs.python.org/3.2/library/getopt.html
+
+from typing import List, Tuple
+
+def getopt(args: List[str], shortopts: str,
+           longopts: List[str]) -> Tuple[List[Tuple[str, str]],
+                                         List[str]]: ...
+
+def gnu_getopt(args: List[str], shortopts: str,
+               longopts: List[str]) -> Tuple[List[Tuple[str, str]],
+                                             List[str]]: ...
+
+class GetoptError(Exception):
+    msg = ...  # type: str
+    opt = ...  # type: str
+
+error = GetoptError
diff --git a/typeshed/stdlib/3/getpass.pyi b/typeshed/stdlib/3/getpass.pyi
new file mode 100644
index 0000000..5938d61
--- /dev/null
+++ b/typeshed/stdlib/3/getpass.pyi
@@ -0,0 +1,5 @@
+# Stubs for getpass
+
+# NOTE: These are incomplete!
+
+def getuser() -> str: ...
diff --git a/typeshed/stdlib/3/gettext.pyi b/typeshed/stdlib/3/gettext.pyi
new file mode 100644
index 0000000..339f8a9
--- /dev/null
+++ b/typeshed/stdlib/3/gettext.pyi
@@ -0,0 +1,39 @@
+# Stubs for gettext (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class NullTranslations:
+    def __init__(self, fp=...) -> None: ...
+    def add_fallback(self, fallback): ...
+    def gettext(self, message): ...
+    def lgettext(self, message): ...
+    def ngettext(self, msgid1, msgid2, n): ...
+    def lngettext(self, msgid1, msgid2, n): ...
+    def info(self): ...
+    def charset(self): ...
+    def output_charset(self): ...
+    def set_output_charset(self, charset): ...
+    def install(self, names=...): ...
+
+class GNUTranslations(NullTranslations):
+    LE_MAGIC = ...  # type: Any
+    BE_MAGIC = ...  # type: Any
+    def lgettext(self, message): ...
+    def lngettext(self, msgid1, msgid2, n): ...
+    def gettext(self, message): ...
+    def ngettext(self, msgid1, msgid2, n): ...
+
+def find(domain, localedir=..., languages=..., all=...): ...
+def translation(domain, localedir=..., languages=..., class_=..., fallback=...,
+                codeset=...): ...
+def install(domain, localedir=..., codeset=..., names=...): ...
+def textdomain(domain=...): ...
+def bindtextdomain(domain, localedir=...): ...
+def dgettext(domain, message): ...
+def dngettext(domain, msgid1, msgid2, n): ...
+def gettext(message): ...
+def ngettext(msgid1, msgid2, n): ...
+
+Catalog = ...  # type: Any
diff --git a/typeshed/stdlib/3/glob.pyi b/typeshed/stdlib/3/glob.pyi
new file mode 100644
index 0000000..71ab366
--- /dev/null
+++ b/typeshed/stdlib/3/glob.pyi
@@ -0,0 +1,8 @@
+# Stubs for glob
+
+# Based on http://docs.python.org/3.2/library/glob.html
+
+from typing import List, Iterator, AnyStr
+
+def glob(pathname: AnyStr) -> List[AnyStr]: ...
+def iglob(pathname: AnyStr) -> Iterator[AnyStr]: ...
diff --git a/typeshed/stdlib/3/grp.pyi b/typeshed/stdlib/3/grp.pyi
new file mode 100644
index 0000000..ad14951
--- /dev/null
+++ b/typeshed/stdlib/3/grp.pyi
@@ -0,0 +1,13 @@
+from typing import List
+
+# TODO group database entry object type
+
+class struct_group:
+    gr_name = ...  # type: str
+    gr_passwd = ...  # type: str
+    gr_gid = 0
+    gr_mem = ...  # type: List[str]
+
+def getgrgid(gid: int) -> struct_group: ...
+def getgrnam(name: str) -> struct_group: ...
+def getgrall() -> List[struct_group]: ...
diff --git a/typeshed/stdlib/3/hashlib.pyi b/typeshed/stdlib/3/hashlib.pyi
new file mode 100644
index 0000000..ee8b235
--- /dev/null
+++ b/typeshed/stdlib/3/hashlib.pyi
@@ -0,0 +1,25 @@
+# Stubs for hashlib
+
+# NOTE: These are incomplete!
+
+from abc import abstractmethod, ABCMeta
+import typing
+
+class Hash(metaclass=ABCMeta):
+    @abstractmethod
+    def update(self, arg: bytes) -> None: ...
+    @abstractmethod
+    def digest(self) -> bytes: ...
+    @abstractmethod
+    def hexdigest(self) -> str: ...
+    @abstractmethod
+    def copy(self) -> 'Hash': ...
+
+def md5(arg: bytes = ...) -> Hash: ...
+def sha1(arg: bytes = ...) -> Hash: ...
+def sha224(arg: bytes = ...) -> Hash: ...
+def sha256(arg: bytes = ...) -> Hash: ...
+def sha384(arg: bytes = ...) -> Hash: ...
+def sha512(arg: bytes = ...) -> Hash: ...
+
+def new(name: str, data: bytes = ...) -> Hash: ...
diff --git a/typeshed/stdlib/3/heapq.pyi b/typeshed/stdlib/3/heapq.pyi
new file mode 100644
index 0000000..0894f98
--- /dev/null
+++ b/typeshed/stdlib/3/heapq.pyi
@@ -0,0 +1,18 @@
+# Stubs for heapq
+
+# Based on http://docs.python.org/3.2/library/heapq.html
+
+from typing import TypeVar, List, Iterable, Any, Callable
+
+_T = TypeVar('_T')
+
+def heappush(heap: List[_T], item: _T) -> None: ...
+def heappop(heap: List[_T]) -> _T: ...
+def heappushpop(heap: List[_T], item: _T) -> _T: ...
+def heapify(x: List[_T]) -> None: ...
+def heapreplace(heap: List[_T], item: _T) -> _T: ...
+def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ...
+def nlargest(n: int, iterable: Iterable[_T],
+             key: Callable[[_T], Any] = ...) -> List[_T]: ...
+def nsmallest(n: int, iterable: Iterable[_T],
+              key: Callable[[_T], Any] = ...) -> List[_T]: ...
diff --git a/typeshed/stdlib/3/html/__init__.pyi b/typeshed/stdlib/3/html/__init__.pyi
new file mode 100644
index 0000000..c8577ea
--- /dev/null
+++ b/typeshed/stdlib/3/html/__init__.pyi
@@ -0,0 +1,4 @@
+from typing import AnyStr
+
+def escape(s: AnyStr, quote: bool) -> AnyStr: ...
+def unescape(s: AnyStr) -> AnyStr: ...
diff --git a/typeshed/stdlib/3/html/entities.pyi b/typeshed/stdlib/3/html/entities.pyi
new file mode 100644
index 0000000..7cc90b4
--- /dev/null
+++ b/typeshed/stdlib/3/html/entities.pyi
@@ -0,0 +1,10 @@
+# Stubs for html.entities (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+name2codepoint = ... # type: Any
+html5 = ... # type: Any
+codepoint2name = ... # type: Any
+entitydefs = ... # type: Any
diff --git a/typeshed/stdlib/3/html/parser.pyi b/typeshed/stdlib/3/html/parser.pyi
new file mode 100644
index 0000000..4328f3f
--- /dev/null
+++ b/typeshed/stdlib/3/html/parser.pyi
@@ -0,0 +1,28 @@
+from typing import AnyStr, List, Tuple
+from _markupbase import ParserBase
+
+class HTMLParser(ParserBase):
+    def __init__(self, *args, convert_charrefs: bool) -> None: ...
+    def feed(self, feed: AnyStr) -> None: ...
+    def close(self) -> None: ...
+    def reset(self) -> None: ...
+
+    def get_starttag_text(self) -> AnyStr: ...
+    def set_cdata_mode(self, AnyStr) -> None: ...
+    def clear_cdata_mode(self) -> None: ...
+
+    def handle_startendtag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ...
+    def handle_starttag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ...
+    def handle_endtag(self, tag: AnyStr): ...
+    def handle_charref(self, name: AnyStr): ...
+    def handle_entityref(self, name: AnyStr): ...
+    def handle_data(self, data: AnyStr): ...
+    def handle_comment(self, data: AnyStr): ...
+    def handle_decl(self, decl: AnyStr): ...
+    def handle_pi(self, data: AnyStr): ...
+
+    def unknown_decl(self, data: AnyStr): ...
+
+    def unescape(self, s: AnyStr) -> AnyStr: ...
+
+class HTMLParseError(Exception): ...
diff --git a/typeshed/stdlib/3/http/__init__.pyi b/typeshed/stdlib/3/http/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/3/http/client.pyi b/typeshed/stdlib/3/http/client.pyi
new file mode 100644
index 0000000..1c83fd4
--- /dev/null
+++ b/typeshed/stdlib/3/http/client.pyi
@@ -0,0 +1,101 @@
+# Stubs for http.client (Python 3.4)
+
+from typing import Any, Dict
+import email.message
+import io
+
+responses = ...  # type: Dict[int, str]
+
+class HTTPMessage(email.message.Message):
+    def getallmatchingheaders(self, name): ...
+
+class HTTPResponse(io.RawIOBase):
+    fp = ...  # type: Any
+    debuglevel = ...  # type: Any
+    headers = ...  # type: Any
+    version = ...  # type: Any
+    status = ...  # type: Any
+    reason = ...  # type: Any
+    chunked = ...  # type: Any
+    chunk_left = ...  # type: Any
+    length = ...  # type: Any
+    will_close = ...  # type: Any
+    def __init__(self, sock, debuglevel=..., method=..., url=...) -> None: ...
+    code = ...  # type: Any
+    def begin(self): ...
+    def close(self): ...
+    def flush(self): ...
+    def readable(self): ...
+    def isclosed(self): ...
+    def read(self, amt=...): ...
+    def readinto(self, b): ...
+    def fileno(self): ...
+    def getheader(self, name, default=...): ...
+    def getheaders(self): ...
+    def __iter__(self): ...
+    def info(self): ...
+    def geturl(self): ...
+    def getcode(self): ...
+
+class HTTPConnection:
+    response_class = ...  # type: Any
+    default_port = ...  # type: Any
+    auto_open = ...  # type: Any
+    debuglevel = ...  # type: Any
+    mss = ...  # type: Any
+    timeout = ...  # type: Any
+    source_address = ...  # type: Any
+    sock = ...  # type: Any
+    def __init__(self, host, port=..., timeout=..., source_address=...) -> None: ...
+    def set_tunnel(self, host, port=..., headers=...): ...
+    def set_debuglevel(self, level): ...
+    def connect(self): ...
+    def close(self): ...
+    def send(self, data): ...
+    def putrequest(self, method, url, skip_host=..., skip_accept_encoding=...): ...
+    def putheader(self, header, *values): ...
+    def endheaders(self, message_body=...): ...
+    def request(self, method, url, body=..., headers=...): ...
+    def getresponse(self): ...
+
+class HTTPSConnection(HTTPConnection):
+    default_port = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    def __init__(self, host, port=..., key_file=..., cert_file=..., timeout=...,
+                 source_address=..., *, context=..., check_hostname=...): ...
+    sock = ...  # type: Any
+    def connect(self): ...
+
+class HTTPException(Exception): ...
+class NotConnected(HTTPException): ...
+class InvalidURL(HTTPException): ...
+
+class UnknownProtocol(HTTPException):
+    args = ...  # type: Any
+    version = ...  # type: Any
+    def __init__(self, version) -> None: ...
+
+class UnknownTransferEncoding(HTTPException): ...
+class UnimplementedFileMode(HTTPException): ...
+
+class IncompleteRead(HTTPException):
+    args = ...  # type: Any
+    partial = ...  # type: Any
+    expected = ...  # type: Any
+    def __init__(self, partial, expected=...) -> None: ...
+
+class ImproperConnectionState(HTTPException): ...
+class CannotSendRequest(ImproperConnectionState): ...
+class CannotSendHeader(ImproperConnectionState): ...
+class ResponseNotReady(ImproperConnectionState): ...
+
+class BadStatusLine(HTTPException):
+    args = ...  # type: Any
+    line = ...  # type: Any
+    def __init__(self, line) -> None: ...
+
+class LineTooLong(HTTPException):
+    def __init__(self, line_type) -> None: ...
+
+error = HTTPException
diff --git a/typeshed/stdlib/3/http/cookiejar.pyi b/typeshed/stdlib/3/http/cookiejar.pyi
new file mode 100644
index 0000000..8e56a33
--- /dev/null
+++ b/typeshed/stdlib/3/http/cookiejar.pyi
@@ -0,0 +1,121 @@
+# Stubs for http.cookiejar (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Cookie:
+    version = ...  # type: Any
+    name = ...  # type: Any
+    value = ...  # type: Any
+    port = ...  # type: Any
+    port_specified = ...  # type: Any
+    domain = ...  # type: Any
+    domain_specified = ...  # type: Any
+    domain_initial_dot = ...  # type: Any
+    path = ...  # type: Any
+    path_specified = ...  # type: Any
+    secure = ...  # type: Any
+    expires = ...  # type: Any
+    discard = ...  # type: Any
+    comment = ...  # type: Any
+    comment_url = ...  # type: Any
+    rfc2109 = ...  # type: Any
+    def __init__(self, version, name, value, port, port_specified, domain, domain_specified,
+                 domain_initial_dot, path, path_specified, secure, expires, discard, comment,
+                 comment_url, rest, rfc2109=...): ...
+    def has_nonstandard_attr(self, name): ...
+    def get_nonstandard_attr(self, name, default=...): ...
+    def set_nonstandard_attr(self, name, value): ...
+    def is_expired(self, now=...): ...
+
+class CookiePolicy:
+    def set_ok(self, cookie, request): ...
+    def return_ok(self, cookie, request): ...
+    def domain_return_ok(self, domain, request): ...
+    def path_return_ok(self, path, request): ...
+
+class DefaultCookiePolicy(CookiePolicy):
+    DomainStrictNoDots = ...  # type: Any
+    DomainStrictNonDomain = ...  # type: Any
+    DomainRFC2965Match = ...  # type: Any
+    DomainLiberal = ...  # type: Any
+    DomainStrict = ...  # type: Any
+    netscape = ...  # type: Any
+    rfc2965 = ...  # type: Any
+    rfc2109_as_netscape = ...  # type: Any
+    hide_cookie2 = ...  # type: Any
+    strict_domain = ...  # type: Any
+    strict_rfc2965_unverifiable = ...  # type: Any
+    strict_ns_unverifiable = ...  # type: Any
+    strict_ns_domain = ...  # type: Any
+    strict_ns_set_initial_dollar = ...  # type: Any
+    strict_ns_set_path = ...  # type: Any
+    def __init__(self, blocked_domains=..., allowed_domains=..., netscape=..., rfc2965=...,
+                 rfc2109_as_netscape=..., hide_cookie2=..., strict_domain=...,
+                 strict_rfc2965_unverifiable=..., strict_ns_unverifiable=...,
+                 strict_ns_domain=..., strict_ns_set_initial_dollar=...,
+                 strict_ns_set_path=...): ...
+    def blocked_domains(self): ...
+    def set_blocked_domains(self, blocked_domains): ...
+    def is_blocked(self, domain): ...
+    def allowed_domains(self): ...
+    def set_allowed_domains(self, allowed_domains): ...
+    def is_not_allowed(self, domain): ...
+    def set_ok(self, cookie, request): ...
+    def set_ok_version(self, cookie, request): ...
+    def set_ok_verifiability(self, cookie, request): ...
+    def set_ok_name(self, cookie, request): ...
+    def set_ok_path(self, cookie, request): ...
+    def set_ok_domain(self, cookie, request): ...
+    def set_ok_port(self, cookie, request): ...
+    def return_ok(self, cookie, request): ...
+    def return_ok_version(self, cookie, request): ...
+    def return_ok_verifiability(self, cookie, request): ...
+    def return_ok_secure(self, cookie, request): ...
+    def return_ok_expires(self, cookie, request): ...
+    def return_ok_port(self, cookie, request): ...
+    def return_ok_domain(self, cookie, request): ...
+    def domain_return_ok(self, domain, request): ...
+    def path_return_ok(self, path, request): ...
+
+class Absent: ...
+
+class CookieJar:
+    non_word_re = ...  # type: Any
+    quote_re = ...  # type: Any
+    strict_domain_re = ...  # type: Any
+    domain_re = ...  # type: Any
+    dots_re = ...  # type: Any
+    magic_re = ...  # type: Any
+    def __init__(self, policy=...) -> None: ...
+    def set_policy(self, policy): ...
+    def add_cookie_header(self, request): ...
+    def make_cookies(self, response, request): ...
+    def set_cookie_if_ok(self, cookie, request): ...
+    def set_cookie(self, cookie): ...
+    def extract_cookies(self, response, request): ...
+    def clear(self, domain=..., path=..., name=...): ...
+    def clear_session_cookies(self): ...
+    def clear_expired_cookies(self): ...
+    def __iter__(self): ...
+    def __len__(self): ...
+
+class LoadError(OSError): ...
+
+class FileCookieJar(CookieJar):
+    filename = ...  # type: Any
+    delayload = ...  # type: Any
+    def __init__(self, filename=..., delayload=..., policy=...) -> None: ...
+    def save(self, filename=..., ignore_discard=..., ignore_expires=...): ...
+    def load(self, filename=..., ignore_discard=..., ignore_expires=...): ...
+    def revert(self, filename=..., ignore_discard=..., ignore_expires=...): ...
+
+class LWPCookieJar(FileCookieJar):
+    def as_lwp_str(self, ignore_discard=..., ignore_expires=...): ...
+    def save(self, filename=..., ignore_discard=..., ignore_expires=...): ...
+
+class MozillaCookieJar(FileCookieJar):
+    magic_re = ...  # type: Any
+    header = ...  # type: Any
+    def save(self, filename=..., ignore_discard=..., ignore_expires=...): ...
diff --git a/typeshed/stdlib/3/imp.pyi b/typeshed/stdlib/3/imp.pyi
new file mode 100644
index 0000000..678b0c1
--- /dev/null
+++ b/typeshed/stdlib/3/imp.pyi
@@ -0,0 +1,10 @@
+# Stubs for imp
+
+# NOTE: These are incomplete!
+
+from typing import TypeVar
+
+_T = TypeVar('_T')
+
+def cache_from_source(path: str, debug_override: bool = ...) -> str: ...
+def reload(module: _T) -> _T: ... # TODO imprecise signature
diff --git a/typeshed/stdlib/3/importlib.pyi b/typeshed/stdlib/3/importlib.pyi
new file mode 100644
index 0000000..12cc79e
--- /dev/null
+++ b/typeshed/stdlib/3/importlib.pyi
@@ -0,0 +1,9 @@
+# Stubs for importlib
+
+# NOTE: These are incomplete!
+
+from typing import Any
+
+# TODO more precise type?
+def import_module(name: str, package: str = ...) -> Any: ...
+def invalidate_caches() -> None: ...
diff --git a/typeshed/stdlib/3/inspect.pyi b/typeshed/stdlib/3/inspect.pyi
new file mode 100644
index 0000000..abbc7f6
--- /dev/null
+++ b/typeshed/stdlib/3/inspect.pyi
@@ -0,0 +1,44 @@
+# Stubs for inspect
+
+from typing import Any, Tuple, List, Dict, Callable, NamedTuple
+from types import FrameType
+
+_object = object
+
+def getmembers(obj: object, predicate: Callable[[Any], bool]) -> List[Tuple[str, object]]: ...
+
+def isclass(obj: object) -> bool: ...
+
+# namedtuple('Attribute', 'name kind defining_class object')
+class Attribute(tuple):
+    name = ...  # type: str
+    kind = ...  # type: str
+    defining_class = ...  # type: type
+    object = ...  # type: _object
+
+def classify_class_attrs(cls: type) -> List[Attribute]: ...
+
+def cleandoc(doc: str) -> str: ...
+
+def getsourcelines(obj: object) -> Tuple[List[str], int]: ...
+
+ArgSpec = NamedTuple('ArgSpec', [('args', List[str]),
+                                 ('varargs', str),
+                                 ('keywords', str),
+                                 ('defaults', tuple),
+                                 ])
+
+def getargspec(func: object) -> ArgSpec: ...
+
+FullArgSpec = NamedTuple('FullArgSpec', [('args', List[str]),
+                                         ('varargs', str),
+                                         ('varkw', str),
+                                         ('defaults', tuple),
+                                         ('kwonlyargs', List[str]),
+                                         ('kwonlydefaults', Dict[str, Any]),
+                                         ('annotations', Dict[str, Any]),
+                                         ])
+
+def getfullargspec(func: object) -> FullArgSpec: ...
+
+def stack() -> List[Tuple[FrameType, str, int, str, List[str], int]]: ...
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
new file mode 100644
index 0000000..ca410f0
--- /dev/null
+++ b/typeshed/stdlib/3/io.pyi
@@ -0,0 +1,150 @@
+# Stubs for io
+
+# Based on http://docs.python.org/3.2/library/io.html
+
+from typing import List, BinaryIO, TextIO, IO, overload, Iterator, Iterable, Any
+import builtins
+import codecs
+import _io
+
+DEFAULT_BUFFER_SIZE = 0  # type: int
+SEEK_SET = ...  # type: int
+SEEK_CUR = ...  # type: int
+SEEK_END = ...  # type: int
+
+open = builtins.open
+
+class BlockingIOError(OSError): ...
+class UnsupportedOperation(ValueError, OSError): ...
+
+class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
+    newlines = ...  # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+    def decode(self, input, final=...): ...
+    def getstate(self): ...
+    def reset(self): ...
+    def setstate(self, state): ...
+
+class IOBase(_io._IOBase): ...
+class RawIOBase(_io._RawIOBase, IOBase): ...
+class BufferedIOBase(_io._BufferedIOBase, IOBase): ...
+class TextIOBase(_io._TextIOBase, IOBase): ...
+
+class FileIO(_io._RawIOBase):
+    closefd = ...  # type: Any
+    mode = ...  # type: Any
+    def __init__(self, name, mode=..., closefd=..., opener=...) -> None: ...
+    def readinto(self, b): ...
+    def write(self, b): ...
+
+class BufferedReader(_io._BufferedIOBase):
+    mode = ...  # type: Any
+    name = ...  # type: Any
+    raw = ...  # type: Any
+    def __init__(self, raw, buffer_size=...) -> None: ...
+    def peek(self, size: int = ...): ...
+
+class BufferedWriter(_io._BufferedIOBase):
+    mode = ...  # type: Any
+    name = ...  # type: Any
+    raw = ...  # type: Any
+    def __init__(self, raw, buffer_size=...) -> None: ...
+
+class BufferedRWPair(_io._BufferedIOBase):
+    def __init__(self, reader, writer, buffer_size=...) -> None: ...
+    def peek(self, size: int = ...): ...
+
+class BufferedRandom(_io._BufferedIOBase):
+    mode = ...  # type: Any
+    name = ...  # type: Any
+    raw = ...  # type: Any
+    def __init__(self, raw, buffer_size=...) -> None: ...
+    def peek(self, size: int = ...): ...
+
+class BytesIO(BinaryIO):
+    def __init__(self, initial_bytes: bytes = ...) -> None: ...
+    # TODO getbuffer
+    # TODO see comments in BinaryIO for missing functionality
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, n: int = ...) -> bytes: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> bytes: ...
+    def readlines(self, hint: int = ...) -> List[bytes]: ...
+    def seek(self, offset: int, whence: int = ...) -> int: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    @overload
+    def write(self, s: bytes) -> int: ...
+    @overload
+    def write(self, s: bytearray) -> int: ...
+    def writelines(self, lines: Iterable[bytes]) -> None: ...
+    def getvalue(self) -> bytes: ...
+    def read1(self) -> str: ...
+
+    def __iter__(self) -> Iterator[bytes]: ...
+    def __enter__(self) -> 'BytesIO': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class StringIO(TextIO):
+    def __init__(self, initial_value: str = ...,
+                 newline: str = ...) -> None: ...
+    # TODO see comments in BinaryIO for missing functionality
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, n: int = ...) -> str: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def seek(self, offset: int, whence: int = ...) -> int: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    def write(self, s: str) -> int: ...
+    def writelines(self, lines: Iterable[str]) -> None: ...
+    def getvalue(self) -> str: ...
+
+    def __iter__(self) -> Iterator[str]: ...
+    def __enter__(self) -> 'StringIO': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class TextIOWrapper(TextIO):
+    # TODO: This is actually a base class of _io._TextIOBase.
+    # write_through is undocumented but used by subprocess
+    def __init__(self, buffer: IO[bytes], encoding: str = ...,
+                 errors: str = ..., newline: str = ...,
+                 line_buffering: bool = ...,
+                 write_through: bool = ...) -> None: ...
+    # TODO see comments in BinaryIO for missing functionality
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    def fileno(self) -> int: ...
+    def flush(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def read(self, n: int = ...) -> str: ...
+    def readable(self) -> bool: ...
+    def readline(self, limit: int = ...) -> str: ...
+    def readlines(self, hint: int = ...) -> List[str]: ...
+    def seek(self, offset: int, whence: int = ...) -> int: ...
+    def seekable(self) -> bool: ...
+    def tell(self) -> int: ...
+    def truncate(self, size: int = ...) -> int: ...
+    def writable(self) -> bool: ...
+    def write(self, s: str) -> int: ...
+    def writelines(self, lines: Iterable[str]) -> None: ...
+
+    def __iter__(self) -> Iterator[str]: ...
+    def __enter__(self) -> StringIO: ...
+    def __exit__(self, type, value, traceback) -> bool: ...
diff --git a/typeshed/stdlib/3/itertools.pyi b/typeshed/stdlib/3/itertools.pyi
new file mode 100644
index 0000000..cb219b4
--- /dev/null
+++ b/typeshed/stdlib/3/itertools.pyi
@@ -0,0 +1,57 @@
+# Stubs for itertools
+
+# Based on http://docs.python.org/3.2/library/itertools.html
+
+from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
+                    Union, Sequence)
+
+_T = TypeVar('_T')
+_S = TypeVar('_S')
+
+def count(start: int = ...,
+          step: int = ...) -> Iterator[int]: ... # more general types?
+def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ...
+
+ at overload
+def repeat(object: _T) -> Iterator[_T]: ...
+ at overload
+def repeat(object: _T, times: int) -> Iterator[_T]: ...
+
+def accumulate(iterable: Iterable[_T]) -> Iterator[_T]: ...
+def chain(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+# TODO chain.from_Iterable
+def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ...
+def dropwhile(predicate: Callable[[_T], Any],
+              iterable: Iterable[_T]) -> Iterator[_T]: ...
+def filterfalse(predicate: Callable[[_T], Any],
+                iterable: Iterable[_T]) -> Iterator[_T]: ...
+
+ at overload
+def groupby(iterable: Iterable[_T]) -> Iterator[Tuple[_T, Iterator[_T]]]: ...
+ at overload
+def groupby(iterable: Iterable[_T],
+            key: Callable[[_T], _S]) -> Iterator[Tuple[_S, Iterator[_T]]]: ...
+
+ at overload
+def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ...
+ at overload
+def islice(iterable: Iterable[_T], start: int, stop: int,
+           step: int = ...) -> Iterator[_T]: ...
+
+def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ...
+def takewhile(predicate: Callable[[_T], Any],
+              iterable: Iterable[_T]) -> Iterator[_T]: ...
+def tee(iterable: Iterable[Any], n: int = ...) -> Iterator[Any]: ...
+def zip_longest(*p: Iterable[Any],
+                fillvalue: Any = ...) -> Iterator[Any]: ...
+
+# TODO: Return type should be Iterator[Tuple[..]], but unknown tuple shape.
+#       Iterator[Sequence[_T]] loses this type information.
+def product(*p: Iterable[_T], repeat: int = ...) -> Iterator[Sequence[_T]]: ...
+
+def permutations(iterable: Iterable[_T],
+                 r: Union[int, None] = ...) -> Iterator[Sequence[_T]]: ...
+def combinations(iterable: Iterable[_T],
+                 r: int) -> Iterable[Sequence[_T]]: ...
+def combinations_with_replacement(iterable: Iterable[_T],
+                                  r: int) -> Iterable[Sequence[_T]]: ...
diff --git a/typeshed/stdlib/3/json.pyi b/typeshed/stdlib/3/json.pyi
new file mode 100644
index 0000000..aec903f
--- /dev/null
+++ b/typeshed/stdlib/3/json.pyi
@@ -0,0 +1,51 @@
+from typing import Any, IO, Optional, Tuple, Callable, Dict, List
+
+class JSONDecodeError(object):
+    def dumps(self, obj: Any) -> str: ...
+    def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
+    def loads(self, s: str) -> Any: ...
+    def load(self, fp: IO[str]) -> Any: ...
+
+def dumps(obj: Any,
+    skipkeys: bool = ...,
+    ensure_ascii: bool = ...,
+    check_circular: bool = ...,
+    allow_nan: bool = ...,
+    cls: Any = ...,
+    indent: Optional[int] = ...,
+    separators: Optional[Tuple[str, str]] = ...,
+    default: Optional[Callable[[Any], Any]] = ...,
+    sort_keys: bool = ...,
+    **kwds: Any) -> str: ...
+
+def dump(obj: Any,
+    fp: IO[str],
+    skipkeys: bool = ...,
+    ensure_ascii: bool = ...,
+    check_circular: bool = ...,
+    allow_nan: bool = ...,
+    cls: Any = ...,
+    indent: Optional[int] = ...,
+    separators: Optional[Tuple[str, str]] = ...,
+    default: Optional[Callable[[Any], Any]] = ...,
+    sort_keys: bool = ...,
+    **kwds: Any) -> None: ...
+
+def loads(s: str,
+    encoding: Any = ..., # ignored and deprecated
+    cls: Any = ...,
+    object_hook: Callable[[Dict], Any] = ...,
+    parse_float: Optional[Callable[[str], Any]] = ...,
+    parse_int: Optional[Callable[[str], Any]] = ...,
+    parse_constant: Optional[Callable[[str], Any]] = ...,
+    object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
+    **kwds: Any) -> Any: ...
+
+def load(fp: IO[str],
+    cls: Any = ...,
+    object_hook: Callable[[Dict], Any] = ...,
+    parse_float: Optional[Callable[[str], Any]] = ...,
+    parse_int: Optional[Callable[[str], Any]] = ...,
+    parse_constant: Optional[Callable[[str], Any]] = ...,
+    object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
+    **kwds: Any) -> Any: ...
diff --git a/typeshed/stdlib/3/linecache.pyi b/typeshed/stdlib/3/linecache.pyi
new file mode 100644
index 0000000..70967db
--- /dev/null
+++ b/typeshed/stdlib/3/linecache.pyi
@@ -0,0 +1,5 @@
+from typing import Any
+
+def getline(filename:str, lineno:int, module_globals: Any=...) -> str: pass
+def clearcache() -> None: pass
+def getlines(filename: str, module_globals: Any=...) -> None: pass
diff --git a/typeshed/stdlib/3/locale.pyi b/typeshed/stdlib/3/locale.pyi
new file mode 100644
index 0000000..a76d137
--- /dev/null
+++ b/typeshed/stdlib/3/locale.pyi
@@ -0,0 +1,17 @@
+# Stubs for locale (Python 3.4)
+#
+# NOTE: This stub is based on a stub automatically generated by stubgen.
+
+from _locale import *
+
+def format(percent, value, grouping=..., monetary=..., *additional): ...
+def format_string(f, val, grouping=...): ...
+def currency(val, symbol=..., grouping=..., international=...): ...
+def str(val): ...
+def atof(string, func=...): ...
+def atoi(str): ...
+def normalize(localename): ...
+def getdefaultlocale(envvars=...): ...
+def getlocale(category=...): ...
+def resetlocale(category=...): ...
+def getpreferredencoding(do_setlocale=...): ...
diff --git a/typeshed/stdlib/3/logging/__init__.pyi b/typeshed/stdlib/3/logging/__init__.pyi
new file mode 100644
index 0000000..4a58102
--- /dev/null
+++ b/typeshed/stdlib/3/logging/__init__.pyi
@@ -0,0 +1,239 @@
+# Stubs for logging (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+CRITICAL = ...  # type: Any
+FATAL = ...  # type: Any
+ERROR = ...  # type: Any
+WARNING = ...  # type: Any
+WARN = ...  # type: Any
+INFO = ...  # type: Any
+DEBUG = ...  # type: Any
+NOTSET = ...  # type: Any
+
+def getLevelName(level): ...
+def addLevelName(level, levelName): ...
+
+class LogRecord:
+    name = ...  # type: Any
+    msg = ...  # type: Any
+    args = ...  # type: Any
+    levelname = ...  # type: Any
+    levelno = ...  # type: Any
+    pathname = ...  # type: Any
+    filename = ...  # type: Any
+    module = ...  # type: Any
+    exc_info = ...  # type: Any
+    exc_text = ...  # type: Any
+    stack_info = ...  # type: Any
+    lineno = ...  # type: Any
+    funcName = ...  # type: Any
+    created = ...  # type: Any
+    msecs = ...  # type: Any
+    relativeCreated = ...  # type: Any
+    thread = ...  # type: Any
+    threadName = ...  # type: Any
+    processName = ...  # type: Any
+    process = ...  # type: Any
+    def __init__(self, name, level, pathname, lineno, msg, args, exc_info, func=..., sinfo=...,
+                 **kwargs): ...
+    def getMessage(self): ...
+
+def setLogRecordFactory(factory): ...
+def getLogRecordFactory(): ...
+def makeLogRecord(dict): ...
+
+class PercentStyle:
+    default_format = ...  # type: Any
+    asctime_format = ...  # type: Any
+    asctime_search = ...  # type: Any
+    def __init__(self, fmt) -> None: ...
+    def usesTime(self): ...
+    def format(self, record): ...
+
+class StrFormatStyle(PercentStyle):
+    default_format = ...  # type: Any
+    asctime_format = ...  # type: Any
+    asctime_search = ...  # type: Any
+    def format(self, record): ...
+
+class StringTemplateStyle(PercentStyle):
+    default_format = ...  # type: Any
+    asctime_format = ...  # type: Any
+    asctime_search = ...  # type: Any
+    def __init__(self, fmt) -> None: ...
+    def usesTime(self): ...
+    def format(self, record): ...
+
+BASIC_FORMAT = ...  # type: Any
+
+class Formatter:
+    converter = ...  # type: Any
+    datefmt = ...  # type: Any
+    def __init__(self, fmt=..., datefmt=..., style=...) -> None: ...
+    default_time_format = ...  # type: Any
+    default_msec_format = ...  # type: Any
+    def formatTime(self, record, datefmt=...): ...
+    def formatException(self, ei): ...
+    def usesTime(self): ...
+    def formatMessage(self, record): ...
+    def formatStack(self, stack_info): ...
+    def format(self, record): ...
+
+class BufferingFormatter:
+    linefmt = ...  # type: Any
+    def __init__(self, linefmt=...) -> None: ...
+    def formatHeader(self, records): ...
+    def formatFooter(self, records): ...
+    def format(self, records): ...
+
+class Filter:
+    name = ...  # type: Any
+    nlen = ...  # type: Any
+    def __init__(self, name=...) -> None: ...
+    def filter(self, record): ...
+
+class Filterer:
+    filters = ...  # type: Any
+    def __init__(self) -> None: ...
+    def addFilter(self, filter): ...
+    def removeFilter(self, filter): ...
+    def filter(self, record): ...
+
+class Handler(Filterer):
+    level = ...  # type: Any
+    formatter = ...  # type: Any
+    def __init__(self, level=...) -> None: ...
+    def get_name(self): ...
+    def set_name(self, name): ...
+    name = ...  # type: Any
+    lock = ...  # type: Any
+    def createLock(self): ...
+    def acquire(self): ...
+    def release(self): ...
+    def setLevel(self, level): ...
+    def format(self, record): ...
+    def emit(self, record): ...
+    def handle(self, record): ...
+    def setFormatter(self, fmt): ...
+    def flush(self): ...
+    def close(self): ...
+    def handleError(self, record): ...
+
+class StreamHandler(Handler):
+    terminator = ...  # type: Any
+    stream = ...  # type: Any
+    def __init__(self, stream=...) -> None: ...
+    def flush(self): ...
+    def emit(self, record): ...
+
+class FileHandler(StreamHandler):
+    baseFilename = ...  # type: Any
+    mode = ...  # type: Any
+    encoding = ...  # type: Any
+    delay = ...  # type: Any
+    stream = ...  # type: Any
+    def __init__(self, filename, mode=..., encoding=..., delay=...) -> None: ...
+    def close(self): ...
+    def emit(self, record): ...
+
+class _StderrHandler(StreamHandler):
+    def __init__(self, level=...) -> None: ...
+
+lastResort = ...  # type: Any
+
+class PlaceHolder:
+    loggerMap = ...  # type: Any
+    def __init__(self, alogger) -> None: ...
+    def append(self, alogger): ...
+
+def setLoggerClass(klass): ...
+def getLoggerClass(): ...
+
+class Manager:
+    root = ...  # type: Any
+    disable = ...  # type: Any
+    emittedNoHandlerWarning = ...  # type: Any
+    loggerDict = ...  # type: Any
+    loggerClass = ...  # type: Any
+    logRecordFactory = ...  # type: Any
+    def __init__(self, rootnode) -> None: ...
+    def getLogger(self, name): ...
+    def setLoggerClass(self, klass): ...
+    def setLogRecordFactory(self, factory): ...
+
+class Logger(Filterer):
+    name = ...  # type: Any
+    level = ...  # type: Any
+    parent = ...  # type: Any
+    propagate = ...  # type: Any
+    handlers = ...  # type: Any
+    disabled = ...  # type: Any
+    def __init__(self, name, level=...) -> None: ...
+    def setLevel(self, level): ...
+    def debug(self, msg, *args, **kwargs): ...
+    def info(self, msg, *args, **kwargs): ...
+    def warning(self, msg, *args, **kwargs): ...
+    def warn(self, msg, *args, **kwargs): ...
+    def error(self, msg, *args, **kwargs): ...
+    def exception(self, msg, *args, **kwargs): ...
+    def critical(self, msg, *args, **kwargs): ...
+    fatal = ...  # type: Any
+    def log(self, level, msg, *args, **kwargs): ...
+    def findCaller(self, stack_info=...): ...
+    def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=..., extra=...,
+                   sinfo=...): ...
+    def handle(self, record): ...
+    def addHandler(self, hdlr): ...
+    def removeHandler(self, hdlr): ...
+    def hasHandlers(self): ...
+    def callHandlers(self, record): ...
+    def getEffectiveLevel(self): ...
+    def isEnabledFor(self, level): ...
+    def getChild(self, suffix): ...
+
+class RootLogger(Logger):
+    def __init__(self, level) -> None: ...
+
+class LoggerAdapter:
+    logger = ...  # type: Any
+    extra = ...  # type: Any
+    def __init__(self, logger, extra) -> None: ...
+    def process(self, msg, kwargs): ...
+    def debug(self, msg, *args, **kwargs): ...
+    def info(self, msg, *args, **kwargs): ...
+    def warning(self, msg, *args, **kwargs): ...
+    def warn(self, msg, *args, **kwargs): ...
+    def error(self, msg, *args, **kwargs): ...
+    def exception(self, msg, *args, **kwargs): ...
+    def critical(self, msg, *args, **kwargs): ...
+    def log(self, level, msg, *args, **kwargs): ...
+    def isEnabledFor(self, level): ...
+    def setLevel(self, level): ...
+    def getEffectiveLevel(self): ...
+    def hasHandlers(self): ...
+
+def basicConfig(**kwargs): ...
+def getLogger(name=...): ...
+def critical(msg, *args, **kwargs): ...
+
+fatal = ...  # type: Any
+
+def error(msg, *args, **kwargs): ...
+def exception(msg, *args, **kwargs): ...
+def warning(msg, *args, **kwargs): ...
+def warn(msg, *args, **kwargs): ...
+def info(msg, *args, **kwargs): ...
+def debug(msg, *args, **kwargs): ...
+def log(level, msg, *args, **kwargs): ...
+def disable(level): ...
+
+class NullHandler(Handler):
+    def handle(self, record): ...
+    def emit(self, record): ...
+    lock = ...  # type: Any
+    def createLock(self): ...
+
+def captureWarnings(capture): ...
diff --git a/typeshed/stdlib/3/logging/handlers.pyi b/typeshed/stdlib/3/logging/handlers.pyi
new file mode 100644
index 0000000..458044e
--- /dev/null
+++ b/typeshed/stdlib/3/logging/handlers.pyi
@@ -0,0 +1,200 @@
+# Stubs for logging.handlers (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import logging
+
+threading = ...  # type: Any
+DEFAULT_TCP_LOGGING_PORT = ...  # type: Any
+DEFAULT_UDP_LOGGING_PORT = ...  # type: Any
+DEFAULT_HTTP_LOGGING_PORT = ...  # type: Any
+DEFAULT_SOAP_LOGGING_PORT = ...  # type: Any
+SYSLOG_UDP_PORT = ...  # type: Any
+SYSLOG_TCP_PORT = ...  # type: Any
+
+class BaseRotatingHandler(logging.FileHandler):
+    mode = ...  # type: Any
+    encoding = ...  # type: Any
+    namer = ...  # type: Any
+    rotator = ...  # type: Any
+    def __init__(self, filename, mode, encoding=..., delay=...) -> None: ...
+    def emit(self, record): ...
+    def rotation_filename(self, default_name): ...
+    def rotate(self, source, dest): ...
+
+class RotatingFileHandler(BaseRotatingHandler):
+    maxBytes = ...  # type: Any
+    backupCount = ...  # type: Any
+    def __init__(self, filename, mode=..., maxBytes=..., backupCount=..., encoding=...,
+                 delay=...): ...
+    stream = ...  # type: Any
+    def doRollover(self): ...
+    def shouldRollover(self, record): ...
+
+class TimedRotatingFileHandler(BaseRotatingHandler):
+    when = ...  # type: Any
+    backupCount = ...  # type: Any
+    utc = ...  # type: Any
+    atTime = ...  # type: Any
+    interval = ...  # type: Any
+    suffix = ...  # type: Any
+    extMatch = ...  # type: Any
+    dayOfWeek = ...  # type: Any
+    rolloverAt = ...  # type: Any
+    def __init__(self, filename, when=..., interval=..., backupCount=..., encoding=..., delay=...,
+                 utc=..., atTime=...): ...
+    def computeRollover(self, currentTime): ...
+    def shouldRollover(self, record): ...
+    def getFilesToDelete(self): ...
+    stream = ...  # type: Any
+    def doRollover(self): ...
+
+class WatchedFileHandler(logging.FileHandler):
+    def __init__(self, filename, mode=..., encoding=..., delay=...) -> None: ...
+    stream = ...  # type: Any
+    def emit(self, record): ...
+
+class SocketHandler(logging.Handler):
+    host = ...  # type: Any
+    port = ...  # type: Any
+    address = ...  # type: Any
+    sock = ...  # type: Any
+    closeOnError = ...  # type: Any
+    retryTime = ...  # type: Any
+    retryStart = ...  # type: Any
+    retryMax = ...  # type: Any
+    retryFactor = ...  # type: Any
+    def __init__(self, host, port) -> None: ...
+    def makeSocket(self, timeout=...): ...
+    retryPeriod = ...  # type: Any
+    def createSocket(self): ...
+    def send(self, s): ...
+    def makePickle(self, record): ...
+    def handleError(self, record): ...
+    def emit(self, record): ...
+    def close(self): ...
+
+class DatagramHandler(SocketHandler):
+    closeOnError = ...  # type: Any
+    def __init__(self, host, port) -> None: ...
+    def makeSocket(self, timeout=...): ... # TODO: Actually does not have the timeout argument.
+    def send(self, s): ...
+
+class SysLogHandler(logging.Handler):
+    LOG_EMERG = ...  # type: Any
+    LOG_ALERT = ...  # type: Any
+    LOG_CRIT = ...  # type: Any
+    LOG_ERR = ...  # type: Any
+    LOG_WARNING = ...  # type: Any
+    LOG_NOTICE = ...  # type: Any
+    LOG_INFO = ...  # type: Any
+    LOG_DEBUG = ...  # type: Any
+    LOG_KERN = ...  # type: Any
+    LOG_USER = ...  # type: Any
+    LOG_MAIL = ...  # type: Any
+    LOG_DAEMON = ...  # type: Any
+    LOG_AUTH = ...  # type: Any
+    LOG_SYSLOG = ...  # type: Any
+    LOG_LPR = ...  # type: Any
+    LOG_NEWS = ...  # type: Any
+    LOG_UUCP = ...  # type: Any
+    LOG_CRON = ...  # type: Any
+    LOG_AUTHPRIV = ...  # type: Any
+    LOG_FTP = ...  # type: Any
+    LOG_LOCAL0 = ...  # type: Any
+    LOG_LOCAL1 = ...  # type: Any
+    LOG_LOCAL2 = ...  # type: Any
+    LOG_LOCAL3 = ...  # type: Any
+    LOG_LOCAL4 = ...  # type: Any
+    LOG_LOCAL5 = ...  # type: Any
+    LOG_LOCAL6 = ...  # type: Any
+    LOG_LOCAL7 = ...  # type: Any
+    priority_names = ...  # type: Any
+    facility_names = ...  # type: Any
+    priority_map = ...  # type: Any
+    address = ...  # type: Any
+    facility = ...  # type: Any
+    socktype = ...  # type: Any
+    unixsocket = ...  # type: Any
+    socket = ...  # type: Any
+    formatter = ...  # type: Any
+    def __init__(self, address=..., facility=..., socktype=...) -> None: ...
+    def encodePriority(self, facility, priority): ...
+    def close(self): ...
+    def mapPriority(self, levelName): ...
+    ident = ...  # type: Any
+    append_nul = ...  # type: Any
+    def emit(self, record): ...
+
+class SMTPHandler(logging.Handler):
+    username = ...  # type: Any
+    fromaddr = ...  # type: Any
+    toaddrs = ...  # type: Any
+    subject = ...  # type: Any
+    secure = ...  # type: Any
+    timeout = ...  # type: Any
+    def __init__(self, mailhost, fromaddr, toaddrs, subject, credentials=..., secure=...,
+                 timeout=...): ...
+    def getSubject(self, record): ...
+    def emit(self, record): ...
+
+class NTEventLogHandler(logging.Handler):
+    appname = ...  # type: Any
+    dllname = ...  # type: Any
+    logtype = ...  # type: Any
+    deftype = ...  # type: Any
+    typemap = ...  # type: Any
+    def __init__(self, appname, dllname=..., logtype=...) -> None: ...
+    def getMessageID(self, record): ...
+    def getEventCategory(self, record): ...
+    def getEventType(self, record): ...
+    def emit(self, record): ...
+    def close(self): ...
+
+class HTTPHandler(logging.Handler):
+    host = ...  # type: Any
+    url = ...  # type: Any
+    method = ...  # type: Any
+    secure = ...  # type: Any
+    credentials = ...  # type: Any
+    def __init__(self, host, url, method=..., secure=..., credentials=...) -> None: ...
+    def mapLogRecord(self, record): ...
+    def emit(self, record): ...
+
+class BufferingHandler(logging.Handler):
+    capacity = ...  # type: Any
+    buffer = ...  # type: Any
+    def __init__(self, capacity) -> None: ...
+    def shouldFlush(self, record): ...
+    def emit(self, record): ...
+    def flush(self): ...
+    def close(self): ...
+
+class MemoryHandler(BufferingHandler):
+    flushLevel = ...  # type: Any
+    target = ...  # type: Any
+    def __init__(self, capacity, flushLevel=..., target=...) -> None: ...
+    def shouldFlush(self, record): ...
+    def setTarget(self, target): ...
+    buffer = ...  # type: Any
+    def flush(self): ...
+    def close(self): ...
+
+class QueueHandler(logging.Handler):
+    queue = ...  # type: Any
+    def __init__(self, queue) -> None: ...
+    def enqueue(self, record): ...
+    def prepare(self, record): ...
+    def emit(self, record): ...
+
+class QueueListener:
+    queue = ...  # type: Any
+    handlers = ...  # type: Any
+    def __init__(self, queue, *handlers) -> None: ...
+    def dequeue(self, block): ...
+    def start(self): ...
+    def prepare(self, record): ...
+    def handle(self, record): ...
+    def enqueue_sentinel(self): ...
+    def stop(self): ...
diff --git a/typeshed/stdlib/3/msvcrt.pyi b/typeshed/stdlib/3/msvcrt.pyi
new file mode 100644
index 0000000..bcab64c
--- /dev/null
+++ b/typeshed/stdlib/3/msvcrt.pyi
@@ -0,0 +1,8 @@
+# Stubs for msvcrt
+
+# NOTE: These are incomplete!
+
+from typing import overload, BinaryIO, TextIO
+
+def get_osfhandle(file: int) -> int: ...
+def open_osfhandle(handle: int, flags: int) -> int: ...
diff --git a/typeshed/stdlib/3/multiprocessing/__init__.pyi b/typeshed/stdlib/3/multiprocessing/__init__.pyi
new file mode 100644
index 0000000..32cf8f7
--- /dev/null
+++ b/typeshed/stdlib/3/multiprocessing/__init__.pyi
@@ -0,0 +1,12 @@
+# Stubs for multiprocessing
+
+from typing import Any
+
+class Lock(): ...
+class Process(): ...
+
+class Queue():
+    def get(block: bool = ..., timeout: float = ...) -> Any: ...
+
+class Value():
+    def __init__(typecode_or_type: str, *args: Any, lock: bool = ...) -> None: ...
diff --git a/typeshed/stdlib/3/multiprocessing/managers.pyi b/typeshed/stdlib/3/multiprocessing/managers.pyi
new file mode 100644
index 0000000..c4f6845
--- /dev/null
+++ b/typeshed/stdlib/3/multiprocessing/managers.pyi
@@ -0,0 +1,8 @@
+# Stubs for multiprocessing.managers
+
+# NOTE: These are incomplete!
+
+from typing import Any
+
+class BaseManager():
+    def register(typeid: str, callable: Any = ...) -> None: ...
diff --git a/typeshed/stdlib/3/multiprocessing/pool.pyi b/typeshed/stdlib/3/multiprocessing/pool.pyi
new file mode 100644
index 0000000..2f3075a
--- /dev/null
+++ b/typeshed/stdlib/3/multiprocessing/pool.pyi
@@ -0,0 +1,6 @@
+# Stubs for multiprocessing.pool
+
+# NOTE: These are incomplete!
+
+class ThreadPool():
+    def __init__(self, processes: int = ...) -> None: ...
diff --git a/typeshed/stdlib/3/numbers.pyi b/typeshed/stdlib/3/numbers.pyi
new file mode 100644
index 0000000..8bea0b0
--- /dev/null
+++ b/typeshed/stdlib/3/numbers.pyi
@@ -0,0 +1,80 @@
+# Stubs for numbers (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Number:
+    __hash__ = ... # type: Any
+
+class Complex(Number):
+    def __complex__(self): ...
+    def __bool__(self): ...
+    @property
+    def real(self): ...
+    @property
+    def imag(self): ...
+    def __add__(self, other): ...
+    def __radd__(self, other): ...
+    def __neg__(self): ...
+    def __pos__(self): ...
+    def __sub__(self, other): ...
+    def __rsub__(self, other): ...
+    def __mul__(self, other): ...
+    def __rmul__(self, other): ...
+    def __truediv__(self, other): ...
+    def __rtruediv__(self, other): ...
+    def __pow__(self, exponent): ...
+    def __rpow__(self, base): ...
+    def __abs__(self): ...
+    def conjugate(self): ...
+    def __eq__(self, other): ...
+
+class Real(Complex):
+    def __float__(self): ...
+    def __trunc__(self): ...
+    def __floor__(self): ...
+    def __ceil__(self): ...
+    def __round__(self, ndigits=None): ...
+    def __divmod__(self, other): ...
+    def __rdivmod__(self, other): ...
+    def __floordiv__(self, other): ...
+    def __rfloordiv__(self, other): ...
+    def __mod__(self, other): ...
+    def __rmod__(self, other): ...
+    def __lt__(self, other): ...
+    def __le__(self, other): ...
+    def __complex__(self): ...
+    @property
+    def real(self): ...
+    @property
+    def imag(self): ...
+    def conjugate(self): ...
+
+class Rational(Real):
+    @property
+    def numerator(self): ...
+    @property
+    def denominator(self): ...
+    def __float__(self): ...
+
+class Integral(Rational):
+    def __int__(self): ...
+    def __index__(self): ...
+    def __pow__(self, exponent, modulus=None): ...
+    def __lshift__(self, other): ...
+    def __rlshift__(self, other): ...
+    def __rshift__(self, other): ...
+    def __rrshift__(self, other): ...
+    def __and__(self, other): ...
+    def __rand__(self, other): ...
+    def __xor__(self, other): ...
+    def __rxor__(self, other): ...
+    def __or__(self, other): ...
+    def __ror__(self, other): ...
+    def __invert__(self): ...
+    def __float__(self): ...
+    @property
+    def numerator(self): ...
+    @property
+    def denominator(self): ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
new file mode 100644
index 0000000..0abf313
--- /dev/null
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -0,0 +1,340 @@
+# Stubs for os
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/os.html
+
+from typing import (
+    Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr,
+    Optional, Generic, Set
+)
+from builtins import OSError as error
+import os.path as path
+
+# ----- os variables -----
+
+supports_bytes_environ = False  # TODO: True when bytes implemented?
+
+SEEK_SET = 0 # type: int
+SEEK_CUR = 1 # type: int
+SEEK_END = 2 # type: int
+
+O_RDONLY = 0
+O_WRONLY = 0
+O_RDWR = 0
+O_APPEND = 0
+O_CREAT = 0
+O_EXCL = 0
+O_TRUNC = 0
+O_DSYNC = 0    # Unix only
+O_RSYNC = 0    # Unix only
+O_SYNC = 0     # Unix only
+O_NDELAY = 0   # Unix only
+O_NONBLOCK = 0 # Unix only
+O_NOCTTY = 0   # Unix only
+O_SHLOCK = 0   # Unix only
+O_EXLOCK = 0   # Unix only
+O_BINARY = 0     # Windows only
+O_NOINHERIT = 0  # Windows only
+O_SHORT_LIVED = 0# Windows only
+O_TEMPORARY = 0  # Windows only
+O_RANDOM = 0     # Windows only
+O_SEQUENTIAL = 0 # Windows only
+O_TEXT = 0       # Windows only
+O_ASYNC = 0      # Gnu extension if in C library
+O_DIRECT = 0     # Gnu extension if in C library
+O_DIRECTORY = 0  # Gnu extension if in C library
+O_NOFOLLOW = 0   # Gnu extension if in C library
+O_NOATIME = 0    # Gnu extension if in C library
+
+curdir = ...  # type: str
+pardir = ...  # type: str
+sep = ...  # type: str
+altsep = ...  # type: str
+extsep = ...  # type: str
+pathsep = ...  # type: str
+defpath = ...  # type: str
+linesep = ...  # type: str
+devnull = ...  # type: str
+
+F_OK = 0
+R_OK = 0
+W_OK = 0
+X_OK = 0
+
+class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]):
+    def copy(self) -> _Environ[AnyStr]: ...
+
+environ = ...  # type: _Environ[str]
+environb = ...  # type: _Environ[bytes]
+
+confstr_names = ...  # type: Dict[str, int]  # Unix only
+pathconf_names = ...  # type: Dict[str, int] # Unix only
+sysconf_names = ...  # type: Dict[str, int]  # Unix only
+
+EX_OK = 0        # Unix only
+EX_USAGE = 0     # Unix only
+EX_DATAERR = 0   # Unix only
+EX_NOINPUT = 0   # Unix only
+EX_NOUSER = 0    # Unix only
+EX_NOHOST = 0    # Unix only
+EX_UNAVAILABLE = 0  # Unix only
+EX_SOFTWARE = 0  # Unix only
+EX_OSERR = 0     # Unix only
+EX_OSFILE = 0    # Unix only
+EX_CANTCREAT = 0 # Unix only
+EX_IOERR = 0     # Unix only
+EX_TEMPFAIL = 0  # Unix only
+EX_PROTOCOL = 0  # Unix only
+EX_NOPERM = 0    # Unix only
+EX_CONFIG = 0    # Unix only
+EX_NOTFOUND = 0  # Unix only
+
+P_NOWAIT = 0
+P_NOWAITO = 0
+P_WAIT = 0
+#P_DETACH = 0  # Windows only
+#P_OVERLAY = 0  # Windows only
+
+# wait()/waitpid() options
+WNOHANG = 0  # Unix only
+#WCONTINUED = 0  # some Unix systems
+#WUNTRACED = 0  # Unix only
+
+TMP_MAX = 0  # Undocumented, but used by tempfile
+
+# ----- os classes (structures) -----
+class stat_result:
+    # For backward compatibility, the return value of stat() is also
+    # accessible as a tuple of at least 10 integers giving the most important
+    # (and portable) members of the stat structure, in the order st_mode,
+    # st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, st_mtime,
+    # st_ctime. More items may be added at the end by some implementations.
+
+    st_mode = 0 # protection bits,
+    st_ino = 0 # inode number,
+    st_dev = 0 # device,
+    st_nlink = 0 # number of hard links,
+    st_uid = 0 # user id of owner,
+    st_gid = 0 # group id of owner,
+    st_size = 0 # size of file, in bytes,
+    st_atime = 0.0 # time of most recent access,
+    st_mtime = 0.0 # time of most recent content modification,
+    st_ctime = 0.0 # platform dependent (time of most recent metadata change
+                   # on  Unix, or the time of creation on Windows)
+
+    def __init__(self, tuple) -> None: ...
+
+    # On some Unix systems (such as Linux), the following attributes may also
+    # be available:
+    st_blocks = 0 # number of blocks allocated for file
+    st_blksize = 0 # filesystem blocksize
+    st_rdev = 0 # type of device if an inode device
+    st_flags = 0 # user defined flags for file
+
+    # On other Unix systems (such as FreeBSD), the following attributes may be
+    # available (but may be only filled out if root tries to use them):
+    st_gen = 0 # file generation number
+    st_birthtime = 0 # time of file creation
+
+    # On Mac OS systems, the following attributes may also be available:
+    st_rsize = 0
+    st_creator = 0
+    st_type = 0
+
+class statvfs_result:  # Unix only
+    f_bsize = 0
+    f_frsize = 0
+    f_blocks = 0
+    f_bfree = 0
+    f_bavail = 0
+    f_files = 0
+    f_ffree = 0
+    f_favail = 0
+    f_flag = 0
+    f_namemax = 0
+
+# ----- os function stubs -----
+def name() -> str: ...
+def fsencode(filename: str) -> bytes: ...
+def fsdecode(filename: bytes) -> str: ...
+def get_exec_path(env=...) -> List[str] : ...
+# NOTE: get_exec_path(): returns List[bytes] when env not None
+def ctermid() -> str: ...  # Unix only
+def getegid() -> int: ...  # Unix only
+def geteuid() -> int: ...  # Unix only
+def getgid() -> int: ...   # Unix only
+def getgroups() -> List[int]: ...  # Unix only, behaves differently on Mac
+def initgroups(username: str, gid: int) -> None: ...  # Unix only
+def getlogin() -> str: ...
+def getpgid(pid: int) -> int: ...  # Unix only
+def getpgrp() -> int: ...  # Unix only
+def getpid() -> int: ...
+def getppid() -> int: ...
+def getresuid() -> Tuple[int, int, int]: ...  # Unix only
+def getresgid() -> Tuple[int, int, int]: ...  # Unix only
+def getuid() -> int: ...  # Unix only
+def getenv(key: str, default: str = ...) -> str: ...
+def getenvb(key: bytes, default: bytes = ...) -> bytes: ...
+# TODO mixed str/bytes putenv arguments
+def putenv(key: AnyStr, value: AnyStr) -> None: ...
+def setegid(egid: int) -> None: ...  # Unix only
+def seteuid(euid: int) -> None: ...  # Unix only
+def setgid(gid: int) -> None: ...  # Unix only
+def setgroups(groups: List[int]) -> None: ...  # Unix only
+def setpgrp() -> int: ...  # Unix only
+def setpgid(pid: int, pgrp: int) -> int: ...  # Unix only
+def setregid(rgid: int, egid: int) -> None: ...  # Unix only
+def setresgid(rgid: int, egid: int, sgid: int) -> None: ...  # Unix only
+def setresuid(ruid: int, euid: int, suid: int) -> None: ...  # Unix only
+def setreuid(ruid: int, euid: int) -> None: ...  # Unix only
+def getsid(pid: int) -> int: ...  # Unix only
+def setsid() -> int: ...  # Unix only
+def setuid(uid) -> None: ...  # Unix only
+def strerror(code: int) -> str: ...
+def umask(mask: int) -> int: ...
+def uname() -> Tuple[str, str, str, str, str]: ...  # Unix only
+def unsetenv(key: AnyStr) -> None: ...
+# Return IO or TextIO
+def fdopen(fd: int, mode: str = ..., encoding: str = ..., errors: str = ...,
+           newline: str = ..., closefd: bool = ...) -> Any: ...
+def close(fd: int) -> None: ...
+def closerange(fd_low: int, fd_high: int) -> None: ...
+def device_encoding(fd: int) -> Optional[str]: ...
+def dup(fd: int) -> int: ...
+def dup2(fd: int, fd2: int) -> None: ...
+def fchmod(fd: int, intmode) -> None: ...  # Unix only
+def fchown(fd: int, uid: int, gid: int) -> None: ...  # Unix only
+def fdatasync(fd: int) -> None: ...  # Unix only, not Mac
+def fpathconf(fd: int, name: str) -> int: ...  # Unix only
+def fstat(fd: int) -> stat_result: ...
+def fstatvfs(fd: int) -> statvfs_result: ...  # Unix only
+def fsync(fd: int) -> None: ...
+def ftruncate(fd: int, length: int) -> None: ...  # Unix only
+def isatty(fd: int) -> bool: ...  # Unix only
+def lseek(fd: int, pos: int, how: int) -> int: ...
+def open(file: AnyStr, flags: int, mode: int = ...) -> int: ...
+def openpty() -> Tuple[int, int]: ...  # some flavors of Unix
+def pipe() -> Tuple[int, int]: ...
+def read(fd: int, n: int) -> bytes: ...
+def tcgetpgrp(fd: int) -> int: ...  # Unix only
+def tcsetpgrp(fd: int, pg: int) -> None: ...  # Unix only
+def ttyname(fd: int) -> str: ...  # Unix only
+def write(fd: int, string: bytes) -> int: ...
+def access(path: AnyStr, mode: int) -> bool: ...
+def chdir(path: AnyStr) -> None: ...
+def fchdir(fd: int) -> None: ...
+def getcwd() -> str: ...
+def getcwdb() -> bytes: ...
+def chflags(path: str, flags: int) -> None: ...  # Unix only
+def chroot(path: str) -> None: ...  # Unix only
+def chmod(path: AnyStr, mode: int) -> None: ...
+def chown(path: AnyStr, uid: int, gid: int) -> None: ...  # Unix only
+def lchflags(path: str, flags: int) -> None: ...  # Unix only
+def lchmod(path: str, mode: int) -> None: ...  # Unix only
+def lchown(path: str, uid: int, gid: int) -> None: ...  # Unix only
+def link(src: AnyStr, link_name: AnyStr) -> None: ...
+
+ at overload
+def listdir(path: str = ...) -> List[str]: ...
+ at overload
+def listdir(path: bytes) -> List[bytes]: ...
+
+def lstat(path: AnyStr) -> stat_result: ...
+def mkfifo(path, mode: int=...) -> None: ...  # Unix only
+def mknod(filename: AnyStr, mode: int = ..., device: int = ...) -> None: ...
+def major(device: int) -> int: ...
+def minor(device: int) -> int: ...
+def makedev(major: int, minor: int) -> int: ...
+def mkdir(path: AnyStr, mode: int = ...) -> None: ...
+def makedirs(path: AnyStr, mode: int = ...,
+             exist_ok: bool = ...) -> None: ...
+def pathconf(path: str, name: str) -> int: ...  # Unix only
+def readlink(path: AnyStr) -> AnyStr: ...
+def remove(path: AnyStr) -> None: ...
+def removedirs(path: AnyStr) -> None: ...
+def rename(src: AnyStr, dst: AnyStr) -> None: ...
+def renames(old: AnyStr, new: AnyStr) -> None: ...
+def rmdir(path: AnyStr) -> None: ...
+def stat(path: AnyStr) -> stat_result: ...
+def stat_float_times(newvalue: Union[bool, None] = ...) -> bool: ...
+def statvfs(path: str) -> statvfs_result: ... # Unix only
+def symlink(source: AnyStr, link_name: AnyStr,
+            target_is_directory: bool = ...) -> None:
+    ...  # final argument in Windows only
+def unlink(path: AnyStr) -> None: ...
+def utime(path: AnyStr, times: Union[Tuple[int, int], Tuple[float, float]] = ...) -> None: ...
+
+# TODO onerror: function from OSError to void
+def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
+         followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
+                                                    List[AnyStr]]]: ...
+
+def abort() -> 'None': ...
+def execl(path: AnyStr, arg0: AnyStr, *args: AnyStr) -> None: ...
+def execle(path: AnyStr, arg0: AnyStr,
+           *args: Any) -> None: ... # Imprecise signature
+def execlp(path: AnyStr, arg0: AnyStr, *args: AnyStr) -> None: ...
+def execlpe(path: AnyStr, arg0: AnyStr,
+            *args: Any) -> None: ... # Imprecise signature
+def execv(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
+def execve(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[AnyStr, AnyStr]) -> None: ...
+def execvp(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
+def execvpe(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]],
+            env: Mapping[str, str]) -> None: ...
+def _exit(n: int) -> None: ...
+def fork() -> int: ...  # Unix only
+def forkpty() -> Tuple[int, int]: ...  # some flavors of Unix
+def kill(pid: int, sig: int) -> None: ...
+def killpg(pgid: int, sig: int) -> None: ...  # Unix only
+def nice(increment: int) -> int: ...  # Unix only
+def plock(op: int) -> None: ...  # Unix only ???op is int?
+
+from io import TextIOWrapper as _TextIOWrapper
+class popen(_TextIOWrapper):
+    # TODO 'b' modes or bytes command not accepted?
+    def __init__(self, command: str, mode: str = ...,
+                 bufsize: int = ...) -> None: ...
+    def close(self) -> Any: ... # may return int
+
+def spawnl(mode: int, path: AnyStr, arg0: AnyStr, *args: AnyStr) -> int: ...
+def spawnle(mode: int, path: AnyStr, arg0: AnyStr,
+            *args: Any) -> int: ... # Imprecise sig
+def spawnlp(mode: int, file: AnyStr, arg0: AnyStr,
+            *args: AnyStr) -> int: ...  # Unix only TODO
+def spawnlpe(mode: int, file: AnyStr, arg0: AnyStr, *args: Any) -> int:
+    ... # Imprecise signature; Unix only TODO
+def spawnv(mode: int, path: AnyStr, args: List[AnyStr]) -> int: ...
+def spawnve(mode: int, path: AnyStr, args: List[AnyStr],
+            env: Mapping[str, str]) -> int: ...
+def spawnvp(mode: int, file: AnyStr, args: List[AnyStr]) -> int: ...  # Unix only
+def spawnvpe(mode: int, file: AnyStr, args: List[AnyStr],
+             env: Mapping[str, str]) -> int:
+    ...  # Unix only
+def startfile(path: str, operation: Union[str, None] = ...) -> None: ...  # Windows only
+def system(command: AnyStr) -> int: ...
+def times() -> Tuple[float, float, float, float, float]: ...
+def wait() -> Tuple[int, int]: ...  # Unix only
+def waitpid(pid: int, options: int) -> Tuple[int, int]: ...
+def wait3(options: Union[int, None] = ...) -> Tuple[int, int, Any]: ...  # Unix only
+def wait4(pid: int, options: int) -> Tuple[int, int, Any]:
+    ...  # Unix only
+def WCOREDUMP(status: int) -> bool: ...  # Unix only
+def WIFCONTINUED(status: int) -> bool: ...  # Unix only
+def WIFSTOPPED(status: int) -> bool: ...  # Unix only
+def WIFSIGNALED(status: int) -> bool: ...  # Unix only
+def WIFEXITED(status: int) -> bool: ...  # Unix only
+def WEXITSTATUS(status: int) -> bool: ...  # Unix only
+def WSTOPSIG(status: int) -> bool: ...  # Unix only
+def WTERMSIG(status: int) -> bool: ...  # Unix only
+def confstr(name: str) -> str: ...  # Unix only
+def getloadavg() -> Tuple[float, float, float]: ...  # Unix only
+def sysconf(name: str) -> int: ...  # Unix only
+def urandom(n: int) -> bytes: ...
+
+def sched_getaffinity(id: int) -> Set[int]: ...
+class waitresult:
+    si_pid = 0
+def waitid(idtype: int, id: int, options: int) -> waitresult: ...
+P_ALL = 0
+WEXITED = 0
+WNOWAIT = 0
diff --git a/typeshed/stdlib/3/os/path.pyi b/typeshed/stdlib/3/os/path.pyi
new file mode 100644
index 0000000..305e059
--- /dev/null
+++ b/typeshed/stdlib/3/os/path.pyi
@@ -0,0 +1,61 @@
+# Stubs for os.path
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/os.path.html
+
+from typing import overload, List, Any, AnyStr, Tuple, BinaryIO, TextIO
+
+# ----- os.path variables -----
+supports_unicode_filenames = False
+# aliases (also in os)
+curdir = ...  # type: str
+pardir = ...  # type: str
+sep = ...  # type: str
+altsep = ...  # type: str
+extsep = ...  # type: str
+pathsep = ...  # type: str
+defpath = ...  # type: str
+devnull = ...  # type: str
+
+# ----- os.path function stubs -----
+def abspath(path: AnyStr) -> AnyStr: ...
+def basename(path: AnyStr) -> AnyStr: ...
+
+# NOTE: Empty List[bytes] results in '' (str) => fall back to Any return type.
+def commonprefix(list: List[AnyStr]) -> Any: ...
+def dirname(path: AnyStr) -> AnyStr: ...
+def exists(path: AnyStr) -> bool: ...
+def lexists(path: AnyStr) -> bool: ...
+def expanduser(path: AnyStr) -> AnyStr: ...
+def expandvars(path: AnyStr) -> AnyStr: ...
+
+
+# These return float if os.stat_float_times() == True
+def getatime(path: AnyStr) -> Any: ...
+def getmtime(path: AnyStr) -> Any: ...
+def getctime(path: AnyStr) -> Any: ...
+
+def getsize(path: AnyStr) -> int: ...
+def isabs(path: AnyStr) -> bool: ...
+def isfile(path: AnyStr) -> bool: ...
+def isdir(path: AnyStr) -> bool: ...
+def islink(path: AnyStr) -> bool: ...
+def ismount(path: AnyStr) -> bool: ...
+
+def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ...
+
+def normcase(path: AnyStr) -> AnyStr: ...
+def normpath(path: AnyStr) -> AnyStr: ...
+def realpath(path: AnyStr) -> AnyStr: ...
+def relpath(path: AnyStr, start: AnyStr = ...) -> AnyStr: ...
+
+def samefile(path1: AnyStr, path2: AnyStr) -> bool: ...
+def sameopenfile(fp1: int, fp2: int) -> bool: ...
+#def samestat(stat1: stat_result,
+#             stat2: stat_result) -> bool: ...  # Unix only
+
+def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+
+#def splitunc(path: str) -> Tuple[str, str]: ...  # Windows only, deprecated
diff --git a/typeshed/stdlib/3/pickle.pyi b/typeshed/stdlib/3/pickle.pyi
new file mode 100644
index 0000000..b3dcd07
--- /dev/null
+++ b/typeshed/stdlib/3/pickle.pyi
@@ -0,0 +1,12 @@
+# Stubs for pickle
+
+# NOTE: These are incomplete!
+
+from typing import Any, IO
+
+def dumps(obj: Any, protocol: int = ..., *,
+          fix_imports: bool = ...) -> bytes: ...
+def loads(p: bytes, *, fix_imports: bool = ...,
+          encoding: str = ..., errors: str = ...) -> Any: ...
+def load(file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ...,
+         errors: str = ...) -> Any: ...
diff --git a/typeshed/stdlib/3/pipes.pyi b/typeshed/stdlib/3/pipes.pyi
new file mode 100644
index 0000000..62163d6
--- /dev/null
+++ b/typeshed/stdlib/3/pipes.pyi
@@ -0,0 +1,19 @@
+# Stubs for pipes
+
+# Based on http://docs.python.org/3.5/library/pipes.html
+
+import os
+
+class Template:
+    def __init__(self) -> None: ...
+    def reset(self) -> None: ...
+    def clone(self) -> 'Template': ...
+    def debug(self, flag: bool) -> None: ...
+    def append(self, cmd: str, kind: str) -> None: ...
+    def prepend(self, cmd: str, kind: str) -> None: ...
+    def open(self, file: str, rw: str) -> os.popen: ...
+    def copy(self, file: str, rw: str) -> os.popen: ...
+
+# Not documented, but widely used.
+# Documented as shlex.quote since 3.3.
+def quote(s: str) -> str: ...
diff --git a/typeshed/stdlib/3/platform.pyi b/typeshed/stdlib/3/platform.pyi
new file mode 100644
index 0000000..794d4c5
--- /dev/null
+++ b/typeshed/stdlib/3/platform.pyi
@@ -0,0 +1,35 @@
+# Stubs for platform (Python 3.5)
+
+from typing import Tuple, NamedTuple
+
+from os import devnull as DEV_NULL
+
+def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> Tuple[str, str]: ...
+def linux_distribution(distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ..., full_distribution_name: bool = ...) -> Tuple[str, str, str]: ...
+def dist(distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ...) -> Tuple[str, str, str]: ...
+from os import popen
+def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> Tuple[str, str, str, str]: ...
+def mac_ver(release: str = ..., versioninfo: Tuple[str, str, str] = ..., machine: str = ...) -> Tuple[str, Tuple[str, str, str], str]: ...
+def java_ver(release: str = ..., vendor: str = ..., vminfo: Tuple[str, str, str] = ..., osinfo: Tuple[str, str, str] = ...) -> Tuple[str, str, Tuple[str, str, str], Tuple[str, str, str]]: ...
+def system_alias(system: str, release: str, version: str) -> Tuple[str, str, str]: ...
+def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> Tuple[str, str]: ...
+
+uname_result = NamedTuple('uname_result', [('system', str), ('node', str), ('release', str), ('version', str), ('machine', str), ('processor', str)])
+
+def uname() -> uname_result: ...
+def system() -> str: ...
+def node() -> str: ...
+def release() -> str: ...
+def version() -> str: ...
+def machine() -> str: ...
+def processor() -> str: ...
+
+def python_implementation() -> str: ...
+def python_version() -> str: ...
+def python_version_tuple() -> Tuple[str, str, str]: ...
+def python_branch() -> str: ...
+def python_revision() -> str: ...
+def python_build() -> Tuple[str, str]: ...
+def python_compiler() -> str: ...
+
+def platform(aliased: bool = ..., terse: bool = ...) -> str: ...
diff --git a/typeshed/stdlib/3/posix.pyi b/typeshed/stdlib/3/posix.pyi
new file mode 100644
index 0000000..3debbbf
--- /dev/null
+++ b/typeshed/stdlib/3/posix.pyi
@@ -0,0 +1,7 @@
+# Stubs for posix
+
+# NOTE: These are incomplete!
+
+import typing
+from os import stat_result
+
diff --git a/typeshed/stdlib/3/posixpath.pyi b/typeshed/stdlib/3/posixpath.pyi
new file mode 100644
index 0000000..5d6285c
--- /dev/null
+++ b/typeshed/stdlib/3/posixpath.pyi
@@ -0,0 +1,46 @@
+# Stubs for os.path
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/os.path.html
+
+from typing import Any, List, Tuple, IO
+
+# ----- os.path variables -----
+supports_unicode_filenames = False
+
+# ----- os.path function stubs -----
+def abspath(path: str) -> str: ...
+def basename(path) -> str: ...
+def commonprefix(list: List[str]) -> str: ...
+def dirname(path: str) -> str: ...
+def exists(path: str) -> bool: ...
+def lexists(path: str) -> bool: ...
+def expanduser(path: str) -> str: ...
+def expandvars(path: str) -> str: ...
+def getatime(path: str) -> int:
+    ... # return float if os.stat_float_times() returns True
+def getmtime(path: str) -> int:
+    ... # return float if os.stat_float_times() returns True
+def getctime(path: str) -> int:
+    ... # return float if os.stat_float_times() returns True
+def getsize(path: str) -> int: ...
+def isabs(path: str) -> bool: ...
+def isfile(path: str) -> bool: ...
+def isdir(path: str) -> bool: ...
+def islink(path: str) -> bool: ...
+def ismount(path: str) -> bool: ...
+def join(path: str, *paths: str) -> str: ...
+def normcase(path: str) -> str: ...
+def normpath(path: str) -> str: ...
+def realpath(path: str) -> str: ...
+def relpath(path: str, start: str = ...) -> str: ...
+def samefile(path1: str, path2: str) -> bool: ...
+
+def sameopenfile(fp1: IO[Any], fp2: IO[Any]) -> bool: ...
+
+#def samestat(stat1: stat_result, stat2: stat_result) -> bool:
+#    ...  # Unix only
+def split(path: str) -> Tuple[str, str]: ...
+def splitdrive(path: str) -> Tuple[str, str]: ...
+def splitext(path: str) -> Tuple[str, str]: ...
+#def splitunc(path: str) -> Tuple[str, str] : ...  # Windows only, deprecated
diff --git a/typeshed/stdlib/3/pprint.pyi b/typeshed/stdlib/3/pprint.pyi
new file mode 100644
index 0000000..95804e9
--- /dev/null
+++ b/typeshed/stdlib/3/pprint.pyi
@@ -0,0 +1,23 @@
+# Stubs for pprint
+
+# Based on http://docs.python.org/3.2/library/pprint.html
+
+from typing import Any, Dict, Tuple, TextIO
+
+def pformat(o: object, indent: int = ..., width: int = ...,
+            depth: int = ...) -> str: ...
+def pprint(o: object, stream: TextIO = ..., indent: int = ..., width: int = ...,
+           depth: int = ...) -> None: ...
+def isreadable(o: object) -> bool: ...
+def isrecursive(o: object) -> bool: ...
+def saferepr(o: object) -> str: ...
+
+class PrettyPrinter:
+    def __init__(self, indent: int = ..., width: int = ..., depth: int = ...,
+                 stream: TextIO = ...) -> None: ...
+    def pformat(self, o: object) -> str: ...
+    def pprint(self, o: object) -> None: ...
+    def isreadable(self, o: object) -> bool: ...
+    def isrecursive(self, o: object) -> bool: ...
+    def format(self, o: object, context: Dict[int, Any], maxlevels: int,
+               level: int) -> Tuple[str, bool, bool]: ...
diff --git a/typeshed/stdlib/3/pwd.pyi b/typeshed/stdlib/3/pwd.pyi
new file mode 100644
index 0000000..a5a8171
--- /dev/null
+++ b/typeshed/stdlib/3/pwd.pyi
@@ -0,0 +1,18 @@
+# Stubs for pwd
+
+# NOTE: These are incomplete!
+
+import typing
+
+class struct_passwd:
+    # TODO use namedtuple
+    pw_name = ...  # type: str
+    pw_passwd = ...  # type: str
+    pw_uid = 0
+    pw_gid = 0
+    pw_gecos = ...  # type: str
+    pw_dir = ...  # type: str
+    pw_shell = ...  # type: str
+
+def getpwuid(uid: int) -> struct_passwd: ...
+def getpwnam(name: str) -> struct_passwd: ...
diff --git a/typeshed/stdlib/3/queue.pyi b/typeshed/stdlib/3/queue.pyi
new file mode 100644
index 0000000..15d62f5
--- /dev/null
+++ b/typeshed/stdlib/3/queue.pyi
@@ -0,0 +1,20 @@
+# Stubs for queue
+
+# NOTE: These are incomplete!
+
+from typing import Any, TypeVar, Generic
+
+_T = TypeVar('_T')
+
+class Queue(Generic[_T]):
+    def __init__(self, maxsize: int = ...) -> None: ...
+    def full(self) -> bool: ...
+    def get(self, block: bool = ..., timeout: float = ...) -> _T: ...
+    def get_nowait(self) -> _T: ...
+    def put(self, item: _T, block: bool = ..., timeout: float = ...) -> None: ...
+    def put_nowait(self, item: _T) -> None: ...
+    def join(self) -> None: ...
+    def qsize(self) -> int: ...
+    def task_done(self) -> None: pass
+
+class Empty: ...
diff --git a/typeshed/stdlib/3/random.pyi b/typeshed/stdlib/3/random.pyi
new file mode 100644
index 0000000..f40d973
--- /dev/null
+++ b/typeshed/stdlib/3/random.pyi
@@ -0,0 +1,67 @@
+# Stubs for random
+# Ron Murawski <ron at horizonchess.com>
+# Updated by Jukka Lehtosalo
+
+# based on http://docs.python.org/3.2/library/random.html
+
+# ----- random classes -----
+
+import _random
+from typing import (
+    Any, TypeVar, Sequence, List, Callable, AbstractSet, Union
+)
+
+_T = TypeVar('_T')
+
+class Random(_random.Random):
+    def __init__(self, x: Any = ...) -> None: ...
+    def seed(self, a: Any = ..., version: int = ...) -> None: ...
+    def getstate(self) -> tuple: ...
+    def setstate(self, state: tuple) -> None: ...
+    def getrandbits(self, k: int) -> int: ...
+    def randrange(self, start: int, stop: Union[int, None] = ..., step: int = ...) -> int: ...
+    def randint(self, a: int, b: int) -> int: ...
+    def choice(self, seq: Sequence[_T]) -> _T: ...
+    def shuffle(self, x: List[Any], random: Union[Callable[[], float], None] = ...) -> None: ...
+    def sample(self, population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ...
+    def random(self) -> float: ...
+    def uniform(self, a: float, b: float) -> float: ...
+    def triangular(self, low: float = ..., high: float = ...,
+                     mode: float = ...) -> float: ...
+    def betavariate(self, alpha: float, beta: float) -> float: ...
+    def expovariate(self, lambd: float) -> float: ...
+    def gammavariate(self, alpha: float, beta: float) -> float: ...
+    def gauss(self, mu: float, sigma: float) -> float: ...
+    def lognormvariate(self, mu: float, sigma: float) -> float: ...
+    def normalvariate(self, mu: float, sigma: float) -> float: ...
+    def vonmisesvariate(self, mu: float, kappa: float) -> float: ...
+    def paretovariate(self, alpha: float) -> float: ...
+    def weibullvariate(self, alpha: float, beta: float) -> float: ...
+
+# SystemRandom is not implemented for all OS's; good on Windows & Linux
+class SystemRandom(Random):
+    ...
+
+# ----- random function stubs -----
+def seed(a: Any = ..., version: int = ...) -> None: ...
+def getstate() -> object: ...
+def setstate(state: object) -> None: ...
+def getrandbits(k: int) -> int: ...
+def randrange(start: int, stop: Union[None, int] = ..., step: int = ...) -> int: ...
+def randint(a: int, b: int) -> int: ...
+def choice(seq: Sequence[_T]) -> _T: ...
+def shuffle(x: List[Any], random: Union[Callable[[], float], None] = ...) -> None: ...
+def sample(population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ...
+def random() -> float: ...
+def uniform(a: float, b: float) -> float: ...
+def triangular(low: float = ..., high: float = ...,
+               mode: float = ...) -> float: ...
+def betavariate(alpha: float, beta: float) -> float: ...
+def expovariate(lambd: float) -> float: ...
+def gammavariate(alpha: float, beta: float) -> float: ...
+def gauss(mu: float, sigma: float) -> float: ...
+def lognormvariate(mu: float, sigma: float) -> float: ...
+def normalvariate(mu: float, sigma: float) -> float: ...
+def vonmisesvariate(mu: float, kappa: float) -> float: ...
+def paretovariate(alpha: float) -> float: ...
+def weibullvariate(alpha: float, beta: float) -> float: ...
diff --git a/typeshed/stdlib/3/re.pyi b/typeshed/stdlib/3/re.pyi
new file mode 100644
index 0000000..8cd8bba
--- /dev/null
+++ b/typeshed/stdlib/3/re.pyi
@@ -0,0 +1,58 @@
+# Stubs for re
+# Ron Murawski <ron at horizonchess.com>
+# 'bytes' support added by Jukka Lehtosalo
+
+# based on: http://docs.python.org/3.2/library/re.html
+# and http://hg.python.org/cpython/file/618ea5612e83/Lib/re.py
+
+from typing import (
+    List, Iterator, Callable, Tuple, Sequence, Dict, Union,
+    Generic, AnyStr, Match, Pattern
+)
+
+# ----- re variables and constants -----
+A = 0
+ASCII = 0
+DEBUG = 0
+I = 0
+IGNORECASE = 0
+L = 0
+LOCALE = 0
+M = 0
+MULTILINE = 0
+S = 0
+DOTALL = 0
+X = 0
+VERBOSE = 0
+U = 0
+UNICODE = 0
+
+class error(Exception): ...
+
+def compile(pattern: AnyStr, flags: int = ...) -> Pattern[AnyStr]: ...
+def search(pattern: AnyStr, string: AnyStr,
+           flags: int = ...) -> Match[AnyStr]: ...
+def match(pattern: AnyStr, string: AnyStr,
+          flags: int = ...) -> Match[AnyStr]: ...
+def split(pattern: AnyStr, string: AnyStr, maxsplit: int = ...,
+          flags: int = ...) -> List[AnyStr]: ...
+def findall(pattern: AnyStr, string: AnyStr,
+            flags: int = ...) -> List[AnyStr]: ...
+
+# Return an iterator yielding match objects over all non-overlapping matches
+# for the RE pattern in string. The string is scanned left-to-right, and
+# matches are returned in the order found. Empty matches are included in the
+# result unless they touch the beginning of another match.
+def finditer(pattern: AnyStr, string: AnyStr,
+             flags: int = ...) -> Iterator[Match[AnyStr]]: ...
+
+def sub(pattern: AnyStr, repl: Union[AnyStr, Callable[[Match[AnyStr]], AnyStr]],
+        string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ...
+
+def subn(pattern: AnyStr, repl: Union[AnyStr, Callable[[Match[AnyStr]], AnyStr]],
+         string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]:
+    ...
+
+def escape(string: AnyStr) -> AnyStr: ...
+
+def purge() -> None: ...
diff --git a/typeshed/stdlib/3/resource.pyi b/typeshed/stdlib/3/resource.pyi
new file mode 100644
index 0000000..bddc6df
--- /dev/null
+++ b/typeshed/stdlib/3/resource.pyi
@@ -0,0 +1,13 @@
+# Stubs for resource
+
+# NOTE: These are incomplete!
+
+from typing import Tuple
+
+RLIMIT_CORE = 0
+
+def getrlimit(resource: int) -> Tuple[int, int]: ...
+def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ...
+
+# NOTE: This is an alias of OSError in Python 3.3.
+class error(Exception): ...
diff --git a/typeshed/stdlib/3/select.pyi b/typeshed/stdlib/3/select.pyi
new file mode 100644
index 0000000..fcede9f
--- /dev/null
+++ b/typeshed/stdlib/3/select.pyi
@@ -0,0 +1,27 @@
+# Stubs for select
+
+# NOTE: These are incomplete!
+
+from typing import Any, Tuple, List, Sequence
+
+class error(Exception): ...
+
+POLLIN = 0
+POLLPRI = 0
+POLLOUT = 0
+POLLERR = 0
+POLLHUP = 0
+POLLNVAL = 0
+
+class poll:
+    def __init__(self) -> None: ...
+    def register(self, fd: Any,
+                 eventmask: int = ...) -> None: ...
+    def modify(self, fd: Any, eventmask: int) -> None: ...
+    def unregister(self, fd: Any) -> None: ...
+    def poll(self, timeout: int = ...) -> List[Tuple[int, int]]: ...
+
+def select(rlist: Sequence, wlist: Sequence, xlist: Sequence,
+           timeout: float = ...) -> Tuple[List[int],
+                                           List[int],
+                                           List[int]]: ...
diff --git a/typeshed/stdlib/3/shlex.pyi b/typeshed/stdlib/3/shlex.pyi
new file mode 100644
index 0000000..db99fc6
--- /dev/null
+++ b/typeshed/stdlib/3/shlex.pyi
@@ -0,0 +1,39 @@
+# Stubs for shlex
+
+# Based on http://docs.python.org/3.2/library/shlex.html
+
+from typing import List, Tuple, Any, TextIO
+
+def split(s: str, comments: bool = ...,
+          posix: bool = ...) -> List[str]: ...
+
+# Added in 3.3, use (undocumented) pipes.quote in previous versions.
+def quote(s: str) -> str: ...
+
+class shlex:
+    commenters = ...  # type: str
+    wordchars = ...  # type: str
+    whitespace = ...  # type: str
+    escape = ...  # type: str
+    quotes = ...  # type: str
+    escapedquotes = ...  # type: str
+    whitespace_split = ...  # type: bool
+    infile = ...  # type: str
+    instream = ...  # type: TextIO
+    source = ...  # type: str
+    debug = 0
+    lineno = 0
+    token = ...  # type: str
+    eof = ...  # type: str
+
+    def __init__(self, instream=..., infile=...,
+                 posix: bool = ...) -> None: ...
+    def get_token(self) -> str: ...
+    def push_token(self, tok: str) -> None: ...
+    def read_token(self) -> str: ...
+    def sourcehook(self, filename: str) -> Tuple[str, TextIO]: ...
+    # TODO argument types
+    def push_source(self, newstream: Any, newfile: Any = ...) -> None: ...
+    def pop_source(self) -> None: ...
+    def error_leader(self, infile: str = ...,
+                     lineno: int = ...) -> None: ...
diff --git a/typeshed/stdlib/3/shutil.pyi b/typeshed/stdlib/3/shutil.pyi
new file mode 100644
index 0000000..2ddd3d6
--- /dev/null
+++ b/typeshed/stdlib/3/shutil.pyi
@@ -0,0 +1,46 @@
+# Stubs for shutil
+
+# Based on http://docs.python.org/3.2/library/shutil.html
+
+# 'bytes' paths are not properly supported: they don't work with all functions,
+# sometimes they only work partially (broken exception messages), and the test
+# cases don't use them.
+
+from typing import List, Iterable, Callable, Any, Tuple, Sequence, IO, AnyStr
+
+def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr],
+                length: int = ...) -> None: ...
+
+def copyfile(src: str, dst: str) -> None: ...
+def copymode(src: str, dst: str) -> None: ...
+def copystat(src: str, dst: str) -> None: ...
+def copy(src: str, dst: str) -> None: ...
+def copy2(src: str, dst: str) -> None: ...
+def ignore_patterns(*patterns: str) -> Callable[[str, List[str]],
+                                                Iterable[str]]: ...
+def copytree(src: str, dst: str, symlinks: bool = ...,
+             ignore: Callable[[str, List[str]], Iterable[str]] = ...,
+             copy_function: Callable[[str, str], None] = ...,
+             ignore_dangling_symlinks: bool = ...) -> None: ...
+def rmtree(path: str, ignore_errors: bool = ...,
+           onerror: Callable[[Any, str, Any], None] = ...) -> None: ...
+def move(src: str, dst: str) -> None: ...
+
+class Error(Exception): ...
+
+def make_archive(base_name: str, format: str, root_dir: str = ...,
+                 base_dir: str = ..., verbose: bool = ...,
+                 dry_run: bool = ..., owner: str = ..., group: str = ...,
+                 logger: Any = ...) -> str: ...
+def get_archive_formats() -> List[Tuple[str, str]]: ...
+def register_archive_format(name: str, function: Any,
+                            extra_args: Sequence[Tuple[str, Any]] = ...,
+                            description: str = ...) -> None: ...
+def unregister_archive_format(name: str) -> None: ...
+def unpack_archive(filename: str, extract_dir: str = ...,
+                   format: str = ...) -> None: ...
+def register_unpack_format(name: str, extensions: List[str], function: Any,
+                           extra_args: Sequence[Tuple[str, Any]] = ...,
+                           description: str = ...) -> None: ...
+def unregister_unpack_format(name: str) -> None: ...
+def get_unpack_formats() -> List[Tuple[str, List[str], str]]: ...
diff --git a/typeshed/stdlib/3/signal.pyi b/typeshed/stdlib/3/signal.pyi
new file mode 100644
index 0000000..f956e05
--- /dev/null
+++ b/typeshed/stdlib/3/signal.pyi
@@ -0,0 +1,117 @@
+"""Stub file for the 'signal' module."""
+
+from typing import Any, Callable, List, Tuple, Dict, Generic, Union, Optional, Iterable, Set
+from types import FrameType
+
+class ItimerError(IOError): ...
+
+ITIMER_PROF = ...  # type: int
+ITIMER_REAL = ...  # type: int
+ITIMER_VIRTUAL = ...  # type: int
+
+NSIG = ...  # type: int
+SIGABRT = ...  # type: int
+SIGALRM = ...  # type: int
+SIGBUS = ...  # type: int
+SIGCHLD = ...  # type: int
+SIGCLD = ...  # type: int
+SIGCONT = ...  # type: int
+SIGFPE = ...  # type: int
+SIGHUP = ...  # type: int
+SIGILL = ...  # type: int
+SIGINT = ...  # type: int
+SIGIO = ...  # type: int
+SIGIOT = ...  # type: int
+SIGKILL = ...  # type: int
+SIGPIPE = ...  # type: int
+SIGPOLL = ...  # type: int
+SIGPROF = ...  # type: int
+SIGPWR = ...  # type: int
+SIGQUIT = ...  # type: int
+SIGRTMAX = ...  # type: int
+SIGRTMIN = ...  # type: int
+SIGSEGV = ...  # type: int
+SIGSTOP = ...  # type: int
+SIGSYS = ...  # type: int
+SIGTERM = ...  # type: int
+SIGTRAP = ...  # type: int
+SIGTSTP = ...  # type: int
+SIGTTIN = ...  # type: int
+SIGTTOU = ...  # type: int
+SIGURG = ...  # type: int
+SIGUSR1 = ...  # type: int
+SIGUSR2 = ...  # type: int
+SIGVTALRM = ...  # type: int
+SIGWINCH = ...  # type: int
+SIGXCPU = ...  # type: int
+SIGXFSZ = ...  # type: int
+
+SIG_DFL = ...  # type: int
+SIG_IGN = ...  # type: int
+
+CTRL_C_EVENT = 0 # Windows
+CTRL_BREAK_EVENT = 0 # Windows
+
+SIG_BLOCK = ...  # type: int
+SIG_UNBLOCK = ...  # type: int
+SIG_SETMASK = ...  # type: int
+
+_HANDLER = Union[Callable[[int, FrameType], None], int, None]
+
+class struct_siginfo(Tuple[int, int, int, int, int, int, int]):
+    def __init__(self, sequence: Iterable[int]) -> None: ...
+    @property
+    def si_signo(self) -> int: ...
+    @property
+    def si_code(self) -> int: ...
+    @property
+    def si_errno(self) -> int: ...
+    @property
+    def si_pid(self) -> int: ...
+    @property
+    def si_uid(self) -> int: ...
+    @property
+    def si_status(self) -> int: ...
+    @property
+    def si_band(self) -> int: ...
+
+def alarm(time: int) -> int: ...
+
+def default_int_handler(signum: int, frame: FrameType) -> None:
+    raise KeyboardInterrupt()
+
+def getitimer(which: int) -> Tuple[float, float]: ...
+
+def getsignal(signalnum: int) -> _HANDLER:
+    raise ValueError()
+
+def pause() -> None: ...
+
+def pthread_kill(thread_id: int, signum: int) -> None:
+    raise OSError()
+
+def pthread_sigmask(how: int, mask: Iterable[int]) -> Set[int]:
+    raise OSError()
+
+def set_wakeup_fd(fd: int) -> int: ...
+
+def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float, float]: ...
+
+def siginterrupt(signalnum: int, flag: bool) -> None:
+    raise OSError()
+
+def signal(signalnum: int, handler: _HANDLER) -> _HANDLER:
+    raise OSError()
+
+def sigpending() -> Any:
+    raise OSError()
+
+def sigtimedwait(sigset: Iterable[int], timeout: float) -> Optional[struct_siginfo]:
+    raise OSError()
+    raise ValueError()
+
+def sigwait(sigset: Iterable[int]) -> int:
+    raise OSError()
+
+def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo:
+    raise OSError()
diff --git a/typeshed/stdlib/3/smtplib.pyi b/typeshed/stdlib/3/smtplib.pyi
new file mode 100644
index 0000000..fc786e4
--- /dev/null
+++ b/typeshed/stdlib/3/smtplib.pyi
@@ -0,0 +1,94 @@
+# Stubs for smtplib (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class SMTPException(OSError): ...
+class SMTPServerDisconnected(SMTPException): ...
+
+class SMTPResponseException(SMTPException):
+    smtp_code = ... # type: Any
+    smtp_error = ... # type: Any
+    args = ... # type: Any
+    def __init__(self, code, msg) -> None: ...
+
+class SMTPSenderRefused(SMTPResponseException):
+    smtp_code = ... # type: Any
+    smtp_error = ... # type: Any
+    sender = ... # type: Any
+    args = ... # type: Any
+    def __init__(self, code, msg, sender) -> None: ...
+
+class SMTPRecipientsRefused(SMTPException):
+    recipients = ... # type: Any
+    args = ... # type: Any
+    def __init__(self, recipients) -> None: ...
+
+class SMTPDataError(SMTPResponseException): ...
+class SMTPConnectError(SMTPResponseException): ...
+class SMTPHeloError(SMTPResponseException): ...
+class SMTPAuthenticationError(SMTPResponseException): ...
+
+def quoteaddr(addrstring): ...
+def quotedata(data): ...
+
+class SMTP:
+    debuglevel = ... # type: Any
+    file = ... # type: Any
+    helo_resp = ... # type: Any
+    ehlo_msg = ... # type: Any
+    ehlo_resp = ... # type: Any
+    does_esmtp = ... # type: Any
+    default_port = ... # type: Any
+    timeout = ... # type: Any
+    esmtp_features = ... # type: Any
+    source_address = ... # type: Any
+    local_hostname = ... # type: Any
+    def __init__(self, host=..., port=..., local_hostname=..., timeout=...,
+                 source_address=...): ...
+    def __enter__(self): ...
+    def __exit__(self, *args): ...
+    def set_debuglevel(self, debuglevel): ...
+    sock = ... # type: Any
+    def connect(self, host=..., port=..., source_address=...): ...
+    def send(self, s): ...
+    def putcmd(self, cmd, args=...): ...
+    def getreply(self): ...
+    def docmd(self, cmd, args=...): ...
+    def helo(self, name=...): ...
+    def ehlo(self, name=...): ...
+    def has_extn(self, opt): ...
+    def help(self, args=...): ...
+    def rset(self): ...
+    def noop(self): ...
+    def mail(self, sender, options=...): ...
+    def rcpt(self, recip, options=...): ...
+    def data(self, msg): ...
+    def verify(self, address): ...
+    vrfy = ... # type: Any
+    def expn(self, address): ...
+    def ehlo_or_helo_if_needed(self): ...
+    def login(self, user, password): ...
+    def starttls(self, keyfile=..., certfile=..., context=...): ...
+    def sendmail(self, from_addr, to_addrs, msg, mail_options=...,
+                 rcpt_options=...): ...
+    def send_message(self, msg, from_addr=..., to_addrs=..., mail_options=...,
+                     rcpt_options=...): ...
+    def close(self): ...
+    def quit(self): ...
+
+class SMTP_SSL(SMTP):
+    default_port = ... # type: Any
+    keyfile = ... # type: Any
+    certfile = ... # type: Any
+    context = ... # type: Any
+    def __init__(self, host=..., port=..., local_hostname=..., keyfile=..., certfile=...,
+                 timeout=..., source_address=..., context=...): ...
+
+class LMTP(SMTP):
+    ehlo_msg = ... # type: Any
+    def __init__(self, host=..., port=..., local_hostname=..., source_address=...) -> None: ...
+    sock = ... # type: Any
+    file = ... # type: Any
+    def connect(self, host=..., port=..., source_address=...): ...
diff --git a/typeshed/stdlib/3/socket.pyi b/typeshed/stdlib/3/socket.pyi
new file mode 100644
index 0000000..d2f3c97
--- /dev/null
+++ b/typeshed/stdlib/3/socket.pyi
@@ -0,0 +1,387 @@
+# Stubs for socket
+# Ron Murawski <ron at horizonchess.com>
+
+# based on: http://docs.python.org/3.2/library/socket.html
+# see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py
+# see: http://nullege.com/codes/search/socket
+
+from typing import Any, Tuple, overload, List
+
+# ----- variables and constants -----
+
+AF_UNIX = 0
+AF_INET = 0
+AF_INET6 = 0
+SOCK_STREAM = 0
+SOCK_DGRAM = 0
+SOCK_RAW = 0
+SOCK_RDM = 0
+SOCK_SEQPACKET = 0
+SOCK_CLOEXEC = 0
+SOCK_NONBLOCK = 0
+SOMAXCONN = 0
+has_ipv6 = False
+_GLOBAL_DEFAULT_TIMEOUT = 0.0
+SocketType = ...  # type: Any
+SocketIO = ...  # type: Any
+
+
+# the following constants are included with Python 3.2.3 (Ubuntu)
+# some of the constants may be Linux-only
+# all Windows/Mac-specific constants are absent
+AF_APPLETALK = 0
+AF_ASH = 0
+AF_ATMPVC = 0
+AF_ATMSVC = 0
+AF_AX25 = 0
+AF_BLUETOOTH = 0
+AF_BRIDGE = 0
+AF_DECnet = 0
+AF_ECONET = 0
+AF_IPX = 0
+AF_IRDA = 0
+AF_KEY = 0
+AF_LLC = 0
+AF_NETBEUI = 0
+AF_NETLINK = 0
+AF_NETROM = 0
+AF_PACKET = 0
+AF_PPPOX = 0
+AF_ROSE = 0
+AF_ROUTE = 0
+AF_SECURITY = 0
+AF_SNA = 0
+AF_TIPC = 0
+AF_UNSPEC = 0
+AF_WANPIPE = 0
+AF_X25 = 0
+AI_ADDRCONFIG = 0
+AI_ALL = 0
+AI_CANONNAME = 0
+AI_NUMERICHOST = 0
+AI_NUMERICSERV = 0
+AI_PASSIVE = 0
+AI_V4MAPPED = 0
+BDADDR_ANY = 0
+BDADDR_LOCAL = 0
+BTPROTO_HCI = 0
+BTPROTO_L2CAP = 0
+BTPROTO_RFCOMM = 0
+BTPROTO_SCO = 0
+CAPI = 0
+EAGAIN = 0
+EAI_ADDRFAMILY = 0
+EAI_AGAIN = 0
+EAI_BADFLAGS = 0
+EAI_FAIL = 0
+EAI_FAMILY = 0
+EAI_MEMORY = 0
+EAI_NODATA = 0
+EAI_NONAME = 0
+EAI_OVERFLOW = 0
+EAI_SERVICE = 0
+EAI_SOCKTYPE = 0
+EAI_SYSTEM = 0
+EBADF = 0
+EINTR = 0
+EWOULDBLOCK = 0
+HCI_DATA_DIR = 0
+HCI_FILTER = 0
+HCI_TIME_STAMP = 0
+INADDR_ALLHOSTS_GROUP = 0
+INADDR_ANY = 0
+INADDR_BROADCAST = 0
+INADDR_LOOPBACK = 0
+INADDR_MAX_LOCAL_GROUP = 0
+INADDR_NONE = 0
+INADDR_UNSPEC_GROUP = 0
+IPPORT_RESERVED = 0
+IPPORT_USERRESERVED = 0
+IPPROTO_AH = 0
+IPPROTO_DSTOPTS = 0
+IPPROTO_EGP = 0
+IPPROTO_ESP = 0
+IPPROTO_FRAGMENT = 0
+IPPROTO_GRE = 0
+IPPROTO_HOPOPTS = 0
+IPPROTO_ICMP = 0
+IPPROTO_ICMPV6 = 0
+IPPROTO_IDP = 0
+IPPROTO_IGMP = 0
+IPPROTO_IP = 0
+IPPROTO_IPIP = 0
+IPPROTO_IPV6 = 0
+IPPROTO_NONE = 0
+IPPROTO_PIM = 0
+IPPROTO_PUP = 0
+IPPROTO_RAW = 0
+IPPROTO_ROUTING = 0
+IPPROTO_RSVP = 0
+IPPROTO_TCP = 0
+IPPROTO_TP = 0
+IPPROTO_UDP = 0
+IPV6_CHECKSUM = 0
+IPV6_DSTOPTS = 0
+IPV6_HOPLIMIT = 0
+IPV6_HOPOPTS = 0
+IPV6_JOIN_GROUP = 0
+IPV6_LEAVE_GROUP = 0
+IPV6_MULTICAST_HOPS = 0
+IPV6_MULTICAST_IF = 0
+IPV6_MULTICAST_LOOP = 0
+IPV6_NEXTHOP = 0
+IPV6_PKTINFO = 0
+IPV6_RECVDSTOPTS = 0
+IPV6_RECVHOPLIMIT = 0
+IPV6_RECVHOPOPTS = 0
+IPV6_RECVPKTINFO = 0
+IPV6_RECVRTHDR = 0
+IPV6_RECVTCLASS = 0
+IPV6_RTHDR = 0
+IPV6_RTHDRDSTOPTS = 0
+IPV6_RTHDR_TYPE_0 = 0
+IPV6_TCLASS = 0
+IPV6_UNICAST_HOPS = 0
+IPV6_V6ONLY = 0
+IP_ADD_MEMBERSHIP = 0
+IP_DEFAULT_MULTICAST_LOOP = 0
+IP_DEFAULT_MULTICAST_TTL = 0
+IP_DROP_MEMBERSHIP = 0
+IP_HDRINCL = 0
+IP_MAX_MEMBERSHIPS = 0
+IP_MULTICAST_IF = 0
+IP_MULTICAST_LOOP = 0
+IP_MULTICAST_TTL = 0
+IP_OPTIONS = 0
+IP_RECVOPTS = 0
+IP_RECVRETOPTS = 0
+IP_RETOPTS = 0
+IP_TOS = 0
+IP_TTL = 0
+MSG_CTRUNC = 0
+MSG_DONTROUTE = 0
+MSG_DONTWAIT = 0
+MSG_EOR = 0
+MSG_OOB = 0
+MSG_PEEK = 0
+MSG_TRUNC = 0
+MSG_WAITALL = 0
+NETLINK_DNRTMSG = 0
+NETLINK_FIREWALL = 0
+NETLINK_IP6_FW = 0
+NETLINK_NFLOG = 0
+NETLINK_ROUTE = 0
+NETLINK_USERSOCK = 0
+NETLINK_XFRM = 0
+NI_DGRAM = 0
+NI_MAXHOST = 0
+NI_MAXSERV = 0
+NI_NAMEREQD = 0
+NI_NOFQDN = 0
+NI_NUMERICHOST = 0
+NI_NUMERICSERV = 0
+PACKET_BROADCAST = 0
+PACKET_FASTROUTE = 0
+PACKET_HOST = 0
+PACKET_LOOPBACK = 0
+PACKET_MULTICAST = 0
+PACKET_OTHERHOST = 0
+PACKET_OUTGOING = 0
+PF_PACKET = 0
+SHUT_RD = 0
+SHUT_RDWR = 0
+SHUT_WR = 0
+SOL_HCI = 0
+SOL_IP = 0
+SOL_SOCKET = 0
+SOL_TCP = 0
+SOL_TIPC = 0
+SOL_UDP = 0
+SO_ACCEPTCONN = 0
+SO_BROADCAST = 0
+SO_DEBUG = 0
+SO_DONTROUTE = 0
+SO_ERROR = 0
+SO_KEEPALIVE = 0
+SO_LINGER = 0
+SO_OOBINLINE = 0
+SO_RCVBUF = 0
+SO_RCVLOWAT = 0
+SO_RCVTIMEO = 0
+SO_REUSEADDR = 0
+SO_SNDBUF = 0
+SO_SNDLOWAT = 0
+SO_SNDTIMEO = 0
+SO_TYPE = 0
+TCP_CORK = 0
+TCP_DEFER_ACCEPT = 0
+TCP_INFO = 0
+TCP_KEEPCNT = 0
+TCP_KEEPIDLE = 0
+TCP_KEEPINTVL = 0
+TCP_LINGER2 = 0
+TCP_MAXSEG = 0
+TCP_NODELAY = 0
+TCP_QUICKACK = 0
+TCP_SYNCNT = 0
+TCP_WINDOW_CLAMP = 0
+TIPC_ADDR_ID = 0
+TIPC_ADDR_NAME = 0
+TIPC_ADDR_NAMESEQ = 0
+TIPC_CFG_SRV = 0
+TIPC_CLUSTER_SCOPE = 0
+TIPC_CONN_TIMEOUT = 0
+TIPC_CRITICAL_IMPORTANCE = 0
+TIPC_DEST_DROPPABLE = 0
+TIPC_HIGH_IMPORTANCE = 0
+TIPC_IMPORTANCE = 0
+TIPC_LOW_IMPORTANCE = 0
+TIPC_MEDIUM_IMPORTANCE = 0
+TIPC_NODE_SCOPE = 0
+TIPC_PUBLISHED = 0
+TIPC_SRC_DROPPABLE = 0
+TIPC_SUBSCR_TIMEOUT = 0
+TIPC_SUB_CANCEL = 0
+TIPC_SUB_PORTS = 0
+TIPC_SUB_SERVICE = 0
+TIPC_TOP_SRV = 0
+TIPC_WAIT_FOREVER = 0
+TIPC_WITHDRAWN = 0
+TIPC_ZONE_SCOPE = 0
+
+
+# ----- exceptions -----
+class error(IOError):
+    ...
+
+class herror(error):
+    def __init__(self, herror: int, string: str) -> None: ...
+
+class gaierror(error):
+    def __init__(self, error: int, string: str) -> None: ...
+
+class timeout(error):
+    ...
+
+
+# Addresses can be either tuples of varying lengths (AF_INET, AF_INET6,
+# AF_NETLINK, AF_TIPC) or strings (AF_UNIX).
+
+# TODO AF_PACKET and AF_BLUETOOTH address objects
+
+
+# ----- classes -----
+class socket:
+    family = 0
+    type = 0
+    proto = 0
+
+    def __init__(self, family: int = ..., type: int = ...,
+                 proto: int = ..., fileno: int = ...) -> None: ...
+
+    # --- methods ---
+    # second tuple item is an address
+    def accept(self) -> Tuple['socket', Any]: ...
+
+    @overload
+    def bind(self, address: tuple) -> None: ...
+    @overload
+    def bind(self, address: str) -> None: ...
+
+    def close(self) -> None: ...
+
+    @overload
+    def connect(self, address: tuple) -> None: ...
+    @overload
+    def connect(self, address: str) -> None: ...
+
+    @overload
+    def connect_ex(self, address: tuple) -> int: ...
+    @overload
+    def connect_ex(self, address: str) -> int: ...
+
+    def detach(self) -> int: ...
+    def fileno(self) -> int: ...
+
+    # return value is an address
+    def getpeername(self) -> Any: ...
+    def getsockname(self) -> Any: ...
+
+    @overload
+    def getsockopt(self, level: int, optname: str) -> bytes: ...
+    @overload
+    def getsockopt(self, level: int, optname: str, buflen: int) -> bytes: ...
+
+    def gettimeout(self) -> float: ...
+    def ioctl(self, control: object,
+              option: Tuple[int, int, int]) -> None: ...
+    def listen(self, backlog: int) -> None: ...
+    # TODO the return value may be BinaryIO or TextIO, depending on mode
+    def makefile(self, mode: str = ..., buffering: int = ...,
+                 encoding: str = ..., errors: str = ...,
+                 newline: str = ...) -> Any:
+        ...
+    def recv(self, bufsize: int, flags: int = ...) -> bytes: ...
+
+    # return type is an address
+    def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ...
+    def recvfrom_into(self, buffer: bytes, nbytes: int,
+                      flags: int = ...) -> Any: ...
+    def recv_into(self, buffer: bytes, nbytes: int,
+                  flags: int = ...) -> Any: ...
+    def send(self, data: bytes, flags=...) -> int: ...
+    def sendall(self, data: bytes, flags=...) -> Any:
+        ... # return type: None on success
+
+    @overload
+    def sendto(self, data: bytes, address: tuple, flags: int = ...) -> int: ...
+    @overload
+    def sendto(self, data: bytes, address: str, flags: int = ...) -> int: ...
+
+    def setblocking(self, flag: bool) -> None: ...
+    # TODO None valid for the value argument
+    def settimeout(self, value: float) -> None: ...
+
+    @overload
+    def setsockopt(self, level: int, optname: str, value: int) -> None: ...
+    @overload
+    def setsockopt(self, level: int, optname: str, value: bytes) -> None: ...
+
+    def shutdown(self, how: int) -> None: ...
+
+
+# ----- functions -----
+def create_connection(address: Tuple[str, int],
+                      timeout: float = ...,
+                      source_address: Tuple[str, int] = ...) -> socket: ...
+
+# the 5th tuple item is an address
+def getaddrinfo(
+        host: str, port: int, family: int = ..., type: int = ..., proto: int = ...,
+        flags: int = ...) -> List[Tuple[int, int, int, str, tuple]]:
+    ...
+
+def getfqdn(name: str = ...) -> str: ...
+def gethostbyname(hostname: str) -> str: ...
+def gethostbyname_ex(hostname: str) -> Tuple[str, List[str], List[str]]: ...
+def gethostname() -> str: ...
+def gethostbyaddr(ip_address: str) -> Tuple[str, List[str], List[str]]: ...
+def getnameinfo(sockaddr: tuple, flags: int) -> Tuple[str, int]: ...
+def getprotobyname(protocolname: str) -> int: ...
+def getservbyname(servicename: str, protocolname: str = ...) -> int: ...
+def getservbyport(port: int, protocolname: str = ...) -> str: ...
+def socketpair(family: int = ...,
+               type: int = ...,
+               proto: int = ...) -> Tuple[socket, socket]: ...
+def fromfd(fd: int, family: int, type: int, proto: int = ...) -> socket: ...
+def ntohl(x: int) -> int: ...  # param & ret val are 32-bit ints
+def ntohs(x: int) -> int: ...  # param & ret val are 16-bit ints
+def htonl(x: int) -> int: ...  # param & ret val are 32-bit ints
+def htons(x: int) -> int: ...  # param & ret val are 16-bit ints
+def inet_aton(ip_string: str) -> bytes: ...  # ret val 4 bytes in length
+def inet_ntoa(packed_ip: bytes) -> str: ...
+def inet_pton(address_family: int, ip_string: str) -> bytes: ...
+def inet_ntop(address_family: int, packed_ip: bytes) -> str: ...
+# TODO the timeout may be None
+def getdefaulttimeout() -> float: ...
+def setdefaulttimeout(timeout: float) -> None: ...
diff --git a/typeshed/stdlib/3/socketserver.pyi b/typeshed/stdlib/3/socketserver.pyi
new file mode 100644
index 0000000..b92e01f
--- /dev/null
+++ b/typeshed/stdlib/3/socketserver.pyi
@@ -0,0 +1,15 @@
+# Stubs for socketserver
+
+# NOTE: These are incomplete!
+
+from typing import Tuple
+
+class BaseRequestHandler(): ...
+
+class TCPServer():
+    def __init__(
+        self,
+        server_address: Tuple[str, int],
+        request_handler: BaseRequestHandler,
+        bind_and_activate: bool = ...,
+    ) -> None: ...
diff --git a/typeshed/stdlib/3/ssl.pyi b/typeshed/stdlib/3/ssl.pyi
new file mode 100644
index 0000000..7d6beda
--- /dev/null
+++ b/typeshed/stdlib/3/ssl.pyi
@@ -0,0 +1,202 @@
+# Stubs for ssl (Python 3.4)
+
+from typing import Any
+from enum import Enum as _Enum
+from socket import socket
+from collections import namedtuple
+
+class SSLError(OSError): ...
+class SSLEOFError(SSLError): ...
+class SSLSyscallError(SSLError): ...
+class SSLWantReadError(SSLError): ...
+class SSLWantWriteError(SSLError): ...
+class SSLZeroReturnError(SSLError): ...
+
+OPENSSL_VERSION = ... # type: str
+OPENSSL_VERSION_INFO = ... # type: Any
+OPENSSL_VERSION_NUMBER = ... # type: int
+
+VERIFY_CRL_CHECK_CHAIN = ... # type: int
+VERIFY_CRL_CHECK_LEAF = ... # type: int
+VERIFY_DEFAULT = ... # type: int
+VERIFY_X509_STRICT = ... # type: int
+
+ALERT_DESCRIPTION_ACCESS_DENIED = ... # type: int
+ALERT_DESCRIPTION_BAD_CERTIFICATE = ... # type: int
+ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = ... # type: int
+ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE = ... # type: int
+ALERT_DESCRIPTION_BAD_RECORD_MAC = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_EXPIRED = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_REVOKED = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE = ... # type: int
+ALERT_DESCRIPTION_CLOSE_NOTIFY = ... # type: int
+ALERT_DESCRIPTION_DECODE_ERROR = ... # type: int
+ALERT_DESCRIPTION_DECOMPRESSION_FAILURE = ... # type: int
+ALERT_DESCRIPTION_DECRYPT_ERROR = ... # type: int
+ALERT_DESCRIPTION_HANDSHAKE_FAILURE = ... # type: int
+ALERT_DESCRIPTION_ILLEGAL_PARAMETER = ... # type: int
+ALERT_DESCRIPTION_INSUFFICIENT_SECURITY = ... # type: int
+ALERT_DESCRIPTION_INTERNAL_ERROR = ... # type: int
+ALERT_DESCRIPTION_NO_RENEGOTIATION = ... # type: int
+ALERT_DESCRIPTION_PROTOCOL_VERSION = ... # type: int
+ALERT_DESCRIPTION_RECORD_OVERFLOW = ... # type: int
+ALERT_DESCRIPTION_UNEXPECTED_MESSAGE = ... # type: int
+ALERT_DESCRIPTION_UNKNOWN_CA = ... # type: int
+ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY = ... # type: int
+ALERT_DESCRIPTION_UNRECOGNIZED_NAME = ... # type: int
+ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE = ... # type: int
+ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION = ... # type: int
+ALERT_DESCRIPTION_USER_CANCELLED = ... # type: int
+
+OP_ALL = ... # type: int
+OP_CIPHER_SERVER_PREFERENCE = ... # type: int
+OP_NO_COMPRESSION = ... # type: int
+OP_NO_SSLv2 = ... # type: int
+OP_NO_SSLv3 = ... # type: int
+OP_NO_TLSv1 = ... # type: int
+OP_NO_TLSv1_1 = ... # type: int
+OP_NO_TLSv1_2 = ... # type: int
+OP_SINGLE_DH_USE = ... # type: int
+OP_SINGLE_ECDH_USE = ... # type: int
+
+SSL_ERROR_EOF = ... # type: int
+SSL_ERROR_INVALID_ERROR_CODE = ... # type: int
+SSL_ERROR_SSL = ... # type: int
+SSL_ERROR_SYSCALL = ... # type: int
+SSL_ERROR_WANT_CONNECT = ... # type: int
+SSL_ERROR_WANT_READ = ... # type: int
+SSL_ERROR_WANT_WRITE = ... # type: int
+SSL_ERROR_WANT_X509_LOOKUP = ... # type: int
+SSL_ERROR_ZERO_RETURN = ... # type: int
+
+CERT_NONE = ... # type: int
+CERT_OPTIONAL = ... # type: int
+CERT_REQUIRED = ... # type: int
+
+PROTOCOL_SSLv23 = ... # type: int
+PROTOCOL_SSLv3 = ... # type: int
+PROTOCOL_TLSv1 = ... # type: int
+PROTOCOL_TLSv1_1 = ... # type: int
+PROTOCOL_TLSv1_2 = ... # type: int
+
+HAS_ECDH = ... # type: bool
+HAS_NPN = ... # type: bool
+HAS_SNI = ... # type: bool
+
+def RAND_add(string, entropy): ...
+def RAND_bytes(n): ...
+def RAND_egd(path): ...
+def RAND_pseudo_bytes(n): ...
+def RAND_status(): ...
+
+socket_error = OSError
+
+CHANNEL_BINDING_TYPES = ... # type: Any
+
+class CertificateError(ValueError): ...
+
+def match_hostname(cert, hostname): ...
+
+DefaultVerifyPaths = namedtuple(
+    'DefaultVerifyPaths',
+    'cafile capath openssl_cafile_env openssl_cafile openssl_capath_env openssl_capath')
+
+def get_default_verify_paths(): ...
+
+class _ASN1Object:
+    def __new__(cls, oid): ...
+    @classmethod
+    def fromnid(cls, nid): ...
+    @classmethod
+    def fromname(cls, name): ...
+
+class Purpose(_ASN1Object, _Enum):
+    SERVER_AUTH = ... # type: Any
+    CLIENT_AUTH = ... # type: Any
+
+class _SSLContext:
+    check_hostname = ... # type: Any
+    options = ... # type: Any
+    verify_flags = ... # type: Any
+    verify_mode = ... # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+    def _set_npn_protocols(self, *args, **kwargs): ...
+    def _wrap_socket(self, *args, **kwargs): ...
+    def cert_store_stats(self): ...
+    def get_ca_certs(self, binary_form=...): ...
+    def load_cert_chain(self, *args, **kwargs): ...
+    def load_dh_params(self, *args, **kwargs): ...
+    def load_verify_locations(self, *args, **kwargs): ...
+    def session_stats(self, *args, **kwargs): ...
+    def set_ciphers(self, *args, **kwargs): ...
+    def set_default_verify_paths(self, *args, **kwargs): ...
+    def set_ecdh_curve(self, *args, **kwargs): ...
+    def set_servername_callback(self, method): ...
+
+class SSLContext(_SSLContext):
+    def __new__(cls, protocol, *args, **kwargs): ...
+    protocol = ... # type: Any
+    def __init__(self, protocol) -> None: ...
+    def wrap_socket(self, sock, server_side=..., do_handshake_on_connect=...,
+                    suppress_ragged_eofs=..., server_hostname=...): ...
+    def set_npn_protocols(self, npn_protocols): ...
+    def load_default_certs(self, purpose=...): ...
+
+def create_default_context(purpose=..., *, cafile=..., capath=..., cadata=...): ...
+
+class SSLSocket(socket):
+    keyfile = ... # type: Any
+    certfile = ... # type: Any
+    cert_reqs = ... # type: Any
+    ssl_version = ... # type: Any
+    ca_certs = ... # type: Any
+    ciphers = ... # type: Any
+    server_side = ... # type: Any
+    server_hostname = ... # type: Any
+    do_handshake_on_connect = ... # type: Any
+    suppress_ragged_eofs = ... # type: Any
+    context = ... # type: Any  # TODO: This should be a property.
+    def __init__(self, sock=..., keyfile=..., certfile=..., server_side=...,
+                 cert_reqs=..., ssl_version=..., ca_certs=...,
+                 do_handshake_on_connect=..., family=..., type=..., proto=...,
+                 fileno=..., suppress_ragged_eofs=..., npn_protocols=..., ciphers=...,
+                 server_hostname=..., _context=...): ...
+    def dup(self): ...
+    def read(self, len=..., buffer=...): ...
+    def write(self, data): ...
+    def getpeercert(self, binary_form=...): ...
+    def selected_npn_protocol(self): ...
+    def cipher(self): ...
+    def compression(self): ...
+    def send(self, data, flags=...): ...
+    def sendto(self, data, flags_or_addr, addr=...): ...
+    def sendmsg(self, *args, **kwargs): ...
+    def sendall(self, data, flags=...): ...
+    def recv(self, buflen=..., flags=...): ...
+    def recv_into(self, buffer, nbytes=..., flags=...): ...
+    def recvfrom(self, buflen=..., flags=...): ...
+    def recvfrom_into(self, buffer, nbytes=..., flags=...): ...
+    def recvmsg(self, *args, **kwargs): ...
+    def recvmsg_into(self, *args, **kwargs): ...
+    def pending(self): ...
+    def shutdown(self, how): ...
+    def unwrap(self): ...
+    def do_handshake(self, block=...): ...
+    def connect(self, addr): ...
+    def connect_ex(self, addr): ...
+    def accept(self): ...
+    def get_channel_binding(self, cb_type=...): ...
+
+def wrap_socket(sock, keyfile=..., certfile=..., server_side=..., cert_reqs=...,
+                ssl_version=..., ca_certs=..., do_handshake_on_connect=...,
+                suppress_ragged_eofs=..., ciphers=...): ...
+def cert_time_to_seconds(cert_time): ...
+
+PEM_HEADER = ... # type: Any
+PEM_FOOTER = ... # type: Any
+
+def DER_cert_to_PEM_cert(der_cert_bytes): ...
+def PEM_cert_to_DER_cert(pem_cert_string): ...
+def get_server_certificate(addr, ssl_version=..., ca_certs=...): ...
+def get_protocol_name(protocol_code): ...
diff --git a/typeshed/stdlib/3/stat.pyi b/typeshed/stdlib/3/stat.pyi
new file mode 100644
index 0000000..eadffb9
--- /dev/null
+++ b/typeshed/stdlib/3/stat.pyi
@@ -0,0 +1,71 @@
+# Stubs for stat
+
+# Based on http://docs.python.org/3.2/library/stat.html
+
+import typing
+
+def S_ISDIR(mode: int) -> bool: ...
+def S_ISCHR(mode: int) -> bool: ...
+def S_ISBLK(mode: int) -> bool: ...
+def S_ISREG(mode: int) -> bool: ...
+def S_ISFIFO(mode: int) -> bool: ...
+def S_ISLNK(mode: int) -> bool: ...
+def S_ISSOCK(mode: int) -> bool: ...
+
+def S_IMODE(mode: int) -> int: ...
+def S_IFMT(mode) -> int: ...
+
+ST_MODE = 0
+ST_INO = 0
+ST_DEV = 0
+ST_NLINK = 0
+ST_UID = 0
+ST_GID = 0
+ST_SIZE = 0
+ST_ATIME = 0
+ST_MTIME = 0
+ST_CTIME = 0
+
+S_IFSOCK = 0
+S_IFLNK = 0
+S_IFREG = 0
+S_IFBLK = 0
+S_IFDIR = 0
+S_IFCHR = 0
+S_IFIFO = 0
+S_ISUID = 0
+S_ISGID = 0
+S_ISVTX = 0
+
+S_IRWXU = 0
+S_IRUSR = 0
+S_IWUSR = 0
+S_IXUSR = 0
+
+S_IRWXG = 0
+S_IRGRP = 0
+S_IWGRP = 0
+S_IXGRP = 0
+
+S_IRWXO = 0
+S_IROTH = 0
+S_IWOTH = 0
+S_IXOTH = 0
+
+S_ENFMT = 0
+S_IREAD = 0
+S_IWRITE = 0
+S_IEXEC = 0
+
+UF_NODUMP = 0
+UF_IMMUTABLE = 0
+UF_APPEND = 0
+UF_OPAQUE = 0
+UF_NOUNLINK = 0
+#int UF_COMPRESSED # OS X 10.6+ only
+#int UF_HIDDEN     # OX X 10.5+ only
+SF_ARCHIVED = 0
+SF_IMMUTABLE = 0
+SF_APPEND = 0
+SF_NOUNLINK = 0
+SF_SNAPSHOT = 0
diff --git a/typeshed/stdlib/3/string.pyi b/typeshed/stdlib/3/string.pyi
new file mode 100644
index 0000000..bc35bb3
--- /dev/null
+++ b/typeshed/stdlib/3/string.pyi
@@ -0,0 +1,27 @@
+# Stubs for string
+
+# Based on http://docs.python.org/3.2/library/string.html
+
+from typing import Mapping
+
+ascii_letters = ...  # type: str
+ascii_lowercase = ...  # type: str
+ascii_uppercase = ...  # type: str
+digits = ...  # type: str
+hexdigits = ...  # type: str
+octdigits = ...  # type: str
+punctuation = ...  # type: str
+printable = ...  # type: str
+whitespace = ...  # type: str
+
+def capwords(s: str, sep: str = ...) -> str: ...
+
+class Template:
+    template = ...  # type: str
+
+    def __init__(self, template: str) -> None: ...
+    def substitute(self, mapping: Mapping[str, str], **kwds: str) -> str: ...
+    def safe_substitute(self, mapping: Mapping[str, str],
+                        **kwds: str) -> str: ...
+
+# TODO Formatter
diff --git a/typeshed/stdlib/3/struct.pyi b/typeshed/stdlib/3/struct.pyi
new file mode 100644
index 0000000..f41164e
--- /dev/null
+++ b/typeshed/stdlib/3/struct.pyi
@@ -0,0 +1,30 @@
+# Stubs for struct
+
+# Based on http://docs.python.org/3.2/library/struct.html
+
+from typing import overload, Any, AnyStr, Tuple
+
+class error(Exception): ...
+
+def pack(fmt: AnyStr, *v: Any) -> bytes: ...
+# TODO buffer type
+def pack_into(fmt: AnyStr, buffer: Any, offset: int, *v: Any) -> None: ...
+
+# TODO buffer type
+def unpack(fmt: AnyStr, buffer: Any) -> Tuple[Any, ...]: ...
+def unpack_from(fmt: AnyStr, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
+
+def calcsize(fmt: AnyStr) -> int: ...
+
+class Struct:
+    format = b''
+    size = 0
+
+    def __init__(self, format: AnyStr) -> None: ...
+
+    def pack(self, *v: Any) -> bytes: ...
+    # TODO buffer type
+    def pack_into(self, buffer: Any, offset: int, *v: Any) -> None: ...
+    # TODO buffer type
+    def unpack(self, buffer: Any) -> Tuple[Any, ...]: ...
+    def unpack_from(self, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
new file mode 100644
index 0000000..03725bb
--- /dev/null
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -0,0 +1,73 @@
+# Stubs for subprocess
+
+# Based on http://docs.python.org/3.2/library/subprocess.html
+
+from typing import Sequence, Any, Mapping, Callable, Tuple, IO
+
+# TODO force keyword arguments
+# TODO more keyword arguments
+def call(args: Sequence[str], *, stdin: Any = ..., stdout: Any = ...,
+         stderr: Any = ..., shell: bool = ...,
+         env: Mapping[str, str] = ...,
+         cwd: str = ...) -> int: ...
+def check_call(args: Sequence[str], *, stdin: Any = ..., stdout: Any = ...,
+               stderr: Any = ..., shell: bool = ...,
+               env: Mapping[str, str] = ...,
+               cwd: str = ...) -> int: ...
+# Return str/bytes
+def check_output(args: Sequence[str], *, stdin: Any = ..., stderr: Any = ...,
+                 shell: bool = ..., universal_newlines: bool = ...,
+                 env: Mapping[str, str] = ...,
+                 cwd: str = ...) -> Any: ...
+
+# TODO types
+PIPE = ... # type: Any
+STDOUT = ... # type: Any
+
+class CalledProcessError(Exception):
+    returncode = 0
+    cmd = ...  # type: str
+    output = b'' # May be None
+
+    def __init__(self, returncode: int, cmd: str, output: str) -> None: ...
+
+class Popen:
+    stdin = ... # type: IO[Any]
+    stdout = ... # type: IO[Any]
+    stderr = ... # type: IO[Any]
+    pid = 0
+    returncode = 0
+
+    def __init__(self,
+                  args: Sequence[str],
+                  bufsize: int = ...,
+                  executable: str = ...,
+                  stdin: Any = ...,
+                  stdout: Any = ...,
+                  stderr: Any = ...,
+                  preexec_fn: Callable[[], Any] = ...,
+                  close_fds: bool = ...,
+                  shell: bool = ...,
+                  cwd: str = ...,
+                  env: Mapping[str, str] = ...,
+                  universal_newlines: bool = ...,
+                  startupinfo: Any = ...,
+                  creationflags: int = ...,
+                  restore_signals: bool = ...,
+                  start_new_session: bool = ...,
+                  pass_fds: Any = ...) -> None: ...
+
+    def poll(self) -> int: ...
+    def wait(self) -> int: ...
+    # Return str/bytes
+    def communicate(self, input=...) -> Tuple[Any, Any]: ...
+    def send_signal(self, signal: int) -> None: ...
+    def terminatate(self) -> None: ...
+    def kill(self) -> None: ...
+    def __enter__(self) -> 'Popen': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+def getstatusoutput(cmd: str) -> Tuple[int, str]: ...
+def getoutput(cmd: str) -> str: ...
+
+# Windows-only: STARTUPINFO etc.
diff --git a/typeshed/stdlib/3/sys.pyi b/typeshed/stdlib/3/sys.pyi
new file mode 100644
index 0000000..e533204
--- /dev/null
+++ b/typeshed/stdlib/3/sys.pyi
@@ -0,0 +1,156 @@
+# Stubs for sys
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/sys.html
+
+from typing import (
+    List, Sequence, Any, Dict, Tuple, TextIO, overload, Optional, Union
+)
+from types import TracebackType
+
+# ----- sys variables -----
+abiflags = ...  # type: str
+argv = ... # type: List[str]
+byteorder = ...  # type: str
+builtin_module_names = ... # type: Sequence[str] # actually a tuple of strings
+copyright = ...  # type: str
+#dllhandle = 0  # Windows only
+dont_write_bytecode = False
+__displayhook__ = ... # type: Any # contains the original value of displayhook
+__excepthook__ = ... # type: Any  # contains the original value of excepthook
+exec_prefix = ...  # type: str
+executable = ...  # type: str
+float_repr_style = ...  # type: str
+hexversion = 0  # this is a 32-bit int
+last_type = ... # type: Any
+last_value = ... # type: Any
+last_traceback = ... # type: Any
+maxsize = 0
+maxunicode = 0
+meta_path = ... # type: List[Any]
+modules = ... # type: Dict[str, Any]
+path = ... # type: List[str]
+path_hooks = ... # type: List[Any] # TODO precise type; function, path to finder
+path_importer_cache = ... # type: Dict[str, Any] # TODO precise type
+platform = ...  # type: str
+prefix = ...  # type: str
+ps1 = ...  # type: str
+ps2 = ...  # type: str
+stdin = ... # type: TextIO
+stdout = ... # type: TextIO
+stderr = ... # type: TextIO
+__stdin__ = ... # type: TextIO
+__stdout__ = ... # type: TextIO
+__stderr__ = ... # type: TextIO
+# deprecated and removed in Python 3.3:
+subversion = ... # type: Tuple[str, str, str]
+tracebacklimit = 0
+version = ...  # type: str
+api_version = 0
+warnoptions = ... # type: Any
+#  Each entry is a tuple of the form (action, message, category, module,
+#    lineno)
+#winver = ''  # Windows only
+_xoptions = ... # type: Dict[Any, Any]
+
+flags = ... # type: _flags
+class _flags:
+    debug = 0
+    division_warning = 0
+    inspect = 0
+    interactive = 0
+    optimize = 0
+    dont_write_bytecode = 0
+    no_user_site = 0
+    no_site = 0
+    ignore_environment = 0
+    verbose = 0
+    bytes_warning = 0
+    quiet = 0
+    hash_randomization = 0
+
+float_info = ... # type: _float_info
+class _float_info:
+    epsilon = 0.0   # DBL_EPSILON
+    dig = 0         # DBL_DIG
+    mant_dig = 0    # DBL_MANT_DIG
+    max = 0.0       # DBL_MAX
+    max_exp = 0     # DBL_MAX_EXP
+    max_10_exp = 0  # DBL_MAX_10_EXP
+    min = 0.0       # DBL_MIN
+    min_exp = 0     # DBL_MIN_EXP
+    min_10_exp = 0  # DBL_MIN_10_EXP
+    radix = 0       # FLT_RADIX
+    rounds = 0      # FLT_ROUNDS
+
+hash_info = ... # type: _hash_info
+class _hash_info:
+    width = 0    # width in bits used for hash values
+    modulus = 0  # prime modulus P used for numeric hash scheme
+    inf = 0      # hash value returned for a positive infinity
+    nan = 0      # hash value returned for a nan
+    imag = 0     # multiplier used for the imaginary part of a complex number
+
+int_info = ... # type: _int_info
+class _int_info:
+    bits_per_digit = 0  # number of bits held in each digit. Python integers
+                        # are stored internally in
+                        # base 2**int_info.bits_per_digit
+    sizeof_digit = 0    # size in bytes of C type used to represent a digit
+
+class _version_info(Tuple[int, int, int, str, int]):
+    major = 0
+    minor = 0
+    micro = 0
+    releaselevel = ...  # type: str
+    serial = 0
+version_info = ... # type: _version_info
+
+
+# ----- sys function stubs -----
+def call_tracing(fn: Any, args: Any) -> object: ...
+def _clear_type_cache() -> None: ...
+def _current_frames() -> Dict[int, Any]: ...
+def displayhook(value: Optional[int]) -> None: ...
+def excepthook(type_: type, value: BaseException,
+               traceback: TracebackType) -> None: ...
+def exc_info() -> Tuple[type, BaseException, TracebackType]: ...
+# sys.exit() accepts an optional argument of anything printable
+def exit(arg: Any = ...) -> None:
+    raise SystemExit()
+def getcheckinterval() -> int: ...  # deprecated
+def getdefaultencoding() -> str: ...
+def getdlopenflags() -> int: ...  # Unix only
+def getfilesystemencoding() -> str: ...  # cannot return None
+def getrefcount(object) -> int: ...
+def getrecursionlimit() -> int: ...
+
+ at overload
+def getsizeof(obj: object) -> int: ...
+ at overload
+def getsizeof(obj: object, default: int) -> int: ...
+
+def getswitchinterval() -> float: ...
+
+ at overload
+def _getframe() -> Any: ...
+ at overload
+def _getframe(depth: int) -> Any: ...
+
+def getprofile() -> Any: ... # TODO return type
+def gettrace() -> Any: ... # TODO return
+def getwindowsversion() -> Any: ...  # Windows only, TODO return type
+def intern(string: str) -> str: ...
+def setcheckinterval(interval: int) -> None: ...  # deprecated
+def setdlopenflags(n: int) -> None: ...  # Linux only
+def setprofile(profilefunc: Any) -> None: ... # TODO type
+def setrecursionlimit(limit: int) -> None: ...
+def setswitchinterval(interval: float) -> None: ...
+def settrace(tracefunc: Any) -> None: ... # TODO type
+# Trace functions should have three arguments: frame, event, and arg. frame
+# is the current stack frame. event is a string: 'call', 'line', 'return',
+# 'exception', 'c_call', 'c_return', or 'c_exception'. arg depends on the
+# event type.
+def settscdump(on_flag: bool) -> None: ...
+
+def gettotalrefcount() -> int: ... # Debug builds only
diff --git a/typeshed/stdlib/3/sysconfig.pyi b/typeshed/stdlib/3/sysconfig.pyi
new file mode 100644
index 0000000..8d7ab2c
--- /dev/null
+++ b/typeshed/stdlib/3/sysconfig.pyi
@@ -0,0 +1,8 @@
+# Stubs for sysconfig
+
+# NOTE: These are incomplete!
+
+import typing
+
+def get_config_var(name: str) -> str: ...
+def is_python_build() -> bool: ...
diff --git a/typeshed/stdlib/3/tarfile.pyi b/typeshed/stdlib/3/tarfile.pyi
new file mode 100644
index 0000000..98f68a6
--- /dev/null
+++ b/typeshed/stdlib/3/tarfile.pyi
@@ -0,0 +1,33 @@
+# TODO these are incomplete
+
+from typing import Any, List, overload, Callable
+
+class TarError(Exception): ...
+
+class TarInfo:
+    name = ...  # type: str
+    size = 0
+    uid = 0
+    gid = 0
+
+class TarFile:
+    def getmember(self, name: str) -> TarInfo: ...
+    def getmembers(self) -> List[TarInfo]: ...
+    def getnames(self) -> List[str]: ...
+    def extractall(self, path: str = ...,
+                   members: List[TarInfo] = ...) -> None: ...
+
+    @overload
+    def extract(self, member: str, path: str = ...,
+                set_attrs: bool = ...) -> None: ...
+    @overload
+    def extract(self, member: TarInfo, path: str = ...,
+                set_attrs: bool = ...) -> None: ...
+
+    def add(self, name: str, arcname: str = ..., recursive: bool = ...,
+            exclude: Callable[[str], bool] = ..., *,
+            filter: 'Callable[[TarFile], TarFile]' = ...) -> None: ...
+    def close(self) -> None: ...
+
+def open(name: str = ..., mode: str = ..., fileobj: Any = ..., bufsize: int = ...,
+         **kwargs) -> TarFile: ...
diff --git a/typeshed/stdlib/3/tempfile.pyi b/typeshed/stdlib/3/tempfile.pyi
new file mode 100644
index 0000000..ce73d8a
--- /dev/null
+++ b/typeshed/stdlib/3/tempfile.pyi
@@ -0,0 +1,45 @@
+# Stubs for tempfile
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.3/library/tempfile.html
+
+from typing import Tuple, BinaryIO
+
+# global variables
+tempdir = ...  # type: str
+template = ...  # type: str
+
+# TODO text files
+
+# function stubs
+def TemporaryFile(
+            mode: str = ..., buffering: int = ..., encoding: str = ...,
+            newline: str = ..., suffix: str = ..., prefix: str = ...,
+            dir: str = ...) -> BinaryIO:
+    ...
+def NamedTemporaryFile(
+            mode: str = ..., buffering: int = ..., encoding: str = ...,
+            newline: str = ..., suffix: str = ..., prefix: str = ...,
+            dir: str = ..., delete=...) -> BinaryIO:
+    ...
+def SpooledTemporaryFile(
+            max_size: int = ..., mode: str = ..., buffering: int = ...,
+            encoding: str = ..., newline: str = ..., suffix: str = ...,
+            prefix: str = ..., dir: str = ...) -> BinaryIO:
+    ...
+
+class TemporaryDirectory:
+    name = ...  # type: str
+    def __init__(self, suffix: str = ..., prefix: str = ...,
+                 dir: str = ...) -> None: ...
+    def cleanup(self) -> None: ...
+    def __enter__(self) -> str: ...
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+def mkstemp(suffix: str = ..., prefix: str = ..., dir: str = ...,
+            text: bool = ...) -> Tuple[int, str]: ...
+def mkdtemp(suffix: str = ..., prefix: str = ...,
+            dir: str = ...) -> str: ...
+def mktemp(suffix: str = ..., prefix: str = ..., dir: str = ...) -> str: ...
+def gettempdir() -> str: ...
+def gettempprefix() -> str: ...
diff --git a/typeshed/stdlib/3/textwrap.pyi b/typeshed/stdlib/3/textwrap.pyi
new file mode 100644
index 0000000..f644104
--- /dev/null
+++ b/typeshed/stdlib/3/textwrap.pyi
@@ -0,0 +1,119 @@
+# Better textwrap stubs hand-written by o11c.
+# https://docs.python.org/3/library/textwrap.html
+from typing import (
+        Callable,
+        List,
+)
+
+class TextWrapper:
+    def __init__(self,
+            width: int = ...,
+            *,
+            initial_indent: str = ...,
+            subsequent_indent: str = ...,
+            expand_tabs: bool = ...,
+            tabsize: int = ...,
+            replace_whitespace: bool = ...,
+            fix_sentence_endings: bool = ...,
+            break_long_words: bool = ...,
+            break_on_hyphens: bool = ...,
+            drop_whitespace: bool = ...,
+            max_lines: int = ...,
+            placeholder: str = ...
+    ) -> None:
+        self.width = width
+        self.initial_indent = initial_indent
+        self.subsequent_indent = subsequent_indent
+        self.expand_tabs = expand_tabs
+        self.tabsize = tabsize
+        self.replace_whitespace = replace_whitespace
+        self.fix_sentence_endings = fix_sentence_endings
+        self.break_long_words = break_long_words
+        self.break_on_hyphens = break_on_hyphens
+        self.drop_whitespace = drop_whitespace
+        self.max_lines = max_lines
+        self.placeholder = placeholder
+
+    # Private methods *are* part of the documented API for subclasses.
+    def _munge_whitespace(self, text: str) -> str:
+        ...
+
+    def _split(self, text: str) -> List[str]:
+        ...
+
+    def _fix_sentence_endings(self, chunks: List[str]) -> None:
+        ...
+
+    def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None:
+        ...
+
+    def _wrap_chunks(self, chunks: List[str]) -> List[str]:
+        ...
+
+    def _split_chunks(self, text: str) -> List[str]:
+        ...
+
+    def wrap(self, text: str) -> List[str]:
+        ...
+
+    def fill(self, text: str) -> str:
+        ...
+
+
+def wrap(
+        text: str = ...,
+        width: int = ...,
+        *,
+        initial_indent: str = ...,
+        subsequent_indent: str = ...,
+        expand_tabs: bool = ...,
+        tabsize: int = ...,
+        replace_whitespace: bool = ...,
+        fix_sentence_endings: bool = ...,
+        break_long_words: bool = ...,
+        break_on_hyphens: bool = ...,
+        drop_whitespace: bool = ...,
+        max_lines: int = ...,
+        placeholder: str = ...
+) -> List[str]:
+    ...
+
+def fill(
+        width: int = ...,
+        *,
+        initial_indent: str = ...,
+        subsequent_indent: str = ...,
+        expand_tabs: bool = ...,
+        tabsize: int = ...,
+        replace_whitespace: bool = ...,
+        fix_sentence_endings: bool = ...,
+        break_long_words: bool = ...,
+        break_on_hyphens: bool = ...,
+        drop_whitespace: bool = ...,
+        max_lines: int = ...,
+        placeholder: str = ...
+):
+    ...
+
+def shorten(
+        width: int,
+        *,
+        initial_indent: str = ...,
+        subsequent_indent: str = ...,
+        expand_tabs: bool = ...,
+        tabsize: int = ...,
+        replace_whitespace: bool = ...,
+        fix_sentence_endings: bool = ...,
+        break_long_words: bool = ...,
+        break_on_hyphens: bool = ...,
+        drop_whitespace: bool = ...,
+        # Omit `max_lines: int = None`, it is forced to 1 here.
+        placeholder: str = ...
+):
+    ...
+
+def dedent(text: str) -> str:
+    ...
+
+def indent(text: str, prefix: str, predicate: Callable[[str], bool] = ...) -> str:
+    ...
diff --git a/typeshed/stdlib/3/threading.pyi b/typeshed/stdlib/3/threading.pyi
new file mode 100644
index 0000000..83f2307
--- /dev/null
+++ b/typeshed/stdlib/3/threading.pyi
@@ -0,0 +1,64 @@
+# Stubs for threading
+
+# NOTE: These are incomplete!
+
+from typing import Any, Optional, Callable, TypeVar, Union, Mapping, Sequence
+
+class Thread:
+    name = ...  # type: str
+    ident = 0
+    daemon = False
+
+    def __init__(self, group: Any = ..., target: Callable[..., Any] = ...,
+                 name: str = ..., args: Sequence[Any] = ...,
+                 kwargs: Mapping[str, Any] = ..., daemon: bool = ...) -> None: ...
+    def start(self) -> None: ...
+    def run(self) -> None: ...
+    def join(self, timeout: float = ...) -> None: ...
+    def is_alive(self) -> bool: ...
+
+    # Legacy methods
+    def getName(self) -> str: ...
+    def setName(self, name: str) -> None: ...
+    def isDaemon(self) -> bool: ...
+    def setDaemon(self, daemon: bool) -> None: ...
+
+class Timer(Thread):
+    def __init__(self, interval: float, function: Callable[..., Any],
+                 args: Sequence[Any] = ...,
+                 kwargs: Mapping[str, Any] = ...) -> None: ...
+    def cancel(self) -> None : ...
+
+class local(Any): ...
+
+class Event:
+    def is_set(self) -> bool: ...
+    def set(self) -> None: ...
+    def clear(self) -> None: ...
+    # TODO can it return None?
+    def wait(self, timeout: float = ...) -> bool: ...
+
+class Lock:
+    def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
+    def release(self) -> None: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+
+class RLock:
+    def acquire(self, blocking: bool = ...,
+                timeout: float = ...) -> Optional[bool]: ...
+    def release(self) -> None: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
+
+_T = TypeVar('_T')
+
+class Condition:
+    def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
+    def release(self) -> None: ...
+    def notify(self, n: int = ...) -> None: ...
+    def notify_all(self) -> None: ...
+    def wait(self, timeout: float = ...) -> bool: ...
+    def wait_for(self, predicate: Callable[[], _T], timeout: float = ...) -> Union[_T, bool]: ...
+    def __enter__(self) -> bool: ...
+    def __exit__(self, *args): ...
diff --git a/typeshed/stdlib/3/time.pyi b/typeshed/stdlib/3/time.pyi
new file mode 100644
index 0000000..a1b8626
--- /dev/null
+++ b/typeshed/stdlib/3/time.pyi
@@ -0,0 +1,64 @@
+# Stubs for time
+# Ron Murawski <ron at horizonchess.com>
+
+# based on: http://docs.python.org/3.2/library/time.html#module-time
+# see: http://nullege.com/codes/search?cq=time
+
+from typing import Tuple, Union
+
+# ----- variables and constants -----
+accept2dyear = False
+altzone = 0
+daylight = 0
+timezone = 0
+tzname = ... # type: Tuple[str, str]
+
+
+# ----- classes/methods -----
+class struct_time:
+    # this is supposed to be a namedtuple object
+    # namedtuple is not yet implemented (see file: mypy/stubs/collections.py)
+    # see: http://docs.python.org/3.2/library/time.html#time.struct_time
+    # see: http://nullege.com/codes/search/time.struct_time
+    # TODO: namedtuple() object problem
+    #namedtuple __init__(self, int, int, int, int, int, int, int, int, int):
+    #    ...
+    tm_year = 0
+    tm_mon = 0
+    tm_mday = 0
+    tm_hour = 0
+    tm_min = 0
+    tm_sec = 0
+    tm_wday = 0
+    tm_yday = 0
+    tm_isdst = 0
+
+
+# ----- functions -----
+def asctime(t: Union[Tuple[int, int, int, int, int, int, int, int, int],
+                     struct_time,
+                     None] = ...) -> str: ...  # return current time
+
+def clock() -> float: ...
+
+def ctime(secs: Union[float, None] = ...) -> str: ...  # return current time
+
+def gmtime(secs: Union[float, None] = ...) -> struct_time: ...  # return current time
+
+def localtime(secs: Union[float, None] = ...) -> struct_time: ...  # return current time
+
+def mktime(t: Union[Tuple[int, int, int, int, int,
+                          int, int, int, int],
+                    struct_time]) -> float: ...
+
+def sleep(secs: Union[int, float]) -> None: ...
+
+def strftime(format: str, t: Union[Tuple[int, int, int, int, int,
+                                         int, int, int, int],
+                                   struct_time,
+                                   None] = ...) -> str: ...  # return current time
+
+def strptime(string: str,
+             format: str = ...) -> struct_time: ...
+def time() -> float: ...
+def tzset() -> None: ...  # Unix only
diff --git a/typeshed/stdlib/3/token.pyi b/typeshed/stdlib/3/token.pyi
new file mode 100644
index 0000000..76a746f
--- /dev/null
+++ b/typeshed/stdlib/3/token.pyi
@@ -0,0 +1,63 @@
+from typing import Dict
+
+ENDMARKER = 0
+NAME = 0
+NUMBER = 0
+STRING = 0
+NEWLINE = 0
+INDENT = 0
+DEDENT = 0
+LPAR = 0
+RPAR = 0
+LSQB = 0
+RSQB = 0
+COLON = 0
+COMMA = 0
+SEMI = 0
+PLUS = 0
+MINUS = 0
+STAR = 0
+SLASH = 0
+VBAR = 0
+AMPER = 0
+LESS = 0
+GREATER = 0
+EQUAL = 0
+DOT = 0
+PERCENT = 0
+LBRACE = 0
+RBRACE = 0
+EQEQUAL = 0
+NOTEQUAL = 0
+LESSEQUAL = 0
+GREATEREQUAL = 0
+TILDE = 0
+CIRCUMFLEX = 0
+LEFTSHIFT = 0
+RIGHTSHIFT = 0
+DOUBLESTAR = 0
+PLUSEQUAL = 0
+MINEQUAL = 0
+STAREQUAL = 0
+SLASHEQUAL = 0
+PERCENTEQUAL = 0
+AMPEREQUAL = 0
+VBAREQUAL = 0
+CIRCUMFLEXEQUAL = 0
+LEFTSHIFTEQUAL = 0
+RIGHTSHIFTEQUAL = 0
+DOUBLESTAREQUAL = 0
+DOUBLESLASH = 0
+DOUBLESLASHEQUAL = 0
+AT = 0
+RARROW = 0
+ELLIPSIS = 0
+OP = 0
+ERRORTOKEN = 0
+N_TOKENS = 0
+NT_OFFSET = 0
+tok_name = {} # type: Dict[int, str]
+
+def ISTERMINAL(x: int) -> bool: pass
+def ISNONTERMINAL(x: int) -> bool: pass
+def ISEOF(x: int) -> bool: pass
diff --git a/typeshed/stdlib/3/traceback.pyi b/typeshed/stdlib/3/traceback.pyi
new file mode 100644
index 0000000..21c2564
--- /dev/null
+++ b/typeshed/stdlib/3/traceback.pyi
@@ -0,0 +1,16 @@
+# Stubs for traceback
+
+from types import TracebackType
+import typing
+
+# TODO signatures
+def format_exception_only(etype, value): ...
+def format_exception(type: type, value: List[str], tb: TracebackType, limit: int, chain: bool) -> str: ...
+def format_tb(traceback): ...
+def print_exc(limit=..., file=..., chain=...): ...
+def format_exc(limit: int, chain: bool = ...) -> str: ...
+def extract_stack(f=..., limit=...): ...
+def extract_tb(traceback, limit=...): ...
+def format_list(list): ...
+
+# TODO add more
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
new file mode 100644
index 0000000..5e94ee8
--- /dev/null
+++ b/typeshed/stdlib/3/types.pyi
@@ -0,0 +1,149 @@
+# Stubs for types
+# Note, all classes "defined" here require special handling.
+
+# TODO parts of this should be conditional on version
+
+from typing import Any, Callable, Dict, Iterator, Optional, Tuple, TypeVar, Union, overload
+
+_T = TypeVar('_T')
+
+class _Cell:
+    cell_contents = ...  # type: Any
+
+class FunctionType:
+    __closure__ = ...  # type: Optional[Tuple[_Cell, ...]]
+    __code__ = ...  # type: CodeType
+    __defaults__ = ...  # type: Optional[Tuple[Any, ...]]
+    __dict__ = ...  # type: Dict[str, Any]
+    __doc__ = ...  # type: Optional[str]
+    __globals__ = ...  # type: Dict[str, Any]
+    __name__ = ...  # type: str
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+    def __get__(self, obj: Optional[object], type: Optional[type]) -> 'MethodType': ...
+LambdaType = FunctionType
+
+class CodeType:
+    """Create a code object.  Not for the faint of heart."""
+    co_argcount = ... # type: int
+    co_kwonlyargcount = ... # type: int
+    co_nlocals = ... # type: int
+    co_stacksize = ... # type: int
+    co_flags = ... # type: int
+    co_code = ... # type: bytes
+    co_consts = ... # type: Tuple[Any, ...]
+    co_names = ... # type: Tuple[str, ...]
+    co_varnames = ... # type: Tuple[str, ...]
+    co_filename = ... # type: Optional[str]
+    co_name = ... # type: str
+    co_firstlineno = ... # type: int
+    co_lnotab = ... # type: bytes
+    co_freevars = ... # type: Tuple[str, ...]
+    co_cellvars = ... # type: Tuple[str, ...]
+    def __init__(self,
+            argcount: int,
+            kwonlyargcount: int,
+            nlocals: int,
+            stacksize: int,
+            flags: int,
+            codestring: bytes,
+            constants: Tuple[Any, ...],
+            names: Tuple[str, ...],
+            varnames: Tuple[str, ...],
+            filename: str,
+            name: str,
+            firstlineno: int,
+            lnotab: bytes,
+            freevars: Tuple[str, ...] = ...,
+            cellvars: Tuple[str, ...] = ...,
+    ) -> None: ...
+
+class MappingProxyType:
+    def copy(self) -> dict: ...
+    def get(self, key: str, default: _T = ...) -> Union[Any, _T]: ...
+    def items(self) -> Iterator[Tuple[str, Any]]: ...
+    def keys(self) -> Iterator[str]: ...
+    def values(self) -> Iterator[Any]: ...
+    def __contains__(self, key: str) -> bool: ...
+    def __getitem__(self, key: str) -> Any: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __len__(self) -> int: ...
+class SimpleNamespace(Any): ...
+
+class GeneratorType:
+    gi_code = ...  # type: CodeType
+    gi_frame = ...  # type: FrameType
+    gi_running = ...  # type: bool
+    gi_yieldfrom = ...  # type: Optional[GeneratorType]
+    def __iter__(self) -> 'GeneratorType': ...
+    def __next__(self) -> Any: ...
+    def close(self) -> None: ...
+    def send(self, arg: Any) -> Any: ...
+    @overload
+    def throw(self, val: BaseException) -> Any: ...
+    @overload
+    def throw(self, typ: type, val: BaseException = ..., tb: 'TracebackType' = ...) -> Any: ...
+
+class CoroutineType:
+    cr_await = ...  # type: Optional[Any]
+    cr_code = ...  # type: CodeType
+    cr_frame = ...  # type: FrameType
+    cr_running = ...  # type: bool
+    def close(self) -> None: ...
+    def send(self, arg: Any) -> Any: ...
+    @overload
+    def throw(self, val: BaseException) -> Any: ...
+    @overload
+    def throw(self, typ: type, val: BaseException = ..., tb: 'TracebackType' = ...) -> Any: ...
+
+class MethodType:
+    __func__ = ...  # type: FunctionType
+    __self__ = ...  # type: object
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+class BuiltinFunctionType:
+    __self__ = ...  # type: Union[object, ModuleType]
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+BuiltinMethodType = BuiltinFunctionType
+
+class ModuleType:
+    __name__ = ... # type: str
+    __file__ = ... # type: str
+    def __init__(self, name: str, doc: Any) -> None: ...
+
+class TracebackType:
+    tb_frame = ... # type: FrameType
+    tb_lasti = ... # type: int
+    tb_lineno = ... # type: int
+    tb_next = ... # type: TracebackType
+
+class FrameType:
+    f_back = ... # type: FrameType
+    f_builtins = ... # type: Dict[str, Any]
+    f_code = ... # type: CodeType
+    f_globals = ... # type: Dict[str, Any]
+    f_lasti = ... # type: int
+    f_lineno = ... # type: int
+    f_locals = ... # type: Dict[str, Any]
+    f_trace = ... # type: Callable[[], None]
+
+    def clear(self) -> None: pass
+
+class GetSetDescriptorType:
+    __name__ = ...  # type: str
+    __objclass__ = ...  # type: type
+    def __get__(self, obj: Any, type: type = ...) -> Any: ...
+    def __set__(self, obj: Any) -> None: ...
+    def __delete__(self, obj: Any) -> None: ...
+class MemberDescriptorType:
+    __name__ = ...  # type: str
+    __objclass__ = ...  # type: type
+    def __get__(self, obj: Any, type: type = ...) -> Any: ...
+    def __set__(self, obj: Any) -> None: ...
+    def __delete__(self, obj: Any) -> None: ...
+
+def new_class(name: str, bases: Tuple[type, ...] = ..., kwds: Dict[str, Any] = ..., exec_body: Callable[[Dict[str, Any]], None] = ...) -> type: ...
+def prepare_class(name: str, bases: Tuple[type, ...] = ..., kwds: Dict[str, Any] = ...) -> Tuple[type, Dict[str, Any], Dict[str, Any]]: ...
+
+# Actually a different type, but `property` is special and we want that too.
+DynamicClassAttribute = property
+
+def coroutine(f: Callable[..., Any]) -> CoroutineType: ...
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
new file mode 100644
index 0000000..1f31e8a
--- /dev/null
+++ b/typeshed/stdlib/3/typing.pyi
@@ -0,0 +1,378 @@
+# Stubs for typing
+
+from abc import abstractmethod, ABCMeta
+
+# Definitions of special type checking related constructs.  Their definition
+# are not used, so their value does not matter.
+
+cast = object()
+overload = object()
+Any = object()
+TypeVar = object()
+Generic = object()
+Tuple = object()
+Callable = object()
+builtinclass = object()
+_promote = object()
+NamedTuple = object()
+no_type_check = object()
+
+# Type aliases and type constructors
+
+class TypeAlias:
+    # Class for defining generic aliases for library types.
+    def __init__(self, target_type) -> None: ...
+    def __getitem__(self, typeargs): ...
+
+Union = TypeAlias(object)
+Optional = TypeAlias(object)
+List = TypeAlias(object)
+Dict = TypeAlias(object)
+Set = TypeAlias(object)
+
+# Predefined type variables.
+AnyStr = TypeVar('AnyStr', str, bytes)
+
+# Abstract base classes.
+
+# These type variables are used by the container types.
+_T = TypeVar('_T')
+_S = TypeVar('_S')
+_KT = TypeVar('_KT')  # Key type.
+_VT = TypeVar('_VT')  # Value type.
+_T_co = TypeVar('_T_co', covariant=True)  # Any type covariant containers.
+_V_co = TypeVar('_V_co', covariant=True)  # Any type covariant containers.
+_KT_co = TypeVar('_KT_co', covariant=True)  # Key type covariant containers.
+_VT_co = TypeVar('_VT_co', covariant=True)  # Value type covariant containers.
+_T_contra = TypeVar('_T_contra', contravariant=True)  # Ditto contravariant.
+
+class SupportsInt(metaclass=ABCMeta):
+    @abstractmethod
+    def __int__(self) -> int: ...
+
+class SupportsFloat(metaclass=ABCMeta):
+    @abstractmethod
+    def __float__(self) -> float: ...
+
+class SupportsComplex(metaclass=ABCMeta):
+    @abstractmethod
+    def __complex__(self) -> complex: pass
+
+class SupportsBytes(metaclass=ABCMeta):
+    @abstractmethod
+    def __bytes__(self) -> bytes: pass
+
+class SupportsAbs(Generic[_T]):
+    @abstractmethod
+    def __abs__(self) -> _T: ...
+
+class SupportsRound(Generic[_T]):
+    @abstractmethod
+    def __round__(self, ndigits: int = ...) -> _T: ...
+
+class Reversible(Generic[_T_co]):
+    @abstractmethod
+    def __reversed__(self) -> Iterator[_T_co]: ...
+
+class Sized(metaclass=ABCMeta):
+    @abstractmethod
+    def __len__(self) -> int: ...
+
+class Hashable(metaclass=ABCMeta):
+    # TODO: This is special, in that a subclass of a hashable class may not be hashable
+    #   (for example, list vs. object). It's not obvious how to represent this. This class
+    #   is currently mostly useless for static checking.
+    @abstractmethod
+    def __hash__(self) -> int: ...
+
+class Iterable(Generic[_T_co]):
+    @abstractmethod
+    def __iter__(self) -> Iterator[_T_co]: ...
+
+class Iterator(Iterable[_T_co], Generic[_T_co]):
+    @abstractmethod
+    def __next__(self) -> _T_co: ...
+    def __iter__(self) -> 'Iterator[_T_co]': ...
+
+class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
+    @abstractmethod
+    def __next__(self) -> _T_co:...
+
+    @abstractmethod
+    def send(self, value: _T_contra) -> _T_co:...
+
+    @abstractmethod
+    def throw(self, typ: BaseException, val: Any=None, tb=None) -> None:...
+
+    @abstractmethod
+    def close(self) -> None:...
+
+    @abstractmethod
+    def __iter__(self) -> 'Generator[_T_co, _T_contra, _V_co]': ...
+
+class AbstractFuture(Generic[_T]): ...
+
+class Awaitable(Generic[_T_co]):
+    @abstractmethod
+    def __await__(self) -> Generator[AbstractFuture[_T_co], Any, _T_co]:...
+
+class AsyncIterable(Generic[_T_co]):
+    @abstractmethod
+    def __anext__(self) -> Awaitable[_T_co]:...
+
+class AsyncIterator(AsyncIterable[_T_co],
+                    Generic[_T_co]):
+    @abstractmethod
+    def __anext__(self) -> Awaitable[_T_co]:...
+    def __aiter__(self) -> 'AsyncIterator[_T_co]':...
+
+class Container(Generic[_T_co]):
+    @abstractmethod
+    def __contains__(self, x: object) -> bool: ...
+
+class Sequence(Iterable[_T_co], Container[_T_co], Sized, Reversible[_T_co], Generic[_T_co]):
+    @overload
+    @abstractmethod
+    def __getitem__(self, i: int) -> _T_co: ...
+    @overload
+    @abstractmethod
+    def __getitem__(self, s: slice) -> Sequence[_T_co]: ...
+    # Mixin methods
+    def index(self, x: Any) -> int: ...
+    def count(self, x: Any) -> int: ...
+    def __contains__(self, x: object) -> bool: ...
+    def __iter__(self) -> Iterator[_T_co]: ...
+    def __reversed__(self) -> Iterator[_T_co]: ...
+
+class MutableSequence(Sequence[_T], Generic[_T]):
+    @abstractmethod
+    def insert(self, index: int, object: _T) -> None: ...
+    @overload
+    @abstractmethod
+    def __setitem__(self, i: int, o: _T) -> None: ...
+    @overload
+    @abstractmethod
+    def __setitem__(self, s: slice, o: Sequence[_T]) -> None: ...
+    @overload
+    @abstractmethod
+    def __delitem__(self, i: int) -> None: ...
+    @overload
+    @abstractmethod
+    def __delitem__(self, i: slice) -> None: ...
+    # Mixin methods
+    def append(self, object: _T) -> None: ...
+    def extend(self, iterable: Iterable[_T]) -> None: ...
+    def reverse(self) -> None: ...
+    def pop(self, index: int = ...) -> _T: ...
+    def remove(self, object: _T) -> None: ...
+    def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ...
+
+class AbstractSet(Iterable[_KT_co], Container[_KT_co], Sized, Generic[_KT_co]):
+    @abstractmethod
+    def __contains__(self, x: object) -> bool: ...
+    # Mixin methods
+    def __le__(self, s: AbstractSet[Any]) -> bool: ...
+    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
+    def __and__(self, s: AbstractSet[Any]) -> AbstractSet[_KT_co]: ...
+    def __or__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_KT_co, _T]]: ...
+    def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[_KT_co]: ...
+    def __xor__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_KT_co, _T]]: ...
+    # TODO: Argument can be a more general ABC?
+    def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
+
+class MutableSet(AbstractSet[_T], Generic[_T]):
+    @abstractmethod
+    def add(self, x: _T) -> None: ...
+    @abstractmethod
+    def discard(self, x: _T) -> None: ...
+    # Mixin methods
+    def clear(self) -> None: ...
+    def pop(self) -> _T: ...
+    def remove(self, element: _T) -> None: ...
+    def __ior__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ...
+    def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ...
+    def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ...
+    def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ...
+
+class MappingView(Sized):
+    def __len__(self) -> int: ...
+
+class ItemsView(AbstractSet[Tuple[_KT_co, _VT_co]], MappingView, Generic[_KT_co, _VT_co]):
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
+
+class KeysView(AbstractSet[_KT_co], MappingView, Generic[_KT_co]):
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_KT_co]: ...
+
+class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_VT_co]: ...
+
+class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT]):
+    # TODO: Value type should be covariant, but currently we can't give a good signature for
+    #   get if this is the case.
+    @abstractmethod
+    def __getitem__(self, k: _KT) -> _VT: ...
+    # Mixin methods
+    def get(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def items(self) -> AbstractSet[Tuple[_KT, _VT]]: ...
+    def keys(self) -> AbstractSet[_KT]: ...
+    def values(self) -> ValuesView[_VT]: ...
+    def __contains__(self, o: object) -> bool: ...
+
+class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
+    @abstractmethod
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    @abstractmethod
+    def __delitem__(self, v: _KT) -> None: ...
+
+    def clear(self) -> None: ...
+    def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+    def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def update(self, m: Union[Mapping[_KT, _VT],
+                              Iterable[Tuple[_KT, _VT]]]) -> None: ...
+
+class IO(Iterable[AnyStr], Generic[AnyStr]):
+    # TODO detach
+    # TODO use abstract properties
+    @property
+    def mode(self) -> str: ...
+    @property
+    def name(self) -> str: ...
+    @abstractmethod
+    def close(self) -> None: ...
+    @property
+    def closed(self) -> bool: ...
+    @abstractmethod
+    def fileno(self) -> int: ...
+    @abstractmethod
+    def flush(self) -> None: ...
+    @abstractmethod
+    def isatty(self) -> bool: ...
+    # TODO what if n is None?
+    @abstractmethod
+    def read(self, n: int = ...) -> AnyStr: ...
+    @abstractmethod
+    def readable(self) -> bool: ...
+    @abstractmethod
+    def readline(self, limit: int = ...) -> AnyStr: ...
+    @abstractmethod
+    def readlines(self, hint: int = ...) -> list[AnyStr]: ...
+    @abstractmethod
+    def seek(self, offset: int, whence: int = ...) -> int: ...
+    @abstractmethod
+    def seekable(self) -> bool: ...
+    @abstractmethod
+    def tell(self) -> int: ...
+    # TODO None should not be compatible with int
+    @abstractmethod
+    def truncate(self, size: int = ...) -> int: ...
+    @abstractmethod
+    def writable(self) -> bool: ...
+    # TODO buffer objects
+    @abstractmethod
+    def write(self, s: AnyStr) -> int: ...
+    @abstractmethod
+    def writelines(self, lines: Iterable[AnyStr]) -> None: ...
+
+    @abstractmethod
+    def __iter__(self) -> Iterator[AnyStr]: ...
+    @abstractmethod
+    def __enter__(self) -> 'IO[AnyStr]': ...
+    @abstractmethod
+    def __exit__(self, type, value, traceback) -> bool: ...
+
+class BinaryIO(IO[bytes]):
+    # TODO readinto
+    # TODO read1?
+    # TODO peek?
+    @overload
+    @abstractmethod
+    def write(self, s: bytes) -> int: ...
+    @overload
+    @abstractmethod
+    def write(self, s: bytearray) -> int: ...
+
+    @abstractmethod
+    def __enter__(self) -> BinaryIO: ...
+
+class TextIO(IO[str]):
+    # TODO use abstractproperty
+    @property
+    def buffer(self) -> BinaryIO: ...
+    @property
+    def encoding(self) -> str: ...
+    @property
+    def errors(self) -> str: ...
+    @property
+    def line_buffering(self) -> int: ...  # int on PyPy, bool on CPython
+    @property
+    def newlines(self) -> Any: ... # None, str or tuple
+    @abstractmethod
+    def __enter__(self) -> TextIO: ...
+
+class ByteString(Sequence[int]): ...
+
+class Match(Generic[AnyStr]):
+    pos = 0
+    endpos = 0
+    lastindex = 0
+    lastgroup = ...  # type: AnyStr
+    string = ...  # type: AnyStr
+
+    # The regular expression object whose match() or search() method produced
+    # this match instance.
+    re = ...  # type: 'Pattern[AnyStr]'
+
+    def expand(self, template: AnyStr) -> AnyStr: ...
+
+    @overload
+    def group(self, group1: int = ...) -> AnyStr: ...
+    @overload
+    def group(self, group1: str) -> AnyStr: ...
+    @overload
+    def group(self, group1: int, group2: int,
+              *groups: int) -> Sequence[AnyStr]: ...
+    @overload
+    def group(self, group1: str, group2: str,
+              *groups: str) -> Sequence[AnyStr]: ...
+
+    def groups(self, default: AnyStr = ...) -> Sequence[AnyStr]: ...
+    def groupdict(self, default: AnyStr = ...) -> dict[str, AnyStr]: ...
+    def start(self, group: int = ...) -> int: ...
+    def end(self, group: int = ...) -> int: ...
+    def span(self, group: int = ...) -> Tuple[int, int]: ...
+
+class Pattern(Generic[AnyStr]):
+    flags = 0
+    groupindex = 0
+    groups = 0
+    pattern = ...  # type: AnyStr
+
+    def search(self, string: AnyStr, pos: int = ...,
+               endpos: int = ...) -> Match[AnyStr]: ...
+    def match(self, string: AnyStr, pos: int = ...,
+              endpos: int = ...) -> Match[AnyStr]: ...
+    def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ...
+    def findall(self, string: AnyStr, pos: int = ...,
+                endpos: int = ...) -> list[AnyStr]: ...
+    def finditer(self, string: AnyStr, pos: int = ...,
+                 endpos: int = ...) -> Iterator[Match[AnyStr]]: ...
+
+    @overload
+    def sub(self, repl: AnyStr, string: AnyStr,
+            count: int = ...) -> AnyStr: ...
+    @overload
+    def sub(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr,
+            count: int = ...) -> AnyStr: ...
+
+    @overload
+    def subn(self, repl: AnyStr, string: AnyStr,
+             count: int = ...) -> Tuple[AnyStr, int]: ...
+    @overload
+    def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr,
+             count: int = ...) -> Tuple[AnyStr, int]: ...
diff --git a/typeshed/stdlib/3/unicodedata.pyi b/typeshed/stdlib/3/unicodedata.pyi
new file mode 100644
index 0000000..08ad413
--- /dev/null
+++ b/typeshed/stdlib/3/unicodedata.pyi
@@ -0,0 +1,37 @@
+# Stubs for unicodedata (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+ucd_3_2_0 = ... # type: Any
+ucnhash_CAPI = ... # type: Any
+unidata_version = ... # type: str
+
+def bidirectional(unichr): ...
+def category(unichr): ...
+def combining(unichr): ...
+def decimal(chr, default=...): ...
+def decomposition(unichr): ...
+def digit(chr, default=...): ...
+def east_asian_width(unichr): ...
+def lookup(name): ...
+def mirrored(unichr): ...
+def name(chr, default=...): ...
+def normalize(form, unistr): ...
+def numeric(chr, default=...): ...
+
+class UCD:
+    unidata_version = ... # type: Any
+    def bidirectional(self, unichr): ...
+    def category(self, unichr): ...
+    def combining(self, unichr): ...
+    def decimal(self, chr, default=...): ...
+    def decomposition(self, unichr): ...
+    def digit(self, chr, default=...): ...
+    def east_asian_width(self, unichr): ...
+    def lookup(self, name): ...
+    def mirrored(self, unichr): ...
+    def name(self, chr, default=...): ...
+    def normalize(self, form, unistr): ...
+    def numeric(self, chr, default=...): ...
diff --git a/typeshed/stdlib/3/unittest.pyi b/typeshed/stdlib/3/unittest.pyi
new file mode 100644
index 0000000..2669999
--- /dev/null
+++ b/typeshed/stdlib/3/unittest.pyi
@@ -0,0 +1,167 @@
+# Stubs for unittest
+
+# Based on http://docs.python.org/3.0/library/unittest.html
+
+# NOTE: These stubs are based on the 3.0 version API, since later versions
+#       would require featurs not supported currently by mypy.
+
+# Only a subset of functionality is included.
+
+from typing import (
+    Any, Callable, Iterable, Tuple, List, TextIO, Sequence,
+    overload, TypeVar, Pattern
+)
+from abc import abstractmethod, ABCMeta
+
+_T = TypeVar('_T')
+_FT = TypeVar('_FT')
+
+class Testable(metaclass=ABCMeta):
+    @abstractmethod
+    def run(self, result: 'TestResult') -> None: ...
+    @abstractmethod
+    def debug(self) -> None: ...
+    @abstractmethod
+    def countTestCases(self) -> int: ...
+
+# TODO ABC for test runners?
+
+class TestResult:
+    errors = ... # type: List[Tuple[Testable, str]]
+    failures = ... # type: List[Tuple[Testable, str]]
+    testsRun = 0
+    shouldStop = False
+
+    def wasSuccessful(self) -> bool: ...
+    def stop(self) -> None: ...
+    def startTest(self, test: Testable) -> None: ...
+    def stopTest(self, test: Testable) -> None: ...
+    def addError(self, test: Testable,
+                  err: Tuple[type, Any, Any]) -> None: ... # TODO
+    def addFailure(self, test: Testable,
+                    err: Tuple[type, Any, Any]) -> None: ... # TODO
+    def addSuccess(self, test: Testable) -> None: ...
+
+class _AssertRaisesBaseContext:
+    expected = ... # type: Any
+    failureException = ... # type: type
+    obj_name = ...  # type: str
+    expected_regex = ... # type: Pattern[str]
+
+class _AssertRaisesContext(_AssertRaisesBaseContext):
+    exception = ... # type: Any # TODO precise type
+    def __enter__(self) -> _AssertRaisesContext: ...
+    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
+
+class _AssertWarnsContext(_AssertRaisesBaseContext):
+    warning = ... # type: Any # TODO precise type
+    filename = ...  # type: str
+    lineno = 0
+    def __enter__(self) -> _AssertWarnsContext: ...
+    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
+
+class TestCase(Testable):
+    def __init__(self, methodName: str = ...) -> None: ...
+    # TODO failureException
+    def setUp(self) -> None: ...
+    def tearDown(self) -> None: ...
+    def run(self, result: TestResult = ...) -> None: ...
+    def debug(self) -> None: ...
+    def assert_(self, expr: Any, msg: object = ...) -> None: ...
+    def failUnless(self, expr: Any, msg: object = ...) -> None: ...
+    def assertTrue(self, expr: Any, msg: object = ...) -> None: ...
+    def assertEqual(self, first: Any, second: Any,
+                    msg: object = ...) -> None: ...
+    def failUnlessEqual(self, first: Any, second: Any,
+                        msg: object = ...) -> None: ...
+    def assertNotEqual(self, first: Any, second: Any,
+                       msg: object = ...) -> None: ...
+    def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
+                            msg: object = ...,
+                            seq_type: type = ...) -> None: ...
+    def failIfEqual(self, first: Any, second: Any,
+                    msg: object = ...) -> None: ...
+    def assertAlmostEqual(self, first: float, second: float, places: int = ...,
+                          msg: object = ...,
+                          delta: float = ...) -> None: ...
+    def failUnlessAlmostEqual(self, first: float, second: float,
+                              places: int = ...,
+                              msg: object = ...) -> None: ...
+    def assertNotAlmostEqual(self, first: float, second: float,
+                             places: int = ..., msg: object = ...,
+                             delta: float = ...) -> None: ...
+    def failIfAlmostEqual(self, first: float, second: float, places: int = ...,
+                          msg: object = ...) -> None: ...
+    def assertGreater(self, first: Any, second: Any,
+                      msg: object = ...) -> None: ...
+    def assertGreaterEqual(self, first: Any, second: Any,
+                      msg: object = ...) -> None: ...
+    def assertLess(self, first: Any, second: Any,
+                   msg: object = ...) -> None: ...
+    def assertLessEqual(self, first: Any, second: Any,
+                        msg: object = ...) -> None: ...
+    # TODO: If callableObj is None, the return value is None.
+    def assertRaises(self, excClass: type, callableObj: Any = ...,
+                     *args: Any, **kwargs: Any) -> _AssertRaisesContext: ...
+    def failIf(self, expr: Any, msg: object = ...) -> None: ...
+    def assertFalse(self, expr: Any, msg: object = ...) -> None: ...
+    def assertIs(self, first: object, second: object,
+                 msg: object = ...) -> None: ...
+    def assertIsNot(self, first: object, second: object,
+                    msg: object = ...) -> None: ...
+    def assertIsNone(self, expr: Any, msg: object = ...) -> None: ...
+    def assertIsNotNone(self, expr: Any, msg: object = ...) -> None: ...
+    def assertIn(self, first: _T, second: Iterable[_T],
+                 msg: object = ...) -> None: ...
+    def assertNotIn(self, first: _T, second: Iterable[_T],
+                    msg: object = ...) -> None: ...
+    def assertIsInstance(self, obj: Any, cls: type,
+                         msg: object = ...) -> None: ...
+    def assertNotIsInstance(self, obj: Any, cls: type,
+                            msg: object = ...) -> None: ...
+    def assertWarns(self, expected_warning: type, callable_obj: Any = ...,
+                    *args: Any, **kwargs: Any) -> _AssertWarnsContext: ...
+    def fail(self, msg: object = ...) -> None: ...
+    def countTestCases(self) -> int: ...
+    def defaultTestResult(self) -> TestResult: ...
+    def id(self) -> str: ...
+    def shortDescription(self) -> str: ... # May return None
+    def addCleanup(function: Any, *args: Any, **kwargs: Any) -> None: ...
+    def skipTest(self, reason: Any) -> None: ...
+
+class CallableTestCase(Testable):
+    def __init__(self, testFunc: Callable[[], None],
+                 setUp: Callable[[], None] = ...,
+                 tearDown: Callable[[], None] = ...,
+                 description: str = ...) -> None: ...
+    def run(self, result: TestResult) -> None: ...
+    def debug(self) -> None: ...
+    def countTestCases(self) -> int: ...
+
+class TestSuite(Testable):
+    def __init__(self, tests: Iterable[Testable] = ...) -> None: ...
+    def addTest(self, test: Testable) -> None: ...
+    def addTests(self, tests: Iterable[Testable]) -> None: ...
+    def run(self, result: TestResult) -> None: ...
+    def debug(self) -> None: ...
+    def countTestCases(self) -> int: ...
+
+# TODO TestLoader
+# TODO defaultTestLoader
+
+class TextTestRunner:
+    def __init__(self, stream: TextIO = ..., descriptions: bool = ...,
+                 verbosity: int = ..., failfast: bool = ...) -> None: ...
+
+class SkipTest(Exception):
+    ...
+
+# TODO precise types
+def skipUnless(condition: Any, reason: str) -> Any: ...
+def skipIf(condition: Any, reason: str) -> Any: ...
+def expectedFailure(func: _FT) -> _FT: ...
+def skip(reason: str) -> Any: ...
+
+def main(module: str = ..., defaultTest: str = ...,
+         argv: List[str] = ..., testRunner: Any = ...,
+         testLoader: Any = ...) -> None: ... # TODO types
diff --git a/typeshed/stdlib/3/urllib/__init__.pyi b/typeshed/stdlib/3/urllib/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/3/urllib/error.pyi b/typeshed/stdlib/3/urllib/error.pyi
new file mode 100644
index 0000000..2792964
--- /dev/null
+++ b/typeshed/stdlib/3/urllib/error.pyi
@@ -0,0 +1,5 @@
+# Stubs for urllib.error
+
+class URLError(IOError): ...
+class HTTPError(URLError): ...
+class ContentTooShortError(URLError): ...
diff --git a/typeshed/stdlib/3/urllib/parse.pyi b/typeshed/stdlib/3/urllib/parse.pyi
new file mode 100644
index 0000000..1a453dc
--- /dev/null
+++ b/typeshed/stdlib/3/urllib/parse.pyi
@@ -0,0 +1,133 @@
+# Stubs for urllib.parse
+from typing import List, Dict, Tuple, AnyStr, Generic, overload, Sequence, Mapping
+
+__all__ = (
+    'urlparse',
+    'urlunparse',
+    'urljoin',
+    'urldefrag',
+    'urlsplit',
+    'urlunsplit',
+    'urlencode',
+    'parse_qs',
+    'parse_qsl',
+    'quote',
+    'quote_plus',
+    'quote_from_bytes',
+    'unquote',
+    'unquote_plus',
+    'unquote_to_bytes'
+)
+
+uses_relative = []  # type: List[str]
+uses_netloc = []  # type: List[str]
+uses_params = []  # type: List[str]
+non_hierarchical = []  # type: List[str]
+uses_query = []  # type: List[str]
+uses_fragment = []  # type: List[str]
+scheme_chars = ...  # type: str
+MAX_CACHE_SIZE = 0
+
+class _ResultMixinBase(Generic[AnyStr]):
+    def geturl(self) -> AnyStr: ...
+
+class _ResultMixinStr(_ResultMixinBase[str]):
+    def encode(self, encoding: str = ..., errors: str = ...) -> '_ResultMixinBytes': ...
+
+
+class _ResultMixinBytes(_ResultMixinBase[str]):
+    def decode(self, encoding: str = ..., errors: str = ...) -> '_ResultMixinStr': ...
+
+
+class _NetlocResultMixinBase(Generic[AnyStr]):
+    username = ... # type: AnyStr
+    password = ... # type: AnyStr
+    hostname = ... # type: AnyStr
+    port = ... # type: int
+
+class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ...
+
+
+class _NetlocResultMixinBytes(_NetlocResultMixinBase[str], _ResultMixinBytes): ...
+
+class _DefragResultBase(tuple, Generic[AnyStr]):
+    url = ... # type: AnyStr
+    fragment = ... # type: AnyStr
+
+class _SplitResultBase(tuple, Generic[AnyStr]):
+    scheme = ... # type: AnyStr
+    netloc = ... # type: AnyStr
+    path = ... # type: AnyStr
+    query = ... # type: AnyStr
+    fragment = ... # type: AnyStr
+
+class _ParseResultBase(tuple, Generic[AnyStr]):
+    scheme = ... # type: AnyStr
+    netloc = ... # type: AnyStr
+    path = ... # type: AnyStr
+    params = ... # type: AnyStr
+    query = ... # type: AnyStr
+    fragment = ... # type: AnyStr
+
+# Structured result objects for string data
+class DefragResult(_DefragResultBase[str], _ResultMixinStr): ...
+
+class SplitResult(_SplitResultBase[str], _NetlocResultMixinStr): ...
+
+class ParseResult(_ParseResultBase[str], _NetlocResultMixinStr): ...
+
+# Structured result objects for bytes data
+class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes): ...
+
+class SplitResultBytes(_SplitResultBase[bytes], _NetlocResultMixinBytes): ...
+
+class ParseResultBytes(_ParseResultBase[bytes], _NetlocResultMixinBytes): ...
+
+
+def parse_qs(qs: str, keep_blank_values : bool = ..., strict_parsing : bool = ..., encoding : str = ..., errors: str = ...) -> Dict[str, List[str]]: ...
+
+def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> List[Tuple[str,str]]: ...
+
+def quote(string: AnyStr, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+
+def quote_from_bytes(bs: bytes, safe: AnyStr = ...) -> bytes: ...
+
+def quote_plus(string: AnyStr, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+
+def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ...
+
+def unquote_to_bytes(string: AnyStr) -> bytes: ...
+
+def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ...
+
+ at overload
+def urldefrag(url: str) -> DefragResult: ...
+ at overload
+def urldefrag(url: bytes) -> DefragResultBytes: ...
+
+ at overload
+def urlencode(query: Mapping[AnyStr, AnyStr], doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+ at overload
+def urlencode(query: Sequence[Tuple[AnyStr, AnyStr]], doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+
+def urljoin(base: AnyStr, url: AnyStr, allow_fragments: bool = ...) -> AnyStr: ...
+
+ at overload
+def urlparse(url: str, scheme: str = ..., allow_framgents: bool = ...) -> ParseResult: ...
+ at overload
+def urlparse(url: bytes, scheme: bytes = ..., allow_framgents: bool = ...) -> ParseResultBytes: ...
+
+ at overload
+def urlsplit(url: str, scheme: str = ..., allow_fragments: bool = ...) -> SplitResult: ...
+ at overload
+def urlsplit(url: bytes, scheme: bytes = ..., allow_fragments: bool = ...) -> SplitResultBytes: ...
+
+ at overload
+def urlunparse(components: Sequence[AnyStr]) -> AnyStr: ...
+ at overload
+def urlunparse(components: Tuple[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]) -> AnyStr: ...
+
+ at overload
+def urlunsplit(components: Sequence[AnyStr]) -> AnyStr: ...
+ at overload
+def urlunsplit(components: Tuple[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]) -> AnyStr: ...
diff --git a/typeshed/stdlib/3/urllib/request.pyi b/typeshed/stdlib/3/urllib/request.pyi
new file mode 100644
index 0000000..3aeb20f
--- /dev/null
+++ b/typeshed/stdlib/3/urllib/request.pyi
@@ -0,0 +1,15 @@
+# Stubs for urllib.request
+
+# NOTE: These are incomplete!
+
+from typing import Any
+
+class BaseHandler(): ...
+class HTTPRedirectHandler(BaseHandler): ...
+class OpenerDirector(): ...
+
+# TODO args should be types that extend BaseHandler (types, not instances)
+def build_opener(*args: Any) -> OpenerDirector: ...
+def install_opener(opener: OpenerDirector) -> None: ...
+
+def proxy_bypass(host): ...
diff --git a/typeshed/stdlib/3/urllib/response.pyi b/typeshed/stdlib/3/urllib/response.pyi
new file mode 100644
index 0000000..20b01ea
--- /dev/null
+++ b/typeshed/stdlib/3/urllib/response.pyi
@@ -0,0 +1,32 @@
+from typing import Any
+
+class addbase:
+    fp = ... # type: Any
+    read = ... # type: Any
+    readline = ... # type: Any
+    readlines = ... # type: Any
+    fileno = ... # type: Any
+    __iter__ = ... # type: Any
+    next = ... # type: Any
+    def __init__(self, fp) -> None: ...
+    def close(self): ...
+
+class addclosehook(addbase):
+    closehook = ... # type: Any
+    hookargs = ... # type: Any
+    def __init__(self, fp, closehook, *hookargs) -> None: ...
+    def close(self): ...
+
+class addinfo(addbase):
+    headers = ... # type: Any
+    def __init__(self, fp, headers) -> None: ...
+    def info(self): ...
+
+class addinfourl(addbase):
+    headers = ... # type: Any
+    url = ... # type: Any
+    code = ... # type: Any
+    def __init__(self, fp, headers, url, code=...) -> None: ...
+    def info(self): ...
+    def getcode(self): ...
+    def geturl(self): ...
diff --git a/typeshed/stdlib/3/urllib/robotparser.pyi b/typeshed/stdlib/3/urllib/robotparser.pyi
new file mode 100644
index 0000000..403039a
--- /dev/null
+++ b/typeshed/stdlib/3/urllib/robotparser.pyi
@@ -0,0 +1,7 @@
+class RobotFileParser:
+    def set_url(self, url: str): ...
+    def read(self): ...
+    def parse(self, lines: str): ...
+    def can_fetch(self, user_agent: str, url: str): ...
+    def mtime(self): ...
+    def modified(self): ...
diff --git a/typeshed/stdlib/3/uuid.pyi b/typeshed/stdlib/3/uuid.pyi
new file mode 100644
index 0000000..8c51d99
--- /dev/null
+++ b/typeshed/stdlib/3/uuid.pyi
@@ -0,0 +1,73 @@
+# Stubs for uuid
+
+from typing import Tuple
+
+Int = __builtins__.int
+Bytes = __builtins__.bytes
+FieldsType = Tuple[Int, Int, Int, Int, Int, Int]
+
+class UUID:
+    def __init__(self, hex: str=..., bytes: Bytes=..., bytes_le: Bytes=..., fields: FieldsType=..., int: Int=..., version: Int=...) -> None: ...
+
+    @property
+    def bytes(self) -> Bytes: ...
+
+    @property
+    def bytes_le(self) -> Bytes: ...
+
+    @property
+    def clock_seq(self) -> Int: ...
+
+    @property
+    def clock_seq_hi_variant(self) -> Int: ...
+
+    @property
+    def clock_seq_low(self) -> Int: ...
+
+    @property
+    def fields(self) -> FieldsType: ...
+
+    @property
+    def hex(self) -> str: ...
+
+    @property
+    def int(self) -> Int: ...
+
+    @property
+    def node(self) -> Int: ...
+
+    @property
+    def time(self) -> Int: ...
+
+    @property
+    def time_hi_version(self) -> Int: ...
+
+    @property
+    def time_low(self) -> Int: ...
+
+    @property
+    def time_mid(self) -> Int: ...
+
+    @property
+    def urn(self) -> str: ...
+
+    @property
+    def variant(self) -> str: ...
+
+    @property
+    def version(self) -> str: ...
+
+def getnode() -> Int: ...
+def uuid1(node: Int=..., clock_seq: Int=...) -> UUID: ...
+def uuid3(namespace: UUID, name: str) -> UUID: ...
+def uuid4() -> UUID: ...
+def uuid5(namespace: UUID, name: str) -> UUID: ...
+
+NAMESPACE_DNS = ... # type: UUID
+NAMESPACE_URL = ... # type: UUID
+NAMESPACE_OID = ... # type: UUID
+NAMESPACE_X500 = ... # type: UUID
+RESERVED_NCS = ... # type: str
+RFC_4122 = ... # type: str
+RESERVED_MICROSOFT = ... # type: str
+RESERVED_FUTURE = ... # type: str
diff --git a/typeshed/stdlib/3/warnings.pyi b/typeshed/stdlib/3/warnings.pyi
new file mode 100644
index 0000000..c15d8bc
--- /dev/null
+++ b/typeshed/stdlib/3/warnings.pyi
@@ -0,0 +1,33 @@
+# Stubs for warnings
+
+# Based on http://docs.python.org/3.2/library/warnings.html
+
+from typing import Any, List, TextIO, Union
+
+def warn(message: Union[str, Warning], category: type = ...,
+         stacklevel: int = ...) -> None: ...
+
+def warn_explicit(message: Union[str, Warning], category: type, filename: str,
+                  lineno: int, module: str = ..., registry: Any = ...,
+                  module_globals: Any = ...) -> None: ...
+
+# logging modifies showwarning => make it a variable.
+def _showwarning(message: str, category: type, filename: str, lineno: int,
+                 file: TextIO = ..., line: str = ...) -> None: ...
+showwarning = _showwarning
+
+def formatwarning(message: str, category: type, filename: str, lineno: int,
+                  line: str = ...) -> None: ...
+def filterwarnings(action: str, message: str = ..., category: type = ...,
+                   module: str = ..., lineno: int = ...,
+                   append: bool = ...) -> None: ...
+def simplefilter(action: str, category: type = ..., lineno: int = ...,
+                 append: bool = ...) -> None: ...
+def resetwarnings() -> None: ...
+
+class catch_warnings:
+    # TODO record and module must be keyword arguments!
+    # TODO type of module?
+    def __init__(self, record: bool = ..., module: Any = ...) -> None: ...
+    def __enter__(self) -> List[Any]: ...
+    def __exit__(self, type, value, traceback) -> bool: ...
diff --git a/typeshed/stdlib/3/weakref.pyi b/typeshed/stdlib/3/weakref.pyi
new file mode 100644
index 0000000..08d31ac
--- /dev/null
+++ b/typeshed/stdlib/3/weakref.pyi
@@ -0,0 +1,71 @@
+# Stubs for weakref
+
+# NOTE: These are incomplete!
+
+from typing import (
+    TypeVar, Generic, Any, Callable, overload, Mapping, Iterator, Dict, Tuple,
+    Iterable, Optional
+)
+
+_T = TypeVar('_T')
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+
+class ReferenceType(Generic[_T]):
+    # TODO rest of members
+    def __call__(self) -> Optional[_T]:
+        ...
+
+def ref(o: _T, callback: Callable[[ReferenceType[_T]],
+                                 Any] = ...) -> ReferenceType[_T]: ...
+
+# TODO callback
+def proxy(object: _T) -> _T: ...
+
+class WeakValueDictionary(Generic[_KT, _VT]):
+    # TODO tuple iterable argument?
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+
+    def __len__(self) -> int: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    def __delitem__(self, v: _KT) -> None: ...
+    def __contains__(self, o: object) -> bool: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+    def __str__(self) -> str: ...
+
+    def clear(self) -> None: ...
+    def copy(self) -> Dict[_KT, _VT]: ...
+
+    @overload
+    def get(self, k: _KT) -> _VT: ...
+    @overload
+    def get(self, k: _KT, default: _VT) -> _VT: ...
+
+    @overload
+    def pop(self, k: _KT) -> _VT: ...
+    @overload
+    def pop(self, k: _KT, default: _VT) -> _VT: ...
+
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+
+    @overload
+    def setdefault(self, k: _KT) -> _VT: ...
+    @overload
+    def setdefault(self, k: _KT, default: _VT) -> _VT: ...
+
+    @overload
+    def update(self, m: Mapping[_KT, _VT]) -> None: ...
+    @overload
+    def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+
+    # NOTE: incompatible with Mapping
+    def keys(self) -> Iterator[_KT]: ...
+    def values(self) -> Iterator[_VT]: ...
+    def items(self) -> Iterator[Tuple[_KT, _VT]]: ...
+
+    # TODO return type
+    def valuerefs(self) -> Iterable[Any]: ...
diff --git a/typeshed/stdlib/3/xml/__init__.pyi b/typeshed/stdlib/3/xml/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/3/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
new file mode 100644
index 0000000..3369c9e
--- /dev/null
+++ b/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
@@ -0,0 +1,14 @@
+# Stubs for xml.etree.ElementInclude (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+XINCLUDE = ... # type: Any
+XINCLUDE_INCLUDE = ... # type: Any
+XINCLUDE_FALLBACK = ... # type: Any
+
+class FatalIncludeError(SyntaxError): ...
+
+def default_loader(href, parse, encoding=...): ...
+def include(elem, loader=...): ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementPath.pyi b/typeshed/stdlib/3/xml/etree/ElementPath.pyi
new file mode 100644
index 0000000..dee3bb8
--- /dev/null
+++ b/typeshed/stdlib/3/xml/etree/ElementPath.pyi
@@ -0,0 +1,28 @@
+# Stubs for xml.etree.ElementPath (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+xpath_tokenizer_re = ... # type: Any
+
+def xpath_tokenizer(pattern, namespaces=...): ...
+def get_parent_map(context): ...
+def prepare_child(next, token): ...
+def prepare_star(next, token): ...
+def prepare_self(next, token): ...
+def prepare_descendant(next, token): ...
+def prepare_parent(next, token): ...
+def prepare_predicate(next, token): ...
+
+ops = ... # type: Any
+
+class _SelectorContext:
+    parent_map = ... # type: Any
+    root = ... # type: Any
+    def __init__(self, root) -> None: ...
+
+def iterfind(elem, path, namespaces=...): ...
+def find(elem, path, namespaces=...): ...
+def findall(elem, path, namespaces=...): ...
+def findtext(elem, path, default=..., namespaces=...): ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
new file mode 100644
index 0000000..74cc977
--- /dev/null
+++ b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
@@ -0,0 +1,127 @@
+# Stubs for xml.etree.ElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import io
+
+VERSION = ... # type: Any
+
+class ParseError(SyntaxError): ...
+
+def iselement(element): ...
+
+class Element:
+    def __init__(self, tag, attrib=..., **extra) -> None: ...
+    def append(self, *args, **kwargs): ...
+    def clear(self, *args, **kwargs): ...
+    def extend(self, *args, **kwargs): ...
+    def find(self, *args, **kwargs): ...
+    def findall(self, *args, **kwargs): ...
+    def findtext(self, match, default=..., namespaces=...): ...
+    def get(self, *args, **kwargs): ...
+    def getchildren(self): ...
+    def getiterator(self, tag=...): ...
+    def insert(self, *args, **kwargs): ...
+    def items(self, *args, **kwargs): ...
+    def iter(self, *args, **kwargs): ...
+    def iterfind(self, match, namespaces=...): ...
+    def itertext(self): ...
+    def keys(self): ...
+    def makeelement(self, tag, attrib): ...
+    def remove(self, *args, **kwargs): ...
+    def set(self, *args, **kwargs): ...
+    def __copy__(self): ...
+    def __deepcopy__(self): ...
+    def __delattr__(self, name): ...
+    def __delitem__(self, name): ...
+    def __getitem__(self, name): ...
+    def __getstate__(self): ...
+    def __len__(self): ...
+    def __setattr__(self, name, value): ...
+    def __setitem__(self, index, object): ...
+    def __setstate__(self, state): ...
+    def __sizeof__(self): ...
+
+def SubElement(parent, tag, attrib=..., **extra): ...
+def Comment(text=...): ...
+def ProcessingInstruction(target, text=...): ...
+
+PI = ... # type: Any
+
+class QName:
+    text = ... # type: Any
+    def __init__(self, text_or_uri, tag=...) -> None: ...
+    def __hash__(self): ...
+    def __le__(self, other): ...
+    def __lt__(self, other): ...
+    def __ge__(self, other): ...
+    def __gt__(self, other): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class ElementTree:
+    def __init__(self, element=..., file=...) -> None: ...
+    def getroot(self): ...
+    def parse(self, source, parser=...): ...
+    def iter(self, tag=...): ...
+    def getiterator(self, tag=...): ...
+    def find(self, path, namespaces=...): ...
+    def findtext(self, path, default=..., namespaces=...): ...
+    def findall(self, path, namespaces=...): ...
+    def iterfind(self, path, namespaces=...): ...
+    def write(self, file_or_filename, encoding=..., xml_declaration=..., default_namespace=..., method=..., *, short_empty_elements=...): ...
+    def write_c14n(self, file): ...
+
+def register_namespace(prefix, uri): ...
+def tostring(element, encoding=..., method=..., *, short_empty_elements=...): ...
+
+class _ListDataStream(io.BufferedIOBase):
+    lst = ... # type: Any
+    def __init__(self, lst) -> None: ...
+    def writable(self): ...
+    def seekable(self): ...
+    def write(self, b): ...
+    def tell(self): ...
+
+def tostringlist(element, encoding=..., method=..., *, short_empty_elements=...): ...
+def dump(elem): ...
+def parse(source, parser=...): ...
+def iterparse(source, events=..., parser=...): ...
+
+class XMLPullParser:
+    def __init__(self, events=..., *, _parser=...) -> None: ...
+    def feed(self, data): ...
+    def close(self): ...
+    def read_events(self): ...
+
+class _IterParseIterator:
+    root = ... # type: Any
+    def __init__(self, source, events, parser, close_source=...) -> None: ...
+    def __next__(self): ...
+    def __iter__(self): ...
+
+def XML(text, parser=...): ...
+def XMLID(text, parser=...): ...
+
+fromstring = ... # type: Any
+
+def fromstringlist(sequence, parser=...): ...
+
+class TreeBuilder:
+    def __init__(self, element_factory=...) -> None: ...
+    def close(self): ...
+    def data(self, data): ...
+    def start(self, tag, attrs): ...
+    def end(self, tag): ...
+
+class XMLParser:
+    target = ... # type: Any
+    entity = ... # type: Any
+    version = ... # type: Any
+    def __init__(self, html=..., target=..., encoding=...) -> None: ...
+    def _parse_whole(self, *args, **kwargs): ...
+    def _setevents(self, *args, **kwargs): ...
+    def close(self, *args, **kwargs): ...
+    def doctype(self, name, pubid, system): ...
+    def feed(self, data): ...
diff --git a/typeshed/stdlib/3/xml/etree/__init__.pyi b/typeshed/stdlib/3/xml/etree/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/stdlib/3/xml/etree/cElementTree.pyi b/typeshed/stdlib/3/xml/etree/cElementTree.pyi
new file mode 100644
index 0000000..a6f4274
--- /dev/null
+++ b/typeshed/stdlib/3/xml/etree/cElementTree.pyi
@@ -0,0 +1,5 @@
+# Stubs for xml.etree.cElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from xml.etree.ElementTree import *
diff --git a/typeshed/stdlib/3/zipfile.pyi b/typeshed/stdlib/3/zipfile.pyi
new file mode 100644
index 0000000..5745674
--- /dev/null
+++ b/typeshed/stdlib/3/zipfile.pyi
@@ -0,0 +1,29 @@
+# TODO these are incomplete
+
+from typing import List, Tuple, BinaryIO, Union
+
+ZIP_STORED = 0
+ZIP_DEFLATED = 0
+
+def is_zipfile(filename: Union[str, BinaryIO]) -> bool: ...
+
+class ZipInfo:
+    filename = ...  # type: str
+    date_time = ... # type: Tuple[int, int, int, int, int, int]
+    compressed_size = 0
+    file_size = 0
+
+class ZipFile:
+    def __init__(self, file: Union[str, BinaryIO], mode: str = ...,
+                 compression: int = ...,
+                 allowZip64: bool = ...) -> None: ...
+    def close(self) -> None: ...
+    def getinfo(name: str) -> ZipInfo: ...
+    def infolist(self) -> List[ZipInfo]: ...
+    def namelist(self) -> List[str]: ...
+    def read(self, name: Union[str, ZipInfo], pwd: str = ...) -> bytes: ...
+    def write(self, filename: str, arcname: str = ...,
+              compress_type: int = ...) -> None: ...
+
+    def __enter__(self) -> 'ZipFile': ...
+    def __exit__(self, type, value, traceback) -> bool: ...
diff --git a/typeshed/stdlib/3/zlib.pyi b/typeshed/stdlib/3/zlib.pyi
new file mode 100644
index 0000000..8aefc0e
--- /dev/null
+++ b/typeshed/stdlib/3/zlib.pyi
@@ -0,0 +1,32 @@
+# Stubs for zlib (Python 3.4)
+#
+# NOTE: This stub was automatically generated by stubgen.
+
+# TODO: Compress and Decompress classes are not published by the module.
+
+DEFLATED = ... # type: int
+DEF_BUF_SIZE = ... # type: int
+DEF_MEM_LEVEL = ... # type: int
+MAX_WBITS = ... # type: int
+ZLIB_RUNTIME_VERSION = ... # type: str
+ZLIB_VERSION = ... # type: str
+Z_BEST_COMPRESSION = ... # type: int
+Z_BEST_SPEED = ... # type: int
+Z_DEFAULT_COMPRESSION = ... # type: int
+Z_DEFAULT_STRATEGY = ... # type: int
+Z_FILTERED = ... # type: int
+Z_FINISH = ... # type: int
+Z_FULL_FLUSH = ... # type: int
+Z_HUFFMAN_ONLY = ... # type: int
+Z_NO_FLUSH = ... # type: int
+Z_SYNC_FLUSH = ... # type: int
+
+def adler32(data, value=...) -> int: ...
+def compress(data, level: int = ...): ...
+def compressobj(level=..., method=..., wbits=..., memlevel=...,
+                strategy=..., zdict=...): ...
+def crc32(data, value=...) -> int: ...
+def decompress(data, wbits=..., bufsize=...): ...
+def decompressobj(wbits=..., zdict=...): ...
+
+class error(Exception): ...
diff --git a/typeshed/third_party/2.7/Crypto/Cipher/AES.pyi b/typeshed/third_party/2.7/Crypto/Cipher/AES.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/Crypto/Cipher/__init__.pyi b/typeshed/third_party/2.7/Crypto/Cipher/__init__.pyi
new file mode 100644
index 0000000..b6e2a04
--- /dev/null
+++ b/typeshed/third_party/2.7/Crypto/Cipher/__init__.pyi
@@ -0,0 +1,15 @@
+# Stubs for Crypto.Cipher (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+# Names in __all__ with no definition:
+#   AES
+#   ARC2
+#   ARC4
+#   Blowfish
+#   CAST
+#   DES
+#   DES3
+#   PKCS1_OAEP
+#   PKCS1_v1_5
+#   XOR
diff --git a/typeshed/third_party/2.7/Crypto/Random/__init__.pyi b/typeshed/third_party/2.7/Crypto/Random/__init__.pyi
new file mode 100644
index 0000000..c13ab82
--- /dev/null
+++ b/typeshed/third_party/2.7/Crypto/Random/__init__.pyi
@@ -0,0 +1,5 @@
+# Stubs for Crypto.Random (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def get_random_bytes(n: int) -> str: ...
diff --git a/typeshed/third_party/2.7/Crypto/Random/random.pyi b/typeshed/third_party/2.7/Crypto/Random/random.pyi
new file mode 100644
index 0000000..76f2780
--- /dev/null
+++ b/typeshed/third_party/2.7/Crypto/Random/random.pyi
@@ -0,0 +1,3 @@
+# very stubby version of Crypto.Random
+
+def randint(min: int, max: int) -> int: ...
diff --git a/typeshed/third_party/2.7/Crypto/__init__.pyi b/typeshed/third_party/2.7/Crypto/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/OpenSSL/__init__.pyi b/typeshed/third_party/2.7/OpenSSL/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/OpenSSL/crypto.pyi b/typeshed/third_party/2.7/OpenSSL/crypto.pyi
new file mode 100644
index 0000000..ef352ac
--- /dev/null
+++ b/typeshed/third_party/2.7/OpenSSL/crypto.pyi
@@ -0,0 +1,6 @@
+class X509:
+    ...
+
+def sign(key: str, data: str, digest: str) -> str: ...
+def verify(certificate: X509, signature: str, data: str, digest: str) -> None:
+    raise Exception()
diff --git a/typeshed/third_party/2.7/boto/__init__.pyi b/typeshed/third_party/2.7/boto/__init__.pyi
new file mode 100644
index 0000000..bb0631f
--- /dev/null
+++ b/typeshed/third_party/2.7/boto/__init__.pyi
@@ -0,0 +1,78 @@
+# Stubs for boto (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import logging
+
+Version = ... # type: Any
+UserAgent = ... # type: Any
+config = ... # type: Any
+BUCKET_NAME_RE = ... # type: Any
+TOO_LONG_DNS_NAME_COMP = ... # type: Any
+GENERATION_RE = ... # type: Any
+VERSION_RE = ... # type: Any
+ENDPOINTS_PATH = ... # type: Any
+
+def init_logging(): ...
+
+class NullHandler(logging.Handler):
+    def emit(self, record): ...
+
+log = ... # type: Any
+perflog = ... # type: Any
+
+def set_file_logger(name, filepath, level=..., format_string=...): ...
+def set_stream_logger(name, level=..., format_string=...): ...
+def connect_sqs(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_s3(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_gs(gs_access_key_id=..., gs_secret_access_key=..., **kwargs): ...
+def connect_ec2(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_elb(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_autoscale(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudwatch(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_sdb(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_fps(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_mturk(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudfront(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_vpc(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_rds(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_rds2(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_emr(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_sns(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_iam(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_route53(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudformation(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_euca(host=..., aws_access_key_id=..., aws_secret_access_key=..., port=..., path=..., is_secure=..., **kwargs): ...
+def connect_glacier(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_ec2_endpoint(url, aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_walrus(host=..., aws_access_key_id=..., aws_secret_access_key=..., port=..., path=..., is_secure=..., **kwargs): ...
+def connect_ses(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_sts(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_ia(ia_access_key_id=..., ia_secret_access_key=..., is_secure=..., **kwargs): ...
+def connect_dynamodb(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_swf(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudsearch(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudsearch2(aws_access_key_id=..., aws_secret_access_key=..., sign_request=..., **kwargs): ...
+def connect_cloudsearchdomain(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_beanstalk(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_elastictranscoder(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_opsworks(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_redshift(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_support(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudtrail(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_directconnect(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_kinesis(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_logs(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_route53domains(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cognito_identity(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cognito_sync(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_kms(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_awslambda(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_codedeploy(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_configservice(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_cloudhsm(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_ec2containerservice(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def connect_machinelearning(aws_access_key_id=..., aws_secret_access_key=..., **kwargs): ...
+def storage_uri(uri_str, default_scheme=..., debug=..., validate=..., bucket_storage_uri_class=..., suppress_consec_slashes=..., is_latest=...): ...
+def storage_uri_for_key(key): ...
diff --git a/typeshed/third_party/2.7/boto/connection.pyi b/typeshed/third_party/2.7/boto/connection.pyi
new file mode 100644
index 0000000..871cb57
--- /dev/null
+++ b/typeshed/third_party/2.7/boto/connection.pyi
@@ -0,0 +1,108 @@
+# Stubs for boto.connection (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+HAVE_HTTPS_CONNECTION = ... # type: Any
+ON_APP_ENGINE = ... # type: Any
+PORTS_BY_SECURITY = ... # type: Any
+DEFAULT_CA_CERTS_FILE = ... # type: Any
+
+class HostConnectionPool:
+    queue = ... # type: Any
+    def __init__(self) -> None: ...
+    def size(self): ...
+    def put(self, conn): ...
+    def get(self): ...
+    def clean(self): ...
+
+class ConnectionPool:
+    CLEAN_INTERVAL = ... # type: Any
+    STALE_DURATION = ... # type: Any
+    host_to_pool = ... # type: Any
+    last_clean_time = ... # type: Any
+    mutex = ... # type: Any
+    def __init__(self) -> None: ...
+    def size(self): ...
+    def get_http_connection(self, host, port, is_secure): ...
+    def put_http_connection(self, host, port, is_secure, conn): ...
+    def clean(self): ...
+
+class HTTPRequest:
+    method = ... # type: Any
+    protocol = ... # type: Any
+    host = ... # type: Any
+    port = ... # type: Any
+    path = ... # type: Any
+    auth_path = ... # type: Any
+    params = ... # type: Any
+    headers = ... # type: Any
+    body = ... # type: Any
+    def __init__(self, method, protocol, host, port, path, auth_path, params, headers, body) -> None: ...
+    def authorize(self, connection, **kwargs): ...
+
+class AWSAuthConnection:
+    suppress_consec_slashes = ... # type: Any
+    num_retries = ... # type: Any
+    is_secure = ... # type: Any
+    https_validate_certificates = ... # type: Any
+    ca_certificates_file = ... # type: Any
+    port = ... # type: Any
+    http_exceptions = ... # type: Any
+    http_unretryable_exceptions = ... # type: Any
+    socket_exception_values = ... # type: Any
+    https_connection_factory = ... # type: Any
+    protocol = ... # type: Any
+    host = ... # type: Any
+    path = ... # type: Any
+    debug = ... # type: Any
+    host_header = ... # type: Any
+    http_connection_kwargs = ... # type: Any
+    provider = ... # type: Any
+    auth_service_name = ... # type: Any
+    request_hook = ... # type: Any
+    def __init__(self, host, aws_access_key_id=..., aws_secret_access_key=..., is_secure=..., port=..., proxy=..., proxy_port=..., proxy_user=..., proxy_pass=..., debug=..., https_connection_factory=..., path=..., provider=..., security_token=..., suppress_consec_slashes=..., validate_certs=..., profile_name=...) -> None: ...
+    auth_region_name = ... # type: Any
+    def connection(self): ...
+    def aws_access_key_id(self): ...
+    gs_access_key_id = ... # type: Any
+    access_key = ... # type: Any
+    def aws_secret_access_key(self): ...
+    gs_secret_access_key = ... # type: Any
+    secret_key = ... # type: Any
+    def profile_name(self): ...
+    def get_path(self, path=...): ...
+    def server_name(self, port=...): ...
+    proxy = ... # type: Any
+    proxy_port = ... # type: Any
+    proxy_user = ... # type: Any
+    proxy_pass = ... # type: Any
+    no_proxy = ... # type: Any
+    use_proxy = ... # type: Any
+    def handle_proxy(self, proxy, proxy_port, proxy_user, proxy_pass): ...
+    def get_http_connection(self, host, port, is_secure): ...
+    def skip_proxy(self, host): ...
+    def new_http_connection(self, host, port, is_secure): ...
+    def put_http_connection(self, host, port, is_secure, connection): ...
+    def proxy_ssl(self, host=..., port=...): ...
+    def prefix_proxy_to_path(self, path, host=...): ...
+    def get_proxy_auth_header(self): ...
+    def get_proxy_url_with_auth(self): ...
+    def set_host_header(self, request): ...
+    def set_request_hook(self, hook): ...
+    def build_base_http_request(self, method, path, auth_path, params=..., headers=..., data=..., host=...): ...
+    def make_request(self, method, path, headers=..., data=..., host=..., auth_path=..., sender=..., override_num_retries=..., params=..., retry_handler=...): ...
+    def close(self): ...
+
+class AWSQueryConnection(AWSAuthConnection):
+    APIVersion = ... # type: Any
+    ResponseError = ... # type: Any
+    def __init__(self, aws_access_key_id=..., aws_secret_access_key=..., is_secure=..., port=..., proxy=..., proxy_port=..., proxy_user=..., proxy_pass=..., host=..., debug=..., https_connection_factory=..., path=..., security_token=..., validate_certs=..., profile_name=..., provider=...) -> None: ...
+    def get_utf8_value(self, value): ...
+    def make_request(self, action, params=..., path=..., verb=...): ...
+    def build_list_params(self, params, items, label): ...
+    def build_complex_list_params(self, params, items, label, names): ...
+    def get_list(self, action, params, markers, path=..., parent=..., verb=...): ...
+    def get_object(self, action, params, cls, path=..., parent=..., verb=...): ...
+    def get_status(self, action, params, path=..., parent=..., verb=...): ...
diff --git a/typeshed/third_party/2.7/boto/ec2/__init__.pyi b/typeshed/third_party/2.7/boto/ec2/__init__.pyi
new file mode 100644
index 0000000..67908a5
--- /dev/null
+++ b/typeshed/third_party/2.7/boto/ec2/__init__.pyi
@@ -0,0 +1,11 @@
+# Stubs for boto.ec2 (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+RegionData = ... # type: Any
+
+def regions(**kw_params): ...
+def connect_to_region(region_name, **kw_params): ...
+def get_region(region_name, **kw_params): ...
diff --git a/typeshed/third_party/2.7/boto/ec2/elb/__init__.pyi b/typeshed/third_party/2.7/boto/ec2/elb/__init__.pyi
new file mode 100644
index 0000000..fcad2fa
--- /dev/null
+++ b/typeshed/third_party/2.7/boto/ec2/elb/__init__.pyi
@@ -0,0 +1,43 @@
+# Stubs for boto.ec2.elb (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from boto.connection import AWSQueryConnection
+
+RegionData = ... # type: Any
+
+def regions(): ...
+def connect_to_region(region_name, **kw_params): ...
+
+class ELBConnection(AWSQueryConnection):
+    APIVersion = ... # type: Any
+    DefaultRegionName = ... # type: Any
+    DefaultRegionEndpoint = ... # type: Any
+    region = ... # type: Any
+    def __init__(self, aws_access_key_id=..., aws_secret_access_key=..., is_secure=..., port=..., proxy=..., proxy_port=..., proxy_user=..., proxy_pass=..., debug=..., https_connection_factory=..., region=..., path=..., security_token=..., validate_certs=..., profile_name=...) -> None: ...
+    def build_list_params(self, params, items, label): ...
+    def get_all_load_balancers(self, load_balancer_names=..., marker=...): ...
+    def create_load_balancer(self, name, zones, listeners=..., subnets=..., security_groups=..., scheme=..., complex_listeners=...): ...
+    def create_load_balancer_listeners(self, name, listeners=..., complex_listeners=...): ...
+    def delete_load_balancer(self, name): ...
+    def delete_load_balancer_listeners(self, name, ports): ...
+    def enable_availability_zones(self, load_balancer_name, zones_to_add): ...
+    def disable_availability_zones(self, load_balancer_name, zones_to_remove): ...
+    def modify_lb_attribute(self, load_balancer_name, attribute, value): ...
+    def get_all_lb_attributes(self, load_balancer_name): ...
+    def get_lb_attribute(self, load_balancer_name, attribute): ...
+    def register_instances(self, load_balancer_name, instances): ...
+    def deregister_instances(self, load_balancer_name, instances): ...
+    def describe_instance_health(self, load_balancer_name, instances=...): ...
+    def configure_health_check(self, name, health_check): ...
+    def set_lb_listener_SSL_certificate(self, lb_name, lb_port, ssl_certificate_id): ...
+    def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name): ...
+    def create_lb_cookie_stickiness_policy(self, cookie_expiration_period, lb_name, policy_name): ...
+    def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes): ...
+    def delete_lb_policy(self, lb_name, policy_name): ...
+    def set_lb_policies_of_listener(self, lb_name, lb_port, policies): ...
+    def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies): ...
+    def apply_security_groups_to_lb(self, name, security_groups): ...
+    def attach_lb_to_subnets(self, name, subnets): ...
+    def detach_lb_from_subnets(self, name, subnets): ...
diff --git a/typeshed/third_party/2.7/concurrent/__init__.pyi b/typeshed/third_party/2.7/concurrent/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/concurrent/futures/__init__.pyi b/typeshed/third_party/2.7/concurrent/futures/__init__.pyi
new file mode 100644
index 0000000..e2f7455
--- /dev/null
+++ b/typeshed/third_party/2.7/concurrent/futures/__init__.pyi
@@ -0,0 +1,37 @@
+from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple
+
+_T = TypeVar('_T')
+
+class Future(Generic[_T]):
+    def cancel(self) -> bool: ...
+    def cancelled(self) -> bool: ...
+    def running(self) -> bool: ...
+    def done(self) -> bool: ...
+    def result(self, timeout: float = ...) -> _T: ...
+    def exception(self, timeout: float = ...) -> Exception: ...
+    def add_done_callback(self, fn: Callable[[Future], Any]) -> None: ...
+
+    def set_running_or_notify_cancel(self) -> None: ...
+    def set_result(self, result: _T) -> None: ...
+    def set_exception(self, exception: Exception) -> None: ...
+
+class Executor:
+    def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
+    def map(self, func: Callable[..., _T], *iterables: Any, timeout: float = ...) -> Iterable[_T]: ...
+    def shutdown(self, wait: bool = ...) -> None: ...
+    def __enter__(self) -> Executor: ...
+    def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ...
+
+class ThreadPoolExecutor(Executor):
+    def __init__(self, max_workers: int) -> None: ...
+
+class ProcessPoolExecutor(Executor):
+    def __init__(self, max_workers: None) -> None: ...
+
+def wait(fs: Iterable[Future], timeout: float = ..., return_when: str = ...) -> Tuple[Iterable[Future], Iterable[Future]]: ...
+
+FIRST_COMPLETED = ...  # type: str
+FIRST_EXCEPTION = ...  # type: str
+ALL_COMPLETED = ...  # type: str
+
+def as_completed(fs: Iterable[Future], timeout: float = ...) -> Iterator[Future]: ...
diff --git a/typeshed/third_party/2.7/croniter.pyi b/typeshed/third_party/2.7/croniter.pyi
new file mode 100644
index 0000000..e511cf0
--- /dev/null
+++ b/typeshed/third_party/2.7/croniter.pyi
@@ -0,0 +1,27 @@
+# Stubs for croniter.croniter (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class croniter:
+    MONTHS_IN_YEAR = ... # type: Any
+    RANGES = ... # type: Any
+    DAYS = ... # type: Any
+    ALPHACONV = ... # type: Any
+    LOWMAP = ... # type: Any
+    bad_length = ... # type: Any
+    tzinfo = ... # type: Any
+    cur = ... # type: Any
+    exprs = ... # type: Any
+    expanded = ... # type: Any
+    def __init__(self, expr_format, start_time=...) -> None: ...
+    def get_next(self, ret_type=...): ...
+    def get_prev(self, ret_type=...): ...
+    def get_current(self, ret_type=...): ...
+    def __iter__(self): ...
+    __next__ = ... # type: Any
+    def all_next(self, ret_type=...): ...
+    def all_prev(self, ret_type=...): ...
+    iter = ... # type: Any
+    def is_leap(self, year): ...
diff --git a/typeshed/third_party/2.7/enum.pyi b/typeshed/third_party/2.7/enum.pyi
new file mode 100644
index 0000000..dcb3b9c
--- /dev/null
+++ b/typeshed/third_party/2.7/enum.pyi
@@ -0,0 +1,19 @@
+from typing import List, Any, TypeVar
+
+class Enum:
+    def __new__(cls, value: Any) -> None: ...
+    def __repr__(self) -> str: ...
+    def __str__(self) -> str: ...
+    def __dir__(self) -> List[str]: ...
+    def __format__(self, format_spec: str) -> str: ...
+    def __hash__(self) -> Any: ...
+    def __reduce_ex__(self, proto: Any) -> Any: ...
+
+    name = ...  # type: str
+    value = None  # type: Any
+
+class IntEnum(int, Enum): ...
+
+_T = TypeVar('_T')
+
+def unique(enumeration: _T) -> _T: ...
diff --git a/typeshed/third_party/2.7/fb303/FacebookService.pyi b/typeshed/third_party/2.7/fb303/FacebookService.pyi
new file mode 100644
index 0000000..0394fdb
--- /dev/null
+++ b/typeshed/third_party/2.7/fb303/FacebookService.pyi
@@ -0,0 +1,301 @@
+# Stubs for fb303.FacebookService (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from thrift.Thrift import TProcessor
+
+fastbinary = ... # type: Any
+
+class Iface:
+    def getName(self): ...
+    def getVersion(self): ...
+    def getStatus(self): ...
+    def getStatusDetails(self): ...
+    def getCounters(self): ...
+    def getCounter(self, key): ...
+    def setOption(self, key, value): ...
+    def getOption(self, key): ...
+    def getOptions(self): ...
+    def getCpuProfile(self, profileDurationInSec): ...
+    def aliveSince(self): ...
+    def reinitialize(self): ...
+    def shutdown(self): ...
+
+class Client(Iface):
+    def __init__(self, iprot, oprot=...) -> None: ...
+    def getName(self): ...
+    def send_getName(self): ...
+    def recv_getName(self): ...
+    def getVersion(self): ...
+    def send_getVersion(self): ...
+    def recv_getVersion(self): ...
+    def getStatus(self): ...
+    def send_getStatus(self): ...
+    def recv_getStatus(self): ...
+    def getStatusDetails(self): ...
+    def send_getStatusDetails(self): ...
+    def recv_getStatusDetails(self): ...
+    def getCounters(self): ...
+    def send_getCounters(self): ...
+    def recv_getCounters(self): ...
+    def getCounter(self, key): ...
+    def send_getCounter(self, key): ...
+    def recv_getCounter(self): ...
+    def setOption(self, key, value): ...
+    def send_setOption(self, key, value): ...
+    def recv_setOption(self): ...
+    def getOption(self, key): ...
+    def send_getOption(self, key): ...
+    def recv_getOption(self): ...
+    def getOptions(self): ...
+    def send_getOptions(self): ...
+    def recv_getOptions(self): ...
+    def getCpuProfile(self, profileDurationInSec): ...
+    def send_getCpuProfile(self, profileDurationInSec): ...
+    def recv_getCpuProfile(self): ...
+    def aliveSince(self): ...
+    def send_aliveSince(self): ...
+    def recv_aliveSince(self): ...
+    def reinitialize(self): ...
+    def send_reinitialize(self): ...
+    def shutdown(self): ...
+    def send_shutdown(self): ...
+
+class Processor(Iface, TProcessor):
+    def __init__(self, handler) -> None: ...
+    def process(self, iprot, oprot): ...
+    def process_getName(self, seqid, iprot, oprot): ...
+    def process_getVersion(self, seqid, iprot, oprot): ...
+    def process_getStatus(self, seqid, iprot, oprot): ...
+    def process_getStatusDetails(self, seqid, iprot, oprot): ...
+    def process_getCounters(self, seqid, iprot, oprot): ...
+    def process_getCounter(self, seqid, iprot, oprot): ...
+    def process_setOption(self, seqid, iprot, oprot): ...
+    def process_getOption(self, seqid, iprot, oprot): ...
+    def process_getOptions(self, seqid, iprot, oprot): ...
+    def process_getCpuProfile(self, seqid, iprot, oprot): ...
+    def process_aliveSince(self, seqid, iprot, oprot): ...
+    def process_reinitialize(self, seqid, iprot, oprot): ...
+    def process_shutdown(self, seqid, iprot, oprot): ...
+
+class getName_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getName_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getVersion_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getVersion_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getStatus_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getStatus_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getStatusDetails_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getStatusDetails_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getCounters_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getCounters_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getCounter_args:
+    thrift_spec = ... # type: Any
+    key = ... # type: Any
+    def __init__(self, key=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getCounter_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class setOption_args:
+    thrift_spec = ... # type: Any
+    key = ... # type: Any
+    value = ... # type: Any
+    def __init__(self, key=..., value=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class setOption_result:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getOption_args:
+    thrift_spec = ... # type: Any
+    key = ... # type: Any
+    def __init__(self, key=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getOption_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getOptions_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getOptions_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getCpuProfile_args:
+    thrift_spec = ... # type: Any
+    profileDurationInSec = ... # type: Any
+    def __init__(self, profileDurationInSec=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class getCpuProfile_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class aliveSince_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class aliveSince_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class reinitialize_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class shutdown_args:
+    thrift_spec = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
diff --git a/typeshed/third_party/2.7/fb303/__init__.pyi b/typeshed/third_party/2.7/fb303/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/gflags.pyi b/typeshed/third_party/2.7/gflags.pyi
new file mode 100644
index 0000000..55bdc75
--- /dev/null
+++ b/typeshed/third_party/2.7/gflags.pyi
@@ -0,0 +1,216 @@
+from typing import Any, Callable, Dict, Iterable, IO, List, Union
+from types import ModuleType
+
+class FlagsError(Exception): ...
+
+class DuplicateFlag(FlagsError): ...
+
+class CantOpenFlagFileError(FlagsError): ...
+
+class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag): ...
+
+class DuplicateFlagError(DuplicateFlag):
+      def __init__(self, flagname: str, flag_values: FlagValues, other_flag_values: FlagValues = ...) -> None: ...
+
+class IllegalFlagValue(FlagsError): ...
+
+class UnrecognizedFlag(FlagsError): ...
+
+class UnrecognizedFlagError(UnrecognizedFlag):
+    def __init__(self, flagname: str, flagvalue: str = ...) -> None: ...
+
+def GetHelpWidth() -> int: ...
+def CutCommonSpacePrefix(text) -> str: ...
+def TextWrap(text: str, length: int = ..., indent: str = ..., firstline_indent: str = ..., tabs: str = ...) -> str: ...
+def DocToHelp(doc: str) -> str: ...
+
+class FlagValues:
+    def __init__(self) -> None: ...
+    def UseGnuGetOpt(self, use_gnu_getopt: bool = ...) -> None: ...
+    def IsGnuGetOpt(self) -> bool: ...
+# TODO dict type
+    def FlagDict(self) -> dict: ...
+    def FlagsByModuleDict(self) -> Dict[str, List[Flag]]: ...
+    def FlagsByModuleIdDict(self) -> Dict[int, List[Flag]]: ...
+    def KeyFlagsByModuleDict(self) -> Dict[str, List[Flag]]: ...
+    def FindModuleDefiningFlag(self, flagname: str, default: str = ...) -> str: ...
+    def FindModuleIdDefiningFlag(self, flagname: str, default: int = ...) -> int: ...
+    def AppendFlagValues(self, flag_values: FlagValues) -> None: ...
+    def RemoveFlagValues(self, flag_values: FlagValues) -> None: ...
+    def __setitem__(self, name: str, flag: Flag) -> None: ...
+    def __getitem__(self, name: str) -> Flag: ...
+    def __getattr__(self, name: str) -> Any: ...
+    def __setattr__(self, name: str, value: Any): ...
+    def __delattr__(self, flag_name: str) -> None: ...
+    def SetDefault(self, name: str, value: Any) -> None: ...
+    def __contains__(self, name: str) -> bool: ...
+    has_key = __contains__
+    def __iter__(self) -> Iterable[str]: ...
+    def __call__(self, argv: List[str]) -> List[str]: ...
+    def Reset(self) -> None: ...
+    def RegisteredFlags(self) -> List[str]: ...
+    def FlagValuesDict(self) -> Dict[str, Any]: ...
+    def __str__(self) -> str: ...
+    def GetHelp(self, prefix: str = ...) -> str: ...
+    def ModuleHelp(self, module: Union[ModuleType, str]) -> str: ...
+    def MainModuleHelp(self) -> str: ...
+    def get(self, name: str, default: Any) -> Any: ...
+    def ShortestUniquePrefixes(self, fl: Dict[str, Flag]) -> Dict[str, str]: ...
+    def ExtractFilename(self, flagfile_str: str) -> str: ...
+    def ReadFlagsFromFiles(self, argv: List[str], force_gnu: bool = ...) -> List[str]: ...
+    def FlagsIntoString(self) -> str: ...
+    def AppendFlagsIntoFile(self, filename: str) -> None: ...
+    def WriteHelpInXMLFormat(self, outfile: IO[str] = ...) -> None: ...
+  # TODO validator: gflags_validators.Validator
+    def AddValidator(self, validator: Any) -> None: ...
+
+FLAGS = None  # type: FlagValues
+
+class Flag:
+    name = ...  # type: str
+    default = None  # type: Any
+    default_as_str = ...  # type: str
+    value = None  # type: Any
+    help = ...  # type: str
+    short_name = ...  # type: str
+    boolean = False
+    present = False
+    parser = None  # type: ArgumentParser
+    serializer = None  # type: ArgumentSerializer
+    allow_override = False
+
+    def __init__(self, parser: ArgumentParser, serializer: ArgumentSerializer, name: str,
+               default: str, help_string: str, short_name: str = ..., boolean: bool = ...,
+               allow_override: bool = ...) -> None: ...
+    def Parse(self, argument: Any) -> Any: ...
+    def Unparse(self) -> None: ...
+    def Serialize(self) -> str: ...
+    def SetDefault(self, value: Any) -> None: ...
+    def Type(self) -> str: ...
+    def WriteInfoInXMLFormat(self, outfile: IO[str], module_name: str, is_key: bool = ..., indent: str = ...) -> None: ...
+
+class ArgumentParser(object):
+    syntactic_help = ...  # type: str
+# TODO what is this
+    def Parse(self, argument: Any) -> Any: ...
+    def Type(self) -> str: ...
+    def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ...
+
+class ArgumentSerializer:
+    def Serialize(self, value: Any) -> unicode: ...
+
+class ListSerializer(ArgumentSerializer):
+    def __init__(self, list_sep: str) -> None: ...
+    def Serialize(self, value: List[Any]) -> str: ...
+
+def RegisterValidator(flag_name: str,
+                      checker: Callable[[Any], bool],
+                      message: str = ...,
+                      flag_values: FlagValues = ...) -> None: ...
+def MarkFlagAsRequired(flag_name: str, flag_values: FlagValues = ...) -> None: ...
+
+
+
+def DEFINE(parser: ArgumentParser, name: str, default: Any, help: str,
+           flag_values: FlagValues = ..., serializer: ArgumentSerializer = ..., **args: Any) -> None: ...
+def DEFINE_flag(flag: Flag, flag_values: FlagValues = ...) -> None: ...
+def DECLARE_key_flag(flag_name: str, flag_values: FlagValues = ...) -> None: ...
+def ADOPT_module_key_flags(module: ModuleType, flag_values: FlagValues = ...) -> None: ...
+def DEFINE_string(name: str, default: str, help: str, flag_values: FlagValues = ..., **args: Any): ...
+
+class BooleanParser(ArgumentParser):
+    def Convert(self, argument: Any) -> bool: ...
+    def Parse(self, argument: Any) -> bool: ...
+    def Type(self) -> str: ...
+
+class BooleanFlag(Flag):
+    def __init__(self, name: str, default: bool, help: str, short_name=..., **args: Any) -> None: ...
+
+def DEFINE_boolean(name: str, default: bool, help: str, flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+DEFINE_bool = DEFINE_boolean
+
+class HelpFlag(BooleanFlag):
+    def __init__(self) -> None: ...
+    def Parse(self, arg: Any) -> None: ...
+
+class HelpXMLFlag(BooleanFlag):
+    def __init__(self) -> None: ...
+    def Parse(self, arg: Any) -> None: ...
+
+class HelpshortFlag(BooleanFlag):
+    def __init__(self) -> None: ...
+    def Parse(self, arg: Any) -> None: ...
+
+class NumericParser(ArgumentParser):
+    def IsOutsideBounds(self, val: float) -> bool: ...
+    def Parse(self, argument: Any) -> float: ...
+    def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ...
+    def Convert(self, argument: Any) -> Any: ...
+
+class FloatParser(NumericParser):
+    number_article = ...  # type: str
+    number_name = ...  # type: str
+    syntactic_help = ...  # type: str
+    def __init__(self, lower_bound: float = ..., upper_bound: float = ...) -> None: ...
+    def Convert(self, argument: Any) -> float: ...
+    def Type(self) -> str: ...
+
+def DEFINE_float(name: str, default: float, help: str, lower_bound: float = ...,
+                 upper_bound: float = ..., flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+class IntegerParser(NumericParser):
+    number_article = ...  # type: str
+    number_name = ...  # type: str
+    syntactic_help = ...  # type: str
+    def __init__(self, lower_bound: int = ..., upper_bound: int = ...) -> None: ...
+    def Convert(self, argument: Any) -> int: ...
+    def Type(self) -> str: ...
+
+def DEFINE_integer(name: str, default: int, help: str, lower_bound: int = ...,
+                   upper_bound: int = ..., flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+class EnumParser(ArgumentParser):
+    def __init__(self, enum_values: List[str]) -> None: ...
+    def Parse(self, argument: Any) -> Any: ...
+    def Type(self) -> str: ...
+
+class EnumFlag(Flag):
+    def __init__(self, name: str, default: str, help: str, enum_values: List[str],
+               short_name: str, **args: Any) -> None: ...
+
+def DEFINE_enum(name: str, default: str, enum_values: List[str], help: str,
+                flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+class BaseListParser(ArgumentParser):
+    def __init__(self, token: str = ..., name: str = ...) -> None: ...
+    def Parse(self, argument: Any) -> list: ...
+    def Type(self) -> str: ...
+
+class ListParser(BaseListParser):
+    def __init__(self) -> None: ...
+    def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str): ...
+
+class WhitespaceSeparatedListParser(BaseListParser):
+    def __init__(self) -> None: ...
+    def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str): ...
+
+def DEFINE_list(name: str, default: List[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ...
+def DEFINE_spaceseplist(name: str, default: List[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+class MultiFlag(Flag):
+    def __init__(self, *args: Any, **kwargs: Any) -> None: ...
+    def Parse(self, arguments: Any) -> None: ...
+    def Serialize(self) -> str: ...
+    def Type(self) -> str: ...
+
+def DEFINE_multistring(name: str, default: Union[str, List[str]], help: str,
+                       flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+def DEFINE_multi_int(name: str, default: Union[int, List[int]], help: str, lower_bound: int = ...,
+                     upper_bound: int = ..., flag_values: FlagValues = ..., **args: Any) -> None: ...
+
+
+def DEFINE_multi_float(name: str, default: Union[float, List[float]], help: str,
+                       lower_bound: float = ..., upper_bound: float = ...,
+                       flag_values: FlagValues = ..., **args: Any) -> None: ...
diff --git a/typeshed/third_party/2.7/google/__init__.pyi b/typeshed/third_party/2.7/google/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/google/protobuf/__init__.pyi b/typeshed/third_party/2.7/google/protobuf/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/google/protobuf/descriptor.pyi b/typeshed/third_party/2.7/google/protobuf/descriptor.pyi
new file mode 100644
index 0000000..f75d11c
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/descriptor.pyi
@@ -0,0 +1,163 @@
+# Stubs for google.protobuf.descriptor (Python 2.7)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Error(Exception): ...
+class TypeTransformationError(Error): ...
+
+class DescriptorMetaclass(type):
+    def __instancecheck__(cls, obj): ...
+
+class DescriptorBase:
+    __metaclass__ = ... # type: Any
+    has_options = ... # type: Any
+    def __init__(self, options, options_class_name) -> None: ...
+    def GetOptions(self): ...
+
+class _NestedDescriptorBase(DescriptorBase):
+    name = ... # type: Any
+    full_name = ... # type: Any
+    file = ... # type: Any
+    containing_type = ... # type: Any
+    def __init__(self, options, options_class_name, name, full_name, file, containing_type, serialized_start=..., serialized_end=...) -> None: ...
+    def GetTopLevelContainingType(self): ...
+    def CopyToProto(self, proto): ...
+
+class Descriptor(_NestedDescriptorBase):
+    def __new__(cls, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=..., is_extendable=..., extension_ranges=..., oneofs=..., file=..., serialized_start=..., serialized_end=..., syntax=...): ...
+    fields = ... # type: Any
+    fields_by_number = ... # type: Any
+    fields_by_name = ... # type: Any
+    nested_types = ... # type: Any
+    nested_types_by_name = ... # type: Any
+    enum_types = ... # type: Any
+    enum_types_by_name = ... # type: Any
+    enum_values_by_name = ... # type: Any
+    extensions = ... # type: Any
+    extensions_by_name = ... # type: Any
+    is_extendable = ... # type: Any
+    extension_ranges = ... # type: Any
+    oneofs = ... # type: Any
+    oneofs_by_name = ... # type: Any
+    syntax = ... # type: Any
+    def __init__(self, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=..., is_extendable=..., extension_ranges=..., oneofs=..., file=..., serialized_start=..., serialized_end=..., syntax=...) -> None: ...
+    def EnumValueName(self, enum, value): ...
+    def CopyToProto(self, proto): ...
+
+class FieldDescriptor(DescriptorBase):
+    TYPE_DOUBLE = ... # type: Any
+    TYPE_FLOAT = ... # type: Any
+    TYPE_INT64 = ... # type: Any
+    TYPE_UINT64 = ... # type: Any
+    TYPE_INT32 = ... # type: Any
+    TYPE_FIXED64 = ... # type: Any
+    TYPE_FIXED32 = ... # type: Any
+    TYPE_BOOL = ... # type: Any
+    TYPE_STRING = ... # type: Any
+    TYPE_GROUP = ... # type: Any
+    TYPE_MESSAGE = ... # type: Any
+    TYPE_BYTES = ... # type: Any
+    TYPE_UINT32 = ... # type: Any
+    TYPE_ENUM = ... # type: Any
+    TYPE_SFIXED32 = ... # type: Any
+    TYPE_SFIXED64 = ... # type: Any
+    TYPE_SINT32 = ... # type: Any
+    TYPE_SINT64 = ... # type: Any
+    MAX_TYPE = ... # type: Any
+    CPPTYPE_INT32 = ... # type: Any
+    CPPTYPE_INT64 = ... # type: Any
+    CPPTYPE_UINT32 = ... # type: Any
+    CPPTYPE_UINT64 = ... # type: Any
+    CPPTYPE_DOUBLE = ... # type: Any
+    CPPTYPE_FLOAT = ... # type: Any
+    CPPTYPE_BOOL = ... # type: Any
+    CPPTYPE_ENUM = ... # type: Any
+    CPPTYPE_STRING = ... # type: Any
+    CPPTYPE_MESSAGE = ... # type: Any
+    MAX_CPPTYPE = ... # type: Any
+    LABEL_OPTIONAL = ... # type: Any
+    LABEL_REQUIRED = ... # type: Any
+    LABEL_REPEATED = ... # type: Any
+    MAX_LABEL = ... # type: Any
+    MAX_FIELD_NUMBER = ... # type: Any
+    FIRST_RESERVED_FIELD_NUMBER = ... # type: Any
+    LAST_RESERVED_FIELD_NUMBER = ... # type: Any
+    def __new__(cls, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=..., has_default_value=..., containing_oneof=...): ...
+    name = ... # type: Any
+    full_name = ... # type: Any
+    index = ... # type: Any
+    number = ... # type: Any
+    type = ... # type: Any
+    cpp_type = ... # type: Any
+    label = ... # type: Any
+    has_default_value = ... # type: Any
+    default_value = ... # type: Any
+    containing_type = ... # type: Any
+    message_type = ... # type: Any
+    enum_type = ... # type: Any
+    is_extension = ... # type: Any
+    extension_scope = ... # type: Any
+    containing_oneof = ... # type: Any
+    def __init__(self, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=..., has_default_value=..., containing_oneof=...) -> None: ...
+    @staticmethod
+    def ProtoTypeToCppProtoType(proto_type): ...
+
+class EnumDescriptor(_NestedDescriptorBase):
+    def __new__(cls, name, full_name, filename, values, containing_type=..., options=..., file=..., serialized_start=..., serialized_end=...): ...
+    values = ... # type: Any
+    values_by_name = ... # type: Any
+    values_by_number = ... # type: Any
+    def __init__(self, name, full_name, filename, values, containing_type=..., options=..., file=..., serialized_start=..., serialized_end=...) -> None: ...
+    def CopyToProto(self, proto): ...
+
+class EnumValueDescriptor(DescriptorBase):
+    def __new__(cls, name, index, number, type=..., options=...): ...
+    name = ... # type: Any
+    index = ... # type: Any
+    number = ... # type: Any
+    type = ... # type: Any
+    def __init__(self, name, index, number, type=..., options=...) -> None: ...
+
+class OneofDescriptor:
+    def __new__(cls, name, full_name, index, containing_type, fields): ...
+    name = ... # type: Any
+    full_name = ... # type: Any
+    index = ... # type: Any
+    containing_type = ... # type: Any
+    fields = ... # type: Any
+    def __init__(self, name, full_name, index, containing_type, fields) -> None: ...
+
+class ServiceDescriptor(_NestedDescriptorBase):
+    index = ... # type: Any
+    methods = ... # type: Any
+    def __init__(self, name, full_name, index, methods, options=..., file=..., serialized_start=..., serialized_end=...) -> None: ...
+    def FindMethodByName(self, name): ...
+    def CopyToProto(self, proto): ...
+
+class MethodDescriptor(DescriptorBase):
+    name = ... # type: Any
+    full_name = ... # type: Any
+    index = ... # type: Any
+    containing_service = ... # type: Any
+    input_type = ... # type: Any
+    output_type = ... # type: Any
+    def __init__(self, name, full_name, index, containing_service, input_type, output_type, options=...) -> None: ...
+
+class FileDescriptor(DescriptorBase):
+    def __new__(cls, name, package, options=..., serialized_pb=..., dependencies=..., syntax=...): ...
+    _options = ... # type: Any
+    message_types_by_name = ... # type: Any
+    name = ... # type: Any
+    package = ... # type: Any
+    syntax = ... # type: Any
+    serialized_pb = ... # type: Any
+    enum_types_by_name = ... # type: Any
+    extensions_by_name = ... # type: Any
+    dependencies = ... # type: Any
+    def __init__(self, name, package, options=..., serialized_pb=..., dependencies=..., syntax=...) -> None: ...
+    def CopyToProto(self, proto): ...
+
+def MakeDescriptor(desc_proto, package=..., build_file_if_cpp=..., syntax=...): ...
+def _ParseOptions(message, string): ...
diff --git a/typeshed/third_party/2.7/google/protobuf/descriptor_pb2.pyi b/typeshed/third_party/2.7/google/protobuf/descriptor_pb2.pyi
new file mode 100644
index 0000000..4ac2e4c
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/descriptor_pb2.pyi
@@ -0,0 +1,2 @@
+class FileOptions(object): ...
+class FieldOptions(object): ...
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/__init__.pyi b/typeshed/third_party/2.7/google/protobuf/internal/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/decoder.pyi b/typeshed/third_party/2.7/google/protobuf/internal/decoder.pyi
new file mode 100644
index 0000000..b6b990f
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/internal/decoder.pyi
@@ -0,0 +1,34 @@
+# Stubs for google.protobuf.internal.decoder (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def ReadTag(buffer, pos): ...
+def EnumDecoder(field_number, is_repeated, is_packed, key, new_default): ...
+
+Int32Decoder = ... # type: Any
+Int64Decoder = ... # type: Any
+UInt32Decoder = ... # type: Any
+UInt64Decoder = ... # type: Any
+SInt32Decoder = ... # type: Any
+SInt64Decoder = ... # type: Any
+Fixed32Decoder = ... # type: Any
+Fixed64Decoder = ... # type: Any
+SFixed32Decoder = ... # type: Any
+SFixed64Decoder = ... # type: Any
+FloatDecoder = ... # type: Any
+DoubleDecoder = ... # type: Any
+BoolDecoder = ... # type: Any
+
+def StringDecoder(field_number, is_repeated, is_packed, key, new_default): ...
+def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): ...
+def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): ...
+def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): ...
+
+MESSAGE_SET_ITEM_TAG = ... # type: Any
+
+def MessageSetItemDecoder(extensions_by_number): ...
+def MapDecoder(field_descriptor, new_default, is_message_map): ...
+
+SkipField = ... # type: Any
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/encoder.pyi b/typeshed/third_party/2.7/google/protobuf/internal/encoder.pyi
new file mode 100644
index 0000000..b04534d
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/internal/encoder.pyi
@@ -0,0 +1,38 @@
+# Stubs for google.protobuf.internal.encoder (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+Int32Sizer = ... # type: Any
+UInt32Sizer = ... # type: Any
+SInt32Sizer = ... # type: Any
+Fixed32Sizer = ... # type: Any
+Fixed64Sizer = ... # type: Any
+BoolSizer = ... # type: Any
+
+def StringSizer(field_number, is_repeated, is_packed): ...
+def BytesSizer(field_number, is_repeated, is_packed): ...
+def GroupSizer(field_number, is_repeated, is_packed): ...
+def MessageSizer(field_number, is_repeated, is_packed): ...
+def MessageSetItemSizer(field_number): ...
+def MapSizer(field_descriptor): ...
+def TagBytes(field_number, wire_type): ...
+
+Int32Encoder = ... # type: Any
+UInt32Encoder = ... # type: Any
+SInt32Encoder = ... # type: Any
+Fixed32Encoder = ... # type: Any
+Fixed64Encoder = ... # type: Any
+SFixed32Encoder = ... # type: Any
+SFixed64Encoder = ... # type: Any
+FloatEncoder = ... # type: Any
+DoubleEncoder = ... # type: Any
+
+def BoolEncoder(field_number, is_repeated, is_packed): ...
+def StringEncoder(field_number, is_repeated, is_packed): ...
+def BytesEncoder(field_number, is_repeated, is_packed): ...
+def GroupEncoder(field_number, is_repeated, is_packed): ...
+def MessageEncoder(field_number, is_repeated, is_packed): ...
+def MessageSetItemEncoder(field_number): ...
+def MapEncoder(field_descriptor): ...
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/enum_type_wrapper.pyi b/typeshed/third_party/2.7/google/protobuf/internal/enum_type_wrapper.pyi
new file mode 100644
index 0000000..ced66b8
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/internal/enum_type_wrapper.pyi
@@ -0,0 +1,11 @@
+from typing import Any, List, Tuple
+
+class EnumTypeWrapper(object):
+    def __init__(self, enum_type: Any) -> None: ...
+    def Name(self, number: int) -> str: ...
+    def Value(self, name: str) -> int: ...
+    def keys(self) -> List[str]: ...
+    def values(self) -> List[int]: ...
+
+    @classmethod
+    def items(cls) -> List[Tuple[str, int]]: ...
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/wire_format.pyi b/typeshed/third_party/2.7/google/protobuf/internal/wire_format.pyi
new file mode 100644
index 0000000..e9fbef3
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/internal/wire_format.pyi
@@ -0,0 +1,54 @@
+# Stubs for google.protobuf.internal.wire_format (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+TAG_TYPE_BITS = ... # type: Any
+TAG_TYPE_MASK = ... # type: Any
+WIRETYPE_VARINT = ... # type: Any
+WIRETYPE_FIXED64 = ... # type: Any
+WIRETYPE_LENGTH_DELIMITED = ... # type: Any
+WIRETYPE_START_GROUP = ... # type: Any
+WIRETYPE_END_GROUP = ... # type: Any
+WIRETYPE_FIXED32 = ... # type: Any
+INT32_MAX = ... # type: Any
+INT32_MIN = ... # type: Any
+UINT32_MAX = ... # type: Any
+INT64_MAX = ... # type: Any
+INT64_MIN = ... # type: Any
+UINT64_MAX = ... # type: Any
+FORMAT_UINT32_LITTLE_ENDIAN = ... # type: Any
+FORMAT_UINT64_LITTLE_ENDIAN = ... # type: Any
+FORMAT_FLOAT_LITTLE_ENDIAN = ... # type: Any
+FORMAT_DOUBLE_LITTLE_ENDIAN = ... # type: Any
+
+def PackTag(field_number, wire_type): ...
+def UnpackTag(tag): ...
+def ZigZagEncode(value): ...
+def ZigZagDecode(value): ...
+def Int32ByteSize(field_number, int32): ...
+def Int32ByteSizeNoTag(int32): ...
+def Int64ByteSize(field_number, int64): ...
+def UInt32ByteSize(field_number, uint32): ...
+def UInt64ByteSize(field_number, uint64): ...
+def SInt32ByteSize(field_number, int32): ...
+def SInt64ByteSize(field_number, int64): ...
+def Fixed32ByteSize(field_number, fixed32): ...
+def Fixed64ByteSize(field_number, fixed64): ...
+def SFixed32ByteSize(field_number, sfixed32): ...
+def SFixed64ByteSize(field_number, sfixed64): ...
+def FloatByteSize(field_number, flt): ...
+def DoubleByteSize(field_number, double): ...
+def BoolByteSize(field_number, b): ...
+def EnumByteSize(field_number, enum): ...
+def StringByteSize(field_number, string): ...
+def BytesByteSize(field_number, b): ...
+def GroupByteSize(field_number, message): ...
+def MessageByteSize(field_number, message): ...
+def MessageSetItemByteSize(field_number, msg): ...
+def TagByteSize(field_number): ...
+
+NON_PACKABLE_TYPES = ... # type: Any
+
+def IsTypePackable(field_type): ...
diff --git a/typeshed/third_party/2.7/google/protobuf/message.pyi b/typeshed/third_party/2.7/google/protobuf/message.pyi
new file mode 100644
index 0000000..9b14a7d
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/message.pyi
@@ -0,0 +1,36 @@
+# Stubs for google.protobuf.message (Python 2.7)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Sequence, Optional, Tuple
+
+from .descriptor import FieldDescriptor
+
+class Error(Exception): ...
+class DecodeError(Error): ...
+class EncodeError(Error): ...
+
+class Message:
+    DESCRIPTOR = ... # type: Any
+    def __deepcopy__(self, memo=...): ...
+    def __eq__(self, other_msg): ...
+    def __ne__(self, other_msg): ...
+    def MergeFrom(self, other_msg: Message) -> None: ...
+    def CopyFrom(self, other_msg: Message) -> None: ...
+    def Clear(self) -> None: ...
+    def SetInParent(self) -> None: ...
+    def IsInitialized(self) -> bool: ...
+    def MergeFromString(self, serialized: Any) -> int: ... # TODO: we need to be able to call buffer() on serialized
+    def ParseFromString(self, serialized: Any) -> None: ...
+    def SerializeToString(self) -> str: ...
+    def SerializePartialToString(self) -> str: ...
+    def ListFields(self) -> Sequence[Tuple[FieldDescriptor, Any]]: ...
+    def HasField(self, field_name: str) -> bool: ...
+    def ClearField(self, field_name: str) -> None: ...
+    def WhichOneof(self, oneof_group) -> Optional[str]: ...
+    def HasExtension(self, extension_handle): ...
+    def ClearExtension(self, extension_handle): ...
+    def ByteSize(self) -> int: ...
+
+    # TODO: check kwargs
+    def __init__(self, **kwargs) -> None: ...
diff --git a/typeshed/third_party/2.7/google/protobuf/reflection.pyi b/typeshed/third_party/2.7/google/protobuf/reflection.pyi
new file mode 100644
index 0000000..3d3e76b
--- /dev/null
+++ b/typeshed/third_party/2.7/google/protobuf/reflection.pyi
@@ -0,0 +1,10 @@
+# Stubs for google.protobuf.reflection (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class GeneratedProtocolMessageType(type):
+    def __new__(cls, name, bases, dictionary): ...
+    def __init__(cls, name, bases, dictionary) -> None: ...
+
+def ParseMessage(descriptor, byte_str): ...
+def MakeClass(descriptor): ...
diff --git a/typeshed/third_party/2.7/kazoo/__init__.pyi b/typeshed/third_party/2.7/kazoo/__init__.pyi
new file mode 100644
index 0000000..70b8648
--- /dev/null
+++ b/typeshed/third_party/2.7/kazoo/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for kazoo (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/2.7/kazoo/client.pyi b/typeshed/third_party/2.7/kazoo/client.pyi
new file mode 100644
index 0000000..b174940
--- /dev/null
+++ b/typeshed/third_party/2.7/kazoo/client.pyi
@@ -0,0 +1,100 @@
+# Stubs for kazoo.client (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+string_types = ... # type: Any
+bytes_types = ... # type: Any
+LOST_STATES = ... # type: Any
+ENVI_VERSION = ... # type: Any
+ENVI_VERSION_KEY = ... # type: Any
+log = ... # type: Any
+
+class KazooClient:
+    logger = ... # type: Any
+    handler = ... # type: Any
+    auth_data = ... # type: Any
+    default_acl = ... # type: Any
+    randomize_hosts = ... # type: Any
+    hosts = ... # type: Any
+    chroot = ... # type: Any
+    state = ... # type: Any
+    state_listeners = ... # type: Any
+    read_only = ... # type: Any
+    retry = ... # type: Any
+    Barrier = ... # type: Any
+    Counter = ... # type: Any
+    DoubleBarrier = ... # type: Any
+    ChildrenWatch = ... # type: Any
+    DataWatch = ... # type: Any
+    Election = ... # type: Any
+    NonBlockingLease = ... # type: Any
+    MultiNonBlockingLease = ... # type: Any
+    Lock = ... # type: Any
+    Party = ... # type: Any
+    Queue = ... # type: Any
+    LockingQueue = ... # type: Any
+    SetPartitioner = ... # type: Any
+    Semaphore = ... # type: Any
+    ShallowParty = ... # type: Any
+    def __init__(self, hosts=..., timeout=..., client_id=..., handler=..., default_acl=..., auth_data=..., read_only=..., randomize_hosts=..., connection_retry=..., command_retry=..., logger=..., **kwargs) -> None: ...
+    @property
+    def client_state(self): ...
+    @property
+    def client_id(self): ...
+    @property
+    def connected(self): ...
+    def set_hosts(self, hosts, randomize_hosts=...): ...
+    def add_listener(self, listener): ...
+    def remove_listener(self, listener): ...
+    def start(self, timeout=...): ...
+    def start_async(self): ...
+    def stop(self): ...
+    def restart(self): ...
+    def close(self): ...
+    def command(self, cmd=...): ...
+    def server_version(self, retries=...): ...
+    def add_auth(self, scheme, credential): ...
+    def add_auth_async(self, scheme, credential): ...
+    def unchroot(self, path): ...
+    def sync_async(self, path): ...
+    def sync(self, path): ...
+    def create(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ...
+    def create_async(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ...
+    def ensure_path(self, path, acl=...): ...
+    def ensure_path_async(self, path, acl=...): ...
+    def exists(self, path, watch=...): ...
+    def exists_async(self, path, watch=...): ...
+    def get(self, path, watch=...): ...
+    def get_async(self, path, watch=...): ...
+    def get_children(self, path, watch=..., include_data=...): ...
+    def get_children_async(self, path, watch=..., include_data=...): ...
+    def get_acls(self, path): ...
+    def get_acls_async(self, path): ...
+    def set_acls(self, path, acls, version=...): ...
+    def set_acls_async(self, path, acls, version=...): ...
+    def set(self, path, value, version=...): ...
+    def set_async(self, path, value, version=...): ...
+    def transaction(self): ...
+    def delete(self, path, version=..., recursive=...): ...
+    def delete_async(self, path, version=...): ...
+    def reconfig(self, joining, leaving, new_members, from_config=...): ...
+    def reconfig_async(self, joining, leaving, new_members, from_config): ...
+
+class TransactionRequest:
+    client = ... # type: Any
+    operations = ... # type: Any
+    committed = ... # type: Any
+    def __init__(self, client) -> None: ...
+    def create(self, path, value=..., acl=..., ephemeral=..., sequence=...): ...
+    def delete(self, path, version=...): ...
+    def set_data(self, path, value, version=...): ...
+    def check(self, path, version): ...
+    def commit_async(self): ...
+    def commit(self): ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_value, exc_tb): ...
+
+class KazooState:
+    ...
diff --git a/typeshed/third_party/2.7/kazoo/exceptions.pyi b/typeshed/third_party/2.7/kazoo/exceptions.pyi
new file mode 100644
index 0000000..1450e81
--- /dev/null
+++ b/typeshed/third_party/2.7/kazoo/exceptions.pyi
@@ -0,0 +1,62 @@
+# Stubs for kazoo.exceptions (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class KazooException(Exception): ...
+class ZookeeperError(KazooException): ...
+class CancelledError(KazooException): ...
+class ConfigurationError(KazooException): ...
+class ZookeeperStoppedError(KazooException): ...
+class ConnectionDropped(KazooException): ...
+class LockTimeout(KazooException): ...
+class WriterNotClosedException(KazooException): ...
+
+EXCEPTIONS = ... # type: Any
+
+class RolledBackError(ZookeeperError): ...
+class SystemZookeeperError(ZookeeperError): ...
+class RuntimeInconsistency(ZookeeperError): ...
+class DataInconsistency(ZookeeperError): ...
+class ConnectionLoss(ZookeeperError): ...
+class MarshallingError(ZookeeperError): ...
+class UnimplementedError(ZookeeperError): ...
+class OperationTimeoutError(ZookeeperError): ...
+class BadArgumentsError(ZookeeperError): ...
+class NewConfigNoQuorumError(ZookeeperError): ...
+class ReconfigInProcessError(ZookeeperError): ...
+class APIError(ZookeeperError): ...
+class NoNodeError(ZookeeperError): ...
+class NoAuthError(ZookeeperError): ...
+class BadVersionError(ZookeeperError): ...
+class NoChildrenForEphemeralsError(ZookeeperError): ...
+class NodeExistsError(ZookeeperError): ...
+class NotEmptyError(ZookeeperError): ...
+class SessionExpiredError(ZookeeperError): ...
+class InvalidCallbackError(ZookeeperError): ...
+class InvalidACLError(ZookeeperError): ...
+class AuthFailedError(ZookeeperError): ...
+class SessionMovedError(ZookeeperError): ...
+class NotReadOnlyCallError(ZookeeperError): ...
+class ConnectionClosedError(SessionExpiredError): ...
+
+ConnectionLossException = ... # type: Any
+MarshallingErrorException = ... # type: Any
+SystemErrorException = ... # type: Any
+RuntimeInconsistencyException = ... # type: Any
+DataInconsistencyException = ... # type: Any
+UnimplementedException = ... # type: Any
+OperationTimeoutException = ... # type: Any
+BadArgumentsException = ... # type: Any
+ApiErrorException = ... # type: Any
+NoNodeException = ... # type: Any
+NoAuthException = ... # type: Any
+BadVersionException = ... # type: Any
+NoChildrenForEphemeralsException = ... # type: Any
+NodeExistsException = ... # type: Any
+InvalidACLException = ... # type: Any
+AuthFailedException = ... # type: Any
+NotEmptyException = ... # type: Any
+SessionExpiredException = ... # type: Any
+InvalidCallbackException = ... # type: Any
diff --git a/typeshed/third_party/2.7/kazoo/recipe/__init__.pyi b/typeshed/third_party/2.7/kazoo/recipe/__init__.pyi
new file mode 100644
index 0000000..04a7fa2
--- /dev/null
+++ b/typeshed/third_party/2.7/kazoo/recipe/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for kazoo.recipe (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/2.7/kazoo/recipe/watchers.pyi b/typeshed/third_party/2.7/kazoo/recipe/watchers.pyi
new file mode 100644
index 0000000..554e6ee
--- /dev/null
+++ b/typeshed/third_party/2.7/kazoo/recipe/watchers.pyi
@@ -0,0 +1,25 @@
+# Stubs for kazoo.recipe.watchers (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+log = ... # type: Any
+
+class DataWatch:
+    def __init__(self, client, path, func=..., *args, **kwargs) -> None: ...
+    def __call__(self, func): ...
+
+class ChildrenWatch:
+    def __init__(self, client, path, func=..., allow_session_lost=..., send_event=...) -> None: ...
+    def __call__(self, func): ...
+
+class PatientChildrenWatch:
+    client = ... # type: Any
+    path = ... # type: Any
+    children = ... # type: Any
+    time_boundary = ... # type: Any
+    children_changed = ... # type: Any
+    def __init__(self, client, path, time_boundary=...) -> None: ...
+    asy = ... # type: Any
+    def start(self): ...
diff --git a/typeshed/third_party/2.7/pycurl.pyi b/typeshed/third_party/2.7/pycurl.pyi
new file mode 100644
index 0000000..2194fad
--- /dev/null
+++ b/typeshed/third_party/2.7/pycurl.pyi
@@ -0,0 +1,81 @@
+# TODO(MichalPokorny): more precise types
+
+from typing import Any, Tuple, Optional
+
+GLOBAL_SSL = ... # type: int
+GLOBAL_WIN32 = ... # type: int
+GLOBAL_ALL = ... # type: int
+GLOBAL_NOTHING = ... # type: int
+GLOBAL_DEFAULT = ... # type: int
+
+def global_init(option: int) -> None: ...
+def global_cleanup() -> None: ...
+
+version = ... # type: str
+
+def version_info() -> Tuple[int, str, int, str, int, str,
+                            int, str, tuple, Any, int, Any]: ...
+
+class error(Exception):
+    pass
+
+class Curl(object):
+    def close(self) -> None: ...
+    def setopt(self, option: int, value: Any) -> None: ...
+    def perform(self) -> None: ...
+    def getinfo(self, info: Any) -> Any: ...
+    def reset(self) -> None: ...
+    def unsetopt(self, option: int) -> Any: ...
+    def pause(self, bitmask: Any) -> Any: ...
+    def errstr(self) -> str: ...
+
+    # TODO(MichalPokorny): wat?
+    USERPWD = ... # type: int
+
+class CurlMulti(object):
+    def close(self) -> None: ...
+    def add_handle(self, obj: Curl) -> None: ...
+    def remove_handle(self, obj: Curl) -> None: ...
+    def perform(self) -> Tuple[Any, int]: ...
+    def fdset(self) -> tuple: ...
+    def select(self, timeout: float = ...) -> int: ...
+    def info_read(self, max_objects: int) -> tuple: ...
+
+class CurlShare(object):
+    def close(self) -> None: ...
+    def setopt(self, option: int, value: Any) -> Any: ...
+
+CAINFO = ... # type: int
+CONNECTTIMEOUT_MS = ... # type: int
+CUSTOMREQUEST = ... # type: int
+ENCODING = ... # type: int
+E_CALL_MULTI_PERFORM = ... # type: int
+E_OPERATION_TIMEOUTED = ... # type: int
+FOLLOWLOCATION = ... # type: int
+HEADERFUNCTION = ... # type: int
+HTTPGET = ... # type: int
+HTTPHEADER = ... # type: int
+HTTP_CODE = ... # type: int
+INFILESIE_LARGE = ... # type: int
+INFILESIZE_LARGE = ... # type: int
+NOBODY = ... # type: int
+NOPROGRESS = ... # type: int
+NOSIGNAL = ... # type: int
+POST = ... # type: int
+POSTFIELDS = ... # type: int
+POSTFIELDSIZE = ... # type: int
+PRIMARY_IP = ... # type: int
+PROGRESSFUNCTION = ... # type: int
+PROXY = ... # type: int
+READFUNCTION = ... # type: int
+RESPONSE_CODE = ... # type: int
+SSLCERT = ... # type: int
+SSLCERTPASSWD = ... # type: int
+SSLKEY = ... # type: int
+SSLKEYPASSWD = ... # type: int
+SSL_VERIFYHOST = ... # type: int
+SSL_VERIFYPEER = ... # type: int
+TIMEOUT_MS = ... # type: int
+UPLOAD = ... # type: int
+URL = ... # type: int
+WRITEFUNCTION = ... # type: int
diff --git a/typeshed/third_party/2.7/redis/__init__.pyi b/typeshed/third_party/2.7/redis/__init__.pyi
new file mode 100644
index 0000000..f1bc24e
--- /dev/null
+++ b/typeshed/third_party/2.7/redis/__init__.pyi
@@ -0,0 +1,28 @@
+# Stubs for redis (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+import redis.client
+import redis.connection
+import redis.utils
+import redis.exceptions
+
+Redis = client.Redis
+StrictRedis = client.StrictRedis
+BlockingConnectionPool = connection.BlockingConnectionPool
+ConnectionPool = connection.ConnectionPool
+Connection = connection.Connection
+SSLConnection = connection.SSLConnection
+UnixDomainSocketConnection = connection.UnixDomainSocketConnection
+from_url = utils.from_url
+AuthenticationError = exceptions.AuthenticationError
+BusyLoadingError = exceptions.BusyLoadingError
+ConnectionError = exceptions.ConnectionError
+DataError = exceptions.DataError
+InvalidResponse = exceptions.InvalidResponse
+PubSubError = exceptions.PubSubError
+ReadOnlyError = exceptions.ReadOnlyError
+RedisError = exceptions.RedisError
+ResponseError = exceptions.ResponseError
+TimeoutError = exceptions.TimeoutError
+WatchError = exceptions.WatchError
diff --git a/typeshed/third_party/2.7/redis/client.pyi b/typeshed/third_party/2.7/redis/client.pyi
new file mode 100644
index 0000000..6a55f80
--- /dev/null
+++ b/typeshed/third_party/2.7/redis/client.pyi
@@ -0,0 +1,293 @@
+# Stubs for redis.client (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+SYM_EMPTY = ... # type: Any
+
+def list_or_args(keys, args): ...
+def timestamp_to_datetime(response): ...
+def string_keys_to_dict(key_string, callback): ...
+def dict_merge(*dicts): ...
+def parse_debug_object(response): ...
+def parse_object(response, infotype): ...
+def parse_info(response): ...
+
+SENTINEL_STATE_TYPES = ... # type: Any
+
+def parse_sentinel_state(item): ...
+def parse_sentinel_master(response): ...
+def parse_sentinel_masters(response): ...
+def parse_sentinel_slaves_and_sentinels(response): ...
+def parse_sentinel_get_master(response): ...
+def pairs_to_dict(response): ...
+def pairs_to_dict_typed(response, type_info): ...
+def zset_score_pairs(response, **options): ...
+def sort_return_tuples(response, **options): ...
+def int_or_none(response): ...
+def float_or_none(response): ...
+def bool_ok(response): ...
+def parse_client_list(response, **options): ...
+def parse_config_get(response, **options): ...
+def parse_scan(response, **options): ...
+def parse_hscan(response, **options): ...
+def parse_zscan(response, **options): ...
+def parse_slowlog_get(response, **options): ...
+
+class StrictRedis:
+    RESPONSE_CALLBACKS = ... # type: Any
+    @classmethod
+    def from_url(cls, url, db=..., **kwargs): ...
+    connection_pool = ... # type: Any
+    response_callbacks = ... # type: Any
+    def __init__(self, host=..., port=..., db=..., password=..., socket_timeout=..., socket_connect_timeout=..., socket_keepalive=..., socket_keepalive_options=..., connection_pool=..., unix_socket_path=..., encoding=..., encoding_errors=..., charset=..., errors=..., decode_responses=..., retry_on_timeout=..., ssl=..., ssl_keyfile=..., ssl_certfile=..., ssl_cert_reqs=..., ssl_ca_certs=...) -> None: ...
+    def set_response_callback(self, command, callback): ...
+    def pipeline(self, transaction=..., shard_hint=...): ...
+    def transaction(self, func, *watches, **kwargs): ...
+    def lock(self, name, timeout=..., sleep=..., blocking_timeout=..., lock_class=..., thread_local=...): ...
+    def pubsub(self, **kwargs): ...
+    def execute_command(self, *args, **options): ...
+    def parse_response(self, connection, command_name, **options): ...
+    def bgrewriteaof(self): ...
+    def bgsave(self): ...
+    def client_kill(self, address): ...
+    def client_list(self): ...
+    def client_getname(self): ...
+    def client_setname(self, name): ...
+    def config_get(self, pattern=...): ...
+    def config_set(self, name, value): ...
+    def config_resetstat(self): ...
+    def config_rewrite(self): ...
+    def dbsize(self): ...
+    def debug_object(self, key): ...
+    def echo(self, value): ...
+    def flushall(self): ...
+    def flushdb(self): ...
+    def info(self, section=...): ...
+    def lastsave(self): ...
+    def object(self, infotype, key): ...
+    def ping(self): ...
+    def save(self): ...
+    def sentinel(self, *args): ...
+    def sentinel_get_master_addr_by_name(self, service_name): ...
+    def sentinel_master(self, service_name): ...
+    def sentinel_masters(self): ...
+    def sentinel_monitor(self, name, ip, port, quorum): ...
+    def sentinel_remove(self, name): ...
+    def sentinel_sentinels(self, service_name): ...
+    def sentinel_set(self, name, option, value): ...
+    def sentinel_slaves(self, service_name): ...
+    def shutdown(self): ...
+    def slaveof(self, host=..., port=...): ...
+    def slowlog_get(self, num=...): ...
+    def slowlog_len(self): ...
+    def slowlog_reset(self): ...
+    def time(self): ...
+    def append(self, key, value): ...
+    def bitcount(self, key, start=..., end=...): ...
+    def bitop(self, operation, dest, *keys): ...
+    def bitpos(self, key, bit, start=..., end=...): ...
+    def decr(self, name, amount=...): ...
+    def delete(self, *names): ...
+    def __delitem__(self, name): ...
+    def dump(self, name): ...
+    def exists(self, name): ...
+    __contains__ = ... # type: Any
+    def expire(self, name, time): ...
+    def expireat(self, name, when): ...
+    def get(self, name): ...
+    def __getitem__(self, name): ...
+    def getbit(self, name, offset): ...
+    def getrange(self, key, start, end): ...
+    def getset(self, name, value): ...
+    def incr(self, name, amount=...): ...
+    def incrby(self, name, amount=...): ...
+    def incrbyfloat(self, name, amount=...): ...
+    def keys(self, pattern=...): ...
+    def mget(self, keys, *args): ...
+    def mset(self, *args, **kwargs): ...
+    def msetnx(self, *args, **kwargs): ...
+    def move(self, name, db): ...
+    def persist(self, name): ...
+    def pexpire(self, name, time): ...
+    def pexpireat(self, name, when): ...
+    def psetex(self, name, time_ms, value): ...
+    def pttl(self, name): ...
+    def randomkey(self): ...
+    def rename(self, src, dst): ...
+    def renamenx(self, src, dst): ...
+    def restore(self, name, ttl, value): ...
+    def set(self, name, value, ex=..., px=..., nx=..., xx=...): ...
+    def __setitem__(self, name, value): ...
+    def setbit(self, name, offset, value): ...
+    def setex(self, name, time, value): ...
+    def setnx(self, name, value): ...
+    def setrange(self, name, offset, value): ...
+    def strlen(self, name): ...
+    def substr(self, name, start, end=...): ...
+    def ttl(self, name): ...
+    def type(self, name): ...
+    def watch(self, *names): ...
+    def unwatch(self): ...
+    def blpop(self, keys, timeout=...): ...
+    def brpop(self, keys, timeout=...): ...
+    def brpoplpush(self, src, dst, timeout=...): ...
+    def lindex(self, name, index): ...
+    def linsert(self, name, where, refvalue, value): ...
+    def llen(self, name): ...
+    def lpop(self, name): ...
+    def lpush(self, name, *values): ...
+    def lpushx(self, name, value): ...
+    def lrange(self, name, start, end): ...
+    def lrem(self, name, count, value): ...
+    def lset(self, name, index, value): ...
+    def ltrim(self, name, start, end): ...
+    def rpop(self, name): ...
+    def rpoplpush(self, src, dst): ...
+    def rpush(self, name, *values): ...
+    def rpushx(self, name, value): ...
+    def sort(self, name, start=..., num=..., by=..., get=..., desc=..., alpha=..., store=..., groups=...): ...
+    def scan(self, cursor=..., match=..., count=...): ...
+    def scan_iter(self, match=..., count=...): ...
+    def sscan(self, name, cursor=..., match=..., count=...): ...
+    def sscan_iter(self, name, match=..., count=...): ...
+    def hscan(self, name, cursor=..., match=..., count=...): ...
+    def hscan_iter(self, name, match=..., count=...): ...
+    def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...): ...
+    def zscan_iter(self, name, match=..., count=..., score_cast_func=...): ...
+    def sadd(self, name, *values): ...
+    def scard(self, name): ...
+    def sdiff(self, keys, *args): ...
+    def sdiffstore(self, dest, keys, *args): ...
+    def sinter(self, keys, *args): ...
+    def sinterstore(self, dest, keys, *args): ...
+    def sismember(self, name, value): ...
+    def smembers(self, name): ...
+    def smove(self, src, dst, value): ...
+    def spop(self, name): ...
+    def srandmember(self, name, number=...): ...
+    def srem(self, name, *values): ...
+    def sunion(self, keys, *args): ...
+    def sunionstore(self, dest, keys, *args): ...
+    def zadd(self, name, *args, **kwargs): ...
+    def zcard(self, name): ...
+    def zcount(self, name, min, max): ...
+    def zincrby(self, name, value, amount=...): ...
+    def zinterstore(self, dest, keys, aggregate=...): ...
+    def zlexcount(self, name, min, max): ...
+    def zrange(self, name, start, end, desc=..., withscores=..., score_cast_func=...): ...
+    def zrangebylex(self, name, min, max, start=..., num=...): ...
+    def zrangebyscore(self, name, min, max, start=..., num=..., withscores=..., score_cast_func=...): ...
+    def zrank(self, name, value): ...
+    def zrem(self, name, *values): ...
+    def zremrangebylex(self, name, min, max): ...
+    def zremrangebyrank(self, name, min, max): ...
+    def zremrangebyscore(self, name, min, max): ...
+    def zrevrange(self, name, start, end, withscores=..., score_cast_func=...): ...
+    def zrevrangebyscore(self, name, max, min, start=..., num=..., withscores=..., score_cast_func=...): ...
+    def zrevrank(self, name, value): ...
+    def zscore(self, name, value): ...
+    def zunionstore(self, dest, keys, aggregate=...): ...
+    def pfadd(self, name, *values): ...
+    def pfcount(self, name): ...
+    def pfmerge(self, dest, *sources): ...
+    def hdel(self, name, *keys): ...
+    def hexists(self, name, key): ...
+    def hget(self, name, key): ...
+    def hgetall(self, name): ...
+    def hincrby(self, name, key, amount=...): ...
+    def hincrbyfloat(self, name, key, amount=...): ...
+    def hkeys(self, name): ...
+    def hlen(self, name): ...
+    def hset(self, name, key, value): ...
+    def hsetnx(self, name, key, value): ...
+    def hmset(self, name, mapping): ...
+    def hmget(self, name, keys, *args): ...
+    def hvals(self, name): ...
+    def publish(self, channel, message): ...
+    def eval(self, script, numkeys, *keys_and_args): ...
+    def evalsha(self, sha, numkeys, *keys_and_args): ...
+    def script_exists(self, *args): ...
+    def script_flush(self): ...
+    def script_kill(self): ...
+    def script_load(self, script): ...
+    def register_script(self, script): ...
+
+class Redis(StrictRedis):
+    RESPONSE_CALLBACKS = ... # type: Any
+    def pipeline(self, transaction=..., shard_hint=...): ...
+    def setex(self, name, value, time): ...
+    def lrem(self, name, value, num=...): ...
+    def zadd(self, name, *args, **kwargs): ...
+
+class PubSub:
+    PUBLISH_MESSAGE_TYPES = ... # type: Any
+    UNSUBSCRIBE_MESSAGE_TYPES = ... # type: Any
+    connection_pool = ... # type: Any
+    shard_hint = ... # type: Any
+    ignore_subscribe_messages = ... # type: Any
+    connection = ... # type: Any
+    encoding = ... # type: Any
+    encoding_errors = ... # type: Any
+    decode_responses = ... # type: Any
+    def __init__(self, connection_pool, shard_hint=..., ignore_subscribe_messages=...) -> None: ...
+    def __del__(self): ...
+    channels = ... # type: Any
+    patterns = ... # type: Any
+    def reset(self): ...
+    def close(self): ...
+    def on_connect(self, connection): ...
+    def encode(self, value): ...
+    @property
+    def subscribed(self): ...
+    def execute_command(self, *args, **kwargs): ...
+    def parse_response(self, block=...): ...
+    def psubscribe(self, *args, **kwargs): ...
+    def punsubscribe(self, *args): ...
+    def subscribe(self, *args, **kwargs): ...
+    def unsubscribe(self, *args): ...
+    def listen(self): ...
+    def get_message(self, ignore_subscribe_messages=...): ...
+    def handle_message(self, response, ignore_subscribe_messages=...): ...
+    def run_in_thread(self, sleep_time=...): ...
+
+class BasePipeline:
+    UNWATCH_COMMANDS = ... # type: Any
+    connection_pool = ... # type: Any
+    connection = ... # type: Any
+    response_callbacks = ... # type: Any
+    transaction = ... # type: Any
+    shard_hint = ... # type: Any
+    watching = ... # type: Any
+    def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_value, traceback): ...
+    def __del__(self): ...
+    def __len__(self): ...
+    command_stack = ... # type: Any
+    scripts = ... # type: Any
+    explicit_transaction = ... # type: Any
+    def reset(self): ...
+    def multi(self): ...
+    def execute_command(self, *args, **kwargs): ...
+    def immediate_execute_command(self, *args, **options): ...
+    def pipeline_execute_command(self, *args, **options): ...
+    def raise_first_error(self, commands, response): ...
+    def annotate_exception(self, exception, number, command): ...
+    def parse_response(self, connection, command_name, **options): ...
+    def load_scripts(self): ...
+    def execute(self, raise_on_error=...): ...
+    def watch(self, *names): ...
+    def unwatch(self): ...
+    def script_load_for_pipeline(self, script): ...
+
+class StrictPipeline(BasePipeline, StrictRedis): ...
+class Pipeline(BasePipeline, Redis): ...
+
+class Script:
+    registered_client = ... # type: Any
+    script = ... # type: Any
+    sha = ... # type: Any
+    def __init__(self, registered_client, script) -> None: ...
+    def __call__(self, keys=..., args=..., client=...): ...
diff --git a/typeshed/third_party/2.7/redis/connection.pyi b/typeshed/third_party/2.7/redis/connection.pyi
new file mode 100644
index 0000000..f17c062
--- /dev/null
+++ b/typeshed/third_party/2.7/redis/connection.pyi
@@ -0,0 +1,135 @@
+# Stubs for redis.connection (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+ssl_available = ... # type: Any
+hiredis_version = ... # type: Any
+HIREDIS_SUPPORTS_CALLABLE_ERRORS = ... # type: Any
+HIREDIS_SUPPORTS_BYTE_BUFFER = ... # type: Any
+msg = ... # type: Any
+HIREDIS_USE_BYTE_BUFFER = ... # type: Any
+SYM_STAR = ... # type: Any
+SYM_DOLLAR = ... # type: Any
+SYM_CRLF = ... # type: Any
+SYM_EMPTY = ... # type: Any
+SERVER_CLOSED_CONNECTION_ERROR = ... # type: Any
+
+class Token:
+    value = ... # type: Any
+    def __init__(self, value) -> None: ...
+
+class BaseParser:
+    EXCEPTION_CLASSES = ... # type: Any
+    def parse_error(self, response): ...
+
+class SocketBuffer:
+    socket_read_size = ... # type: Any
+    bytes_written = ... # type: Any
+    bytes_read = ... # type: Any
+    def __init__(self, socket, socket_read_size) -> None: ...
+    @property
+    def length(self): ...
+    def read(self, length): ...
+    def readline(self): ...
+    def purge(self): ...
+    def close(self): ...
+
+class PythonParser(BaseParser):
+    encoding = ... # type: Any
+    socket_read_size = ... # type: Any
+    def __init__(self, socket_read_size) -> None: ...
+    def __del__(self): ...
+    def on_connect(self, connection): ...
+    def on_disconnect(self): ...
+    def can_read(self): ...
+    def read_response(self): ...
+
+class HiredisParser(BaseParser):
+    socket_read_size = ... # type: Any
+    def __init__(self, socket_read_size) -> None: ...
+    def __del__(self): ...
+    def on_connect(self, connection): ...
+    def on_disconnect(self): ...
+    def can_read(self): ...
+    def read_response(self): ...
+
+DefaultParser = ... # type: Any
+
+class Connection:
+    description_format = ... # type: Any
+    pid = ... # type: Any
+    host = ... # type: Any
+    port = ... # type: Any
+    db = ... # type: Any
+    password = ... # type: Any
+    socket_timeout = ... # type: Any
+    socket_connect_timeout = ... # type: Any
+    socket_keepalive = ... # type: Any
+    socket_keepalive_options = ... # type: Any
+    retry_on_timeout = ... # type: Any
+    encoding = ... # type: Any
+    encoding_errors = ... # type: Any
+    decode_responses = ... # type: Any
+    def __init__(self, host=..., port=..., db=..., password=..., socket_timeout=..., socket_connect_timeout=..., socket_keepalive=..., socket_keepalive_options=..., retry_on_timeout=..., encoding=..., encoding_errors=..., decode_responses=..., parser_class=..., socket_read_size=...) -> None: ...
+    def __del__(self): ...
+    def register_connect_callback(self, callback): ...
+    def clear_connect_callbacks(self): ...
+    def connect(self): ...
+    def on_connect(self): ...
+    def disconnect(self): ...
+    def send_packed_command(self, command): ...
+    def send_command(self, *args): ...
+    def can_read(self): ...
+    def read_response(self): ...
+    def encode(self, value): ...
+    def pack_command(self, *args): ...
+    def pack_commands(self, commands): ...
+
+class SSLConnection(Connection):
+    description_format = ... # type: Any
+    keyfile = ... # type: Any
+    certfile = ... # type: Any
+    cert_reqs = ... # type: Any
+    ca_certs = ... # type: Any
+    def __init__(self, ssl_keyfile=..., ssl_certfile=..., ssl_cert_reqs=..., ssl_ca_certs=..., **kwargs) -> None: ...
+
+class UnixDomainSocketConnection(Connection):
+    description_format = ... # type: Any
+    pid = ... # type: Any
+    path = ... # type: Any
+    db = ... # type: Any
+    password = ... # type: Any
+    socket_timeout = ... # type: Any
+    retry_on_timeout = ... # type: Any
+    encoding = ... # type: Any
+    encoding_errors = ... # type: Any
+    decode_responses = ... # type: Any
+    def __init__(self, path=..., db=..., password=..., socket_timeout=..., encoding=..., encoding_errors=..., decode_responses=..., retry_on_timeout=..., parser_class=..., socket_read_size=...) -> None: ...
+
+class ConnectionPool:
+    @classmethod
+    def from_url(cls, url, db=..., **kwargs): ...
+    connection_class = ... # type: Any
+    connection_kwargs = ... # type: Any
+    max_connections = ... # type: Any
+    def __init__(self, connection_class=..., max_connections=..., **connection_kwargs) -> None: ...
+    pid = ... # type: Any
+    def reset(self): ...
+    def get_connection(self, command_name, *keys, **options): ...
+    def make_connection(self): ...
+    def release(self, connection): ...
+    def disconnect(self): ...
+
+class BlockingConnectionPool(ConnectionPool):
+    queue_class = ... # type: Any
+    timeout = ... # type: Any
+    def __init__(self, max_connections=..., timeout=..., connection_class=..., queue_class=..., **connection_kwargs) -> None: ...
+    pid = ... # type: Any
+    pool = ... # type: Any
+    def reset(self): ...
+    def make_connection(self): ...
+    def get_connection(self, command_name, *keys, **options): ...
+    def release(self, connection): ...
+    def disconnect(self): ...
diff --git a/typeshed/third_party/2.7/redis/exceptions.pyi b/typeshed/third_party/2.7/redis/exceptions.pyi
new file mode 100644
index 0000000..97a11f5
--- /dev/null
+++ b/typeshed/third_party/2.7/redis/exceptions.pyi
@@ -0,0 +1,21 @@
+# Stubs for redis.exceptions (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class RedisError(Exception): ...
+
+def __unicode__(self): ...
+
+class AuthenticationError(RedisError): ...
+class ConnectionError(RedisError): ...
+class TimeoutError(RedisError): ...
+class BusyLoadingError(ConnectionError): ...
+class InvalidResponse(RedisError): ...
+class ResponseError(RedisError): ...
+class DataError(RedisError): ...
+class PubSubError(RedisError): ...
+class WatchError(RedisError): ...
+class NoScriptError(ResponseError): ...
+class ExecAbortError(ResponseError): ...
+class ReadOnlyError(ResponseError): ...
+class LockError(RedisError, ValueError): ...
diff --git a/typeshed/third_party/2.7/redis/utils.pyi b/typeshed/third_party/2.7/redis/utils.pyi
new file mode 100644
index 0000000..9cabf0d
--- /dev/null
+++ b/typeshed/third_party/2.7/redis/utils.pyi
@@ -0,0 +1,12 @@
+# Stubs for redis.utils (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+HIREDIS_AVAILABLE = ... # type: Any
+
+def from_url(url, db=..., **kwargs): ...
+def pipeline(redis_obj): ...
+
+class dummy: ...
diff --git a/typeshed/third_party/2.7/requests/__init__.pyi b/typeshed/third_party/2.7/requests/__init__.pyi
new file mode 100644
index 0000000..6ea56ef
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/__init__.pyi
@@ -0,0 +1,38 @@
+# Stubs for requests (based on version 2.6.0, Python 3)
+
+from typing import Any
+from requests import models
+from requests import api
+from requests import sessions
+from requests import status_codes
+from requests import exceptions
+import logging
+
+__title__ = ...  # type: Any
+__build__ = ...  # type: Any
+__license__ = ...  # type: Any
+__copyright__ = ...  # type: Any
+
+Request = models.Request
+Response = models.Response
+PreparedRequest = models.PreparedRequest
+request = api.request
+get = api.get
+head = api.head
+post = api.post
+patch = api.patch
+put = api.put
+delete = api.delete
+options = api.options
+session = sessions.session
+Session = sessions.Session
+codes = status_codes.codes
+RequestException = exceptions.RequestException
+Timeout = exceptions.Timeout
+URLRequired = exceptions.URLRequired
+TooManyRedirects = exceptions.TooManyRedirects
+HTTPError = exceptions.HTTPError
+ConnectionError = exceptions.ConnectionError
+
+class NullHandler(logging.Handler):
+    def emit(self, record): ...
diff --git a/typeshed/third_party/2.7/requests/adapters.pyi b/typeshed/third_party/2.7/requests/adapters.pyi
new file mode 100644
index 0000000..109dc9a
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/adapters.pyi
@@ -0,0 +1,69 @@
+# Stubs for requests.adapters (Python 3)
+
+from typing import Any
+from . import models
+from .packages.urllib3 import poolmanager
+from .packages.urllib3 import response
+from .packages.urllib3.util import retry
+from . import compat
+from . import utils
+from . import structures
+from .packages.urllib3 import exceptions as urllib3_exceptions
+from . import cookies
+from . import exceptions
+from . import auth
+
+Response = models.Response
+PoolManager = poolmanager.PoolManager
+proxy_from_url = poolmanager.proxy_from_url
+HTTPResponse = response.HTTPResponse
+Retry = retry.Retry
+DEFAULT_CA_BUNDLE_PATH = utils.DEFAULT_CA_BUNDLE_PATH
+get_encoding_from_headers = utils.get_encoding_from_headers
+prepend_scheme_if_needed = utils.prepend_scheme_if_needed
+get_auth_from_url = utils.get_auth_from_url
+urldefragauth = utils.urldefragauth
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+ConnectTimeoutError = urllib3_exceptions.ConnectTimeoutError
+MaxRetryError = urllib3_exceptions.MaxRetryError
+ProtocolError = urllib3_exceptions.ProtocolError
+ReadTimeoutError = urllib3_exceptions.ReadTimeoutError
+ResponseError = urllib3_exceptions.ResponseError
+extract_cookies_to_jar = cookies.extract_cookies_to_jar
+ConnectionError = exceptions.ConnectionError
+ConnectTimeout = exceptions.ConnectTimeout
+ReadTimeout = exceptions.ReadTimeout
+SSLError = exceptions.SSLError
+ProxyError = exceptions.ProxyError
+RetryError = exceptions.RetryError
+
+DEFAULT_POOLBLOCK = ...  # type: Any
+DEFAULT_POOLSIZE = ...  # type: Any
+DEFAULT_RETRIES = ...  # type: Any
+
+class BaseAdapter:
+    def __init__(self) -> None: ...
+    # TODO: "request" parameter not actually supported, added to please mypy.
+    def send(self, request=...): ...
+    def close(self): ...
+
+class HTTPAdapter(BaseAdapter):
+    __attrs__ = ...  # type: Any
+    max_retries = ...  # type: Any
+    config = ...  # type: Any
+    proxy_manager = ...  # type: Any
+    def __init__(self, pool_connections=..., pool_maxsize=..., max_retries=...,
+                 pool_block=...): ...
+    poolmanager = ...  # type: Any
+    def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ...
+    def proxy_manager_for(self, proxy, **proxy_kwargs): ...
+    def cert_verify(self, conn, url, verify, cert): ...
+    def build_response(self, req, resp): ...
+    def get_connection(self, url, proxies=...): ...
+    def close(self): ...
+    def request_url(self, request, proxies): ...
+    def add_headers(self, request, **kwargs): ...
+    def proxy_headers(self, proxy): ...
+    # TODO: "request" is not actually optional, modified to please mypy.
+    def send(self, request=..., stream=..., timeout=..., verify=..., cert=...,
+             proxies=...): ...
diff --git a/typeshed/third_party/2.7/requests/api.pyi b/typeshed/third_party/2.7/requests/api.pyi
new file mode 100644
index 0000000..7b04128
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/api.pyi
@@ -0,0 +1,14 @@
+# Stubs for requests.api (Python 3)
+
+import typing
+
+from .models import Response
+
+def request(method: str, url: str, **kwargs) -> Response: ...
+def get(url: str, **kwargs) -> Response: ...
+def options(url: str, **kwargs) -> Response: ...
+def head(url: str, **kwargs) -> Response: ...
+def post(url: str, data=..., json=..., **kwargs) -> Response: ...
+def put(url: str, data=..., **kwargs) -> Response: ...
+def patch(url: str, data=..., **kwargs) -> Response: ...
+def delete(url: str, **kwargs) -> Response: ...
diff --git a/typeshed/third_party/2.7/requests/auth.pyi b/typeshed/third_party/2.7/requests/auth.pyi
new file mode 100644
index 0000000..8eea2b0
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/auth.pyi
@@ -0,0 +1,41 @@
+# Stubs for requests.auth (Python 3)
+
+from typing import Any
+from . import compat
+from . import cookies
+from . import utils
+from . import status_codes
+
+extract_cookies_to_jar = cookies.extract_cookies_to_jar
+parse_dict_header = utils.parse_dict_header
+to_native_string = utils.to_native_string
+codes = status_codes.codes
+
+CONTENT_TYPE_FORM_URLENCODED = ...  # type: Any
+CONTENT_TYPE_MULTI_PART = ...  # type: Any
+
+class AuthBase:
+    def __call__(self, r): ...
+
+class HTTPBasicAuth(AuthBase):
+    username = ...  # type: Any
+    password = ...  # type: Any
+    def __init__(self, username, password) -> None: ...
+    def __call__(self, r): ...
+
+class HTTPProxyAuth(HTTPBasicAuth):
+    def __call__(self, r): ...
+
+class HTTPDigestAuth(AuthBase):
+    username = ...  # type: Any
+    password = ...  # type: Any
+    last_nonce = ...  # type: Any
+    nonce_count = ...  # type: Any
+    chal = ...  # type: Any
+    pos = ...  # type: Any
+    num_401_calls = ...  # type: Any
+    def __init__(self, username, password) -> None: ...
+    def build_digest_header(self, method, url): ...
+    def handle_redirect(self, r, **kwargs): ...
+    def handle_401(self, r, **kwargs): ...
+    def __call__(self, r): ...
diff --git a/typeshed/third_party/2.7/requests/compat.pyi b/typeshed/third_party/2.7/requests/compat.pyi
new file mode 100644
index 0000000..63b92f6
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/compat.pyi
@@ -0,0 +1,6 @@
+# Stubs for requests.compat (Python 3.4)
+
+from typing import Any
+import collections
+
+OrderedDict = collections.OrderedDict
diff --git a/typeshed/third_party/2.7/requests/cookies.pyi b/typeshed/third_party/2.7/requests/cookies.pyi
new file mode 100644
index 0000000..6f56c82
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/cookies.pyi
@@ -0,0 +1,61 @@
+# Stubs for requests.cookies (Python 3)
+
+from typing import Any, MutableMapping
+import collections
+from . import compat
+
+class MockRequest:
+    type = ...  # type: Any
+    def __init__(self, request) -> None: ...
+    def get_type(self): ...
+    def get_host(self): ...
+    def get_origin_req_host(self): ...
+    def get_full_url(self): ...
+    def is_unverifiable(self): ...
+    def has_header(self, name): ...
+    def get_header(self, name, default=...): ...
+    def add_header(self, key, val): ...
+    def add_unredirected_header(self, name, value): ...
+    def get_new_headers(self): ...
+    @property
+    def unverifiable(self): ...
+    @property
+    def origin_req_host(self): ...
+    @property
+    def host(self): ...
+
+class MockResponse:
+    def __init__(self, headers) -> None: ...
+    def info(self): ...
+    def getheaders(self, name): ...
+
+def extract_cookies_to_jar(jar, request, response): ...
+def get_cookie_header(jar, request): ...
+def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ...
+
+class CookieConflictError(RuntimeError): ...
+
+class RequestsCookieJar(MutableMapping):
+    def get(self, name, default=..., domain=..., path=...): ...
+    def set(self, name, value, **kwargs): ...
+    def iterkeys(self): ...
+    def keys(self): ...
+    def itervalues(self): ...
+    def values(self): ...
+    def iteritems(self): ...
+    def items(self): ...
+    def list_domains(self): ...
+    def list_paths(self): ...
+    def multiple_domains(self): ...
+    def get_dict(self, domain=..., path=...): ...
+    def __getitem__(self, name): ...
+    def __setitem__(self, name, value): ...
+    def __delitem__(self, name): ...
+    def set_cookie(self, cookie, *args, **kwargs): ...
+    def update(self, other): ...
+    def copy(self): ...
+
+def create_cookie(name, value, **kwargs): ...
+def morsel_to_cookie(morsel): ...
+def cookiejar_from_dict(cookie_dict, cookiejar=..., overwrite=...): ...
+def merge_cookies(cookiejar, cookies): ...
diff --git a/typeshed/third_party/2.7/requests/exceptions.pyi b/typeshed/third_party/2.7/requests/exceptions.pyi
new file mode 100644
index 0000000..ff0c328
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/exceptions.pyi
@@ -0,0 +1,26 @@
+# Stubs for requests.exceptions (Python 3)
+
+from typing import Any
+from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
+
+class RequestException(IOError):
+    response = ...  # type: Any
+    request = ...  # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+
+class HTTPError(RequestException): ...
+class ConnectionError(RequestException): ...
+class ProxyError(ConnectionError): ...
+class SSLError(ConnectionError): ...
+class Timeout(RequestException): ...
+class ConnectTimeout(ConnectionError, Timeout): ...
+class ReadTimeout(Timeout): ...
+class URLRequired(RequestException): ...
+class TooManyRedirects(RequestException): ...
+class MissingSchema(RequestException, ValueError): ...
+class InvalidSchema(RequestException, ValueError): ...
+class InvalidURL(RequestException, ValueError): ...
+class ChunkedEncodingError(RequestException): ...
+class ContentDecodingError(RequestException, BaseHTTPError): ...
+class StreamConsumedError(RequestException, TypeError): ...
+class RetryError(RequestException): ...
diff --git a/typeshed/third_party/2.7/requests/hooks.pyi b/typeshed/third_party/2.7/requests/hooks.pyi
new file mode 100644
index 0000000..3367d9a
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/hooks.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.hooks (Python 3)
+
+from typing import Any
+
+HOOKS = ...  # type: Any
+
+def default_hooks(): ...
+def dispatch_hook(key, hooks, hook_data, **kwargs): ...
diff --git a/typeshed/third_party/2.7/requests/models.pyi b/typeshed/third_party/2.7/requests/models.pyi
new file mode 100644
index 0000000..d400d4a
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/models.pyi
@@ -0,0 +1,133 @@
+# Stubs for requests.models (Python 3)
+
+from typing import Any, List, MutableMapping, Iterator, Dict
+import datetime
+
+from . import hooks
+from . import structures
+from . import auth
+from . import cookies
+from .cookies import RequestsCookieJar
+from .packages.urllib3 import fields
+from .packages.urllib3 import filepost
+from .packages.urllib3 import util
+from .packages.urllib3 import exceptions as urllib3_exceptions
+from . import exceptions
+from . import utils
+from . import compat
+from . import status_codes
+
+default_hooks = hooks.default_hooks
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+HTTPBasicAuth = auth.HTTPBasicAuth
+cookiejar_from_dict = cookies.cookiejar_from_dict
+get_cookie_header = cookies.get_cookie_header
+RequestField = fields.RequestField
+encode_multipart_formdata = filepost.encode_multipart_formdata
+DecodeError = urllib3_exceptions.DecodeError
+ReadTimeoutError = urllib3_exceptions.ReadTimeoutError
+ProtocolError = urllib3_exceptions.ProtocolError
+LocationParseError = urllib3_exceptions.LocationParseError
+HTTPError = exceptions.HTTPError
+MissingSchema = exceptions.MissingSchema
+InvalidURL = exceptions.InvalidURL
+ChunkedEncodingError = exceptions.ChunkedEncodingError
+ContentDecodingError = exceptions.ContentDecodingError
+ConnectionError = exceptions.ConnectionError
+StreamConsumedError = exceptions.StreamConsumedError
+guess_filename = utils.guess_filename
+get_auth_from_url = utils.get_auth_from_url
+requote_uri = utils.requote_uri
+stream_decode_response_unicode = utils.stream_decode_response_unicode
+to_key_val_list = utils.to_key_val_list
+parse_header_links = utils.parse_header_links
+iter_slices = utils.iter_slices
+guess_json_utf = utils.guess_json_utf
+super_len = utils.super_len
+to_native_string = utils.to_native_string
+codes = status_codes.codes
+
+REDIRECT_STATI = ...  # type: Any
+DEFAULT_REDIRECT_LIMIT = ...  # type: Any
+CONTENT_CHUNK_SIZE = ...  # type: Any
+ITER_CHUNK_SIZE = ...  # type: Any
+json_dumps = ...  # type: Any
+
+class RequestEncodingMixin:
+    @property
+    def path_url(self): ...
+
+class RequestHooksMixin:
+    def register_hook(self, event, hook): ...
+    def deregister_hook(self, event, hook): ...
+
+class Request(RequestHooksMixin):
+    hooks = ...  # type: Any
+    method = ...  # type: Any
+    url = ...  # type: Any
+    headers = ...  # type: Any
+    files = ...  # type: Any
+    data = ...  # type: Any
+    json = ...  # type: Any
+    params = ...  # type: Any
+    auth = ...  # type: Any
+    cookies = ...  # type: Any
+    def __init__(self, method=..., url=..., headers=..., files=..., data=..., params=...,
+                 auth=..., cookies=..., hooks=..., json=...): ...
+    def prepare(self): ...
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+    method = ...  # type: Any
+    url = ...  # type: Any
+    headers = ...  # type: Any
+    body = ...  # type: Any
+    hooks = ...  # type: Any
+    def __init__(self) -> None: ...
+    def prepare(self, method=..., url=..., headers=..., files=..., data=..., params=...,
+                auth=..., cookies=..., hooks=..., json=...): ...
+    def copy(self): ...
+    def prepare_method(self, method): ...
+    def prepare_url(self, url, params): ...
+    def prepare_headers(self, headers): ...
+    def prepare_body(self, data, files, json=...): ...
+    def prepare_content_length(self, body): ...
+    def prepare_auth(self, auth, url=...): ...
+    def prepare_cookies(self, cookies): ...
+    def prepare_hooks(self, hooks): ...
+
+class Response:
+    __attrs__ = ...  # type: Any
+    status_code = ...  # type: int
+    headers = ...  # type: MutableMapping[str, str]
+    raw = ...  # type: Any
+    url = ...  # type: str
+    encoding = ...  # type: str
+    history = ...  # type: List[Response]
+    reason = ...  # type: str
+    cookies = ...  # type: RequestsCookieJar
+    elapsed = ...  # type: datetime.timedelta
+    request = ...  # type: PreparedRequest
+    def __init__(self) -> None: ...
+    def __bool__(self) -> bool: ...
+    def __nonzero__(self) -> bool: ...
+    def __iter__(self) -> Iterator[str]: ...
+    @property
+    def ok(self) -> bool: ...
+    @property
+    def is_redirect(self) -> bool: ...
+    @property
+    def is_permanent_redirect(self) -> bool: ...
+    @property
+    def apparent_encoding(self) -> str: ...
+    def iter_content(self, chunk_size: int = ...,
+                     decode_unicode: bool = ...) -> Iterator[Any]: ...
+    def iter_lines(self, chunk_size=..., decode_unicode=..., delimiter=...): ...
+    @property
+    def content(self) -> str: ...
+    @property
+    def text(self) -> str: ...
+    def json(self, **kwargs) -> Any: ...
+    @property
+    def links(self) -> Dict[Any, Any]: ...
+    def raise_for_status(self) -> None: ...
+    def close(self) -> None: ...
diff --git a/typeshed/third_party/2.7/requests/packages/__init__.pyi b/typeshed/third_party/2.7/requests/packages/__init__.pyi
new file mode 100644
index 0000000..2b1bff8
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/__init__.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.packages (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class VendorAlias:
+    def __init__(self, package_names) -> None: ...
+    def find_module(self, fullname, path=...): ...
+    def load_module(self, name): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/__init__.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/__init__.pyi
new file mode 100644
index 0000000..38cf672
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/__init__.pyi
@@ -0,0 +1,12 @@
+# Stubs for requests.packages.urllib3 (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import logging
+
+class NullHandler(logging.Handler):
+    def emit(self, record): ...
+
+def add_stderr_logger(level=...): ...
+def disable_warnings(category=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/_collections.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/_collections.pyi
new file mode 100644
index 0000000..58aa944
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/_collections.pyi
@@ -0,0 +1,51 @@
+# Stubs for requests.packages.urllib3._collections (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from collections import MutableMapping
+
+class RLock:
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_value, traceback): ...
+
+class RecentlyUsedContainer(MutableMapping):
+    ContainerCls = ...  # type: Any
+    dispose_func = ...  # type: Any
+    lock = ...  # type: Any
+    def __init__(self, maxsize=..., dispose_func=...) -> None: ...
+    def __getitem__(self, key): ...
+    def __setitem__(self, key, value): ...
+    def __delitem__(self, key): ...
+    def __len__(self): ...
+    def __iter__(self): ...
+    def clear(self): ...
+    def keys(self): ...
+
+class HTTPHeaderDict(dict):
+    def __init__(self, headers=..., **kwargs) -> None: ...
+    def __setitem__(self, key, val): ...
+    def __getitem__(self, key): ...
+    def __delitem__(self, key): ...
+    def __contains__(self, key): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    values = ...  # type: Any
+    get = ...  # type: Any
+    update = ...  # type: Any
+    iterkeys = ...  # type: Any
+    itervalues = ...  # type: Any
+    def pop(self, key, default=...): ...
+    def discard(self, key): ...
+    def add(self, key, val): ...
+    def extend(*args, **kwargs): ...
+    def getlist(self, key): ...
+    getheaders = ...  # type: Any
+    getallmatchingheaders = ...  # type: Any
+    iget = ...  # type: Any
+    def copy(self): ...
+    def iteritems(self): ...
+    def itermerged(self): ...
+    def items(self): ...
+    @classmethod
+    def from_httplib(cls, message, duplicates=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/connection.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/connection.pyi
new file mode 100644
index 0000000..289fd18
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/connection.pyi
@@ -0,0 +1,51 @@
+# Stubs for requests.packages.urllib3.connection (Python 3.4)
+
+from typing import Any
+from httplib import HTTPException
+from . import packages
+from . import exceptions
+from . import util
+
+class DummyConnection: ...
+
+ConnectTimeoutError = exceptions.ConnectTimeoutError
+SystemTimeWarning = exceptions.SystemTimeWarning
+SecurityWarning = exceptions.SecurityWarning
+
+port_by_scheme = ...  # type: Any
+RECENT_DATE = ...  # type: Any
+
+class HTTPConnection(object):
+    default_port = ...  # type: Any
+    default_socket_options = ...  # type: Any
+    is_verified = ...  # type: Any
+    source_address = ...  # type: Any
+    socket_options = ...  # type: Any
+    def __init__(self, *args, **kw) -> None: ...
+    def connect(self): ...
+
+class HTTPSConnection(HTTPConnection):
+    default_port = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    def __init__(self, host, port=..., key_file=..., cert_file=..., strict=..., timeout=..., **kw) -> None: ...
+    sock = ...  # type: Any
+    def connect(self): ...
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+    cert_reqs = ...  # type: Any
+    ca_certs = ...  # type: Any
+    ssl_version = ...  # type: Any
+    assert_fingerprint = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    assert_hostname = ...  # type: Any
+    def set_cert(self, key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., assert_hostname=..., assert_fingerprint=...): ...
+    sock = ...  # type: Any
+    auto_open = ...  # type: Any
+    is_verified = ...  # type: Any
+    def connect(self): ...
+
+UnverifiedHTTPSConnection = ...  # type: Any
+
+class ConnectionError(Exception): pass
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/connectionpool.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/connectionpool.pyi
new file mode 100644
index 0000000..03c3140
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/connectionpool.pyi
@@ -0,0 +1,87 @@
+# Stubs for requests.packages.urllib3.connectionpool (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from ssl import SSLError as BaseSSLError
+from . import exceptions
+from .packages import ssl_match_hostname
+from . import packages
+from . import connection
+from . import request
+from . import response
+from .util import connection as _connection
+from .util import retry
+from .util import timeout
+from .util import url
+
+ClosedPoolError = exceptions.ClosedPoolError
+ProtocolError = exceptions.ProtocolError
+EmptyPoolError = exceptions.EmptyPoolError
+HostChangedError = exceptions.HostChangedError
+LocationValueError = exceptions.LocationValueError
+MaxRetryError = exceptions.MaxRetryError
+ProxyError = exceptions.ProxyError
+ReadTimeoutError = exceptions.ReadTimeoutError
+SSLError = exceptions.SSLError
+TimeoutError = exceptions.TimeoutError
+InsecureRequestWarning = exceptions.InsecureRequestWarning
+CertificateError = ssl_match_hostname.CertificateError
+port_by_scheme = connection.port_by_scheme
+DummyConnection = connection.DummyConnection
+HTTPConnection = connection.HTTPConnection
+HTTPSConnection = connection.HTTPSConnection
+VerifiedHTTPSConnection = connection.VerifiedHTTPSConnection
+HTTPException = connection.HTTPException
+ConnectionError = connection.ConnectionError
+RequestMethods = request.RequestMethods
+HTTPResponse = response.HTTPResponse
+is_connection_dropped = _connection.is_connection_dropped
+Retry = retry.Retry
+Timeout = timeout.Timeout
+get_host = url.get_host
+
+xrange = ...  # type: Any
+log = ...  # type: Any
+
+class ConnectionPool:
+    scheme = ...  # type: Any
+    QueueCls = ...  # type: Any
+    host = ...  # type: Any
+    port = ...  # type: Any
+    def __init__(self, host, port=...) -> None: ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_val, exc_tb): ...
+    def close(self): ...
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+    scheme = ...  # type: Any
+    ConnectionCls = ...  # type: Any
+    strict = ...  # type: Any
+    timeout = ...  # type: Any
+    retries = ...  # type: Any
+    pool = ...  # type: Any
+    block = ...  # type: Any
+    proxy = ...  # type: Any
+    proxy_headers = ...  # type: Any
+    num_connections = ...  # type: Any
+    num_requests = ...  # type: Any
+    conn_kw = ...  # type: Any
+    def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., **conn_kw) -> None: ...
+    def close(self): ...
+    def is_same_host(self, url): ...
+    def urlopen(self, method, url, body=..., headers=..., retries=..., redirect=..., assert_same_host=..., timeout=..., pool_timeout=..., release_conn=..., **response_kw): ...
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+    scheme = ...  # type: Any
+    ConnectionCls = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    cert_reqs = ...  # type: Any
+    ca_certs = ...  # type: Any
+    ssl_version = ...  # type: Any
+    assert_hostname = ...  # type: Any
+    assert_fingerprint = ...  # type: Any
+    def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., ssl_version=..., assert_hostname=..., assert_fingerprint=..., **conn_kw) -> None: ...
+
+def connection_from_url(url, **kw): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/contrib/__init__.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/contrib/__init__.pyi
new file mode 100644
index 0000000..17d26bb
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/contrib/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for requests.packages.urllib3.contrib (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/exceptions.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/exceptions.pyi
new file mode 100644
index 0000000..3e7d0f6
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/exceptions.pyi
@@ -0,0 +1,54 @@
+# Stubs for requests.packages.urllib3.exceptions (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class HTTPError(Exception): ...
+class HTTPWarning(Warning): ...
+
+class PoolError(HTTPError):
+    pool = ...  # type: Any
+    def __init__(self, pool, message) -> None: ...
+    def __reduce__(self): ...
+
+class RequestError(PoolError):
+    url = ...  # type: Any
+    def __init__(self, pool, url, message) -> None: ...
+    def __reduce__(self): ...
+
+class SSLError(HTTPError): ...
+class ProxyError(HTTPError): ...
+class DecodeError(HTTPError): ...
+class ProtocolError(HTTPError): ...
+
+ConnectionError = ...  # type: Any
+
+class MaxRetryError(RequestError):
+    reason = ...  # type: Any
+    def __init__(self, pool, url, reason=...) -> None: ...
+
+class HostChangedError(RequestError):
+    retries = ...  # type: Any
+    def __init__(self, pool, url, retries=...) -> None: ...
+
+class TimeoutStateError(HTTPError): ...
+class TimeoutError(HTTPError): ...
+class ReadTimeoutError(TimeoutError, RequestError): ...
+class ConnectTimeoutError(TimeoutError): ...
+class EmptyPoolError(PoolError): ...
+class ClosedPoolError(PoolError): ...
+class LocationValueError(ValueError, HTTPError): ...
+
+class LocationParseError(LocationValueError):
+    location = ...  # type: Any
+    def __init__(self, location) -> None: ...
+
+class ResponseError(HTTPError):
+    GENERIC_ERROR = ...  # type: Any
+    SPECIFIC_ERROR = ...  # type: Any
+
+class SecurityWarning(HTTPWarning): ...
+class InsecureRequestWarning(SecurityWarning): ...
+class SystemTimeWarning(SecurityWarning): ...
+class InsecurePlatformWarning(SecurityWarning): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/fields.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/fields.pyi
new file mode 100644
index 0000000..cdc7734
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/fields.pyi
@@ -0,0 +1,16 @@
+# Stubs for requests.packages.urllib3.fields (Python 3.4)
+
+from typing import Any
+from . import packages
+
+def guess_content_type(filename, default=...): ...
+def format_header_param(name, value): ...
+
+class RequestField:
+    data = ...  # type: Any
+    headers = ...  # type: Any
+    def __init__(self, name, data, filename=..., headers=...) -> None: ...
+    @classmethod
+    def from_tuples(cls, fieldname, value): ...
+    def render_headers(self): ...
+    def make_multipart(self, content_disposition=..., content_type=..., content_location=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/filepost.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/filepost.pyi
new file mode 100644
index 0000000..c6fefa6
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/filepost.pyi
@@ -0,0 +1,19 @@
+# Stubs for requests.packages.urllib3.filepost (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import packages
+#from .packages import six
+from . import fields
+
+#six = packages.six
+#b = six.b
+RequestField = fields.RequestField
+
+writer = ...  # type: Any
+
+def choose_boundary(): ...
+def iter_field_objects(fields): ...
+def iter_fields(fields): ...
+def encode_multipart_formdata(fields, boundary=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/packages/__init__.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/packages/__init__.pyi
new file mode 100644
index 0000000..2314636
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/packages/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for requests.packages.urllib3.packages (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
new file mode 100644
index 0000000..05c03dc
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
@@ -0,0 +1 @@
+class CertificateError(ValueError): pass
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
new file mode 100644
index 0000000..5abbc9d
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
@@ -0,0 +1,7 @@
+# Stubs for requests.packages.urllib3.packages.ssl_match_hostname._implementation (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class CertificateError(ValueError): ...
+
+def match_hostname(cert, hostname): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/poolmanager.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/poolmanager.pyi
new file mode 100644
index 0000000..a65f664
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/poolmanager.pyi
@@ -0,0 +1,31 @@
+# Stubs for requests.packages.urllib3.poolmanager (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .request import RequestMethods
+
+class PoolManager(RequestMethods):
+    proxy = ...  # type: Any
+    connection_pool_kw = ...  # type: Any
+    pools = ...  # type: Any
+    def __init__(self, num_pools=..., headers=..., **connection_pool_kw) -> None: ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_val, exc_tb): ...
+    def clear(self): ...
+    def connection_from_host(self, host, port=..., scheme=...): ...
+    def connection_from_url(self, url): ...
+    # TODO: This was the original signature -- copied another one from base class to fix complaint.
+    # def urlopen(self, method, url, redirect=True, **kw): ...
+    def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
+
+class ProxyManager(PoolManager):
+    proxy = ...  # type: Any
+    proxy_headers = ...  # type: Any
+    def __init__(self, proxy_url, num_pools=..., headers=..., proxy_headers=..., **connection_pool_kw) -> None: ...
+    def connection_from_host(self, host, port=..., scheme=...): ...
+    # TODO: This was the original signature -- copied another one from base class to fix complaint.
+    # def urlopen(self, method, url, redirect=True, **kw): ...
+    def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
+
+def proxy_from_url(url, **kw): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/request.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/request.pyi
new file mode 100644
index 0000000..788c759
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/request.pyi
@@ -0,0 +1,13 @@
+# Stubs for requests.packages.urllib3.request (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class RequestMethods:
+    headers = ...  # type: Any
+    def __init__(self, headers=...) -> None: ...
+    def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
+    def request(self, method, url, fields=..., headers=..., **urlopen_kw): ...
+    def request_encode_url(self, method, url, fields=..., **urlopen_kw): ...
+    def request_encode_body(self, method, url, fields=..., headers=..., encode_multipart=..., multipart_boundary=..., **urlopen_kw): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/response.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/response.pyi
new file mode 100644
index 0000000..c84f7e9
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/response.pyi
@@ -0,0 +1,58 @@
+# Stubs for requests.packages.urllib3.response (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, IO
+import io
+from . import _collections
+from . import exceptions
+#from .packages import six
+from . import connection
+from .util import response
+
+HTTPHeaderDict = _collections.HTTPHeaderDict
+ProtocolError = exceptions.ProtocolError
+DecodeError = exceptions.DecodeError
+ReadTimeoutError = exceptions.ReadTimeoutError
+binary_type = str # six.binary_type
+PY3 = True # six.PY3
+is_fp_closed = response.is_fp_closed
+
+class DeflateDecoder:
+    def __init__(self) -> None: ...
+    def __getattr__(self, name): ...
+    def decompress(self, data): ...
+
+class GzipDecoder:
+    def __init__(self) -> None: ...
+    def __getattr__(self, name): ...
+    def decompress(self, data): ...
+
+class HTTPResponse(IO[Any]):
+    CONTENT_DECODERS = ...  # type: Any
+    REDIRECT_STATUSES = ...  # type: Any
+    headers = ...  # type: Any
+    status = ...  # type: Any
+    version = ...  # type: Any
+    reason = ...  # type: Any
+    strict = ...  # type: Any
+    decode_content = ...  # type: Any
+    def __init__(self, body=..., headers=..., status=..., version=..., reason=..., strict=..., preload_content=..., decode_content=..., original_response=..., pool=..., connection=...) -> None: ...
+    def get_redirect_location(self): ...
+    def release_conn(self): ...
+    @property
+    def data(self): ...
+    def tell(self): ...
+    def read(self, amt=..., decode_content=..., cache_content=...): ...
+    def stream(self, amt=..., decode_content=...): ...
+    @classmethod
+    def from_httplib(ResponseCls, r, **response_kw): ...
+    def getheaders(self): ...
+    def getheader(self, name, default=...): ...
+    def close(self): ...
+    @property
+    def closed(self): ...
+    def fileno(self): ...
+    def flush(self): ...
+    def readable(self): ...
+    def readinto(self, b): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/__init__.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/__init__.pyi
new file mode 100644
index 0000000..eca2ea9
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/__init__.pyi
@@ -0,0 +1,7 @@
+# Stubs for requests.packages.urllib3.util (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from . import connection
+from . import request
+
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/connection.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/connection.pyi
new file mode 100644
index 0000000..cd67309
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/connection.pyi
@@ -0,0 +1,11 @@
+# Stubs for requests.packages.urllib3.util.connection (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+poll = ...  # type: Any
+select = ...  # type: Any
+
+def is_connection_dropped(conn): ...
+def create_connection(address, timeout=..., source_address=..., socket_options=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/request.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/request.pyi
new file mode 100644
index 0000000..20a6ea2
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/request.pyi
@@ -0,0 +1,12 @@
+# Stubs for requests.packages.urllib3.util.request (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+#from ..packages import six
+
+#b = six.b
+
+ACCEPT_ENCODING = ...  # type: Any
+
+def make_headers(keep_alive=..., accept_encoding=..., user_agent=..., basic_auth=..., proxy_basic_auth=..., disable_cache=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/response.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/response.pyi
new file mode 100644
index 0000000..761a006
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/response.pyi
@@ -0,0 +1,5 @@
+# Stubs for requests.packages.urllib3.util.response (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def is_fp_closed(obj): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/retry.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/retry.pyi
new file mode 100644
index 0000000..e958d90
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/retry.pyi
@@ -0,0 +1,36 @@
+# Stubs for requests.packages.urllib3.util.retry (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+from .. import packages
+
+ConnectTimeoutError = exceptions.ConnectTimeoutError
+MaxRetryError = exceptions.MaxRetryError
+ProtocolError = exceptions.ProtocolError
+ReadTimeoutError = exceptions.ReadTimeoutError
+ResponseError = exceptions.ResponseError
+
+log = ...  # type: Any
+
+class Retry:
+    DEFAULT_METHOD_WHITELIST = ...  # type: Any
+    BACKOFF_MAX = ...  # type: Any
+    total = ...  # type: Any
+    connect = ...  # type: Any
+    read = ...  # type: Any
+    redirect = ...  # type: Any
+    status_forcelist = ...  # type: Any
+    method_whitelist = ...  # type: Any
+    backoff_factor = ...  # type: Any
+    raise_on_redirect = ...  # type: Any
+    def __init__(self, total=..., connect=..., read=..., redirect=..., method_whitelist=..., status_forcelist=..., backoff_factor=..., raise_on_redirect=..., _observed_errors=...) -> None: ...
+    def new(self, **kw): ...
+    @classmethod
+    def from_int(cls, retries, redirect=..., default=...): ...
+    def get_backoff_time(self): ...
+    def sleep(self): ...
+    def is_forced_retry(self, method, status_code): ...
+    def is_exhausted(self): ...
+    def increment(self, method=..., url=..., response=..., error=..., _pool=..., _stacktrace=...): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/timeout.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/timeout.pyi
new file mode 100644
index 0000000..0a7653c
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/timeout.pyi
@@ -0,0 +1,24 @@
+# Stubs for requests.packages.urllib3.util.timeout (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+
+TimeoutStateError = exceptions.TimeoutStateError
+
+def current_time(): ...
+
+class Timeout:
+    DEFAULT_TIMEOUT = ...  # type: Any
+    total = ...  # type: Any
+    def __init__(self, total=..., connect=..., read=...) -> None: ...
+    @classmethod
+    def from_float(cls, timeout): ...
+    def clone(self): ...
+    def start_connect(self): ...
+    def get_connect_duration(self): ...
+    @property
+    def connect_timeout(self): ...
+    @property
+    def read_timeout(self): ...
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/url.pyi b/typeshed/third_party/2.7/requests/packages/urllib3/util/url.pyi
new file mode 100644
index 0000000..9877b4a
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/packages/urllib3/util/url.pyi
@@ -0,0 +1,26 @@
+# Stubs for requests.packages.urllib3.util.url (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+
+LocationParseError = exceptions.LocationParseError
+
+url_attrs = ...  # type: Any
+
+class Url:
+    slots = ...  # type: Any
+    def __new__(cls, scheme=..., auth=..., host=..., port=..., path=..., query=..., fragment=...): ...
+    @property
+    def hostname(self): ...
+    @property
+    def request_uri(self): ...
+    @property
+    def netloc(self): ...
+    @property
+    def url(self): ...
+
+def split_first(s, delims): ...
+def parse_url(url): ...
+def get_host(url): ...
diff --git a/typeshed/third_party/2.7/requests/sessions.pyi b/typeshed/third_party/2.7/requests/sessions.pyi
new file mode 100644
index 0000000..1791283
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/sessions.pyi
@@ -0,0 +1,92 @@
+# Stubs for requests.sessions (Python 3)
+
+from typing import Any, Union, MutableMapping
+from . import auth
+from . import compat
+from . import cookies
+from . import models
+from .models import Response
+from . import hooks
+from . import utils
+from . import exceptions
+from .packages.urllib3 import _collections
+from . import structures
+from . import adapters
+from . import status_codes
+
+OrderedDict = compat.OrderedDict
+cookiejar_from_dict = cookies.cookiejar_from_dict
+extract_cookies_to_jar = cookies.extract_cookies_to_jar
+RequestsCookieJar = cookies.RequestsCookieJar
+merge_cookies = cookies.merge_cookies
+Request = models.Request
+PreparedRequest = models.PreparedRequest
+DEFAULT_REDIRECT_LIMIT = models.DEFAULT_REDIRECT_LIMIT
+default_hooks = hooks.default_hooks
+dispatch_hook = hooks.dispatch_hook
+to_key_val_list = utils.to_key_val_list
+default_headers = utils.default_headers
+to_native_string = utils.to_native_string
+TooManyRedirects = exceptions.TooManyRedirects
+InvalidSchema = exceptions.InvalidSchema
+ChunkedEncodingError = exceptions.ChunkedEncodingError
+ContentDecodingError = exceptions.ContentDecodingError
+RecentlyUsedContainer = _collections.RecentlyUsedContainer
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+HTTPAdapter = adapters.HTTPAdapter
+requote_uri = utils.requote_uri
+get_environ_proxies = utils.get_environ_proxies
+get_netrc_auth = utils.get_netrc_auth
+should_bypass_proxies = utils.should_bypass_proxies
+get_auth_from_url = utils.get_auth_from_url
+codes = status_codes.codes
+REDIRECT_STATI = models.REDIRECT_STATI
+
+REDIRECT_CACHE_SIZE = ... # type: Any
+
+def merge_setting(request_setting, session_setting, dict_class=...): ...
+def merge_hooks(request_hooks, session_hooks, dict_class=...): ...
+
+class SessionRedirectMixin:
+    def resolve_redirects(self, resp, req, stream=..., timeout=..., verify=..., cert=...,
+                          proxies=...): ...
+    def rebuild_auth(self, prepared_request, response): ...
+    def rebuild_proxies(self, prepared_request, proxies): ...
+
+class Session(SessionRedirectMixin):
+    __attrs__ = ... # type: Any
+    headers = ... # type: MutableMapping[str, str]
+    auth = ... # type: Any
+    proxies = ... # type: Any
+    hooks = ... # type: Any
+    params = ... # type: Any
+    stream = ... # type: Any
+    verify = ... # type: Any
+    cert = ... # type: Any
+    max_redirects = ... # type: Any
+    trust_env = ... # type: Any
+    cookies = ... # type: Any
+    adapters = ... # type: Any
+    redirect_cache = ... # type: Any
+    def __init__(self) -> None: ...
+    def __enter__(self) -> 'Session': ...
+    def __exit__(self, *args) -> None: ...
+    def prepare_request(self, request): ...
+    def request(self, method: str, url: str, params=..., data=..., headers=...,
+                cookies=..., files=..., auth=..., timeout=..., allow_redirects=...,
+                proxies=..., hooks=..., stream=..., verify=..., cert=...,
+                json=...) -> Response: ...
+    def get(self, url: str, **kwargs) -> Response: ...
+    def options(self, url: str, **kwargs) -> Response: ...
+    def head(self, url: str, **kwargs) -> Response: ...
+    def post(self, url: str, data=..., json=..., **kwargs) -> Response: ...
+    def put(self, url: str, data=..., **kwargs) -> Response: ...
+    def patch(self, url: str, data=..., **kwargs) -> Response: ...
+    def delete(self, url: str, **kwargs) -> Response: ...
+    def send(self, request, **kwargs): ...
+    def merge_environment_settings(self, url, proxies, stream, verify, cert): ...
+    def get_adapter(self, url): ...
+    def close(self) -> None: ...
+    def mount(self, prefix, adapter): ...
+
+def session() -> Session: ...
diff --git a/typeshed/third_party/2.7/requests/status_codes.pyi b/typeshed/third_party/2.7/requests/status_codes.pyi
new file mode 100644
index 0000000..e3035eb
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/status_codes.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.status_codes (Python 3)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .structures import LookupDict
+
+codes = ...  # type: Any
diff --git a/typeshed/third_party/2.7/requests/structures.pyi b/typeshed/third_party/2.7/requests/structures.pyi
new file mode 100644
index 0000000..837cf25
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/structures.pyi
@@ -0,0 +1,21 @@
+# Stubs for requests.structures (Python 3)
+
+from typing import Any
+import collections
+
+class CaseInsensitiveDict(collections.MutableMapping):
+    def __init__(self, data=..., **kwargs) -> None: ...
+    def __setitem__(self, key, value): ...
+    def __getitem__(self, key): ...
+    def __delitem__(self, key): ...
+    def __iter__(self): ...
+    def __len__(self): ...
+    def lower_items(self): ...
+    def __eq__(self, other): ...
+    def copy(self): ...
+
+class LookupDict(dict):
+    name = ...  # type: Any
+    def __init__(self, name=...) -> None: ...
+    def __getitem__(self, key): ...
+    def get(self, key, default=...): ...
diff --git a/typeshed/third_party/2.7/requests/utils.pyi b/typeshed/third_party/2.7/requests/utils.pyi
new file mode 100644
index 0000000..945277a
--- /dev/null
+++ b/typeshed/third_party/2.7/requests/utils.pyi
@@ -0,0 +1,52 @@
+# Stubs for requests.utils (Python 3)
+
+from typing import Any
+from . import compat
+from . import cookies
+from . import structures
+from . import exceptions
+
+OrderedDict = compat.OrderedDict
+RequestsCookieJar = cookies.RequestsCookieJar
+cookiejar_from_dict = cookies.cookiejar_from_dict
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+InvalidURL = exceptions.InvalidURL
+
+NETRC_FILES = ...  # type: Any
+DEFAULT_CA_BUNDLE_PATH = ...  # type: Any
+
+def dict_to_sequence(d): ...
+def super_len(o): ...
+def get_netrc_auth(url): ...
+def guess_filename(obj): ...
+def from_key_val_list(value): ...
+def to_key_val_list(value): ...
+def parse_list_header(value): ...
+def parse_dict_header(value): ...
+def unquote_header_value(value, is_filename=...): ...
+def dict_from_cookiejar(cj): ...
+def add_dict_to_cookiejar(cj, cookie_dict): ...
+def get_encodings_from_content(content): ...
+def get_encoding_from_headers(headers): ...
+def stream_decode_response_unicode(iterator, r): ...
+def iter_slices(string, slice_length): ...
+def get_unicode_from_response(r): ...
+
+UNRESERVED_SET = ...  # type: Any
+
+def unquote_unreserved(uri): ...
+def requote_uri(uri): ...
+def address_in_network(ip, net): ...
+def dotted_netmask(mask): ...
+def is_ipv4_address(string_ip): ...
+def is_valid_cidr(string_network): ...
+def should_bypass_proxies(url): ...
+def get_environ_proxies(url): ...
+def default_user_agent(name=...): ...
+def default_headers(): ...
+def parse_header_links(value): ...
+def guess_json_utf(data): ...
+def prepend_scheme_if_needed(url, new_scheme): ...
+def get_auth_from_url(url): ...
+def to_native_string(string, encoding=...): ...
+def urldefragauth(url): ...
diff --git a/typeshed/third_party/2.7/routes/__init__.pyi b/typeshed/third_party/2.7/routes/__init__.pyi
new file mode 100644
index 0000000..a82bf13
--- /dev/null
+++ b/typeshed/third_party/2.7/routes/__init__.pyi
@@ -0,0 +1,19 @@
+# Stubs for routes (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+import routes.mapper
+import routes.util
+
+class _RequestConfig:
+    def __getattr__(self, name): ...
+    def __setattr__(self, name, value): ...
+    def __delattr__(self, name): ...
+    def load_wsgi_environ(self, environ): ...
+
+def request_config(original=...): ...
+
+Mapper = mapper.Mapper
+redirect_to = util.redirect_to
+url_for = util.url_for
+URLGenerator = util.URLGenerator
diff --git a/typeshed/third_party/2.7/routes/mapper.pyi b/typeshed/third_party/2.7/routes/mapper.pyi
new file mode 100644
index 0000000..5e99364
--- /dev/null
+++ b/typeshed/third_party/2.7/routes/mapper.pyi
@@ -0,0 +1,70 @@
+# Stubs for routes.mapper (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+COLLECTION_ACTIONS = ... # type: Any
+MEMBER_ACTIONS = ... # type: Any
+
+def strip_slashes(name): ...
+
+class SubMapperParent:
+    def submapper(self, **kargs): ...
+    def collection(self, collection_name, resource_name, path_prefix=..., member_prefix=..., controller=..., collection_actions=..., member_actions=..., member_options=..., **kwargs): ...
+
+class SubMapper(SubMapperParent):
+    kwargs = ... # type: Any
+    obj = ... # type: Any
+    collection_name = ... # type: Any
+    member = ... # type: Any
+    resource_name = ... # type: Any
+    formatted = ... # type: Any
+    def __init__(self, obj, resource_name=..., collection_name=..., actions=..., formatted=..., **kwargs) -> None: ...
+    def connect(self, *args, **kwargs): ...
+    def link(self, rel=..., name=..., action=..., method=..., formatted=..., **kwargs): ...
+    def new(self, **kwargs): ...
+    def edit(self, **kwargs): ...
+    def action(self, name=..., action=..., method=..., formatted=..., **kwargs): ...
+    def index(self, name=..., **kwargs): ...
+    def show(self, name=..., **kwargs): ...
+    def create(self, **kwargs): ...
+    def update(self, **kwargs): ...
+    def delete(self, **kwargs): ...
+    def add_actions(self, actions): ...
+    def __enter__(self): ...
+    def __exit__(self, type, value, tb): ...
+
+class Mapper(SubMapperParent):
+    matchlist = ... # type: Any
+    maxkeys = ... # type: Any
+    minkeys = ... # type: Any
+    urlcache = ... # type: Any
+    prefix = ... # type: Any
+    req_data = ... # type: Any
+    directory = ... # type: Any
+    always_scan = ... # type: Any
+    controller_scan = ... # type: Any
+    debug = ... # type: Any
+    append_slash = ... # type: Any
+    sub_domains = ... # type: Any
+    sub_domains_ignore = ... # type: Any
+    domain_match = ... # type: Any
+    explicit = ... # type: Any
+    encoding = ... # type: Any
+    decode_errors = ... # type: Any
+    hardcode_names = ... # type: Any
+    minimization = ... # type: Any
+    create_regs_lock = ... # type: Any
+    def __init__(self, controller_scan=..., directory=..., always_scan=..., register=..., explicit=...) -> None: ...
+    environ = ... # type: Any
+    def extend(self, routes, path_prefix=...): ...
+    def make_route(self, *args, **kargs): ...
+    def connect(self, *args, **kargs): ...
+    def create_regs(self, *args, **kwargs): ...
+    def match(self, url=..., environ=...): ...
+    def routematch(self, url=..., environ=...): ...
+    obj = ... # type: Any
+    def generate(self, *args, **kargs): ...
+    def resource(self, member_name, collection_name, **kwargs): ...
+    def redirect(self, match_path, destination_path, *args, **kwargs): ...
diff --git a/typeshed/third_party/2.7/routes/util.pyi b/typeshed/third_party/2.7/routes/util.pyi
new file mode 100644
index 0000000..87af6f3
--- /dev/null
+++ b/typeshed/third_party/2.7/routes/util.pyi
@@ -0,0 +1,24 @@
+# Stubs for routes.util (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class RoutesException(Exception): ...
+class MatchException(RoutesException): ...
+class GenerationException(RoutesException): ...
+
+def url_for(*args, **kargs): ...
+
+class URLGenerator:
+    mapper = ... # type: Any
+    environ = ... # type: Any
+    def __init__(self, mapper, environ) -> None: ...
+    def __call__(self, *args, **kargs): ...
+    def current(self, *args, **kwargs): ...
+
+def redirect_to(*args, **kargs): ...
+def cache_hostinfo(environ): ...
+def controller_scan(directory=...): ...
+def as_unicode(value, encoding, errors=...): ...
+def ascii_characters(string): ...
diff --git a/typeshed/third_party/2.7/scribe/__init__.pyi b/typeshed/third_party/2.7/scribe/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/scribe/scribe.pyi b/typeshed/third_party/2.7/scribe/scribe.pyi
new file mode 100644
index 0000000..0988889
--- /dev/null
+++ b/typeshed/third_party/2.7/scribe/scribe.pyi
@@ -0,0 +1,43 @@
+# Stubs for scribe.scribe (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+import fb303.FacebookService
+from .ttypes import *
+from thrift.Thrift import TProcessor
+
+class Iface(fb303.FacebookService.Iface):
+    def Log(self, messages): ...
+
+class Client(fb303.FacebookService.Client, Iface):
+    def __init__(self, iprot, oprot=...) -> None: ...
+    def Log(self, messages): ...
+    def send_Log(self, messages): ...
+    def recv_Log(self): ...
+
+class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
+    def __init__(self, handler) -> None: ...
+    def process(self, iprot, oprot): ...
+    def process_Log(self, seqid, iprot, oprot): ...
+
+class Log_args:
+    thrift_spec = ... # type: Any
+    messages = ... # type: Any
+    def __init__(self, messages=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+
+class Log_result:
+    thrift_spec = ... # type: Any
+    success = ... # type: Any
+    def __init__(self, success=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
diff --git a/typeshed/third_party/2.7/scribe/ttypes.pyi b/typeshed/third_party/2.7/scribe/ttypes.pyi
new file mode 100644
index 0000000..3629e99
--- /dev/null
+++ b/typeshed/third_party/2.7/scribe/ttypes.pyi
@@ -0,0 +1,22 @@
+# Stubs for scribe.ttypes (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+fastbinary = ... # type: Any
+
+class ResultCode:
+    OK = ... # type: Any
+    TRY_LATER = ... # type: Any
+
+class LogEntry:
+    thrift_spec = ... # type: Any
+    category = ... # type: Any
+    message = ... # type: Any
+    def __init__(self, category=..., message=...) -> None: ...
+    def read(self, iprot): ...
+    def write(self, oprot): ...
+    def validate(self): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
diff --git a/typeshed/third_party/2.7/six/__init__.pyi b/typeshed/third_party/2.7/six/__init__.pyi
new file mode 100644
index 0000000..0f6acfc
--- /dev/null
+++ b/typeshed/third_party/2.7/six/__init__.pyi
@@ -0,0 +1,90 @@
+# Stubs for six (Python 2.7)
+
+from __future__ import print_function
+
+from typing import (
+    Any, AnyStr, Callable, Iterable, Mapping, Optional,
+    Pattern, Tuple, TypeVar, Union, overload,
+)
+import typing
+
+import unittest
+import types
+
+_T = TypeVar('_T')
+_K = TypeVar('_K')
+_V = TypeVar('_V')
+
+# TODO make constant, then move this stub to 2and3
+# https://github.com/python/typeshed/issues/17
+PY2 = True
+PY3 = False
+PY34 = False
+
+string_types = basestring,
+integer_types = (int, long)
+class_types = (type, types.ClassType)
+text_type = unicode
+binary_type = str
+
+MAXSIZE = ... # type: int
+
+#def add_move
+#def remove_move
+
+def advance_iterator(it: typing.Iterator[_T]) -> _T: ...
+next = advance_iterator
+
+def callable(obj: object) -> bool: ...
+
+def get_unbound_function(unbound: types.MethodType) -> types.FunctionType: ...
+def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ...
+def create_unbound_method(func: types.FunctionType, cls: Union[type, types.ClassType]) -> types.MethodType: ...
+
+class Iterator:
+    def next(self) -> Any: ...
+
+def get_method_function(meth: types.MethodType) -> types.FunctionType: ...
+def get_method_self(meth: types.MethodType) -> Optional[object]: ...
+def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ...
+def get_function_code(fun: types.FunctionType) -> types.CodeType: ...
+def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ...
+def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ...
+
+def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ...
+def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ...
+def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ...
+#def iterlists
+
+# TODO fix return types - python2 typing doesn't include KeysView etc yet.
+def viewkeys(d: Mapping[_K, _V]) -> Iterable[_K]: ...
+def viewvalues(d: Mapping[_K, _V]) -> Iterable[_V]: ...
+def viewitems(d: Mapping[_K, _V]) -> Iterable[Tuple[_K, _V]]: ...
+
+def b(s: str) -> binary_type: ...
+def u(s: str) -> text_type: ...
+from __builtin__ import unichr as unichr
+int2byte = chr
+def byte2int(bs: binary_type) -> int: ...
+def indexbytes(buf: binary_type, i: int) -> int: ...
+def iterbytes(buf: binary_type) -> typing.Iterator[int]: ...
+from StringIO import StringIO as StringIO, StringIO as BytesIO
+
+def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: str = ...) -> None: ...
+ at overload
+def assertRaisesRegex(self: unittest.TestCase, msg: str = ...) -> Any: ...
+ at overload
+def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ...
+def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: str = ...) -> None: ...
+
+def reraise(tp: type, value: Optional[BaseException], tb: types.TracebackType = ...) -> None: ...
+def exec_(_code_: Union[unicode, types.CodeType], _globs_: Dict[str, Any] = ..., _locs_: Dict[str, Any] = ...): ...
+def raise_from(value: BaseException, from_value: BaseException) -> None: ...
+
+print_ = print
+
+from functools import wraps as wraps
+
+def with_metaclass(meta: type, *bases: type) -> type: ...
+def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ...
+def python_2_unicode_compatible(klass: _T) -> _T: ...
diff --git a/typeshed/third_party/2.7/six/moves/__init__.pyi b/typeshed/third_party/2.7/six/moves/__init__.pyi
new file mode 100644
index 0000000..1347e75
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/__init__.pyi
@@ -0,0 +1,29 @@
+# Provisional stubs for six.moves (Python 2.7)
+
+from cStringIO import StringIO as cStringIO
+from itertools import ifilter as filter
+from itertools import ifilterfalse as filterfalse
+from __builtin__ import raw_input as input
+from __builtin__ import intern as intern
+from itertools import imap as map
+from os import getcwdu as getcwd
+from os import getcwd as getcwdb
+from __builtin__ import xrange as range
+from __builtin__ import reload as reload_module
+from __builtin__ import reduce as reduce
+from pipes import quote as shlex_quote
+from StringIO import StringIO as StringIO
+from UserDict import UserDict as UserDict
+from UserList import UserList as UserList
+from UserString import UserString as UserString
+from __builtin__ import xrange as xrange
+from itertools import izip as zip
+from itertools import izip_longest as zip_longest
+import six.moves.cPickle as cPickle
+import HTMLParser as html_parser
+import htmlentitydefs as html_entities
+
+import six.moves.urllib_parse as urllib_parse
+import six.moves.urllib_error as urllib_error
+import six.moves.urllib as urllib
+import six.moves.urllib_robotparser as urllib_robotparser
diff --git a/typeshed/third_party/2.7/six/moves/cPickle.pyi b/typeshed/third_party/2.7/six/moves/cPickle.pyi
new file mode 100644
index 0000000..a3c3414
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/cPickle.pyi
@@ -0,0 +1,6 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.cPickle (Python 2.7)
+
+from cPickle import *
diff --git a/typeshed/third_party/2.7/six/moves/urllib/__init__.pyi b/typeshed/third_party/2.7/six/moves/urllib/__init__.pyi
new file mode 100644
index 0000000..71523cd
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib/__init__.pyi
@@ -0,0 +1,10 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib (Python 2.7)
+
+import six.moves.urllib.error as error
+import six.moves.urllib.parse as parse
+import six.moves.urllib.request as request
+import six.moves.urllib.response as response
+import six.moves.urllib.robotparser as robotparser
diff --git a/typeshed/third_party/2.7/six/moves/urllib/error.pyi b/typeshed/third_party/2.7/six/moves/urllib/error.pyi
new file mode 100644
index 0000000..05eda84
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib/error.pyi
@@ -0,0 +1,8 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.error (Python 2.7)
+
+from urllib2 import URLError as URLError
+from urllib2 import HTTPError as HTTPError
+from urllib import ContentTooShortError as ContentTooShortError
diff --git a/typeshed/third_party/2.7/six/moves/urllib/parse.pyi b/typeshed/third_party/2.7/six/moves/urllib/parse.pyi
new file mode 100644
index 0000000..25351fb
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib/parse.pyi
@@ -0,0 +1,30 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.parse (Python 2.7)
+
+from six.moves.urllib_parse import (
+    ParseResult as ParseResult,
+    SplitResult as SplitResult,
+    parse_qs as parse_qs,
+    parse_qsl as parse_qsl,
+    urldefrag as urldefrag,
+    urljoin as urljoin,
+    urlparse as urlparse,
+    urlsplit as urlsplit,
+    urlunparse as urlunparse,
+    urlunsplit as urlunsplit,
+    quote as quote,
+    quote_plus as quote_plus,
+    unquote as unquote,
+    unquote_plus as unquote_plus,
+    urlencode as urlencode,
+    splitquery as splitquery,
+    splittag as splittag,
+    splituser as splituser,
+    uses_fragment as uses_fragment,
+    uses_netloc as uses_netloc,
+    uses_params as uses_params,
+    uses_query as uses_query,
+    uses_relative as uses_relative,
+)
diff --git a/typeshed/third_party/2.7/six/moves/urllib/request.pyi b/typeshed/third_party/2.7/six/moves/urllib/request.pyi
new file mode 100644
index 0000000..109cda8
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib/request.pyi
@@ -0,0 +1,38 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.request (Python 2.7)
+
+from urllib2 import urlopen as urlopen
+from urllib2 import install_opener as install_opener
+from urllib2 import build_opener as build_opener
+from urllib import pathname2url as pathname2url
+from urllib import url2pathname as url2pathname
+from urllib import getproxies as getproxies
+from urllib2 import Request as Request
+from urllib2 import OpenerDirector as OpenerDirector
+from urllib2 import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler
+from urllib2 import HTTPRedirectHandler as HTTPRedirectHandler
+from urllib2 import HTTPCookieProcessor as HTTPCookieProcessor
+from urllib2 import ProxyHandler as ProxyHandler
+from urllib2 import BaseHandler as BaseHandler
+from urllib2 import HTTPPasswordMgr as HTTPPasswordMgr
+from urllib2 import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm
+from urllib2 import AbstractBasicAuthHandler as AbstractBasicAuthHandler
+from urllib2 import HTTPBasicAuthHandler as HTTPBasicAuthHandler
+from urllib2 import ProxyBasicAuthHandler as ProxyBasicAuthHandler
+from urllib2 import AbstractDigestAuthHandler as AbstractDigestAuthHandler
+from urllib2 import HTTPDigestAuthHandler as HTTPDigestAuthHandler
+from urllib2 import ProxyDigestAuthHandler as ProxyDigestAuthHandler
+from urllib2 import HTTPHandler as HTTPHandler
+from urllib2 import HTTPSHandler as HTTPSHandler
+from urllib2 import FileHandler as FileHandler
+from urllib2 import FTPHandler as FTPHandler
+from urllib2 import CacheFTPHandler as CacheFTPHandler
+from urllib2 import UnknownHandler as UnknownHandler
+from urllib2 import HTTPErrorProcessor as HTTPErrorProcessor
+from urllib import urlretrieve as urlretrieve
+from urllib import urlcleanup as urlcleanup
+from urllib import URLopener as URLopener
+from urllib import FancyURLopener as FancyURLopener
+from urllib import proxy_bypass as proxy_bypass
diff --git a/typeshed/third_party/2.7/six/moves/urllib/response.pyi b/typeshed/third_party/2.7/six/moves/urllib/response.pyi
new file mode 100644
index 0000000..d778514
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib/response.pyi
@@ -0,0 +1,9 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.response (Python 2.7)
+
+from urllib import addbase as addbase
+from urllib import addclosehook as addclosehook
+from urllib import addinfo as addinfo
+from urllib import addinfourl as addinfourl
diff --git a/typeshed/third_party/2.7/six/moves/urllib/robotparser.pyi b/typeshed/third_party/2.7/six/moves/urllib/robotparser.pyi
new file mode 100644
index 0000000..3b33758
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib/robotparser.pyi
@@ -0,0 +1,6 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.robotparser (Python 2.7)
+
+from robotparser import RobotFileParser as RobotFileParser
diff --git a/typeshed/third_party/2.7/six/moves/urllib_error.pyi b/typeshed/third_party/2.7/six/moves/urllib_error.pyi
new file mode 100644
index 0000000..4872659
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib_error.pyi
@@ -0,0 +1,10 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_error (Python 2.7)
+
+from six.moves.urllib.error import (
+    URLError as URLError,
+    HTTPError as HTTPError,
+    ContentTooShortError as ContentTooShortError,
+)
diff --git a/typeshed/third_party/2.7/six/moves/urllib_parse.pyi b/typeshed/third_party/2.7/six/moves/urllib_parse.pyi
new file mode 100644
index 0000000..2416b96
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib_parse.pyi
@@ -0,0 +1,28 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_parse (Python 2.7)
+
+from urlparse import ParseResult as ParseResult
+from urlparse import SplitResult as SplitResult
+from urlparse import parse_qs as parse_qs
+from urlparse import parse_qsl as parse_qsl
+from urlparse import urldefrag as urldefrag
+from urlparse import urljoin as urljoin
+from urlparse import urlparse as urlparse
+from urlparse import urlsplit as urlsplit
+from urlparse import urlunparse as urlunparse
+from urlparse import urlunsplit as urlunsplit
+from urllib import quote as quote
+from urllib import quote_plus as quote_plus
+from urllib import unquote as unquote
+from urllib import unquote_plus as unquote_plus
+from urllib import urlencode as urlencode
+from urllib import splitquery as splitquery
+from urllib import splittag as splittag
+from urllib import splituser as splituser
+from urlparse import uses_fragment as uses_fragment
+from urlparse import uses_netloc as uses_netloc
+from urlparse import uses_params as uses_params
+from urlparse import uses_query as uses_query
+from urlparse import uses_relative as uses_relative
diff --git a/typeshed/third_party/2.7/six/moves/urllib_request.pyi b/typeshed/third_party/2.7/six/moves/urllib_request.pyi
new file mode 100644
index 0000000..832055a
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib_request.pyi
@@ -0,0 +1,40 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_request (Python 2.7)
+
+from six.moves.urllib.request import (
+    urlopen as urlopen,
+    install_opener as install_opener,
+    build_opener as build_opener,
+    pathname2url as pathname2url,
+    url2pathname as url2pathname,
+    getproxies as getproxies,
+    Request as Request,
+    OpenerDirector as OpenerDirector,
+    HTTPDefaultErrorHandler as HTTPDefaultErrorHandler,
+    HTTPRedirectHandler as HTTPRedirectHandler,
+    HTTPCookieProcessor as HTTPCookieProcessor,
+    ProxyHandler as ProxyHandler,
+    BaseHandler as BaseHandler,
+    HTTPPasswordMgr as HTTPPasswordMgr,
+    HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm,
+    AbstractBasicAuthHandler as AbstractBasicAuthHandler,
+    HTTPBasicAuthHandler as HTTPBasicAuthHandler,
+    ProxyBasicAuthHandler as ProxyBasicAuthHandler,
+    AbstractDigestAuthHandler as AbstractDigestAuthHandler,
+    HTTPDigestAuthHandler as HTTPDigestAuthHandler,
+    ProxyDigestAuthHandler as ProxyDigestAuthHandler,
+    HTTPHandler as HTTPHandler,
+    HTTPSHandler as HTTPSHandler,
+    FileHandler as FileHandler,
+    FTPHandler as FTPHandler,
+    CacheFTPHandler as CacheFTPHandler,
+    UnknownHandler as UnknownHandler,
+    HTTPErrorProcessor as HTTPErrorProcessor,
+    urlretrieve as urlretrieve,
+    urlcleanup as urlcleanup,
+    URLopener as URLopener,
+    FancyURLopener as FancyURLopener,
+    proxy_bypass as proxy_bypass,
+)
diff --git a/typeshed/third_party/2.7/six/moves/urllib_response.pyi b/typeshed/third_party/2.7/six/moves/urllib_response.pyi
new file mode 100644
index 0000000..ca00492
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib_response.pyi
@@ -0,0 +1,11 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_response (Python 2.7)
+
+from six.moves.urllib.response import (
+    addbase as addbase,
+    addclosehook as addclosehook,
+    addinfo as addinfo,
+    addinfourl as addinfourl,
+)
diff --git a/typeshed/third_party/2.7/six/moves/urllib_robotparser.pyi b/typeshed/third_party/2.7/six/moves/urllib_robotparser.pyi
new file mode 100644
index 0000000..d990bb5
--- /dev/null
+++ b/typeshed/third_party/2.7/six/moves/urllib_robotparser.pyi
@@ -0,0 +1,8 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_robotparser (Python 2.7)
+
+from six.moves.urllib.robotparser import (
+    RobotFileParser as RobotFileParser,
+)
diff --git a/typeshed/third_party/2.7/sqlalchemy/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/__init__.pyi
new file mode 100644
index 0000000..717082c
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/__init__.pyi
@@ -0,0 +1,124 @@
+# Stubs for sqlalchemy (Python 2)
+
+from .sql import (
+    alias,
+    and_,
+    asc,
+    between,
+    bindparam,
+    case,
+    cast,
+    collate,
+    column,
+    delete,
+    desc,
+    distinct,
+    except_,
+    except_all,
+    exists,
+    extract,
+    false,
+    func,
+    funcfilter,
+    insert,
+    intersect,
+    intersect_all,
+    join,
+    literal,
+    literal_column,
+    modifier,
+    not_,
+    null,
+    or_,
+    outerjoin,
+    outparam,
+    over,
+    select,
+    subquery,
+    table,
+    text,
+    true,
+    tuple_,
+    type_coerce,
+    union,
+    union_all,
+    update,
+    )
+
+from .types import (
+    BIGINT,
+    BINARY,
+    BLOB,
+    BOOLEAN,
+    BigInteger,
+    Binary,
+    Boolean,
+    CHAR,
+    CLOB,
+    DATE,
+    DATETIME,
+    DECIMAL,
+    Date,
+    DateTime,
+    Enum,
+    FLOAT,
+    Float,
+    INT,
+    INTEGER,
+    Integer,
+    Interval,
+    LargeBinary,
+    NCHAR,
+    NVARCHAR,
+    NUMERIC,
+    Numeric,
+    PickleType,
+    REAL,
+    SMALLINT,
+    SmallInteger,
+    String,
+    TEXT,
+    TIME,
+    TIMESTAMP,
+    Text,
+    Time,
+    TypeDecorator,
+    Unicode,
+    UnicodeText,
+    VARBINARY,
+    VARCHAR,
+    )
+
+from .schema import (
+    CheckConstraint,
+    Column,
+    ColumnDefault,
+    Constraint,
+    DefaultClause,
+    FetchedValue,
+    ForeignKey,
+    ForeignKeyConstraint,
+    Index,
+    MetaData,
+    PassiveDefault,
+    PrimaryKeyConstraint,
+    Sequence,
+    Table,
+    ThreadLocalMetaData,
+    UniqueConstraint,
+    DDL,
+)
+
+from . import sql as sql
+from . import schema as schema
+from . import types as types
+from . import exc as exc
+from . import dialects as dialects
+from . import pool as pool
+# This should re-export orm but orm is totally broken right now
+# from . import orm as orm
+
+from .inspection import inspect
+from .engine import create_engine, engine_from_config
+
+__version__ = ... # type: int
diff --git a/typeshed/third_party/2.7/sqlalchemy/databases/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/databases/__init__.pyi
new file mode 100644
index 0000000..b1ac4a4
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/databases/__init__.pyi
@@ -0,0 +1,12 @@
+# Stubs for sqlalchemy.databases (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+# Names in __all__ with no definition:
+#   firebird
+#   mssql
+#   mysql
+#   oracle
+#   postgresql
+#   sqlite
+#   sybase
diff --git a/typeshed/third_party/2.7/sqlalchemy/databases/mysql.pyi b/typeshed/third_party/2.7/sqlalchemy/databases/mysql.pyi
new file mode 100644
index 0000000..c218c50
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/databases/mysql.pyi
@@ -0,0 +1 @@
+from sqlalchemy.dialects.mysql.base import *
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/dialects/__init__.pyi
new file mode 100644
index 0000000..2d261de
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/dialects/__init__.pyi
@@ -0,0 +1,12 @@
+# Stubs for sqlalchemy.dialects (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+# Names in __all__ with no definition:
+#   firebird
+#   mssql
+#   mysql
+#   oracle
+#   postgresql
+#   sqlite
+#   sybase
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi
new file mode 100644
index 0000000..2faf87d
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi
@@ -0,0 +1,42 @@
+# Stubs for sqlalchemy.dialects.mysql (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from . import base
+
+BIGINT = base.BIGINT
+BINARY = base.BINARY
+BIT = base.BIT
+BLOB = base.BLOB
+BOOLEAN = base.BOOLEAN
+CHAR = base.CHAR
+DATE = base.DATE
+DATETIME = base.DATETIME
+DECIMAL = base.DECIMAL
+DOUBLE = base.DOUBLE
+ENUM = base.ENUM
+DECIMAL = base.DECIMAL
+FLOAT = base.FLOAT
+INTEGER = base.INTEGER
+INTEGER = base.INTEGER
+LONGBLOB = base.LONGBLOB
+LONGTEXT = base.LONGTEXT
+MEDIUMBLOB = base.MEDIUMBLOB
+MEDIUMINT = base.MEDIUMINT
+MEDIUMTEXT = base.MEDIUMTEXT
+NCHAR = base.NCHAR
+NVARCHAR = base.NVARCHAR
+NUMERIC = base.NUMERIC
+SET = base.SET
+SMALLINT = base.SMALLINT
+REAL = base.REAL
+TEXT = base.TEXT
+TIME = base.TIME
+TIMESTAMP = base.TIMESTAMP
+TINYBLOB = base.TINYBLOB
+TINYINT = base.TINYINT
+TINYTEXT = base.TINYTEXT
+VARBINARY = base.VARBINARY
+VARCHAR = base.VARCHAR
+YEAR = base.YEAR
+dialect = base.dialect
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi
new file mode 100644
index 0000000..1443c26
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi
@@ -0,0 +1,350 @@
+# Stubs for sqlalchemy.dialects.mysql.base (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from ... import sql
+from ... import engine
+from ... import util
+from ... import types
+
+sqltypes = sql.sqltypes
+compiler = sql.compiler
+reflection = engine.reflection
+default = engine.default
+topological = util.topological
+DATE = types.DATE
+BOOLEAN = types.BOOLEAN
+BLOB = types.BLOB
+BINARY = types.BINARY
+VARBINARY = types.VARBINARY
+
+RESERVED_WORDS = ... # type: Any
+AUTOCOMMIT_RE = ... # type: Any
+SET_RE = ... # type: Any
+
+class _NumericType:
+    unsigned = ... # type: Any
+    zerofill = ... # type: Any
+    def __init__(self, unsigned=..., zerofill=..., **kw) -> None: ...
+
+class _FloatType(_NumericType, sqltypes.Float):
+    scale = ... # type: Any
+    def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
+
+class _IntegerType(_NumericType, sqltypes.Integer):
+    display_width = ... # type: Any
+    def __init__(self, display_width=..., **kw) -> None: ...
+
+class _StringType(sqltypes.String):
+    charset = ... # type: Any
+    ascii = ... # type: Any
+    unicode = ... # type: Any
+    binary = ... # type: Any
+    national = ... # type: Any
+    def __init__(self, charset=..., collation=..., ascii=..., binary=..., unicode=..., national=..., **kw) -> None: ...
+
+class _MatchType(sqltypes.Float, sqltypes.MatchType):
+    def __init__(self, **kw) -> None: ...
+
+class NUMERIC(_NumericType, sqltypes.NUMERIC):
+    __visit_name__ = ... # type: Any
+    def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
+
+class DECIMAL(_NumericType, sqltypes.DECIMAL):
+    __visit_name__ = ... # type: Any
+    def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
+
+class DOUBLE(_FloatType):
+    __visit_name__ = ... # type: Any
+    def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
+
+class REAL(_FloatType, sqltypes.REAL):
+    __visit_name__ = ... # type: Any
+    def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
+
+class FLOAT(_FloatType, sqltypes.FLOAT):
+    __visit_name__ = ... # type: Any
+    def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
+    def bind_processor(self, dialect): ...
+
+class INTEGER(_IntegerType, sqltypes.INTEGER):
+    __visit_name__ = ... # type: Any
+    def __init__(self, display_width=..., **kw) -> None: ...
+
+class BIGINT(_IntegerType, sqltypes.BIGINT):
+    __visit_name__ = ... # type: Any
+    def __init__(self, display_width=..., **kw) -> None: ...
+
+class MEDIUMINT(_IntegerType):
+    __visit_name__ = ... # type: Any
+    def __init__(self, display_width=..., **kw) -> None: ...
+
+class TINYINT(_IntegerType):
+    __visit_name__ = ... # type: Any
+    def __init__(self, display_width=..., **kw) -> None: ...
+
+class SMALLINT(_IntegerType, sqltypes.SMALLINT):
+    __visit_name__ = ... # type: Any
+    def __init__(self, display_width=..., **kw) -> None: ...
+
+class BIT(sqltypes.TypeEngine):
+    __visit_name__ = ... # type: Any
+    length = ... # type: Any
+    def __init__(self, length=...) -> None: ...
+    def result_processor(self, dialect, coltype): ...
+
+class TIME(sqltypes.TIME):
+    __visit_name__ = ... # type: Any
+    fsp = ... # type: Any
+    def __init__(self, timezone=..., fsp=...) -> None: ...
+    def result_processor(self, dialect, coltype): ...
+
+class TIMESTAMP(sqltypes.TIMESTAMP):
+    __visit_name__ = ... # type: Any
+    fsp = ... # type: Any
+    def __init__(self, timezone=..., fsp=...) -> None: ...
+
+class DATETIME(sqltypes.DATETIME):
+    __visit_name__ = ... # type: Any
+    fsp = ... # type: Any
+    def __init__(self, timezone=..., fsp=...) -> None: ...
+
+class YEAR(sqltypes.TypeEngine):
+    __visit_name__ = ... # type: Any
+    display_width = ... # type: Any
+    def __init__(self, display_width=...) -> None: ...
+
+class TEXT(_StringType, sqltypes.TEXT):
+    __visit_name__ = ... # type: Any
+    def __init__(self, length=..., **kw) -> None: ...
+
+class TINYTEXT(_StringType):
+    __visit_name__ = ... # type: Any
+    def __init__(self, **kwargs) -> None: ...
+
+class MEDIUMTEXT(_StringType):
+    __visit_name__ = ... # type: Any
+    def __init__(self, **kwargs) -> None: ...
+
+class LONGTEXT(_StringType):
+    __visit_name__ = ... # type: Any
+    def __init__(self, **kwargs) -> None: ...
+
+class VARCHAR(_StringType, sqltypes.VARCHAR):
+    __visit_name__ = ... # type: Any
+    def __init__(self, length=..., **kwargs) -> None: ...
+
+class CHAR(_StringType, sqltypes.CHAR):
+    __visit_name__ = ... # type: Any
+    def __init__(self, length=..., **kwargs) -> None: ...
+
+class NVARCHAR(_StringType, sqltypes.NVARCHAR):
+    __visit_name__ = ... # type: Any
+    def __init__(self, length=..., **kwargs) -> None: ...
+
+class NCHAR(_StringType, sqltypes.NCHAR):
+    __visit_name__ = ... # type: Any
+    def __init__(self, length=..., **kwargs) -> None: ...
+
+class TINYBLOB(sqltypes._Binary):
+    __visit_name__ = ... # type: Any
+
+class MEDIUMBLOB(sqltypes._Binary):
+    __visit_name__ = ... # type: Any
+
+class LONGBLOB(sqltypes._Binary):
+    __visit_name__ = ... # type: Any
+
+class _EnumeratedValues(_StringType): ...
+
+class ENUM(sqltypes.Enum, _EnumeratedValues):
+    __visit_name__ = ... # type: Any
+    strict = ... # type: Any
+    def __init__(self, *enums, **kw) -> None: ...
+    def bind_processor(self, dialect): ...
+    def adapt(self, cls, **kw): ...
+
+class SET(_EnumeratedValues):
+    __visit_name__ = ... # type: Any
+    retrieve_as_bitwise = ... # type: Any
+    values = ... # type: Any
+    def __init__(self, *values, **kw) -> None: ...
+    def column_expression(self, colexpr): ...
+    def result_processor(self, dialect, coltype): ...
+    def bind_processor(self, dialect): ...
+    def adapt(self, impltype, **kw): ...
+
+MSTime = ... # type: Any
+MSSet = ... # type: Any
+MSEnum = ... # type: Any
+MSLongBlob = ... # type: Any
+MSMediumBlob = ... # type: Any
+MSTinyBlob = ... # type: Any
+MSBlob = ... # type: Any
+MSBinary = ... # type: Any
+MSVarBinary = ... # type: Any
+MSNChar = ... # type: Any
+MSNVarChar = ... # type: Any
+MSChar = ... # type: Any
+MSString = ... # type: Any
+MSLongText = ... # type: Any
+MSMediumText = ... # type: Any
+MSTinyText = ... # type: Any
+MSText = ... # type: Any
+MSYear = ... # type: Any
+MSTimeStamp = ... # type: Any
+MSBit = ... # type: Any
+MSSmallInteger = ... # type: Any
+MSTinyInteger = ... # type: Any
+MSMediumInteger = ... # type: Any
+MSBigInteger = ... # type: Any
+MSNumeric = ... # type: Any
+MSDecimal = ... # type: Any
+MSDouble = ... # type: Any
+MSReal = ... # type: Any
+MSFloat = ... # type: Any
+MSInteger = ... # type: Any
+colspecs = ... # type: Any
+ischema_names = ... # type: Any
+
+class MySQLExecutionContext(default.DefaultExecutionContext):
+    def should_autocommit_text(self, statement): ...
+
+class MySQLCompiler(compiler.SQLCompiler):
+    render_table_with_column_in_update_from = ... # type: Any
+    extract_map = ... # type: Any
+    def visit_random_func(self, fn, **kw): ...
+    def visit_utc_timestamp_func(self, fn, **kw): ...
+    def visit_sysdate_func(self, fn, **kw): ...
+    def visit_concat_op_binary(self, binary, operator, **kw): ...
+    def visit_match_op_binary(self, binary, operator, **kw): ...
+    def get_from_hint_text(self, table, text): ...
+    def visit_typeclause(self, typeclause, type_=...): ...
+    def visit_cast(self, cast, **kwargs): ...
+    def render_literal_value(self, value, type_): ...
+    def visit_true(self, element, **kw): ...
+    def visit_false(self, element, **kw): ...
+    def get_select_precolumns(self, select, **kw): ...
+    def visit_join(self, join, asfrom=..., **kwargs): ...
+    def for_update_clause(self, select, **kw): ...
+    def limit_clause(self, select, **kw): ...
+    def update_limit_clause(self, update_stmt): ...
+    def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ...
+    def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ...
+
+class MySQLDDLCompiler(compiler.DDLCompiler):
+    def create_table_constraints(self, table, **kw): ...
+    def get_column_specification(self, column, **kw): ...
+    def post_create_table(self, table): ...
+    def visit_create_index(self, create): ...
+    def visit_primary_key_constraint(self, constraint): ...
+    def visit_drop_index(self, drop): ...
+    def visit_drop_constraint(self, drop): ...
+    def define_constraint_match(self, constraint): ...
+
+class MySQLTypeCompiler(compiler.GenericTypeCompiler):
+    def visit_NUMERIC(self, type_, **kw): ...
+    def visit_DECIMAL(self, type_, **kw): ...
+    def visit_DOUBLE(self, type_, **kw): ...
+    def visit_REAL(self, type_, **kw): ...
+    def visit_FLOAT(self, type_, **kw): ...
+    def visit_INTEGER(self, type_, **kw): ...
+    def visit_BIGINT(self, type_, **kw): ...
+    def visit_MEDIUMINT(self, type_, **kw): ...
+    def visit_TINYINT(self, type_, **kw): ...
+    def visit_SMALLINT(self, type_, **kw): ...
+    def visit_BIT(self, type_, **kw): ...
+    def visit_DATETIME(self, type_, **kw): ...
+    def visit_DATE(self, type_, **kw): ...
+    def visit_TIME(self, type_, **kw): ...
+    def visit_TIMESTAMP(self, type_, **kw): ...
+    def visit_YEAR(self, type_, **kw): ...
+    def visit_TEXT(self, type_, **kw): ...
+    def visit_TINYTEXT(self, type_, **kw): ...
+    def visit_MEDIUMTEXT(self, type_, **kw): ...
+    def visit_LONGTEXT(self, type_, **kw): ...
+    def visit_VARCHAR(self, type_, **kw): ...
+    def visit_CHAR(self, type_, **kw): ...
+    def visit_NVARCHAR(self, type_, **kw): ...
+    def visit_NCHAR(self, type_, **kw): ...
+    def visit_VARBINARY(self, type_, **kw): ...
+    def visit_large_binary(self, type_, **kw): ...
+    def visit_enum(self, type_, **kw): ...
+    def visit_BLOB(self, type_, **kw): ...
+    def visit_TINYBLOB(self, type_, **kw): ...
+    def visit_MEDIUMBLOB(self, type_, **kw): ...
+    def visit_LONGBLOB(self, type_, **kw): ...
+    def visit_ENUM(self, type_, **kw): ...
+    def visit_SET(self, type_, **kw): ...
+    def visit_BOOLEAN(self, type, **kw): ...
+
+class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
+    reserved_words = ... # type: Any
+    def __init__(self, dialect, server_ansiquotes=..., **kw) -> None: ...
+
+class MySQLDialect(default.DefaultDialect):
+    name = ... # type: Any
+    supports_alter = ... # type: Any
+    supports_native_boolean = ... # type: Any
+    max_identifier_length = ... # type: Any
+    max_index_name_length = ... # type: Any
+    supports_native_enum = ... # type: Any
+    supports_sane_rowcount = ... # type: Any
+    supports_sane_multi_rowcount = ... # type: Any
+    supports_multivalues_insert = ... # type: Any
+    default_paramstyle = ... # type: Any
+    colspecs = ... # type: Any
+    statement_compiler = ... # type: Any
+    ddl_compiler = ... # type: Any
+    type_compiler = ... # type: Any
+    ischema_names = ... # type: Any
+    preparer = ... # type: Any
+    construct_arguments = ... # type: Any
+    isolation_level = ... # type: Any
+    def __init__(self, isolation_level=..., **kwargs) -> None: ...
+    def on_connect(self): ...
+    def set_isolation_level(self, connection, level): ...
+    def get_isolation_level(self, connection): ...
+    def do_commit(self, dbapi_connection): ...
+    def do_rollback(self, dbapi_connection): ...
+    def do_begin_twophase(self, connection, xid): ...
+    def do_prepare_twophase(self, connection, xid): ...
+    def do_rollback_twophase(self, connection, xid, is_prepared=..., recover=...): ...
+    def do_commit_twophase(self, connection, xid, is_prepared=..., recover=...): ...
+    def do_recover_twophase(self, connection): ...
+    def is_disconnect(self, e, connection, cursor): ...
+    def has_table(self, connection, table_name, schema=...): ...
+    identifier_preparer = ... # type: Any
+    def initialize(self, connection): ...
+    def get_schema_names(self, connection, **kw): ...
+    def get_table_names(self, connection, schema=..., **kw): ...
+    def get_view_names(self, connection, schema=..., **kw): ...
+    def get_table_options(self, connection, table_name, schema=..., **kw): ...
+    def get_columns(self, connection, table_name, schema=..., **kw): ...
+    def get_pk_constraint(self, connection, table_name, schema=..., **kw): ...
+    def get_foreign_keys(self, connection, table_name, schema=..., **kw): ...
+    def get_indexes(self, connection, table_name, schema=..., **kw): ...
+    def get_unique_constraints(self, connection, table_name, schema=..., **kw): ...
+    def get_view_definition(self, connection, view_name, schema=..., **kw): ...
+
+class ReflectedState:
+    columns = ... # type: Any
+    table_options = ... # type: Any
+    table_name = ... # type: Any
+    keys = ... # type: Any
+    constraints = ... # type: Any
+    def __init__(self) -> None: ...
+
+class MySQLTableDefinitionParser:
+    dialect = ... # type: Any
+    preparer = ... # type: Any
+    def __init__(self, dialect, preparer) -> None: ...
+    def parse(self, show_create, charset): ...
+
+class _DecodingRowProxy:
+    rowproxy = ... # type: Any
+    charset = ... # type: Any
+    def __init__(self, rowproxy, charset) -> None: ...
+    def __getitem__(self, index): ...
+    def __getattr__(self, attr): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi
new file mode 100644
index 0000000..18ceed9
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi
@@ -0,0 +1,6 @@
+# Stubs for sqlalchemy.engine (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def create_engine(*args, **kwargs): ...
+def engine_from_config(configuration, prefix=..., **kwargs): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi
new file mode 100644
index 0000000..726741d
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi
@@ -0,0 +1,39 @@
+# Stubs for sqlalchemy.engine.strategies (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import base
+
+strategies = ... # type: Any
+
+class EngineStrategy:
+    def __init__(self) -> None: ...
+    def create(self, *args, **kwargs): ...
+
+class DefaultEngineStrategy(EngineStrategy):
+    def create(self, name_or_url, **kwargs): ...
+
+class PlainEngineStrategy(DefaultEngineStrategy):
+    name = ... # type: Any
+    engine_cls = ... # type: Any
+
+class ThreadLocalEngineStrategy(DefaultEngineStrategy):
+    name = ... # type: Any
+    engine_cls = ... # type: Any
+
+class MockEngineStrategy(EngineStrategy):
+    name = ... # type: Any
+    def create(self, name_or_url, executor, **kwargs): ...
+    class MockConnection(base.Connectable):
+        execute = ... # type: Any
+        def __init__(self, dialect, execute) -> None: ...
+        engine = ... # type: Any
+        dialect = ... # type: Any
+        name = ... # type: Any
+        def contextual_connect(self, **kwargs): ...
+        def execution_options(self, **kw): ...
+        def compiler(self, statement, parameters, **kwargs): ...
+        def create(self, entity, **kwargs): ...
+        def drop(self, entity, **kwargs): ...
+        def execute(self, object, *multiparams, **params): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi
new file mode 100644
index 0000000..fa68a57
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi
@@ -0,0 +1,27 @@
+# Stubs for sqlalchemy.engine.url (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import dialects
+
+registry = dialects.registry
+
+class URL:
+    drivername = ... # type: Any
+    username = ... # type: Any
+    password = ... # type: Any
+    host = ... # type: Any
+    port = ... # type: Any
+    database = ... # type: Any
+    query = ... # type: Any
+    def __init__(self, drivername, username=..., password=..., host=..., port=..., database=..., query=...) -> None: ...
+    def __to_string__(self, hide_password=...): ...
+    def __hash__(self): ...
+    def __eq__(self, other): ...
+    def get_backend_name(self): ...
+    def get_driver_name(self): ...
+    def get_dialect(self): ...
+    def translate_connect_args(self, names=..., **kw): ...
+
+def make_url(name_or_url): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/exc.pyi b/typeshed/third_party/2.7/sqlalchemy/exc.pyi
new file mode 100644
index 0000000..310770d
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/exc.pyi
@@ -0,0 +1,77 @@
+# Stubs for sqlalchemy.exc (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class SQLAlchemyError(Exception): ...
+class ArgumentError(SQLAlchemyError): ...
+class NoSuchModuleError(ArgumentError): ...
+class NoForeignKeysError(ArgumentError): ...
+class AmbiguousForeignKeysError(ArgumentError): ...
+
+class CircularDependencyError(SQLAlchemyError):
+    cycles = ... # type: Any
+    edges = ... # type: Any
+    def __init__(self, message, cycles, edges, msg=...) -> None: ...
+    def __reduce__(self): ...
+
+class CompileError(SQLAlchemyError): ...
+
+class UnsupportedCompilationError(CompileError):
+    def __init__(self, compiler, element_type) -> None: ...
+
+class IdentifierError(SQLAlchemyError): ...
+class DisconnectionError(SQLAlchemyError): ...
+class TimeoutError(SQLAlchemyError): ...
+class InvalidRequestError(SQLAlchemyError): ...
+class NoInspectionAvailable(InvalidRequestError): ...
+class ResourceClosedError(InvalidRequestError): ...
+class NoSuchColumnError(KeyError, InvalidRequestError): ...
+class NoReferenceError(InvalidRequestError): ...
+
+class NoReferencedTableError(NoReferenceError):
+    table_name = ... # type: Any
+    def __init__(self, message, tname) -> None: ...
+    def __reduce__(self): ...
+
+class NoReferencedColumnError(NoReferenceError):
+    table_name = ... # type: Any
+    column_name = ... # type: Any
+    def __init__(self, message, tname, cname) -> None: ...
+    def __reduce__(self): ...
+
+class NoSuchTableError(InvalidRequestError): ...
+class UnboundExecutionError(InvalidRequestError): ...
+class DontWrapMixin: ...
+
+UnmappedColumnError = ... # type: Any
+
+class StatementError(SQLAlchemyError):
+    statement = ... # type: Any
+    params = ... # type: Any
+    orig = ... # type: Any
+    detail = ... # type: Any
+    def __init__(self, message, statement, params, orig) -> None: ...
+    def add_detail(self, msg): ...
+    def __reduce__(self): ...
+    def __unicode__(self): ...
+
+class DBAPIError(StatementError):
+    @classmethod
+    def instance(cls, statement, params, orig, dbapi_base_err, connection_invalidated=..., dialect=...): ...
+    def __reduce__(self): ...
+    connection_invalidated = ... # type: Any
+    def __init__(self, statement, params, orig, connection_invalidated=...) -> None: ...
+
+class InterfaceError(DBAPIError): ...
+class DatabaseError(DBAPIError): ...
+class DataError(DatabaseError): ...
+class OperationalError(DatabaseError): ...
+class IntegrityError(DatabaseError): ...
+class InternalError(DatabaseError): ...
+class ProgrammingError(DatabaseError): ...
+class NotSupportedError(DatabaseError): ...
+class SADeprecationWarning(DeprecationWarning): ...
+class SAPendingDeprecationWarning(PendingDeprecationWarning): ...
+class SAWarning(RuntimeWarning): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/inspection.pyi b/typeshed/third_party/2.7/sqlalchemy/inspection.pyi
new file mode 100644
index 0000000..2d550cd
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/inspection.pyi
@@ -0,0 +1,5 @@
+# Stubs for sqlalchemy.inspection (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def inspect(subject, raiseerr=...): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/log.pyi b/typeshed/third_party/2.7/sqlalchemy/log.pyi
new file mode 100644
index 0000000..8a5b3bf
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/log.pyi
@@ -0,0 +1,14 @@
+import logging
+from typing import Any
+
+rootlogger = ... # type: Any
+
+class Identified(object):
+    def _should_log_debug(self) -> bool: ...
+    def _should_log_info(self) -> bool: ...
+
+class InstanceLogger(object): ...
+
+def instance_logger(instance, echoflag) -> None: ...
+
+class echo_property(object): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi
new file mode 100644
index 0000000..5a76703
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi
@@ -0,0 +1,95 @@
+# Stubs for sqlalchemy.orm (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import mapper
+from . import interfaces
+from . import deprecated_interfaces
+from . import util
+from . import properties
+from . import relationships
+from . import descriptor_props
+from . import session
+from . import scoping
+from . import query
+from ..util import langhelpers
+from . import strategy_options
+
+Mapper = mapper.Mapper
+class_mapper = mapper.class_mapper
+configure_mappers = mapper.configure_mappers
+reconstructor = mapper.reconstructor
+validates = mapper.validates
+EXT_CONTINUE = interfaces.EXT_CONTINUE
+EXT_STOP = interfaces.EXT_STOP
+PropComparator = interfaces.PropComparator
+MapperExtension = deprecated_interfaces.MapperExtension
+SessionExtension = deprecated_interfaces.SessionExtension
+AttributeExtension = deprecated_interfaces.AttributeExtension
+aliased = util.aliased
+join = util.join
+object_mapper = util.object_mapper
+outerjoin = util.outerjoin
+polymorphic_union = util.polymorphic_union
+was_deleted = util.was_deleted
+with_parent = util.with_parent
+with_polymorphic = util.with_polymorphic
+ColumnProperty = properties.ColumnProperty
+RelationshipProperty = relationships.RelationshipProperty
+ComparableProperty = descriptor_props.ComparableProperty
+CompositeProperty = descriptor_props.CompositeProperty
+SynonymProperty = descriptor_props.SynonymProperty
+foreign = relationships.foreign
+remote = relationships.remote
+Session = session.Session
+object_session = session.object_session
+sessionmaker = session.sessionmaker
+make_transient = session.make_transient
+make_transient_to_detached = session.make_transient_to_detached
+scoped_session = scoping.scoped_session
+AliasOption = query.AliasOption
+Query = query.Query
+Bundle = query.Bundle
+public_factory = langhelpers.public_factory
+
+def create_session(bind=..., **kwargs): ...
+
+relationship = ... # type: Any
+
+def relation(*arg, **kw): ...
+def dynamic_loader(argument, **kw): ...
+
+column_property = ... # type: Any
+composite = ... # type: Any
+
+def backref(name, **kwargs): ...
+def deferred(*columns, **kw): ...
+
+synonym = ... # type: Any
+comparable_property = ... # type: Any
+
+def compile_mappers(): ...
+def clear_mappers(): ...
+
+joinedload = ... # type: Any
+joinedload_all = ... # type: Any
+contains_eager = ... # type: Any
+defer = ... # type: Any
+undefer = ... # type: Any
+undefer_group = ... # type: Any
+load_only = ... # type: Any
+lazyload = ... # type: Any
+lazyload_all = ... # type: Any
+subqueryload = ... # type: Any
+subqueryload_all = ... # type: Any
+immediateload = ... # type: Any
+noload = ... # type: Any
+defaultload = ... # type: Any
+
+Load = strategy_options.Load
+
+def eagerload(*args, **kwargs): ...
+def eagerload_all(*args, **kwargs): ...
+
+contains_alias = ... # type: Any
diff --git a/typeshed/third_party/2.7/sqlalchemy/orm/session.pyi b/typeshed/third_party/2.7/sqlalchemy/orm/session.pyi
new file mode 100644
index 0000000..dcdf8b0
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/orm/session.pyi
@@ -0,0 +1,93 @@
+# Stubs for sqlalchemy.orm.session (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _SessionClassMethods:
+    @classmethod
+    def close_all(cls): ...
+    @classmethod
+    def identity_key(cls, orm_util, *args, **kwargs): ...
+    @classmethod
+    def object_session(cls, instance): ...
+
+class SessionTransaction:
+    session = ... # type: Any
+    nested = ... # type: Any
+    def __init__(self, session, parent=..., nested=...) -> None: ...
+    @property
+    def is_active(self): ...
+    def connection(self, bindkey, execution_options=..., **kwargs): ...
+    def prepare(self): ...
+    def commit(self): ...
+    def rollback(self, _capture_exception=...): ...
+    def close(self, invalidate=...): ...
+    def __enter__(self): ...
+    def __exit__(self, type, value, traceback): ...
+
+class Session(_SessionClassMethods):
+    public_methods = ... # type: Any
+    identity_map = ... # type: Any
+    bind = ... # type: Any
+    transaction = ... # type: Any
+    hash_key = ... # type: Any
+    autoflush = ... # type: Any
+    autocommit = ... # type: Any
+    expire_on_commit = ... # type: Any
+    twophase = ... # type: Any
+    def __init__(self, bind=..., autoflush=..., expire_on_commit=..., _enable_transaction_accounting=..., autocommit=..., twophase=..., weak_identity_map=..., binds=..., extension=..., info=..., query_cls=...) -> None: ...
+    connection_callable = ... # type: Any
+    def info(self): ...
+    def begin(self, subtransactions=..., nested=...): ...
+    def begin_nested(self): ...
+    def rollback(self): ...
+    def commit(self): ...
+    def prepare(self): ...
+    def connection(self, mapper=..., clause=..., bind=..., close_with_result=..., execution_options=..., **kw): ...
+    def execute(self, clause, params=..., mapper=..., bind=..., **kw): ...
+    def scalar(self, clause, params=..., mapper=..., bind=..., **kw): ...
+    def close(self): ...
+    def invalidate(self): ...
+    def expunge_all(self): ...
+    def bind_mapper(self, mapper, bind): ...
+    def bind_table(self, table, bind): ...
+    def get_bind(self, mapper=..., clause=...): ...
+    def query(self, *entities, **kwargs): ...
+    @property
+    def no_autoflush(self): ...
+    def refresh(self, instance, attribute_names=..., lockmode=...): ...
+    def expire_all(self): ...
+    def expire(self, instance, attribute_names=...): ...
+    def prune(self): ...
+    def expunge(self, instance): ...
+    def add(self, instance, _warn=...): ...
+    def add_all(self, instances): ...
+    def delete(self, instance): ...
+    def merge(self, instance, load=...): ...
+    def enable_relationship_loading(self, obj): ...
+    def __contains__(self, instance): ...
+    def __iter__(self): ...
+    def flush(self, objects=...): ...
+    def bulk_save_objects(self, objects, return_defaults=..., update_changed_only=...): ...
+    def bulk_insert_mappings(self, mapper, mappings, return_defaults=...): ...
+    def bulk_update_mappings(self, mapper, mappings): ...
+    def is_modified(self, instance, include_collections=..., passive=...): ...
+    @property
+    def is_active(self): ...
+    @property
+    def dirty(self): ...
+    @property
+    def deleted(self): ...
+    @property
+    def new(self): ...
+
+class sessionmaker(_SessionClassMethods):
+    kw = ... # type: Any
+    class_ = ... # type: Any
+    def __init__(self, bind=..., class_=..., autoflush=..., autocommit=..., expire_on_commit=..., info=..., **kw) -> None: ...
+    def __call__(self, **local_kw): ...
+    def configure(self, **new_kw): ...
+
+# Names in __all__ with no definition:
+#   SessionExtension
diff --git a/typeshed/third_party/2.7/sqlalchemy/pool.pyi b/typeshed/third_party/2.7/sqlalchemy/pool.pyi
new file mode 100644
index 0000000..7161dc5
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/pool.pyi
@@ -0,0 +1,118 @@
+# Stubs for sqlalchemy.pool (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import log
+from . import util
+
+threading = util.threading
+memoized_property = util.memoized_property
+chop_traceback = util.chop_traceback
+
+proxies = ... # type: Any
+
+def manage(module, **params): ...
+def clear_managers(): ...
+
+reset_rollback = ... # type: Any
+reset_commit = ... # type: Any
+reset_none = ... # type: Any
+
+class _ConnDialect:
+    def do_rollback(self, dbapi_connection): ...
+    def do_commit(self, dbapi_connection): ...
+    def do_close(self, dbapi_connection): ...
+
+class Pool(log.Identified):
+    logging_name = ... # type: Any
+    echo = ... # type: Any
+    def __init__(self, creator, recycle=..., echo=..., use_threadlocal=..., logging_name=..., reset_on_return=..., listeners=..., events=..., _dispatch=..., _dialect=...) -> None: ...
+    def add_listener(self, listener): ...
+    def unique_connection(self): ...
+    def recreate(self): ...
+    def dispose(self): ...
+    def connect(self): ...
+    def status(self): ...
+
+    _threadconns = ... # type: Any
+    _creator = ... # type: Any
+    _recycle = ... # type: Any
+    _invalidate_time = ... # type: Any
+    dispatch = ... # type: Any
+    _dialect = ... # type: Any
+    _orig_logging_name = ... # type: Any
+    _reset_on_return = ... # type: Any
+    _use_threadlocal = ... # type: Any
+
+class _ConnectionRecord:
+    connection = ... # type: Any
+    finalize_callback = ... # type: Any
+    def __init__(self, pool) -> None: ...
+    def info(self): ...
+    @classmethod
+    def checkout(cls, pool): ...
+    fairy_ref = ... # type: Any
+    def checkin(self): ...
+    def close(self): ...
+    def invalidate(self, e=..., soft=...): ...
+    def get_connection(self): ...
+
+class _ConnectionFairy:
+    connection = ... # type: Any
+    def __init__(self, dbapi_connection, connection_record, echo) -> None: ...
+    @property
+    def is_valid(self): ...
+    def info(self): ...
+    def invalidate(self, e=..., soft=...): ...
+    def cursor(self, *args, **kwargs): ...
+    def __getattr__(self, key): ...
+    def detach(self): ...
+    def close(self): ...
+
+class SingletonThreadPool(Pool):
+    size = ... # type: Any
+    def __init__(self, creator, pool_size=..., **kw) -> None: ...
+    def recreate(self): ...
+    def dispose(self): ...
+    def status(self): ...
+
+class QueuePool(Pool):
+    def __init__(self, creator, pool_size=..., max_overflow=..., timeout=..., **kw) -> None: ...
+    def recreate(self): ...
+    def dispose(self): ...
+    def status(self): ...
+    def size(self): ...
+    def checkedin(self): ...
+    def overflow(self): ...
+    def checkedout(self): ...
+
+class NullPool(Pool):
+    def status(self): ...
+    def recreate(self): ...
+    def dispose(self): ...
+
+class StaticPool(Pool):
+    def connection(self): ...
+    def status(self): ...
+    def dispose(self): ...
+    def recreate(self): ...
+
+class AssertionPool(Pool):
+    def __init__(self, *args, **kw) -> None: ...
+    def status(self): ...
+    def dispose(self): ...
+    def recreate(self): ...
+
+class _DBProxy:
+    module = ... # type: Any
+    kw = ... # type: Any
+    poolclass = ... # type: Any
+    pools = ... # type: Any
+    def __init__(self, module, poolclass=..., **kw) -> None: ...
+    def close(self): ...
+    def __del__(self): ...
+    def __getattr__(self, key): ...
+    def get_pool(self, *args, **kw): ...
+    def connect(self, *args, **kw): ...
+    def dispose(self, *args, **kw): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/schema.pyi b/typeshed/third_party/2.7/sqlalchemy/schema.pyi
new file mode 100644
index 0000000..f788897
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/schema.pyi
@@ -0,0 +1,50 @@
+# Stubs for sqlalchemy.schema (Python 2)
+
+from .sql import base
+from .sql import schema
+from .sql import naming
+from .sql import ddl
+from .sql import elements
+
+SchemaVisitor = base.SchemaVisitor
+CheckConstraint = schema.CheckConstraint
+Column = schema.Column
+ColumnDefault = schema.ColumnDefault
+Constraint = schema.Constraint
+DefaultClause = schema.DefaultClause
+DefaultGenerator = schema.DefaultGenerator
+FetchedValue = schema.FetchedValue
+ForeignKey = schema.ForeignKey
+ForeignKeyConstraint = schema.ForeignKeyConstraint
+Index = schema.Index
+MetaData = schema.MetaData
+PassiveDefault = schema.PassiveDefault
+PrimaryKeyConstraint = schema.PrimaryKeyConstraint
+SchemaItem = schema.SchemaItem
+Sequence = schema.Sequence
+Table = schema.Table
+ThreadLocalMetaData = schema.ThreadLocalMetaData
+UniqueConstraint = schema.UniqueConstraint
+_get_table_key = schema._get_table_key
+ColumnCollectionConstraint = schema.ColumnCollectionConstraint
+ColumnCollectionMixin = schema.ColumnCollectionMixin
+conv = elements.conv
+DDL = ddl.DDL
+CreateTable = ddl.CreateTable
+DropTable = ddl.DropTable
+CreateSequence = ddl.CreateSequence
+DropSequence = ddl.DropSequence
+CreateIndex = ddl.CreateIndex
+DropIndex = ddl.DropIndex
+CreateSchema = ddl.CreateSchema
+DropSchema = ddl.DropSchema
+_DropView = ddl._DropView
+CreateColumn = ddl.CreateColumn
+AddConstraint = ddl.AddConstraint
+DropConstraint = ddl.DropConstraint
+DDLBase = ddl.DDLBase
+DDLElement = ddl.DDLElement
+_CreateDropBase = ddl._CreateDropBase
+_DDLCompiles = ddl._DDLCompiles
+sort_tables = ddl.sort_tables
+sort_tables_and_constraints = ddl.sort_tables_and_constraints
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/__init__.pyi
new file mode 100644
index 0000000..91d06d8
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/__init__.pyi
@@ -0,0 +1,66 @@
+# Stubs for sqlalchemy.sql (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from . import expression
+from . import visitors
+
+Alias = expression.Alias
+ClauseElement = expression.ClauseElement
+ColumnCollection = expression.ColumnCollection
+ColumnElement = expression.ColumnElement
+CompoundSelect = expression.CompoundSelect
+Delete = expression.Delete
+FromClause = expression.FromClause
+Insert = expression.Insert
+Join = expression.Join
+Select = expression.Select
+Selectable = expression.Selectable
+TableClause = expression.TableClause
+Update = expression.Update
+alias = expression.alias
+and_ = expression.and_
+asc = expression.asc
+between = expression.between
+bindparam = expression.bindparam
+case = expression.case
+cast = expression.cast
+collate = expression.collate
+column = expression.column
+delete = expression.delete
+desc = expression.desc
+distinct = expression.distinct
+except_ = expression.except_
+except_all = expression.except_all
+exists = expression.exists
+extract = expression.extract
+false = expression.false
+False_ = expression.False_
+func = expression.func
+funcfilter = expression.funcfilter
+insert = expression.insert
+intersect = expression.intersect
+intersect_all = expression.intersect_all
+join = expression.join
+label = expression.label
+literal = expression.literal
+literal_column = expression.literal_column
+modifier = expression.modifier
+not_ = expression.not_
+null = expression.null
+or_ = expression.or_
+outerjoin = expression.outerjoin
+outparam = expression.outparam
+over = expression.over
+select = expression.select
+subquery = expression.subquery
+table = expression.table
+text = expression.text
+true = expression.true
+True_ = expression.True_
+tuple_ = expression.tuple_
+type_coerce = expression.type_coerce
+union = expression.union
+union_all = expression.union_all
+update = expression.update
+ClauseVisitor = visitors.ClauseVisitor
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/annotation.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/annotation.pyi
new file mode 100644
index 0000000..ba0aba4
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/annotation.pyi
@@ -0,0 +1,11 @@
+class Annotated(object):
+    def __new__(cls, *args): ...
+    def __init__(self, element, values): ...
+    def _annotate(self, values): ...
+    def _with_annotations(self, values): ...
+    def _deannotate(self, values=..., clone: bool=...): ...
+    def _compiler_dispatch(self, visitor, **kw): ...
+    def _constructor(self): ...
+    def _clone(self): ...
+    def __hash__(self): ...
+    def __eq__(self): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/base.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/base.pyi
new file mode 100644
index 0000000..48e68c7
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/base.pyi
@@ -0,0 +1,42 @@
+from typing import Any, Iterable
+
+from .visitors import ClauseVisitor
+from .. import util
+
+class Immutable(object):
+    def unique_params(self, *optionaldict, **kwargs): ...
+    def params(self, *optionaldict, **kwargs): ...
+    def _clone(self) -> Immutable: ...
+
+class DialectKWArgs(object):
+    def argument_for(cls, dialect_name, argument_name, default): ...
+    def kwargs(self): ...
+    def dialect_options(self): ...
+
+class Generative(object): ...
+
+class Executable(Generative):
+    def execution_options(self, **kw): ...
+    def execute(self, *multiparams, **params): ...
+    def scalar(self, *multiparams, **params): ...
+
+    @property
+    def bind(self): ...
+
+class SchemaEventTarget(object): ...
+class SchemaVisitor(ClauseVisitor): ...
+class ColumnCollection(util.OrderedProperties):
+    def replace(self, column): ...
+    def add(self, column): ...
+    def clear(self): ...
+    def remove(self, column): ...
+    def update(self, iter: Iterable[Any]): ...
+    def extend(self, iter: Iterable[Any]): ...
+    def contains_column(self, col): ...
+    def as_immutable(self): ...
+
+class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection): ...
+
+class ColumnSet(util.ordered_column_set): ...
+
+def _bind_or_error(schemaitem, msg): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/ddl.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/ddl.pyi
new file mode 100644
index 0000000..06ac96a
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/ddl.pyi
@@ -0,0 +1,25 @@
+from .elements import ClauseElement
+from .base import Executable, SchemaVisitor
+
+class _DDLCompiles(ClauseElement): ...
+class DDLElement(Executable, _DDLCompiles): ...
+class DDL(DDLElement): ...
+class _CreateDropBase(DDLElement): ...
+class CreateSchema(_CreateDropBase): ...
+class DropSchema(_CreateDropBase): ...
+class CreateTable(_CreateDropBase): ...
+class _DropView(_CreateDropBase): ...
+class CreateColumn(_DDLCompiles): ...
+class DropTable(_CreateDropBase): ...
+class CreateSequence(_CreateDropBase): ...
+class DropSequence(_CreateDropBase): ...
+class CreateIndex(_CreateDropBase): ...
+class DropIndex(_CreateDropBase): ...
+class AddConstraint(_CreateDropBase): ...
+class DropConstraint(_CreateDropBase): ...
+class DDLBase(SchemaVisitor): ...
+class SchemaGenerator(DDLBase): ...
+class SchemaDropper(DDLBase): ...
+
+def sort_tables(tables, skip_fn=..., extra_dependencies=...): ...
+def sort_tables_and_constraints(tables, filter_fn=..., extra_dependencies=...): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/dml.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/dml.pyi
new file mode 100644
index 0000000..79cb201
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/dml.pyi
@@ -0,0 +1,20 @@
+from typing import AnyStr
+
+from .base import Executable, DialectKWArgs
+from .elements import ClauseElement
+from .selectable import HasPrefixes
+
+class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
+    def params(self, *arg, **kw): ...
+    @property
+    def bind(self): ...
+    def returning(self, *cols): ...
+    def with_hint(self, text, selectable=..., dialect_name: AnyStr=...): ...
+
+class ValuesBase(UpdateBase):
+    def values(self, *args, **kwargs): ...
+    def return_defaults(self, *cols): ...
+
+class Insert(ValuesBase): ...
+class Update(ValuesBase): ...
+class Delete(UpdateBase): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/elements.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/elements.pyi
new file mode 100644
index 0000000..e2a8d38
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/elements.pyi
@@ -0,0 +1,60 @@
+
+from .visitors import Visitable
+from .annotation import Annotated
+from .base import Executable, Immutable
+from .operators import ColumnOperators
+from .. import util
+
+class ClauseElement(Visitable): ...
+
+class ColumnElement(ColumnOperators, ClauseElement): ...
+
+class BindParameter(ColumnElement): ...
+class BinaryExpression(ColumnElement): ...
+
+class TypeClause(ClauseElement): ...
+class TextClause(Executable, ClauseElement): ...
+
+class Null(ColumnElement): ...
+class False_(ColumnElement): ...
+class True_(ColumnElement): ...
+
+class ClauseList(ClauseElement): ...
+class BooleanClauseList(ClauseList, ColumnElement): ...
+class Tuple(ClauseList, ColumnElement): ...
+class Case(ColumnElement): ...
+class Cast(ColumnElement): ...
+class Extract(ColumnElement): ...
+class _label_reference(ColumnElement): ...
+
+class _textual_label_reference(ColumnElement): ...
+class UnaryExpression(ColumnElement): ...
+class AsBoolean(UnaryExpression): ...
+class Grouping(ColumnElement): ...
+class Over(ColumnElement): ...
+class FunctionFilter(ColumnElement): ...
+class Label(ColumnElement): ...
+class ColumnClause(Immutable, ColumnElement): ...
+class _IdentifiedClause(Executable, ClauseElement): ...
+class SavepointClause(_IdentifiedClause): ...
+class RollbackToSavepointClause(_IdentifiedClause): ...
+class ReleaseSavepointClause(_IdentifiedClause): ...
+class quoted_name(util.MemoizedSlots, util.text_type): ...
+class _truncated_label(quoted_name): ...
+class conv(_truncated_label): ...
+class _defer_name(_truncated_label): ...
+class _defer_none_name(_defer_name): ...
+class _anonymous_label(_truncated_label): ...
+class AnnotatedColumnElement(Annotated): ...
+
+def _clone(element, **kw): ...
+def _type_from_args(args): ...
+def _literal_as_binds(element, name, type_=None): ...
+
+def collate(expression, collation) -> BinaryExpression: ...
+def between(expr, lower_bound, upper_bound, symmetric: bool=...): ...
+def literal(value, type_=None) -> BindParameter: ...
+def outparam(key, type_=None) -> BindParameter: ...
+def type_coerce(expression, type_): ...
+def not_(clause): ...
+def literal_column(text, type_=None): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/expression.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/expression.pyi
new file mode 100644
index 0000000..fd24299
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/expression.pyi
@@ -0,0 +1,87 @@
+# Stubs for sqlalchemy.sql.expression (Python 2)
+
+from typing import Any
+from . import functions
+from . import elements
+from . import base
+from . import selectable
+from . import dml
+
+func = functions.func # type: functions._FunctionGenerator
+modifier = functions.modifier # type: functions._FunctionGenerator
+
+from .visitors import Visitable
+
+from .elements import ClauseElement, ColumnElement,\
+    BindParameter, UnaryExpression, BooleanClauseList, \
+    Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
+    True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
+    Grouping, not_, \
+    collate, literal_column, between,\
+    literal, outparam, type_coerce, ClauseList, FunctionFilter
+from .elements import SavepointClause, RollbackToSavepointClause, \
+    ReleaseSavepointClause
+from .base import ColumnCollection, Generative, Executable
+from .selectable import Alias, Join, Select, Selectable, TableClause, \
+    CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
+    alias, GenerativeSelect, \
+    subquery, HasPrefixes, HasSuffixes, Exists, ScalarSelect, TextAsFrom
+from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
+
+and_ = ... # type: Any
+or_ = ... # type: Any
+bindparam = ... # type: Any
+select = ... # type: Any
+text = ... # type: Any
+table = ... # type: Any
+column = ... # type: Any
+over = ... # type: Any
+label = ... # type: Any
+case = ... # type: Any
+cast = ... # type: Any
+extract = ... # type: Any
+tuple_ = ... # type: Any
+except_ = ... # type: Any
+except_all = ... # type: Any
+intersect = ... # type: Any
+intersect_all = ... # type: Any
+union = ... # type: Any
+union_all = ... # type: Any
+exists = ... # type: Any
+nullsfirst = ... # type: Any
+nullslast = ... # type: Any
+asc = ... # type: Any
+desc = ... # type: Any
+distinct = ... # type: Any
+true = ... # type: Any
+false = ... # type: Any
+null = ... # type: Any
+join = ... # type: Any
+outerjoin = ... # type: Any
+insert = ... # type: Any
+update = ... # type: Any
+delete = ... # type: Any
+funcfilter = ... # type: Any
+
+# old names for compatibility
+_Executable = Executable
+_BindParamClause = BindParameter
+_Label = Label
+_SelectBase = SelectBase
+_BinaryExpression = BinaryExpression
+_Cast = Cast
+_Null = Null
+_False = False_
+_True = True_
+_TextClause = TextClause
+_UnaryExpression = UnaryExpression
+_Case = Case
+_Tuple = Tuple
+_Over = Over
+_Generative = Generative
+_TypeClause = TypeClause
+_Extract = Extract
+_Exists = Exists
+_Grouping = Grouping
+_FromGrouping = FromGrouping
+_ScalarSelect = ScalarSelect
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/functions.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/functions.pyi
new file mode 100644
index 0000000..4be9907
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/functions.pyi
@@ -0,0 +1,47 @@
+
+from .base import Executable, ColumnCollection
+from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
+    literal_column, _type_from_args, ColumnElement, _clone,\
+    Over, BindParameter, FunctionFilter
+from .selectable import FromClause, Select, Alias
+from .visitors import VisitableType
+
+class FunctionElement(Executable, ColumnElement, FromClause): ...
+
+class _FunctionGenerator(object):
+    def __init__(self, **opts): ...
+    def __getattr__(self, name): ...
+    def __call__(self, *c, **kwargs) -> Function: ...
+
+func = ... # type: _FunctionGenerator
+modifier = ... # type: _FunctionGenerator
+
+class Function(FunctionElement): ...
+
+class _GenericMeta(VisitableType): ...
+# TODO: Use GenericFunction(util.with_metaclass(_GenericMeta, Function))
+class GenericFunction(_GenericMeta, Function): ...
+class next_value(GenericFunction): ...
+
+class AnsiFunction(GenericFunction): ...
+class ReturnTypeFromArgs(GenericFunction): ...
+
+class coalesce(ReturnTypeFromArgs): ...
+class max(ReturnTypeFromArgs): ...
+class min(ReturnTypeFromArgs): ...
+class sum(ReturnTypeFromArgs): ...
+class now(GenericFunction): ...
+class concat(GenericFunction): ...
+
+class char_length(GenericFunction): ...
+class random(GenericFunction): ...
+class count(GenericFunction): ...
+class current_date(AnsiFunction): ...
+class current_time(AnsiFunction): ...
+class current_timestamp(AnsiFunction): ...
+class current_user(AnsiFunction): ...
+class localtime(AnsiFunction): ...
+class localtimestamp(AnsiFunction): ...
+class session_user(AnsiFunction): ...
+class sysdate(AnsiFunction): ...
+class user(AnsiFunction): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/naming.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/naming.pyi
new file mode 100644
index 0000000..d9172c4
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/naming.pyi
@@ -0,0 +1 @@
+class ConventionDict(object): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi
new file mode 100644
index 0000000..1b33d00
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi
@@ -0,0 +1,99 @@
+from typing import Any, AnyStr, Callable
+
+class Operators(object):
+    def op(self, opstring: AnyStr, precedence: int, is_comparison: bool): ...
+    def operate(self, op: Callable[[Any], Any], *other, **kwargs): ...
+    def reverse_operator(self, op: Callable[[Any], Any], *other, **kwargs): ...
+    def __and__(self, other): ...
+    def __or__(self, other): ...
+    def __invert__(self): ...
+
+
+class ColumnOperators(Operators):
+    def concat(self, other): ...
+    def like(self, other, escape=None): ...
+    def ilike(self, other, escape=None): ...
+    def notlike(self, other, escape=None): ...
+    def notilike(self, other, escape=None): ...
+    def in_(self, other): ...
+    def notin_(self, other): ...
+    def is_(self, other): ...
+    def startswith(self, other, **kwargs): ...
+    def endswith(self, other, **kwargs): ...
+    def contains(self, other, **kwargs): ...
+    def match(self, other, **kwargs): ...
+    def desc(self): ...
+    def asc(self): ...
+    def nullsfirst(self): ...
+    def nullslast(self): ...
+    def collate(self, collation): ...
+    def between(self, cleft, cright, symmetric: bool): ...
+    def distinct(self): ...
+
+    def __lt__(self, other): ...
+    def __le__(self, other): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def __gt__(self, other): ...
+    def __ge__(self, other): ...
+    def __neg__(self, other): ...
+    def __getitem__(self, index): ...
+    def __lshift__(self, other): ...
+    def __rshift__(self, other): ...
+
+    def __radd__(self, other): ...
+    def __rsub__(self, other): ...
+    def __rmul__(self, other): ...
+    def __rdiv__(self, other): ...
+    def __rmod__(self, other): ...
+    def __add__(self, other): ...
+    def __sub__(self, other): ...
+    def __mul__(self, other): ...
+    def __div__(self, other): ...
+    def __mod__(self, other): ...
+    def __truediv__(self, other): ...
+    def __rtruediv__(self, other): ...
+
+def from_(): ...
+def as_(): ...
+def exists(): ...
+def istrue(a): ...
+def isfalse(a): ...
+def is_(a, b): ...
+def isnot(a, b): ...
+def collate(a, b): ...
+def op(a, opstring, b): ...
+
+def like_op(a, b, escape=None): ...
+def notlike_op(a, b, escape=None): ...
+def ilike_op(a, b, escape=None): ...
+def notilike_op(a, b, escape=None): ...
+def between_op(a, b, symmetric: bool): ...
+def notbetween_(a, b, symmetric: bool): ...
+
+def in_op(a, b): ...
+def notin_op(a, b): ...
+def distinct_op(a): ...
+
+def startswith_op(a, b, escape=None): ...
+def notstartswith_op(a, b, escape=None): ...
+def endswith_op(a, b, escape=None): ...
+def notendswith_op(a, b, escape=None): ...
+def contains_op(a, b, escape=None): ...
+def notcontains_op(a, b, escape=None): ...
+
+def match_op(a, b, **kw): ...
+def notmatch_op(a, b, **kw): ...
+
+def comma_op(a, b): ...
+def concat_op(a, b): ...
+
+def desc_op(a): ...
+def asc_op(a): ...
+def nullsfirst_op(a): ...
+def nullslast_op(a): ...
+
+def is_comparison(op): ...
+def is_commutative(op): ...
+def is_ordering_modified(op): ...
+def is_precedent(operator, against): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
new file mode 100644
index 0000000..403795e
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
@@ -0,0 +1,98 @@
+from typing import Any, AnyStr
+
+from .base import SchemaEventTarget, DialectKWArgs
+from .base import ColumnCollection
+from .elements import ClauseElement, ColumnClause, TextClause, \
+    ColumnElement
+from .selectable import TableClause
+
+from . import visitors
+
+class SchemaItem(SchemaEventTarget, visitors.Visitable):
+    def _execute_on_connection(self, connection, multiparams, params): ...
+    @property
+    def info(self): ...
+    @property
+    def quote(self): ...
+    def get_children(self, **kwargs): ...
+    def _init_items(self, *args): ...
+    def _schema_item_copy(self, schema_item): ...
+    def __repr__(self): ...
+
+
+class Table(DialectKWArgs, SchemaItem, TableClause): ...
+
+class Column(SchemaItem, ColumnClause):
+    primary_key = ... # type: Any
+    def __init__(self, *args, **kwargs): ...
+    def references(self, column): ...
+    def append_foreign_key(self, fk): ...
+    def __repr__(self): ...
+    def _set_parent(self, table): ...
+    def _setup_on_memoized_fks(self, fn): ...
+    def _on_table_attach(self, fn): ...
+    def copy(self, **kw): ...
+    def _make_proxy(self, selectable, name=None, key=None,
+                    name_is_truncatable=False, **kw): ...
+    def get_children(self, schema_visitor=False, **kwargs): ...
+
+
+class ForeignKey(DialectKWArgs, SchemaItem):
+    def __init__(self, column, _constraint=None, use_alter=False, name=None,
+                 onupdate=None, ondelete=None, deferrable=None,
+                 initially=None, link_to_name=False, match=None,
+                 info=None, **dialect_kw) -> None: ...
+    def __repr__(self): ...
+    def copy(self, schema=None): ...
+    def _get_colspec(self, schema=None, table_name=None): ...
+    @property
+    def _referred_schema(self): ...
+    def _table_key(self): ...
+    def references(self, table): ...
+    def get_referent(self, table): ...
+    @property
+    def _column_tokens(self): ...
+    def _resolve_col_tokens(self): ...
+    def _link_to_col_by_colstring(self, parenttable, table, colname): ...
+    def _set_target_column(self, column): ...
+    @property
+    def column(self): ...
+    def _set_parent(self, column): ...
+    def _set_remote_table(self, table): ...
+    def _remove_from_metadata(self, metadata): ...
+    def _set_table(self, column, table): ...
+
+class _NotAColumnExpr(object): ...
+class DefaultGenerator(_NotAColumnExpr, SchemaItem): ...
+class ColumnDefault(DefaultGenerator): ...
+class Sequence(DefaultGenerator): ...
+class FetchedValue(_NotAColumnExpr, SchemaEventTarget): ...
+class DefaultClause(FetchedValue): ...
+class PassiveDefault(DefaultClause): ...
+class Constraint(DialectKWArgs, SchemaItem): ...
+class ColumnCollectionMixin(object):
+    columns = None # type: Any
+    def __init__(self, *columns, **kw): ...
+    @classmethod
+    def _extract_col_expression_collection(cls, expressions): ...
+    def _check_attach(self, evt=False): ...
+    def _set_parent(self, table): ...
+
+class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
+    def __init__(self, *columns, **kw): ...
+    def _set_parent(self, table): ...
+    def __contains__(self, x): ...
+    def copy(self, **kw): ...
+    def contains_column(self, col): ...
+    def __iter__(self): ...
+    def __len__(self): ...
+
+class CheckConstraint(ColumnCollectionConstraint): ...
+class ForeignKeyConstraint(ColumnCollectionConstraint): ...
+class PrimaryKeyConstraint(ColumnCollectionConstraint): ...
+class UniqueConstraint(ColumnCollectionConstraint): ...
+class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): ...
+class MetaData(SchemaItem): ...
+class ThreadLocalMetaData(MetaData): ...
+
+def _get_table_key(name: AnyStr, schema: AnyStr): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/selectable.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/selectable.pyi
new file mode 100644
index 0000000..045170b
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/selectable.pyi
@@ -0,0 +1,60 @@
+from .base import Immutable, Executable, \
+    ColumnCollection, ColumnSet, Generative
+from .elements import ClauseElement, TextClause, ClauseList, \
+    Grouping, UnaryExpression, BindParameter
+from .annotation import Annotated
+from .visitors import Visitable
+
+def subquery(alias, *args, **kwargs): ...
+def alias(selectable, name=..., flat: bool=...): ...
+
+class Selectable(ClauseElement):
+    def selectable(self): ...
+
+class HasPrefixes(object):
+    def prefix_with(self, *expr, **kw): ...
+
+class HasSuffixes(object):
+    def suffix_with(self, *expr, **kw): ...
+
+class FromClause(Selectable):
+    def count(self, functions, whereclause=None, **params): ...
+    def select(self, whereclause=None, **params): ...
+    def join(self, right, onclause=None, isouter: bool=False): ...
+    def outerjoin(self, right, onclause=None): ...
+    def alias(self, name=None, flat: bool=False): ...
+    def is_derived_from(self, fromclause): ...
+    def _is_lexical_equivalent(self, other): ...
+    def replace_selectable(self, sqlutil, old, alias): ...
+    def correspond_on_equivalents(self, column, equivalents): ...
+    def corresponding_column(self, column, require_embedded: bool=False): ...
+    @property
+    def description(self): ...
+    def _reset_exported(self): ...
+    @property
+    def columns(self): ...
+    @property
+    def primary_key(self): ...
+    @property
+    def foreign_keys(self): ...
+    def _init_collections(self): ...
+    @property
+    def _cols_populated(self): ...
+    def _populate_column_collection(self): ...
+    def _refresh_for_new_column(self, column): ...
+
+class Join(FromClause): ...
+class Alias(FromClause): ...
+class CTE(Generative, HasSuffixes, Alias): ...
+class FromGrouping(FromClause): ...
+class TableClause(Immutable, FromClause): ...
+
+class ForUpdateArg(ClauseElement): ...
+class SelectBase(Executable, FromClause): ...
+class GenerativeSelect(SelectBase): ...
+class CompoundSelect(GenerativeSelect): ...
+class Select(HasPrefixes, HasSuffixes, GenerativeSelect): ...
+class ScalarSelect(Generative, Grouping): ...
+class Exists(UnaryExpression): ...
+class TextAsFrom(SelectBase): ...
+class AnnotatedFromClause(Annotated): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/sqltypes.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/sqltypes.pyi
new file mode 100644
index 0000000..9c9806a
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/sqltypes.pyi
@@ -0,0 +1,62 @@
+from .type_api import TypeEngine, TypeDecorator
+from .base import SchemaEventTarget
+
+class _DateAffinity(object): ...
+class Concatenable(object): ...
+class String(TypeEngine, Concatenable):
+    def __init__(self, length=None, collation=None,
+                 convert_unicode=False,
+                 unicode_error=None,
+                 _warn_on_bytestring=False): ...
+    def literal_processor(self, dialect): ...
+    def bind_processor(self, dialect): ...
+    def result_processor(self, dialect, coltype): ...
+    @property
+    def python_type(self): ...
+    def get_dbapi_type(self, dbapi): ...
+
+class Text(String): ...
+class Unicode(String): ...
+class UnicodeText(Text): ...
+class Integer(TypeEngine, _DateAffinity): ...
+class SmallInteger(Integer): ...
+class BigInteger(Integer): ...
+class Numeric(TypeEngine, _DateAffinity): ...
+class Float(Numeric): ...
+class DateTime(TypeEngine, _DateAffinity): ...
+class Date(TypeEngine, _DateAffinity): ...
+class Time(TypeEngine, _DateAffinity): ...
+class _Binary(TypeEngine): ...
+class LargeBinary(_Binary): ...
+class Binary(LargeBinary): ...
+class SchemaType(SchemaEventTarget): ...
+class Enum(String, SchemaType): ...
+class PickleType(TypeDecorator): ...
+class Boolean(TypeEngine, SchemaType): ...
+class Interval(_DateAffinity, TypeDecorator): ...
+
+class REAL(Float): ...
+class FLOAT(Float): ...
+class NUMERIC(Numeric): ...
+class DECIMAL(Numeric): ...
+class INTEGER(Integer): ...
+# In code it's INT=INTEGER
+class INT(Integer): ...
+class SMALLINT(SmallInteger): ...
+class BIGINT(BigInteger): ...
+class TIMESTAMP(DateTime): ...
+class DATETIME(DateTime): ...
+class DATE(Date): ...
+class TIME(Time): ...
+class TEXT(Text): ...
+class CLOB(Text): ...
+class VARCHAR(String): ...
+class NVARCHAR(Unicode): ...
+class CHAR(String): ...
+class NCHAR(Unicode): ...
+class BLOB(LargeBinary): ...
+class BINARY(_Binary): ...
+class VARBINARY(_Binary): ...
+class BOOLEAN(Boolean): ...
+class NullType(TypeEngine): ...
+class MatchType(Boolean): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/type_api.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/type_api.pyi
new file mode 100644
index 0000000..ccc0870
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/type_api.pyi
@@ -0,0 +1,9 @@
+from .. import util
+from .visitors import Visitable, VisitableType
+
+class TypeEngine(Visitable): ...
+class VisitableCheckKWArg(util.EnsureKWArgType, VisitableType): ...
+# TODO: class UserDefinedType(util.with_metaclass(VisitableCheckKWArg, TypeEngine)):
+class UserDefinedType(VisitableCheckKWArg, TypeEngine): ...
+class TypeDecorator(TypeEngine): ...
+class Variant(TypeDecorator): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/visitors.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/visitors.pyi
new file mode 100644
index 0000000..3b549b4
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/visitors.pyi
@@ -0,0 +1,33 @@
+# Stubs for sqlalchemy.sql.visitors (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class VisitableType(type):
+    def __init__(cls, clsname, bases, clsdict) -> None: ...
+
+class Visitable: ...
+
+class ClauseVisitor:
+    __traverse_options__ = ... # type: Any
+    def traverse_single(self, obj, **kw): ...
+    def iterate(self, obj): ...
+    def traverse(self, obj): ...
+    def chain(self, visitor): ...
+
+class CloningVisitor(ClauseVisitor):
+    def copy_and_process(self, list_): ...
+    def traverse(self, obj): ...
+
+class ReplacingCloningVisitor(CloningVisitor):
+    def replace(self, elem): ...
+    def traverse(self, obj): ...
+
+def iterate(obj, opts): ...
+def iterate_depthfirst(obj, opts): ...
+def traverse_using(iterator, obj, visitors): ...
+def traverse(obj, opts, visitors): ...
+def traverse_depthfirst(obj, opts, visitors): ...
+def cloned_traverse(obj, opts, visitors): ...
+def replacement_traverse(obj, opts, replace): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/types.pyi b/typeshed/third_party/2.7/sqlalchemy/types.pyi
new file mode 100644
index 0000000..7aa160c
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/types.pyi
@@ -0,0 +1,51 @@
+# Stubs for sqlalchemy.types (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from .sql import type_api
+from .sql import sqltypes
+
+TypeEngine = type_api.TypeEngine
+TypeDecorator = type_api.TypeDecorator
+UserDefinedType = type_api.UserDefinedType
+BIGINT = sqltypes.BIGINT
+BINARY = sqltypes.BINARY
+BLOB = sqltypes.BLOB
+BOOLEAN = sqltypes.BOOLEAN
+BigInteger = sqltypes.BigInteger
+Binary = sqltypes.Binary
+Boolean = sqltypes.Boolean
+CHAR = sqltypes.CHAR
+CLOB = sqltypes.CLOB
+Concatenable = sqltypes.Concatenable
+DATE = sqltypes.DATE
+DATETIME = sqltypes.DATETIME
+DECIMAL = sqltypes.DECIMAL
+Date = sqltypes.Date
+DateTime = sqltypes.DateTime
+Enum = sqltypes.Enum
+FLOAT = sqltypes.FLOAT
+Float = sqltypes.Float
+INT = sqltypes.INT
+INTEGER = sqltypes.INTEGER
+Integer = sqltypes.Integer
+Interval = sqltypes.Interval
+LargeBinary = sqltypes.LargeBinary
+NCHAR = sqltypes.NCHAR
+NVARCHAR = sqltypes.NVARCHAR
+NUMERIC = sqltypes.NUMERIC
+Numeric = sqltypes.Numeric
+PickleType = sqltypes.PickleType
+REAL = sqltypes.REAL
+SMALLINT = sqltypes.SMALLINT
+SmallInteger = sqltypes.SmallInteger
+String = sqltypes.String
+TEXT = sqltypes.TEXT
+TIME = sqltypes.TIME
+TIMESTAMP = sqltypes.TIMESTAMP
+Text = sqltypes.Text
+Time = sqltypes.Time
+Unicode = sqltypes.Unicode
+UnicodeText = sqltypes.UnicodeText
+VARBINARY = sqltypes.VARBINARY
+VARCHAR = sqltypes.VARCHAR
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/util/__init__.pyi
new file mode 100644
index 0000000..a42c2ce
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/util/__init__.pyi
@@ -0,0 +1,133 @@
+# Stubs for sqlalchemy.util (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from . import compat
+from . import _collections
+from . import langhelpers
+from . import deprecations
+
+callable = compat.callable
+cmp = compat.cmp
+reduce = compat.reduce
+threading = compat.threading
+py3k = compat.py3k
+py33 = compat.py33
+py2k = compat.py2k
+jython = compat.jython
+pypy = compat.pypy
+cpython = compat.cpython
+win32 = compat.win32
+pickle = compat.pickle
+dottedgetter = compat.dottedgetter
+parse_qsl = compat.parse_qsl
+namedtuple = compat.namedtuple
+next = compat.next
+reraise = compat.reraise
+raise_from_cause = compat.raise_from_cause
+text_type = compat.text_type
+safe_kwarg = compat.safe_kwarg
+string_types = compat.string_types
+int_types = compat.int_types
+binary_type = compat.binary_type
+nested = compat.nested
+quote_plus = compat.quote_plus
+with_metaclass = compat.with_metaclass
+print_ = compat.print_
+itertools_filterfalse = compat.itertools_filterfalse
+u = compat.u
+ue = compat.ue
+b = compat.b
+unquote_plus = compat.unquote_plus
+unquote = compat.unquote
+b64decode = compat.b64decode
+b64encode = compat.b64encode
+byte_buffer = compat.byte_buffer
+itertools_filter = compat.itertools_filter
+iterbytes = compat.iterbytes
+StringIO = compat.StringIO
+inspect_getargspec = compat.inspect_getargspec
+zip_longest = compat.zip_longest
+KeyedTuple = _collections.KeyedTuple
+ImmutableContainer = _collections.ImmutableContainer
+immutabledict = _collections.immutabledict
+Properties = _collections.Properties
+OrderedProperties = _collections.OrderedProperties
+ImmutableProperties = _collections.ImmutableProperties
+OrderedDict = _collections.OrderedDict
+OrderedSet = _collections.OrderedSet
+IdentitySet = _collections.IdentitySet
+OrderedIdentitySet = _collections.OrderedIdentitySet
+column_set = _collections.column_set
+column_dict = _collections.column_dict
+ordered_column_set = _collections.ordered_column_set
+populate_column_dict = _collections.populate_column_dict
+unique_list = _collections.unique_list
+UniqueAppender = _collections.UniqueAppender
+PopulateDict = _collections.PopulateDict
+EMPTY_SET = _collections.EMPTY_SET
+to_list = _collections.to_list
+to_set = _collections.to_set
+to_column_set = _collections.to_column_set
+update_copy = _collections.update_copy
+flatten_iterator = _collections.flatten_iterator
+has_intersection = _collections.has_intersection
+LRUCache = _collections.LRUCache
+ScopedRegistry = _collections.ScopedRegistry
+ThreadLocalRegistry = _collections.ThreadLocalRegistry
+WeakSequence = _collections.WeakSequence
+coerce_generator_arg = _collections.coerce_generator_arg
+lightweight_named_tuple = _collections.lightweight_named_tuple
+iterate_attributes = langhelpers.iterate_attributes
+class_hierarchy = langhelpers.class_hierarchy
+portable_instancemethod = langhelpers.portable_instancemethod
+unbound_method_to_callable = langhelpers.unbound_method_to_callable
+getargspec_init = langhelpers.getargspec_init
+format_argspec_init = langhelpers.format_argspec_init
+format_argspec_plus = langhelpers.format_argspec_plus
+get_func_kwargs = langhelpers.get_func_kwargs
+get_cls_kwargs = langhelpers.get_cls_kwargs
+decorator = langhelpers.decorator
+as_interface = langhelpers.as_interface
+memoized_property = langhelpers.memoized_property
+memoized_instancemethod = langhelpers.memoized_instancemethod
+md5_hex = langhelpers.md5_hex
+group_expirable_memoized_property = langhelpers.group_expirable_memoized_property
+dependencies = langhelpers.dependencies
+decode_slice = langhelpers.decode_slice
+monkeypatch_proxied_specials = langhelpers.monkeypatch_proxied_specials
+asbool = langhelpers.asbool
+bool_or_str = langhelpers.bool_or_str
+coerce_kw_type = langhelpers.coerce_kw_type
+duck_type_collection = langhelpers.duck_type_collection
+assert_arg_type = langhelpers.assert_arg_type
+symbol = langhelpers.symbol
+dictlike_iteritems = langhelpers.dictlike_iteritems
+classproperty = langhelpers.classproperty
+set_creation_order = langhelpers.set_creation_order
+warn_exception = langhelpers.warn_exception
+warn = langhelpers.warn
+NoneType = langhelpers.NoneType
+constructor_copy = langhelpers.constructor_copy
+methods_equivalent = langhelpers.methods_equivalent
+chop_traceback = langhelpers.chop_traceback
+asint = langhelpers.asint
+generic_repr = langhelpers.generic_repr
+counter = langhelpers.counter
+PluginLoader = langhelpers.PluginLoader
+hybridproperty = langhelpers.hybridproperty
+hybridmethod = langhelpers.hybridmethod
+safe_reraise = langhelpers.safe_reraise
+get_callable_argspec = langhelpers.get_callable_argspec
+only_once = langhelpers.only_once
+attrsetter = langhelpers.attrsetter
+ellipses_string = langhelpers.ellipses_string
+warn_limited = langhelpers.warn_limited
+map_bits = langhelpers.map_bits
+MemoizedSlots = langhelpers.MemoizedSlots
+EnsureKWArgType = langhelpers.EnsureKWArgType
+warn_deprecated = deprecations.warn_deprecated
+warn_pending_deprecation = deprecations.warn_pending_deprecation
+deprecated = deprecations.deprecated
+pending_deprecation = deprecations.pending_deprecation
+inject_docstring_text = deprecations.inject_docstring_text
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/_collections.pyi b/typeshed/third_party/2.7/sqlalchemy/util/_collections.pyi
new file mode 100644
index 0000000..a25c7e9
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/util/_collections.pyi
@@ -0,0 +1,214 @@
+# Stubs for sqlalchemy.util._collections (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import compat
+
+threading = compat.threading
+itertools_filterfalse = compat.itertools_filterfalse
+string_types = compat.string_types
+
+EMPTY_SET = ... # type: Any
+
+class AbstractKeyedTuple(tuple):
+    def keys(self): ...
+
+class KeyedTuple(AbstractKeyedTuple):
+    def __new__(cls, vals, labels=...): ...
+    def __setattr__(self, key, value): ...
+
+class _LW(AbstractKeyedTuple):
+    def __new__(cls, vals): ...
+    def __reduce__(self): ...
+
+class ImmutableContainer:
+    __delitem__ = ... # type: Any
+
+class immutabledict(ImmutableContainer, dict):
+    clear = ... # type: Any
+    def __new__(cls, *args): ...
+    def __init__(self, *args) -> None: ...
+    def __reduce__(self): ...
+    def union(self, d): ...
+
+class Properties:
+    def __init__(self, data) -> None: ...
+    def __len__(self): ...
+    def __iter__(self): ...
+    def __add__(self, other): ...
+    def __setitem__(self, key, object): ...
+    def __getitem__(self, key): ...
+    def __delitem__(self, key): ...
+    def __setattr__(self, key, obj): ...
+    def __getattr__(self, key): ...
+    def __contains__(self, key): ...
+    def as_immutable(self): ...
+    def update(self, value): ...
+    def get(self, key, default=...): ...
+    def keys(self): ...
+    def values(self): ...
+    def items(self): ...
+    def has_key(self, key): ...
+    def clear(self): ...
+
+class OrderedProperties(Properties):
+    def __init__(self) -> None: ...
+
+class ImmutableProperties(ImmutableContainer, Properties): ...
+
+class OrderedDict(dict):
+    def __reduce__(self): ...
+    def __init__(self, ____sequence=..., **kwargs) -> None: ...
+    def clear(self): ...
+    def copy(self): ...
+    def __copy__(self): ...
+    def sort(self, *arg, **kw): ...
+    def update(self, ____sequence=..., **kwargs): ...
+    def setdefault(self, key, value): ...
+    def __iter__(self): ...
+    def keys(self): ...
+    def values(self): ...
+    def items(self): ...
+    def itervalues(self): ...
+    def iterkeys(self): ...
+    def iteritems(self): ...
+    def __setitem__(self, key, object): ...
+    def __delitem__(self, key): ...
+    def pop(self, key, *default): ...
+    def popitem(self): ...
+
+class OrderedSet(set):
+    def __init__(self, d=...) -> None: ...
+    def add(self, element): ...
+    def remove(self, element): ...
+    def insert(self, pos, element): ...
+    def discard(self, element): ...
+    def clear(self): ...
+    def __getitem__(self, key): ...
+    def __iter__(self): ...
+    def __add__(self, other): ...
+    def update(self, iterable): ...
+    __ior__ = ... # type: Any
+    def union(self, other): ...
+    __or__ = ... # type: Any
+    def intersection(self, other): ...
+    __and__ = ... # type: Any
+    def symmetric_difference(self, other): ...
+    __xor__ = ... # type: Any
+    def difference(self, other): ...
+    __sub__ = ... # type: Any
+    def intersection_update(self, other): ...
+    __iand__ = ... # type: Any
+    def symmetric_difference_update(self, other): ...
+    __ixor__ = ... # type: Any
+    def difference_update(self, other): ...
+    __isub__ = ... # type: Any
+
+class IdentitySet:
+    def __init__(self, iterable=...) -> None: ...
+    def add(self, value): ...
+    def __contains__(self, value): ...
+    def remove(self, value): ...
+    def discard(self, value): ...
+    def pop(self): ...
+    def clear(self): ...
+    def __cmp__(self, other): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    def issubset(self, iterable): ...
+    def __le__(self, other): ...
+    def __lt__(self, other): ...
+    def issuperset(self, iterable): ...
+    def __ge__(self, other): ...
+    def __gt__(self, other): ...
+    def union(self, iterable): ...
+    def __or__(self, other): ...
+    def update(self, iterable): ...
+    def __ior__(self, other): ...
+    def difference(self, iterable): ...
+    def __sub__(self, other): ...
+    def difference_update(self, iterable): ...
+    def __isub__(self, other): ...
+    def intersection(self, iterable): ...
+    def __and__(self, other): ...
+    def intersection_update(self, iterable): ...
+    def __iand__(self, other): ...
+    def symmetric_difference(self, iterable): ...
+    def __xor__(self, other): ...
+    def symmetric_difference_update(self, iterable): ...
+    def __ixor__(self, other): ...
+    def copy(self): ...
+    __copy__ = ... # type: Any
+    def __len__(self): ...
+    def __iter__(self): ...
+    def __hash__(self): ...
+
+class WeakSequence:
+    def __init__(self, __elements=...) -> None: ...
+    def append(self, item): ...
+    def __len__(self): ...
+    def __iter__(self): ...
+    def __getitem__(self, index): ...
+
+class OrderedIdentitySet(IdentitySet):
+    class _working_set(OrderedSet):
+        __sa_hash_exempt__ = ... # type: Any
+    def __init__(self, iterable=...) -> None: ...
+
+class PopulateDict(dict):
+    creator = ... # type: Any
+    def __init__(self, creator) -> None: ...
+    def __missing__(self, key): ...
+
+column_set = set
+column_dict = dict
+ordered_column_set = OrderedSet
+populate_column_dict = PopulateDict
+
+def unique_list(seq, hashfunc=...): ...
+
+class UniqueAppender:
+    data = ... # type: Any
+    def __init__(self, data, via=...) -> None: ...
+    def append(self, item): ...
+    def __iter__(self): ...
+
+def coerce_generator_arg(arg): ...
+def to_list(x, default=...): ...
+def has_intersection(set_, iterable): ...
+def to_set(x): ...
+def to_column_set(x): ...
+def update_copy(d, _new=..., **kw): ...
+def flatten_iterator(x): ...
+
+class LRUCache(dict):
+    capacity = ... # type: Any
+    threshold = ... # type: Any
+    def __init__(self, capacity=..., threshold=...) -> None: ...
+    def get(self, key, default=...): ...
+    def __getitem__(self, key): ...
+    def values(self): ...
+    def setdefault(self, key, value): ...
+    def __setitem__(self, key, value): ...
+
+def lightweight_named_tuple(name, fields): ...
+
+class ScopedRegistry:
+    createfunc = ... # type: Any
+    scopefunc = ... # type: Any
+    registry = ... # type: Any
+    def __init__(self, createfunc, scopefunc) -> None: ...
+    def __call__(self): ...
+    def has(self): ...
+    def set(self, obj): ...
+    def clear(self): ...
+
+class ThreadLocalRegistry(ScopedRegistry):
+    createfunc = ... # type: Any
+    registry = ... # type: Any
+    def __init__(self, createfunc) -> None: ...
+    def __call__(self): ...
+    def has(self): ...
+    def set(self, obj): ...
+    def clear(self): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/compat.pyi b/typeshed/third_party/2.7/sqlalchemy/util/compat.pyi
new file mode 100644
index 0000000..ee10636
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/util/compat.pyi
@@ -0,0 +1,67 @@
+# Stubs for sqlalchemy.util.compat (Python 2)
+
+from typing import Any
+from collections import namedtuple
+
+import threading
+import pickle
+from six.moves.urllib.parse import (quote_plus, unquote_plus,
+                                    parse_qsl, quote, unquote)
+# import configparser
+from six.moves import StringIO
+
+from io import BytesIO as byte_buffer
+
+from operator import attrgetter as dottedgetter
+
+from six.moves import zip_longest
+
+py33 = ... # type: Any
+py32 = ... # type: Any
+py3k = ... # type: Any
+py2k = ... # type: Any
+py265 = ... # type: Any
+jython = ... # type: Any
+pypy = ... # type: Any
+win32 = ... # type: Any
+cpython = ... # type: Any
+next = ... # type: Any
+safe_kwarg = ... # type: Any
+
+ArgSpec = namedtuple('ArgSpec', ['args', 'varargs', 'keywords', 'defaults'])
+
+def inspect_getargspec(func): ...
+
+string_types = ... # type: Any
+binary_type = ... # type: Any
+text_type = unicode
+int_types = ... # type: Any
+
+def callable(fn): ...
+def cmp(a, b): ...
+
+itertools_filterfalse = ... # type: Any
+itertools_filter = ... # type: Any
+itertools_imap = ... # type: Any
+
+def b64encode(x): ...
+def b64decode(x): ...
+
+def iterbytes(buf): ...
+def u(s): ...
+def ue(s): ...
+def b(s): ...
+def import_(*args): ...
+
+reduce = ... # type: Any
+
+def print_(*args, **kwargs): ...
+
+time_func = ... # type: Any
+
+def reraise(tp, value, tb=..., cause=...): ...
+def raise_from_cause(exception, exc_info=...): ...
+
+def exec_(func_text, globals_, lcl=...): ...
+def with_metaclass(meta, *bases): ...
+def nested(*managers): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/deprecations.pyi b/typeshed/third_party/2.7/sqlalchemy/util/deprecations.pyi
new file mode 100644
index 0000000..49940cf
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/util/deprecations.pyi
@@ -0,0 +1,13 @@
+# Stubs for sqlalchemy.util.deprecations (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from . import langhelpers
+
+decorator = langhelpers.decorator
+
+def warn_deprecated(msg, stacklevel=...): ...
+def warn_pending_deprecation(msg, stacklevel=...): ...
+def deprecated(version, message=..., add_deprecation_to_docstring=...): ...
+def pending_deprecation(version, message=..., add_deprecation_to_docstring=...): ...
+def inject_docstring_text(doctext, injecttext, pos): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/langhelpers.pyi b/typeshed/third_party/2.7/sqlalchemy/util/langhelpers.pyi
new file mode 100644
index 0000000..c16b1f9
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/util/langhelpers.pyi
@@ -0,0 +1,136 @@
+# Stubs for sqlalchemy.util.langhelpers (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import compat
+
+def md5_hex(x): ...
+
+class safe_reraise:
+    def __enter__(self): ...
+    def __exit__(self, type_, value, traceback): ...
+
+def decode_slice(slc): ...
+def map_bits(fn, n): ...
+def decorator(target): ...
+def public_factory(target, location): ...
+
+class PluginLoader:
+    group = ... # type: Any
+    impls = ... # type: Any
+    auto_fn = ... # type: Any
+    def __init__(self, group, auto_fn=...) -> None: ...
+    def load(self, name): ...
+    def register(self, name, modulepath, objname): ...
+
+def get_cls_kwargs(cls, _set=...): ...
+def inspect_func_args(fn): ...
+def get_func_kwargs(func): ...
+def get_callable_argspec(fn, no_self=..., _is_init=...): ...
+def format_argspec_plus(fn, grouped=...): ...
+def format_argspec_init(method, grouped=...): ...
+def getargspec_init(method): ...
+def unbound_method_to_callable(func_or_cls): ...
+def generic_repr(obj, additional_kw=..., to_inspect=..., omit_kwarg=...): ...
+
+class portable_instancemethod:
+    target = ... # type: Any
+    name = ... # type: Any
+    def __init__(self, meth) -> None: ...
+    def __call__(self, *arg, **kw): ...
+
+def class_hierarchy(cls): ...
+def iterate_attributes(cls): ...
+def monkeypatch_proxied_specials(into_cls, from_cls, skip=..., only=..., name=..., from_instance=...): ...
+def methods_equivalent(meth1, meth2): ...
+def as_interface(obj, cls=..., methods=..., required=...): ...
+
+class memoized_property:
+    fget = ... # type: Any
+    __doc__ = ... # type: Any
+    __name__ = ... # type: Any
+    def __init__(self, fget, doc=...) -> None: ...
+    def __get__(self, obj, cls): ...
+    @classmethod
+    def reset(cls, obj, name): ...
+
+def memoized_instancemethod(fn): ...
+
+class group_expirable_memoized_property:
+    attributes = ... # type: Any
+    def __init__(self, attributes=...) -> None: ...
+    def expire_instance(self, instance): ...
+    def __call__(self, fn): ...
+    def method(self, fn): ...
+
+class MemoizedSlots:
+    def __getattr__(self, key): ...
+
+def dependency_for(modulename): ...
+
+class dependencies:
+    import_deps = ... # type: Any
+    def __init__(self, *deps) -> None: ...
+    def __call__(self, fn): ...
+    @classmethod
+    def resolve_all(cls, path): ...
+    class _importlater:
+        def __new__(cls, path, addtl): ...
+        def __init__(self, path, addtl) -> None: ...
+        def module(self): ...
+        def __getattr__(self, key): ...
+
+def asbool(obj): ...
+def bool_or_str(*text): ...
+def asint(value): ...
+def coerce_kw_type(kw, key, type_, flexi_bool=...): ...
+def constructor_copy(obj, cls, *args, **kw): ...
+def counter(): ...
+def duck_type_collection(specimen, default=...): ...
+def assert_arg_type(arg, argtype, name): ...
+def dictlike_iteritems(dictlike): ...
+
+class classproperty:
+    __doc__ = ... # type: Any
+    def __init__(self, fget, *arg, **kw) -> None: ...
+    def __get__(desc, self, cls): ...
+
+class hybridproperty:
+    func = ... # type: Any
+    def __init__(self, func) -> None: ...
+    def __get__(self, instance, owner): ...
+
+class hybridmethod:
+    func = ... # type: Any
+    def __init__(self, func) -> None: ...
+    def __get__(self, instance, owner): ...
+
+class _symbol(int):
+    def __new__(self, name, doc=..., canonical=...): ...
+    def __reduce__(self): ...
+
+class symbol:
+    symbols = ... # type: Any
+    def __new__(cls, name, doc=..., canonical=...): ...
+
+def set_creation_order(instance): ...
+def warn_exception(func, *args, **kwargs): ...
+def ellipses_string(value, len_=...): ...
+
+class _hash_limit_string(compat.text_type):
+    def __new__(cls, value, num, args): ...
+    def __hash__(self): ...
+    def __eq__(self, other): ...
+
+def warn(msg): ...
+def warn_limited(msg, args): ...
+def only_once(fn): ...
+def chop_traceback(tb, exclude_prefix=..., exclude_suffix=...): ...
+
+NoneType = ... # type: Any
+
+def attrsetter(attrname): ...
+
+class EnsureKWArgType(type):
+    def __init__(cls, clsname, bases, clsdict) -> None: ...
diff --git a/typeshed/third_party/2.7/thrift/Thrift.pyi b/typeshed/third_party/2.7/thrift/Thrift.pyi
new file mode 100644
index 0000000..78a61d6
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/Thrift.pyi
@@ -0,0 +1,55 @@
+# Stubs for thrift.Thrift (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class TType:
+    STOP = ... # type: Any
+    VOID = ... # type: Any
+    BOOL = ... # type: Any
+    BYTE = ... # type: Any
+    I08 = ... # type: Any
+    DOUBLE = ... # type: Any
+    I16 = ... # type: Any
+    I32 = ... # type: Any
+    I64 = ... # type: Any
+    STRING = ... # type: Any
+    UTF7 = ... # type: Any
+    STRUCT = ... # type: Any
+    MAP = ... # type: Any
+    SET = ... # type: Any
+    LIST = ... # type: Any
+    UTF8 = ... # type: Any
+    UTF16 = ... # type: Any
+
+class TMessageType:
+    CALL = ... # type: Any
+    REPLY = ... # type: Any
+    EXCEPTION = ... # type: Any
+    ONEWAY = ... # type: Any
+
+class TProcessor:
+    def process(iprot, oprot): ...
+
+class TException(Exception):
+    message = ... # type: Any
+    def __init__(self, message=...) -> None: ...
+
+class TApplicationException(TException):
+    UNKNOWN = ... # type: Any
+    UNKNOWN_METHOD = ... # type: Any
+    INVALID_MESSAGE_TYPE = ... # type: Any
+    WRONG_METHOD_NAME = ... # type: Any
+    BAD_SEQUENCE_ID = ... # type: Any
+    MISSING_RESULT = ... # type: Any
+    INTERNAL_ERROR = ... # type: Any
+    PROTOCOL_ERROR = ... # type: Any
+    INVALID_TRANSFORM = ... # type: Any
+    INVALID_PROTOCOL = ... # type: Any
+    UNSUPPORTED_CLIENT_TYPE = ... # type: Any
+    type = ... # type: Any
+    def __init__(self, type=..., message=...) -> None: ...
+    message = ... # type: Any
+    def read(self, iprot): ...
+    def write(self, oprot): ...
diff --git a/typeshed/third_party/2.7/thrift/__init__.pyi b/typeshed/third_party/2.7/thrift/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/thrift/protocol/TBinaryProtocol.pyi b/typeshed/third_party/2.7/thrift/protocol/TBinaryProtocol.pyi
new file mode 100644
index 0000000..1815457
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/protocol/TBinaryProtocol.pyi
@@ -0,0 +1,65 @@
+# Stubs for thrift.protocol.TBinaryProtocol (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+from .TProtocol import *
+
+class TBinaryProtocol(TProtocolBase):
+    VERSION_MASK = ... # type: Any
+    VERSION_1 = ... # type: Any
+    TYPE_MASK = ... # type: Any
+    strictRead = ... # type: Any
+    strictWrite = ... # type: Any
+    def __init__(self, trans, strictRead=..., strictWrite=...) -> None: ...
+    def writeMessageBegin(self, name, type, seqid): ...
+    def writeMessageEnd(self): ...
+    def writeStructBegin(self, name): ...
+    def writeStructEnd(self): ...
+    def writeFieldBegin(self, name, type, id): ...
+    def writeFieldEnd(self): ...
+    def writeFieldStop(self): ...
+    def writeMapBegin(self, ktype, vtype, size): ...
+    def writeMapEnd(self): ...
+    def writeListBegin(self, etype, size): ...
+    def writeListEnd(self): ...
+    def writeSetBegin(self, etype, size): ...
+    def writeSetEnd(self): ...
+    def writeBool(self, bool): ...
+    def writeByte(self, byte): ...
+    def writeI16(self, i16): ...
+    def writeI32(self, i32): ...
+    def writeI64(self, i64): ...
+    def writeDouble(self, dub): ...
+    def writeString(self, str): ...
+    def readMessageBegin(self): ...
+    def readMessageEnd(self): ...
+    def readStructBegin(self): ...
+    def readStructEnd(self): ...
+    def readFieldBegin(self): ...
+    def readFieldEnd(self): ...
+    def readMapBegin(self): ...
+    def readMapEnd(self): ...
+    def readListBegin(self): ...
+    def readListEnd(self): ...
+    def readSetBegin(self): ...
+    def readSetEnd(self): ...
+    def readBool(self): ...
+    def readByte(self): ...
+    def readI16(self): ...
+    def readI32(self): ...
+    def readI64(self): ...
+    def readDouble(self): ...
+    def readString(self): ...
+
+class TBinaryProtocolFactory:
+    strictRead = ... # type: Any
+    strictWrite = ... # type: Any
+    def __init__(self, strictRead=..., strictWrite=...) -> None: ...
+    def getProtocol(self, trans): ...
+
+class TBinaryProtocolAccelerated(TBinaryProtocol): ...
+
+class TBinaryProtocolAcceleratedFactory:
+    def getProtocol(self, trans): ...
diff --git a/typeshed/third_party/2.7/thrift/protocol/TProtocol.pyi b/typeshed/third_party/2.7/thrift/protocol/TProtocol.pyi
new file mode 100644
index 0000000..8ec1029
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/protocol/TProtocol.pyi
@@ -0,0 +1,79 @@
+# Stubs for thrift.protocol.TProtocol (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+from thrift.Thrift import *
+
+class TProtocolException(TException):
+    UNKNOWN = ... # type: Any
+    INVALID_DATA = ... # type: Any
+    NEGATIVE_SIZE = ... # type: Any
+    SIZE_LIMIT = ... # type: Any
+    BAD_VERSION = ... # type: Any
+    NOT_IMPLEMENTED = ... # type: Any
+    DEPTH_LIMIT = ... # type: Any
+    type = ... # type: Any
+    def __init__(self, type=..., message=...) -> None: ...
+
+class TProtocolBase:
+    trans = ... # type: Any
+    def __init__(self, trans) -> None: ...
+    def writeMessageBegin(self, name, ttype, seqid): ...
+    def writeMessageEnd(self): ...
+    def writeStructBegin(self, name): ...
+    def writeStructEnd(self): ...
+    def writeFieldBegin(self, name, ttype, fid): ...
+    def writeFieldEnd(self): ...
+    def writeFieldStop(self): ...
+    def writeMapBegin(self, ktype, vtype, size): ...
+    def writeMapEnd(self): ...
+    def writeListBegin(self, etype, size): ...
+    def writeListEnd(self): ...
+    def writeSetBegin(self, etype, size): ...
+    def writeSetEnd(self): ...
+    def writeBool(self, bool_val): ...
+    def writeByte(self, byte): ...
+    def writeI16(self, i16): ...
+    def writeI32(self, i32): ...
+    def writeI64(self, i64): ...
+    def writeDouble(self, dub): ...
+    def writeString(self, str_val): ...
+    def readMessageBegin(self): ...
+    def readMessageEnd(self): ...
+    def readStructBegin(self): ...
+    def readStructEnd(self): ...
+    def readFieldBegin(self): ...
+    def readFieldEnd(self): ...
+    def readMapBegin(self): ...
+    def readMapEnd(self): ...
+    def readListBegin(self): ...
+    def readListEnd(self): ...
+    def readSetBegin(self): ...
+    def readSetEnd(self): ...
+    def readBool(self): ...
+    def readByte(self): ...
+    def readI16(self): ...
+    def readI32(self): ...
+    def readI64(self): ...
+    def readDouble(self): ...
+    def readString(self): ...
+    def skip(self, ttype): ...
+    def readFieldByTType(self, ttype, spec): ...
+    def readContainerList(self, spec): ...
+    def readContainerSet(self, spec): ...
+    def readContainerStruct(self, spec): ...
+    def readContainerMap(self, spec): ...
+    def readStruct(self, obj, thrift_spec): ...
+    def writeContainerStruct(self, val, spec): ...
+    def writeContainerList(self, val, spec): ...
+    def writeContainerSet(self, val, spec): ...
+    def writeContainerMap(self, val, spec): ...
+    def writeStruct(self, obj, thrift_spec): ...
+    def writeFieldByTType(self, ttype, val, spec): ...
+
+def checkIntegerLimits(i, bits): ...
+
+class TProtocolFactory:
+    def getProtocol(self, trans): ...
diff --git a/typeshed/third_party/2.7/thrift/protocol/__init__.pyi b/typeshed/third_party/2.7/thrift/protocol/__init__.pyi
new file mode 100644
index 0000000..f98118b
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/protocol/__init__.pyi
@@ -0,0 +1,11 @@
+# Stubs for thrift.protocol (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+# Names in __all__ with no definition:
+#   TBase
+#   TBinaryProtocol
+#   TCompactProtocol
+#   TJSONProtocol
+#   TProtocol
+#   fastbinary
diff --git a/typeshed/third_party/2.7/thrift/transport/TSocket.pyi b/typeshed/third_party/2.7/thrift/transport/TSocket.pyi
new file mode 100644
index 0000000..b26c04e
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/transport/TSocket.pyi
@@ -0,0 +1,32 @@
+# Stubs for thrift.transport.TSocket (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+from .TTransport import *
+
+class TSocketBase(TTransportBase):
+    handle = ... # type: Any
+    def close(self): ...
+
+class TSocket(TSocketBase):
+    host = ... # type: Any
+    port = ... # type: Any
+    handle = ... # type: Any
+    def __init__(self, host=..., port=..., unix_socket=..., socket_family=...) -> None: ...
+    def setHandle(self, h): ...
+    def isOpen(self): ...
+    def setTimeout(self, ms): ...
+    def open(self): ...
+    def read(self, sz): ...
+    def write(self, buff): ...
+    def flush(self): ...
+
+class TServerSocket(TSocketBase, TServerTransportBase):
+    host = ... # type: Any
+    port = ... # type: Any
+    handle = ... # type: Any
+    def __init__(self, host=..., port=..., unix_socket=..., socket_family=...) -> None: ...
+    def listen(self): ...
+    def accept(self): ...
diff --git a/typeshed/third_party/2.7/thrift/transport/TTransport.pyi b/typeshed/third_party/2.7/thrift/transport/TTransport.pyi
new file mode 100644
index 0000000..c2ffa9f
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/transport/TTransport.pyi
@@ -0,0 +1,111 @@
+# Stubs for thrift.transport.TTransport (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from thrift.Thrift import TException
+
+class TTransportException(TException):
+    UNKNOWN = ... # type: Any
+    NOT_OPEN = ... # type: Any
+    ALREADY_OPEN = ... # type: Any
+    TIMED_OUT = ... # type: Any
+    END_OF_FILE = ... # type: Any
+    type = ... # type: Any
+    def __init__(self, type=..., message=...) -> None: ...
+
+class TTransportBase:
+    def isOpen(self): ...
+    def open(self): ...
+    def close(self): ...
+    def read(self, sz): ...
+    def readAll(self, sz): ...
+    def write(self, buf): ...
+    def flush(self): ...
+
+class CReadableTransport:
+    @property
+    def cstringio_buf(self): ...
+    def cstringio_refill(self, partialread, reqlen): ...
+
+class TServerTransportBase:
+    def listen(self): ...
+    def accept(self): ...
+    def close(self): ...
+
+class TTransportFactoryBase:
+    def getTransport(self, trans): ...
+
+class TBufferedTransportFactory:
+    def getTransport(self, trans): ...
+
+class TBufferedTransport(TTransportBase, CReadableTransport):
+    DEFAULT_BUFFER = ... # type: Any
+    def __init__(self, trans, rbuf_size=...) -> None: ...
+    def isOpen(self): ...
+    def open(self): ...
+    def close(self): ...
+    def read(self, sz): ...
+    def write(self, buf): ...
+    def flush(self): ...
+    @property
+    def cstringio_buf(self): ...
+    def cstringio_refill(self, partialread, reqlen): ...
+
+class TMemoryBuffer(TTransportBase, CReadableTransport):
+    def __init__(self, value=...) -> None: ...
+    def isOpen(self): ...
+    def open(self): ...
+    def close(self): ...
+    def read(self, sz): ...
+    def write(self, buf): ...
+    def flush(self): ...
+    def getvalue(self): ...
+    @property
+    def cstringio_buf(self): ...
+    def cstringio_refill(self, partialread, reqlen): ...
+
+class TFramedTransportFactory:
+    def getTransport(self, trans): ...
+
+class TFramedTransport(TTransportBase, CReadableTransport):
+    def __init__(self, trans) -> None: ...
+    def isOpen(self): ...
+    def open(self): ...
+    def close(self): ...
+    def read(self, sz): ...
+    def readFrame(self): ...
+    def write(self, buf): ...
+    def flush(self): ...
+    @property
+    def cstringio_buf(self): ...
+    def cstringio_refill(self, prefix, reqlen): ...
+
+class TFileObjectTransport(TTransportBase):
+    fileobj = ... # type: Any
+    def __init__(self, fileobj) -> None: ...
+    def isOpen(self): ...
+    def close(self): ...
+    def read(self, sz): ...
+    def write(self, buf): ...
+    def flush(self): ...
+
+class TSaslClientTransport(TTransportBase, CReadableTransport):
+    START = ... # type: Any
+    OK = ... # type: Any
+    BAD = ... # type: Any
+    ERROR = ... # type: Any
+    COMPLETE = ... # type: Any
+    transport = ... # type: Any
+    sasl = ... # type: Any
+    def __init__(self, transport, host, service, mechanism=..., **sasl_kwargs) -> None: ...
+    def open(self): ...
+    def send_sasl_msg(self, status, body): ...
+    def recv_sasl_msg(self): ...
+    def write(self, data): ...
+    def flush(self): ...
+    def read(self, sz): ...
+    def close(self): ...
+    @property
+    def cstringio_buf(self): ...
+    def cstringio_refill(self, prefix, reqlen): ...
diff --git a/typeshed/third_party/2.7/thrift/transport/__init__.pyi b/typeshed/third_party/2.7/thrift/transport/__init__.pyi
new file mode 100644
index 0000000..ce02c61
--- /dev/null
+++ b/typeshed/third_party/2.7/thrift/transport/__init__.pyi
@@ -0,0 +1,9 @@
+# Stubs for thrift.transport (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+# Names in __all__ with no definition:
+#   THttpClient
+#   TSocket
+#   TTransport
+#   TZlibTransport
diff --git a/typeshed/third_party/2.7/tornado/__init__.pyi b/typeshed/third_party/2.7/tornado/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/tornado/concurrent.pyi b/typeshed/third_party/2.7/tornado/concurrent.pyi
new file mode 100644
index 0000000..f807f06
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/concurrent.pyi
@@ -0,0 +1,47 @@
+# Stubs for tornado.concurrent (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+futures = ... # type: Any
+
+class ReturnValueIgnoredError(Exception): ...
+
+class _TracebackLogger:
+    exc_info = ... # type: Any
+    formatted_tb = ... # type: Any
+    def __init__(self, exc_info) -> None: ...
+    def activate(self): ...
+    def clear(self): ...
+    def __del__(self): ...
+
+class Future:
+    def __init__(self) -> None: ...
+    def cancel(self): ...
+    def cancelled(self): ...
+    def running(self): ...
+    def done(self): ...
+    def result(self, timeout=...): ...
+    def exception(self, timeout=...): ...
+    def add_done_callback(self, fn): ...
+    def set_result(self, result): ...
+    def set_exception(self, exception): ...
+    def exc_info(self): ...
+    def set_exc_info(self, exc_info): ...
+    def __del__(self): ...
+
+TracebackFuture = ... # type: Any
+FUTURES = ... # type: Any
+
+def is_future(x): ...
+
+class DummyExecutor:
+    def submit(self, fn, *args, **kwargs): ...
+    def shutdown(self, wait=...): ...
+
+dummy_executor = ... # type: Any
+
+def run_on_executor(*args, **kwargs): ...
+def return_future(f): ...
+def chain_future(a, b): ...
diff --git a/typeshed/third_party/2.7/tornado/gen.pyi b/typeshed/third_party/2.7/tornado/gen.pyi
new file mode 100644
index 0000000..1e16521
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/gen.pyi
@@ -0,0 +1,113 @@
+# Stubs for tornado.gen (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from collections import namedtuple
+
+singledispatch = ... # type: Any
+
+class KeyReuseError(Exception): ...
+class UnknownKeyError(Exception): ...
+class LeakedCallbackError(Exception): ...
+class BadYieldError(Exception): ...
+class ReturnValueIgnoredError(Exception): ...
+class TimeoutError(Exception): ...
+
+def engine(func): ...
+def coroutine(func, replace_callback=...): ...
+
+class Return(Exception):
+    value = ... # type: Any
+    def __init__(self, value=...) -> None: ...
+
+class WaitIterator:
+    current_index = ... # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+    def done(self): ...
+    def next(self): ...
+
+class YieldPoint:
+    def start(self, runner): ...
+    def is_ready(self): ...
+    def get_result(self): ...
+
+class Callback(YieldPoint):
+    key = ... # type: Any
+    def __init__(self, key) -> None: ...
+    runner = ... # type: Any
+    def start(self, runner): ...
+    def is_ready(self): ...
+    def get_result(self): ...
+
+class Wait(YieldPoint):
+    key = ... # type: Any
+    def __init__(self, key) -> None: ...
+    runner = ... # type: Any
+    def start(self, runner): ...
+    def is_ready(self): ...
+    def get_result(self): ...
+
+class WaitAll(YieldPoint):
+    keys = ... # type: Any
+    def __init__(self, keys) -> None: ...
+    runner = ... # type: Any
+    def start(self, runner): ...
+    def is_ready(self): ...
+    def get_result(self): ...
+
+def Task(func, *args, **kwargs): ...
+
+class YieldFuture(YieldPoint):
+    future = ... # type: Any
+    io_loop = ... # type: Any
+    def __init__(self, future, io_loop=...) -> None: ...
+    runner = ... # type: Any
+    key = ... # type: Any
+    result_fn = ... # type: Any
+    def start(self, runner): ...
+    def is_ready(self): ...
+    def get_result(self): ...
+
+class Multi(YieldPoint):
+    keys = ... # type: Any
+    children = ... # type: Any
+    unfinished_children = ... # type: Any
+    quiet_exceptions = ... # type: Any
+    def __init__(self, children, quiet_exceptions=...) -> None: ...
+    def start(self, runner): ...
+    def is_ready(self): ...
+    def get_result(self): ...
+
+def multi_future(children, quiet_exceptions=...): ...
+def maybe_future(x): ...
+def with_timeout(timeout, future, io_loop=..., quiet_exceptions=...): ...
+def sleep(duration): ...
+
+moment = ... # type: Any
+
+class Runner:
+    gen = ... # type: Any
+    result_future = ... # type: Any
+    future = ... # type: Any
+    yield_point = ... # type: Any
+    pending_callbacks = ... # type: Any
+    results = ... # type: Any
+    running = ... # type: Any
+    finished = ... # type: Any
+    had_exception = ... # type: Any
+    io_loop = ... # type: Any
+    stack_context_deactivate = ... # type: Any
+    def __init__(self, gen, result_future, first_yielded) -> None: ...
+    def register_callback(self, key): ...
+    def is_ready(self, key): ...
+    def set_result(self, key, result): ...
+    def pop_result(self, key): ...
+    def run(self): ...
+    def handle_yield(self, yielded): ...
+    def result_callback(self, key): ...
+    def handle_exception(self, typ, value, tb): ...
+
+Arguments = namedtuple('Arguments', ['args', 'kwargs'])
+
+def convert_yielded(yielded): ...
diff --git a/typeshed/third_party/2.7/tornado/httpclient.pyi b/typeshed/third_party/2.7/tornado/httpclient.pyi
new file mode 100644
index 0000000..e24b528
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/httpclient.pyi
@@ -0,0 +1,112 @@
+# Stubs for tornado.httpclient (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from tornado.util import Configurable
+
+class HTTPClient:
+    def __init__(self, async_client_class=..., **kwargs) -> None: ...
+    def __del__(self): ...
+    def close(self): ...
+    def fetch(self, request, **kwargs): ...
+
+class AsyncHTTPClient(Configurable):
+    @classmethod
+    def configurable_base(cls): ...
+    @classmethod
+    def configurable_default(cls): ...
+    def __new__(cls, io_loop=..., force_instance=..., **kwargs): ...
+    io_loop = ... # type: Any
+    defaults = ... # type: Any
+    def initialize(self, io_loop, defaults=...): ...
+    def close(self): ...
+    def fetch(self, request, callback=..., raise_error=..., **kwargs): ...
+    def fetch_impl(self, request, callback): ...
+    @classmethod
+    def configure(cls, impl, **kwargs): ...
+
+class HTTPRequest:
+    headers = ... # type: Any
+    proxy_host = ... # type: Any
+    proxy_port = ... # type: Any
+    proxy_username = ... # type: Any
+    proxy_password = ... # type: Any
+    url = ... # type: Any
+    method = ... # type: Any
+    body = ... # type: Any
+    body_producer = ... # type: Any
+    auth_username = ... # type: Any
+    auth_password = ... # type: Any
+    auth_mode = ... # type: Any
+    connect_timeout = ... # type: Any
+    request_timeout = ... # type: Any
+    follow_redirects = ... # type: Any
+    max_redirects = ... # type: Any
+    user_agent = ... # type: Any
+    decompress_response = ... # type: Any
+    network_interface = ... # type: Any
+    streaming_callback = ... # type: Any
+    header_callback = ... # type: Any
+    prepare_curl_callback = ... # type: Any
+    allow_nonstandard_methods = ... # type: Any
+    validate_cert = ... # type: Any
+    ca_certs = ... # type: Any
+    allow_ipv6 = ... # type: Any
+    client_key = ... # type: Any
+    client_cert = ... # type: Any
+    ssl_options = ... # type: Any
+    expect_100_continue = ... # type: Any
+    start_time = ... # type: Any
+    def __init__(self, url, method=..., headers=..., body=..., auth_username=..., auth_password=..., auth_mode=..., connect_timeout=..., request_timeout=..., if_modified_since=..., follow_redirects=..., max_redirects=..., user_agent=..., use_gzip=..., network_interface=..., streaming_callback=..., header_callback=..., prepare_curl_callback=..., proxy_host=..., proxy_port=..., proxy_username=..., proxy_password=..., allow_nonstandard_methods=..., validate_cert=..., ca_certs=..., allow_ipv [...]
+    @property
+    def headers(self): ...
+    @headers.setter
+    def headers(self, value): ...
+    @property
+    def body(self): ...
+    @body.setter
+    def body(self, value): ...
+    @property
+    def body_producer(self): ...
+    @body_producer.setter
+    def body_producer(self, value): ...
+    @property
+    def streaming_callback(self): ...
+    @streaming_callback.setter
+    def streaming_callback(self, value): ...
+    @property
+    def header_callback(self): ...
+    @header_callback.setter
+    def header_callback(self, value): ...
+    @property
+    def prepare_curl_callback(self): ...
+    @prepare_curl_callback.setter
+    def prepare_curl_callback(self, value): ...
+
+class HTTPResponse:
+    request = ... # type: Any
+    code = ... # type: Any
+    reason = ... # type: Any
+    headers = ... # type: Any
+    buffer = ... # type: Any
+    effective_url = ... # type: Any
+    error = ... # type: Any
+    request_time = ... # type: Any
+    time_info = ... # type: Any
+    def __init__(self, request, code, headers=..., buffer=..., effective_url=..., error=..., request_time=..., time_info=..., reason=...) -> None: ...
+    body = ... # type: Any
+    def rethrow(self): ...
+
+class HTTPError(Exception):
+    code = ... # type: Any
+    response = ... # type: Any
+    def __init__(self, code, message=..., response=...) -> None: ...
+
+class _RequestProxy:
+    request = ... # type: Any
+    defaults = ... # type: Any
+    def __init__(self, request, defaults) -> None: ...
+    def __getattr__(self, name): ...
+
+def main(): ...
diff --git a/typeshed/third_party/2.7/tornado/httpserver.pyi b/typeshed/third_party/2.7/tornado/httpserver.pyi
new file mode 100644
index 0000000..eb20bf3
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/httpserver.pyi
@@ -0,0 +1,45 @@
+# Stubs for tornado.httpserver (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from tornado import httputil
+from tornado.tcpserver import TCPServer
+from tornado.util import Configurable
+
+class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate):
+    def __init__(self, *args, **kwargs) -> None: ...
+    request_callback = ... # type: Any
+    no_keep_alive = ... # type: Any
+    xheaders = ... # type: Any
+    protocol = ... # type: Any
+    conn_params = ... # type: Any
+    def initialize(self, request_callback, no_keep_alive=..., io_loop=..., xheaders=..., ssl_options=..., protocol=..., decompress_request=..., chunk_size=..., max_header_size=..., idle_connection_timeout=..., body_timeout=..., max_body_size=..., max_buffer_size=...): ...
+    @classmethod
+    def configurable_base(cls): ...
+    @classmethod
+    def configurable_default(cls): ...
+    def close_all_connections(self): ...
+    def handle_stream(self, stream, address): ...
+    def start_request(self, server_conn, request_conn): ...
+    def on_close(self, server_conn): ...
+
+class _HTTPRequestContext:
+    address = ... # type: Any
+    protocol = ... # type: Any
+    address_family = ... # type: Any
+    remote_ip = ... # type: Any
+    def __init__(self, stream, address, protocol) -> None: ...
+
+class _ServerRequestAdapter(httputil.HTTPMessageDelegate):
+    server = ... # type: Any
+    connection = ... # type: Any
+    request = ... # type: Any
+    delegate = ... # type: Any
+    def __init__(self, server, server_conn, request_conn) -> None: ...
+    def headers_received(self, start_line, headers): ...
+    def data_received(self, chunk): ...
+    def finish(self): ...
+    def on_connection_close(self): ...
+
+HTTPRequest = ... # type: Any
diff --git a/typeshed/third_party/2.7/tornado/httputil.pyi b/typeshed/third_party/2.7/tornado/httputil.pyi
new file mode 100644
index 0000000..5d89414
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/httputil.pyi
@@ -0,0 +1,93 @@
+# Stubs for tornado.httputil (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from tornado.util import ObjectDict
+from collections import namedtuple
+
+class SSLError(Exception): ...
+
+class _NormalizedHeaderCache(dict):
+    size = ... # type: Any
+    queue = ... # type: Any
+    def __init__(self, size) -> None: ...
+    def __missing__(self, key): ...
+
+class HTTPHeaders(dict):
+    def __init__(self, *args, **kwargs) -> None: ...
+    def add(self, name, value): ...
+    def get_list(self, name): ...
+    def get_all(self): ...
+    def parse_line(self, line): ...
+    @classmethod
+    def parse(cls, headers): ...
+    def __setitem__(self, name, value): ...
+    def __getitem__(self, name): ...
+    def __delitem__(self, name): ...
+    def __contains__(self, name): ...
+    def get(self, name, default=...): ...
+    def update(self, *args, **kwargs): ...
+    def copy(self): ...
+    __copy__ = ... # type: Any
+    def __deepcopy__(self, memo_dict): ...
+
+class HTTPServerRequest:
+    method = ... # type: Any
+    uri = ... # type: Any
+    version = ... # type: Any
+    headers = ... # type: Any
+    body = ... # type: Any
+    remote_ip = ... # type: Any
+    protocol = ... # type: Any
+    host = ... # type: Any
+    files = ... # type: Any
+    connection = ... # type: Any
+    arguments = ... # type: Any
+    query_arguments = ... # type: Any
+    body_arguments = ... # type: Any
+    def __init__(self, method=..., uri=..., version=..., headers=..., body=..., host=..., files=..., connection=..., start_line=...) -> None: ...
+    def supports_http_1_1(self): ...
+    @property
+    def cookies(self): ...
+    def write(self, chunk, callback=...): ...
+    def finish(self): ...
+    def full_url(self): ...
+    def request_time(self): ...
+    def get_ssl_certificate(self, binary_form=...): ...
+
+class HTTPInputError(Exception): ...
+class HTTPOutputError(Exception): ...
+
+class HTTPServerConnectionDelegate:
+    def start_request(self, server_conn, request_conn): ...
+    def on_close(self, server_conn): ...
+
+class HTTPMessageDelegate:
+    def headers_received(self, start_line, headers): ...
+    def data_received(self, chunk): ...
+    def finish(self): ...
+    def on_connection_close(self): ...
+
+class HTTPConnection:
+    def write_headers(self, start_line, headers, chunk=..., callback=...): ...
+    def write(self, chunk, callback=...): ...
+    def finish(self): ...
+
+def url_concat(url, args): ...
+
+class HTTPFile(ObjectDict): ...
+
+def parse_body_arguments(content_type, body, arguments, files, headers=...): ...
+def parse_multipart_form_data(boundary, data, arguments, files): ...
+def format_timestamp(ts): ...
+
+RequestStartLine = namedtuple('RequestStartLine', ['method', 'path', 'version'])
+
+def parse_request_start_line(line): ...
+
+ResponseStartLine = namedtuple('ResponseStartLine', ['version', 'code', 'reason'])
+
+def parse_response_start_line(line): ...
+def doctests(): ...
+def split_host_and_port(netloc): ...
diff --git a/typeshed/third_party/2.7/tornado/ioloop.pyi b/typeshed/third_party/2.7/tornado/ioloop.pyi
new file mode 100644
index 0000000..667d98f
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/ioloop.pyi
@@ -0,0 +1,88 @@
+# Stubs for tornado.ioloop (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from tornado.util import Configurable
+
+signal = ... # type: Any
+
+class TimeoutError(Exception): ...
+
+class IOLoop(Configurable):
+    NONE = ... # type: Any
+    READ = ... # type: Any
+    WRITE = ... # type: Any
+    ERROR = ... # type: Any
+    @staticmethod
+    def instance(): ...
+    @staticmethod
+    def initialized(): ...
+    def install(self): ...
+    @staticmethod
+    def clear_instance(): ...
+    @staticmethod
+    def current(instance=...): ...
+    def make_current(self): ...
+    @staticmethod
+    def clear_current(): ...
+    @classmethod
+    def configurable_base(cls): ...
+    @classmethod
+    def configurable_default(cls): ...
+    def initialize(self, make_current=...): ...
+    def close(self, all_fds=...): ...
+    def add_handler(self, fd, handler, events): ...
+    def update_handler(self, fd, events): ...
+    def remove_handler(self, fd): ...
+    def set_blocking_signal_threshold(self, seconds, action): ...
+    def set_blocking_log_threshold(self, seconds): ...
+    def log_stack(self, signal, frame): ...
+    def start(self): ...
+    def stop(self): ...
+    def run_sync(self, func, timeout=...): ...
+    def time(self): ...
+    def add_timeout(self, deadline, callback, *args, **kwargs): ...
+    def call_later(self, delay, callback, *args, **kwargs): ...
+    def call_at(self, when, callback, *args, **kwargs): ...
+    def remove_timeout(self, timeout): ...
+    def add_callback(self, callback, *args, **kwargs): ...
+    def add_callback_from_signal(self, callback, *args, **kwargs): ...
+    def spawn_callback(self, callback, *args, **kwargs): ...
+    def add_future(self, future, callback): ...
+    def handle_callback_exception(self, callback): ...
+    def split_fd(self, fd): ...
+    def close_fd(self, fd): ...
+
+class PollIOLoop(IOLoop):
+    time_func = ... # type: Any
+    def initialize(self, impl, time_func=..., **kwargs): ...
+    def close(self, all_fds=...): ...
+    def add_handler(self, fd, handler, events): ...
+    def update_handler(self, fd, events): ...
+    def remove_handler(self, fd): ...
+    def set_blocking_signal_threshold(self, seconds, action): ...
+    def start(self): ...
+    def stop(self): ...
+    def time(self): ...
+    def call_at(self, deadline, callback, *args, **kwargs): ...
+    def remove_timeout(self, timeout): ...
+    def add_callback(self, callback, *args, **kwargs): ...
+    def add_callback_from_signal(self, callback, *args, **kwargs): ...
+
+class _Timeout:
+    deadline = ... # type: Any
+    callback = ... # type: Any
+    tiebreaker = ... # type: Any
+    def __init__(self, deadline, callback, io_loop) -> None: ...
+    def __lt__(self, other): ...
+    def __le__(self, other): ...
+
+class PeriodicCallback:
+    callback = ... # type: Any
+    callback_time = ... # type: Any
+    io_loop = ... # type: Any
+    def __init__(self, callback, callback_time, io_loop=...) -> None: ...
+    def start(self): ...
+    def stop(self): ...
+    def is_running(self): ...
diff --git a/typeshed/third_party/2.7/tornado/netutil.pyi b/typeshed/third_party/2.7/tornado/netutil.pyi
new file mode 100644
index 0000000..135450e
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/netutil.pyi
@@ -0,0 +1,49 @@
+# Stubs for tornado.netutil (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from tornado.util import Configurable
+
+ssl = ... # type: Any
+certifi = ... # type: Any
+xrange = ... # type: Any
+ssl_match_hostname = ... # type: Any
+SSLCertificateError = ... # type: Any
+
+def bind_sockets(port, address=..., family=..., backlog=..., flags=...): ...
+def bind_unix_socket(file, mode=..., backlog=...): ...
+def add_accept_handler(sock, callback, io_loop=...): ...
+def is_valid_ip(ip): ...
+
+class Resolver(Configurable):
+    @classmethod
+    def configurable_base(cls): ...
+    @classmethod
+    def configurable_default(cls): ...
+    def resolve(self, host, port, family=..., callback=...): ...
+    def close(self): ...
+
+class ExecutorResolver(Resolver):
+    io_loop = ... # type: Any
+    executor = ... # type: Any
+    close_executor = ... # type: Any
+    def initialize(self, io_loop=..., executor=..., close_executor=...): ...
+    def close(self): ...
+    def resolve(self, host, port, family=...): ...
+
+class BlockingResolver(ExecutorResolver):
+    def initialize(self, io_loop=...): ...
+
+class ThreadedResolver(ExecutorResolver):
+    def initialize(self, io_loop=..., num_threads=...): ...
+
+class OverrideResolver(Resolver):
+    resolver = ... # type: Any
+    mapping = ... # type: Any
+    def initialize(self, resolver, mapping): ...
+    def close(self): ...
+    def resolve(self, host, port, *args, **kwargs): ...
+
+def ssl_options_to_context(ssl_options): ...
+def ssl_wrap_socket(socket, ssl_options, server_hostname=..., **kwargs): ...
diff --git a/typeshed/third_party/2.7/tornado/tcpserver.pyi b/typeshed/third_party/2.7/tornado/tcpserver.pyi
new file mode 100644
index 0000000..3c032e1
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/tcpserver.pyi
@@ -0,0 +1,21 @@
+# Stubs for tornado.tcpserver (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+ssl = ... # type: Any
+
+class TCPServer:
+    io_loop = ... # type: Any
+    ssl_options = ... # type: Any
+    max_buffer_size = ... # type: Any
+    read_chunk_size = ... # type: Any
+    def __init__(self, io_loop=..., ssl_options=..., max_buffer_size=..., read_chunk_size=...) -> None: ...
+    def listen(self, port, address=...): ...
+    def add_sockets(self, sockets): ...
+    def add_socket(self, socket): ...
+    def bind(self, port, address=..., family=..., backlog=...): ...
+    def start(self, num_processes=...): ...
+    def stop(self): ...
+    def handle_stream(self, stream, address): ...
diff --git a/typeshed/third_party/2.7/tornado/util.pyi b/typeshed/third_party/2.7/tornado/util.pyi
new file mode 100644
index 0000000..fdc48c9
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/util.pyi
@@ -0,0 +1,50 @@
+# Stubs for tornado.util (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+xrange = ... # type: Any
+
+class ObjectDict(dict):
+    def __getattr__(self, name): ...
+    def __setattr__(self, name, value): ...
+
+class GzipDecompressor:
+    decompressobj = ... # type: Any
+    def __init__(self) -> None: ...
+    def decompress(self, value, max_length=...): ...
+    @property
+    def unconsumed_tail(self): ...
+    def flush(self): ...
+
+unicode_type = ... # type: Any
+basestring_type = ... # type: Any
+
+def import_object(name): ...
+
+bytes_type = ... # type: Any
+
+def errno_from_exception(e): ...
+
+class Configurable:
+    def __new__(cls, *args, **kwargs): ...
+    @classmethod
+    def configurable_base(cls): ...
+    @classmethod
+    def configurable_default(cls): ...
+    def initialize(self): ...
+    @classmethod
+    def configure(cls, impl, **kwargs): ...
+    @classmethod
+    def configured_class(cls): ...
+
+class ArgReplacer:
+    name = ... # type: Any
+    arg_pos = ... # type: Any
+    def __init__(self, func, name) -> None: ...
+    def get_old_value(self, args, kwargs, default=...): ...
+    def replace(self, new_value, args, kwargs): ...
+
+def timedelta_to_seconds(td): ...
+def doctests(): ...
diff --git a/typeshed/third_party/2.7/tornado/web.pyi b/typeshed/third_party/2.7/tornado/web.pyi
new file mode 100644
index 0000000..ab61ff4
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/web.pyi
@@ -0,0 +1,261 @@
+# Stubs for tornado.web (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from tornado import httputil
+
+MIN_SUPPORTED_SIGNED_VALUE_VERSION = ... # type: Any
+MAX_SUPPORTED_SIGNED_VALUE_VERSION = ... # type: Any
+DEFAULT_SIGNED_VALUE_VERSION = ... # type: Any
+DEFAULT_SIGNED_VALUE_MIN_VERSION = ... # type: Any
+
+class RequestHandler:
+    SUPPORTED_METHODS = ... # type: Any
+    application = ... # type: Any
+    request = ... # type: Any
+    path_args = ... # type: Any
+    path_kwargs = ... # type: Any
+    ui = ... # type: Any
+    def __init__(self, application, request, **kwargs) -> None: ...
+    def initialize(self): ...
+    @property
+    def settings(self): ...
+    def head(self, *args, **kwargs): ...
+    def get(self, *args, **kwargs): ...
+    def post(self, *args, **kwargs): ...
+    def delete(self, *args, **kwargs): ...
+    def patch(self, *args, **kwargs): ...
+    def put(self, *args, **kwargs): ...
+    def options(self, *args, **kwargs): ...
+    def prepare(self): ...
+    def on_finish(self): ...
+    def on_connection_close(self): ...
+    def clear(self): ...
+    def set_default_headers(self): ...
+    def set_status(self, status_code, reason=...): ...
+    def get_status(self): ...
+    def set_header(self, name, value): ...
+    def add_header(self, name, value): ...
+    def clear_header(self, name): ...
+    def get_argument(self, name, default=..., strip=...): ...
+    def get_arguments(self, name, strip=...): ...
+    def get_body_argument(self, name, default=..., strip=...): ...
+    def get_body_arguments(self, name, strip=...): ...
+    def get_query_argument(self, name, default=..., strip=...): ...
+    def get_query_arguments(self, name, strip=...): ...
+    def decode_argument(self, value, name=...): ...
+    @property
+    def cookies(self): ...
+    def get_cookie(self, name, default=...): ...
+    def set_cookie(self, name, value, domain=..., expires=..., path=..., expires_days=..., **kwargs): ...
+    def clear_cookie(self, name, path=..., domain=...): ...
+    def clear_all_cookies(self, path=..., domain=...): ...
+    def set_secure_cookie(self, name, value, expires_days=..., version=..., **kwargs): ...
+    def create_signed_value(self, name, value, version=...): ...
+    def get_secure_cookie(self, name, value=..., max_age_days=..., min_version=...): ...
+    def get_secure_cookie_key_version(self, name, value=...): ...
+    def redirect(self, url, permanent=..., status=...): ...
+    def write(self, chunk): ...
+    def render(self, template_name, **kwargs): ...
+    def render_string(self, template_name, **kwargs): ...
+    def get_template_namespace(self): ...
+    def create_template_loader(self, template_path): ...
+    def flush(self, include_footers=..., callback=...): ...
+    def finish(self, chunk=...): ...
+    def send_error(self, status_code=..., **kwargs): ...
+    def write_error(self, status_code, **kwargs): ...
+    @property
+    def locale(self): ...
+    @locale.setter
+    def locale(self, value): ...
+    def get_user_locale(self): ...
+    def get_browser_locale(self, default=...): ...
+    @property
+    def current_user(self): ...
+    @current_user.setter
+    def current_user(self, value): ...
+    def get_current_user(self): ...
+    def get_login_url(self): ...
+    def get_template_path(self): ...
+    @property
+    def xsrf_token(self): ...
+    def check_xsrf_cookie(self): ...
+    def xsrf_form_html(self): ...
+    def static_url(self, path, include_host=..., **kwargs): ...
+    def require_setting(self, name, feature=...): ...
+    def reverse_url(self, name, *args): ...
+    def compute_etag(self): ...
+    def set_etag_header(self): ...
+    def check_etag_header(self): ...
+    def data_received(self, chunk): ...
+    def log_exception(self, typ, value, tb): ...
+
+def asynchronous(method): ...
+def stream_request_body(cls): ...
+def removeslash(method): ...
+def addslash(method): ...
+
+class Application(httputil.HTTPServerConnectionDelegate):
+    transforms = ... # type: Any
+    handlers = ... # type: Any
+    named_handlers = ... # type: Any
+    default_host = ... # type: Any
+    settings = ... # type: Any
+    ui_modules = ... # type: Any
+    ui_methods = ... # type: Any
+    def __init__(self, handlers=..., default_host=..., transforms=..., **settings) -> None: ...
+    def listen(self, port, address=..., **kwargs): ...
+    def add_handlers(self, host_pattern, host_handlers): ...
+    def add_transform(self, transform_class): ...
+    def start_request(self, server_conn, request_conn): ...
+    def __call__(self, request): ...
+    def reverse_url(self, name, *args): ...
+    def log_request(self, handler): ...
+
+class _RequestDispatcher(httputil.HTTPMessageDelegate):
+    application = ... # type: Any
+    connection = ... # type: Any
+    request = ... # type: Any
+    chunks = ... # type: Any
+    handler_class = ... # type: Any
+    handler_kwargs = ... # type: Any
+    path_args = ... # type: Any
+    path_kwargs = ... # type: Any
+    def __init__(self, application, connection) -> None: ...
+    def headers_received(self, start_line, headers): ...
+    stream_request_body = ... # type: Any
+    def set_request(self, request): ...
+    def data_received(self, data): ...
+    def finish(self): ...
+    def on_connection_close(self): ...
+    handler = ... # type: Any
+    def execute(self): ...
+
+class HTTPError(Exception):
+    status_code = ... # type: Any
+    log_message = ... # type: Any
+    args = ... # type: Any
+    reason = ... # type: Any
+    def __init__(self, status_code, log_message=..., *args, **kwargs) -> None: ...
+
+class Finish(Exception): ...
+
+class MissingArgumentError(HTTPError):
+    arg_name = ... # type: Any
+    def __init__(self, arg_name) -> None: ...
+
+class ErrorHandler(RequestHandler):
+    def initialize(self, status_code): ...
+    def prepare(self): ...
+    def check_xsrf_cookie(self): ...
+
+class RedirectHandler(RequestHandler):
+    def initialize(self, url, permanent=...): ...
+    def get(self): ...
+
+class StaticFileHandler(RequestHandler):
+    CACHE_MAX_AGE = ... # type: Any
+    root = ... # type: Any
+    default_filename = ... # type: Any
+    def initialize(self, path, default_filename=...): ...
+    @classmethod
+    def reset(cls): ...
+    def head(self, path): ...
+    path = ... # type: Any
+    absolute_path = ... # type: Any
+    modified = ... # type: Any
+    def get(self, path, include_body=...): ...
+    def compute_etag(self): ...
+    def set_headers(self): ...
+    def should_return_304(self): ...
+    @classmethod
+    def get_absolute_path(cls, root, path): ...
+    def validate_absolute_path(self, root, absolute_path): ...
+    @classmethod
+    def get_content(cls, abspath, start=..., end=...): ...
+    @classmethod
+    def get_content_version(cls, abspath): ...
+    def get_content_size(self): ...
+    def get_modified_time(self): ...
+    def get_content_type(self): ...
+    def set_extra_headers(self, path): ...
+    def get_cache_time(self, path, modified, mime_type): ...
+    @classmethod
+    def make_static_url(cls, settings, path, include_version=...): ...
+    def parse_url_path(self, url_path): ...
+    @classmethod
+    def get_version(cls, settings, path): ...
+
+class FallbackHandler(RequestHandler):
+    fallback = ... # type: Any
+    def initialize(self, fallback): ...
+    def prepare(self): ...
+
+class OutputTransform:
+    def __init__(self, request) -> None: ...
+    def transform_first_chunk(self, status_code, headers, chunk, finishing): ...
+    def transform_chunk(self, chunk, finishing): ...
+
+class GZipContentEncoding(OutputTransform):
+    CONTENT_TYPES = ... # type: Any
+    MIN_LENGTH = ... # type: Any
+    def __init__(self, request) -> None: ...
+    def transform_first_chunk(self, status_code, headers, chunk, finishing): ...
+    def transform_chunk(self, chunk, finishing): ...
+
+def authenticated(method): ...
+
+class UIModule:
+    handler = ... # type: Any
+    request = ... # type: Any
+    ui = ... # type: Any
+    locale = ... # type: Any
+    def __init__(self, handler) -> None: ...
+    @property
+    def current_user(self): ...
+    def render(self, *args, **kwargs): ...
+    def embedded_javascript(self): ...
+    def javascript_files(self): ...
+    def embedded_css(self): ...
+    def css_files(self): ...
+    def html_head(self): ...
+    def html_body(self): ...
+    def render_string(self, path, **kwargs): ...
+
+class _linkify(UIModule):
+    def render(self, text, **kwargs): ...
+
+class _xsrf_form_html(UIModule):
+    def render(self): ...
+
+class TemplateModule(UIModule):
+    def __init__(self, handler) -> None: ...
+    def render(self, path, **kwargs): ...
+    def embedded_javascript(self): ...
+    def javascript_files(self): ...
+    def embedded_css(self): ...
+    def css_files(self): ...
+    def html_head(self): ...
+    def html_body(self): ...
+
+class _UIModuleNamespace:
+    handler = ... # type: Any
+    ui_modules = ... # type: Any
+    def __init__(self, handler, ui_modules) -> None: ...
+    def __getitem__(self, key): ...
+    def __getattr__(self, key): ...
+
+class URLSpec:
+    regex = ... # type: Any
+    handler_class = ... # type: Any
+    kwargs = ... # type: Any
+    name = ... # type: Any
+    def __init__(self, pattern, handler, kwargs=..., name=...) -> None: ...
+    def reverse(self, *args): ...
+
+url = ... # type: Any
+
+def create_signed_value(secret, name, value, version=..., clock=..., key_version=...): ...
+def decode_signed_value(secret, name, value, max_age_days=..., clock=..., min_version=...): ...
+def get_signature_key_version(value): ...
diff --git a/typeshed/third_party/2.7/yaml/__init__.pyi b/typeshed/third_party/2.7/yaml/__init__.pyi
new file mode 100644
index 0000000..9d0e915
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/__init__.pyi
@@ -0,0 +1,51 @@
+# Stubs for yaml (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+#from yaml.error import *
+#from yaml.tokens import *
+#from yaml.events import *
+#from yaml.nodes import *
+#from yaml.loader import *
+#from yaml.dumper import *
+# TODO: stubs for cyaml?
+# from cyaml import *
+
+__with_libyaml__ = ... # type: Any
+
+def scan(stream, Loader=...): ...
+def parse(stream, Loader=...): ...
+def compose(stream, Loader=...): ...
+def compose_all(stream, Loader=...): ...
+def load(stream, Loader=...): ...
+def load_all(stream, Loader=...): ...
+def safe_load(stream): ...
+def safe_load_all(stream): ...
+def emit(events, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=...): ...
+def serialize_all(nodes, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...): ...
+def serialize(node, stream=..., Dumper=..., **kwds): ...
+def dump_all(documents, stream=..., Dumper=..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...): ...
+def dump(data, stream=..., Dumper=..., **kwds): ...
+def safe_dump_all(documents, stream=..., **kwds): ...
+def safe_dump(data, stream=..., **kwds): ...
+def add_implicit_resolver(tag, regexp, first=..., Loader=..., Dumper=...): ...
+def add_path_resolver(tag, path, kind=..., Loader=..., Dumper=...): ...
+def add_constructor(tag, constructor, Loader=...): ...
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=...): ...
+def add_representer(data_type, representer, Dumper=...): ...
+def add_multi_representer(data_type, multi_representer, Dumper=...): ...
+
+class YAMLObjectMetaclass(type):
+    def __init__(cls, name, bases, kwds) -> None: ...
+
+class YAMLObject:
+    __metaclass__ = ... # type: Any
+    yaml_loader = ... # type: Any
+    yaml_dumper = ... # type: Any
+    yaml_tag = ... # type: Any
+    yaml_flow_style = ... # type: Any
+    @classmethod
+    def from_yaml(cls, loader, node): ...
+    @classmethod
+    def to_yaml(cls, dumper, data): ...
diff --git a/typeshed/third_party/2.7/yaml/composer.pyi b/typeshed/third_party/2.7/yaml/composer.pyi
new file mode 100644
index 0000000..a6f21b4
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/composer.pyi
@@ -0,0 +1,21 @@
+# Stubs for yaml.composer (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import Mark, YAMLError, MarkedYAMLError
+from yaml.nodes import Node, ScalarNode, CollectionNode, SequenceNode, MappingNode
+
+class ComposerError(MarkedYAMLError): ...
+
+class Composer:
+    anchors = ... # type: Any
+    def __init__(self) -> None: ...
+    def check_node(self): ...
+    def get_node(self): ...
+    def get_single_node(self): ...
+    def compose_document(self): ...
+    def compose_node(self, parent, index): ...
+    def compose_scalar_node(self, anchor): ...
+    def compose_sequence_node(self, anchor): ...
+    def compose_mapping_node(self, anchor): ...
diff --git a/typeshed/third_party/2.7/yaml/constructor.pyi b/typeshed/third_party/2.7/yaml/constructor.pyi
new file mode 100644
index 0000000..cb043a5
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/constructor.pyi
@@ -0,0 +1,70 @@
+# Stubs for yaml.constructor (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from yaml.error import Mark, YAMLError, MarkedYAMLError
+from yaml.nodes import Node, ScalarNode, CollectionNode, SequenceNode, MappingNode
+
+from typing import Any
+
+class ConstructorError(MarkedYAMLError): ...
+
+class BaseConstructor:
+    yaml_constructors = ... # type: Any
+    yaml_multi_constructors = ... # type: Any
+    constructed_objects = ... # type: Any
+    recursive_objects = ... # type: Any
+    state_generators = ... # type: Any
+    deep_construct = ... # type: Any
+    def __init__(self) -> None: ...
+    def check_data(self): ...
+    def get_data(self): ...
+    def get_single_data(self): ...
+    def construct_document(self, node): ...
+    def construct_object(self, node, deep=...): ...
+    def construct_scalar(self, node): ...
+    def construct_sequence(self, node, deep=...): ...
+    def construct_mapping(self, node, deep=...): ...
+    def construct_pairs(self, node, deep=...): ...
+    def add_constructor(cls, tag, constructor): ...
+    def add_multi_constructor(cls, tag_prefix, multi_constructor): ...
+
+class SafeConstructor(BaseConstructor):
+    def construct_scalar(self, node): ...
+    def flatten_mapping(self, node): ...
+    def construct_mapping(self, node, deep=...): ...
+    def construct_yaml_null(self, node): ...
+    bool_values = ... # type: Any
+    def construct_yaml_bool(self, node): ...
+    def construct_yaml_int(self, node): ...
+    inf_value = ... # type: Any
+    nan_value = ... # type: Any
+    def construct_yaml_float(self, node): ...
+    def construct_yaml_binary(self, node): ...
+    timestamp_regexp = ... # type: Any
+    def construct_yaml_timestamp(self, node): ...
+    def construct_yaml_omap(self, node): ...
+    def construct_yaml_pairs(self, node): ...
+    def construct_yaml_set(self, node): ...
+    def construct_yaml_str(self, node): ...
+    def construct_yaml_seq(self, node): ...
+    def construct_yaml_map(self, node): ...
+    def construct_yaml_object(self, node, cls): ...
+    def construct_undefined(self, node): ...
+
+class Constructor(SafeConstructor):
+    def construct_python_str(self, node): ...
+    def construct_python_unicode(self, node): ...
+    def construct_python_long(self, node): ...
+    def construct_python_complex(self, node): ...
+    def construct_python_tuple(self, node): ...
+    def find_python_module(self, name, mark): ...
+    def find_python_name(self, name, mark): ...
+    def construct_python_name(self, suffix, node): ...
+    def construct_python_module(self, suffix, node): ...
+    class classobj: ...
+    def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=...): ...
+    def set_python_instance_state(self, instance, state): ...
+    def construct_python_object(self, suffix, node): ...
+    def construct_python_object_apply(self, suffix, node, newobj=...): ...
+    def construct_python_object_new(self, suffix, node): ...
diff --git a/typeshed/third_party/2.7/yaml/dumper.pyi b/typeshed/third_party/2.7/yaml/dumper.pyi
new file mode 100644
index 0000000..85b75fb
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/dumper.pyi
@@ -0,0 +1,17 @@
+# Stubs for yaml.dumper (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from yaml.emitter import Emitter
+from yaml.serializer import Serializer
+from yaml.representer import BaseRepresenter, Representer, SafeRepresenter
+from yaml.resolver import BaseResolver, Resolver
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+    def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ...
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+    def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ...
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+    def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ...
diff --git a/typeshed/third_party/2.7/yaml/emitter.pyi b/typeshed/third_party/2.7/yaml/emitter.pyi
new file mode 100644
index 0000000..37cb581
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/emitter.pyi
@@ -0,0 +1,110 @@
+# Stubs for yaml.emitter (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import YAMLError
+
+class EmitterError(YAMLError): ...
+
+class ScalarAnalysis:
+    scalar = ... # type: Any
+    empty = ... # type: Any
+    multiline = ... # type: Any
+    allow_flow_plain = ... # type: Any
+    allow_block_plain = ... # type: Any
+    allow_single_quoted = ... # type: Any
+    allow_double_quoted = ... # type: Any
+    allow_block = ... # type: Any
+    def __init__(self, scalar, empty, multiline, allow_flow_plain, allow_block_plain, allow_single_quoted, allow_double_quoted, allow_block) -> None: ...
+
+class Emitter:
+    DEFAULT_TAG_PREFIXES = ... # type: Any
+    stream = ... # type: Any
+    encoding = ... # type: Any
+    states = ... # type: Any
+    state = ... # type: Any
+    events = ... # type: Any
+    event = ... # type: Any
+    indents = ... # type: Any
+    indent = ... # type: Any
+    flow_level = ... # type: Any
+    root_context = ... # type: Any
+    sequence_context = ... # type: Any
+    mapping_context = ... # type: Any
+    simple_key_context = ... # type: Any
+    line = ... # type: Any
+    column = ... # type: Any
+    whitespace = ... # type: Any
+    indention = ... # type: Any
+    open_ended = ... # type: Any
+    canonical = ... # type: Any
+    allow_unicode = ... # type: Any
+    best_indent = ... # type: Any
+    best_width = ... # type: Any
+    best_line_break = ... # type: Any
+    tag_prefixes = ... # type: Any
+    prepared_anchor = ... # type: Any
+    prepared_tag = ... # type: Any
+    analysis = ... # type: Any
+    style = ... # type: Any
+    def __init__(self, stream, canonical=..., indent=..., width=..., allow_unicode=..., line_break=...) -> None: ...
+    def dispose(self): ...
+    def emit(self, event): ...
+    def need_more_events(self): ...
+    def need_events(self, count): ...
+    def increase_indent(self, flow=..., indentless=...): ...
+    def expect_stream_start(self): ...
+    def expect_nothing(self): ...
+    def expect_first_document_start(self): ...
+    def expect_document_start(self, first=...): ...
+    def expect_document_end(self): ...
+    def expect_document_root(self): ...
+    def expect_node(self, root=..., sequence=..., mapping=..., simple_key=...): ...
+    def expect_alias(self): ...
+    def expect_scalar(self): ...
+    def expect_flow_sequence(self): ...
+    def expect_first_flow_sequence_item(self): ...
+    def expect_flow_sequence_item(self): ...
+    def expect_flow_mapping(self): ...
+    def expect_first_flow_mapping_key(self): ...
+    def expect_flow_mapping_key(self): ...
+    def expect_flow_mapping_simple_value(self): ...
+    def expect_flow_mapping_value(self): ...
+    def expect_block_sequence(self): ...
+    def expect_first_block_sequence_item(self): ...
+    def expect_block_sequence_item(self, first=...): ...
+    def expect_block_mapping(self): ...
+    def expect_first_block_mapping_key(self): ...
+    def expect_block_mapping_key(self, first=...): ...
+    def expect_block_mapping_simple_value(self): ...
+    def expect_block_mapping_value(self): ...
+    def check_empty_sequence(self): ...
+    def check_empty_mapping(self): ...
+    def check_empty_document(self): ...
+    def check_simple_key(self): ...
+    def process_anchor(self, indicator): ...
+    def process_tag(self): ...
+    def choose_scalar_style(self): ...
+    def process_scalar(self): ...
+    def prepare_version(self, version): ...
+    def prepare_tag_handle(self, handle): ...
+    def prepare_tag_prefix(self, prefix): ...
+    def prepare_tag(self, tag): ...
+    def prepare_anchor(self, anchor): ...
+    def analyze_scalar(self, scalar): ...
+    def flush_stream(self): ...
+    def write_stream_start(self): ...
+    def write_stream_end(self): ...
+    def write_indicator(self, indicator, need_whitespace, whitespace=..., indention=...): ...
+    def write_indent(self): ...
+    def write_line_break(self, data=...): ...
+    def write_version_directive(self, version_text): ...
+    def write_tag_directive(self, handle_text, prefix_text): ...
+    def write_single_quoted(self, text, split=...): ...
+    ESCAPE_REPLACEMENTS = ... # type: Any
+    def write_double_quoted(self, text, split=...): ...
+    def determine_block_hints(self, text): ...
+    def write_folded(self, text): ...
+    def write_literal(self, text): ...
+    def write_plain(self, text, split=...): ...
diff --git a/typeshed/third_party/2.7/yaml/error.pyi b/typeshed/third_party/2.7/yaml/error.pyi
new file mode 100644
index 0000000..0e1f625
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/error.pyi
@@ -0,0 +1,25 @@
+# Stubs for yaml.error (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Mark:
+    name = ... # type: Any
+    index = ... # type: Any
+    line = ... # type: Any
+    column = ... # type: Any
+    buffer = ... # type: Any
+    pointer = ... # type: Any
+    def __init__(self, name, index, line, column, buffer, pointer) -> None: ...
+    def get_snippet(self, indent=..., max_length=...): ...
+
+class YAMLError(Exception): ...
+
+class MarkedYAMLError(YAMLError):
+    context = ... # type: Any
+    context_mark = ... # type: Any
+    problem = ... # type: Any
+    problem_mark = ... # type: Any
+    note = ... # type: Any
+    def __init__(self, context=..., context_mark=..., problem=..., problem_mark=..., note=...) -> None: ...
diff --git a/typeshed/third_party/2.7/yaml/events.pyi b/typeshed/third_party/2.7/yaml/events.pyi
new file mode 100644
index 0000000..d00d55c
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/events.pyi
@@ -0,0 +1,66 @@
+# Stubs for yaml.events (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Event:
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, start_mark=..., end_mark=...) -> None: ...
+
+class NodeEvent(Event):
+    anchor = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, anchor, start_mark=..., end_mark=...) -> None: ...
+
+class CollectionStartEvent(NodeEvent):
+    anchor = ... # type: Any
+    tag = ... # type: Any
+    implicit = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    flow_style = ... # type: Any
+    def __init__(self, anchor, tag, implicit, start_mark=..., end_mark=..., flow_style=...) -> None: ...
+
+class CollectionEndEvent(Event): ...
+
+class StreamStartEvent(Event):
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    encoding = ... # type: Any
+    def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ...
+
+class StreamEndEvent(Event): ...
+
+class DocumentStartEvent(Event):
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    explicit = ... # type: Any
+    version = ... # type: Any
+    tags = ... # type: Any
+    def __init__(self, start_mark=..., end_mark=..., explicit=..., version=..., tags=...) -> None: ...
+
+class DocumentEndEvent(Event):
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    explicit = ... # type: Any
+    def __init__(self, start_mark=..., end_mark=..., explicit=...) -> None: ...
+
+class AliasEvent(NodeEvent): ...
+
+class ScalarEvent(NodeEvent):
+    anchor = ... # type: Any
+    tag = ... # type: Any
+    implicit = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    style = ... # type: Any
+    def __init__(self, anchor, tag, implicit, value, start_mark=..., end_mark=..., style=...) -> None: ...
+
+class SequenceStartEvent(CollectionStartEvent): ...
+class SequenceEndEvent(CollectionEndEvent): ...
+class MappingStartEvent(CollectionStartEvent): ...
+class MappingEndEvent(CollectionEndEvent): ...
diff --git a/typeshed/third_party/2.7/yaml/loader.pyi b/typeshed/third_party/2.7/yaml/loader.pyi
new file mode 100644
index 0000000..6d3b4c1
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/loader.pyi
@@ -0,0 +1,19 @@
+# Stubs for yaml.loader (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from yaml.reader import Reader
+from yaml.scanner import Scanner
+from yaml.parser import Parser
+from yaml.composer import Composer
+from yaml.constructor import BaseConstructor, SafeConstructor, Constructor
+from yaml.resolver import BaseResolver, Resolver
+
+class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
+    def __init__(self, stream) -> None: ...
+
+class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
+    def __init__(self, stream) -> None: ...
+
+class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+    def __init__(self, stream) -> None: ...
diff --git a/typeshed/third_party/2.7/yaml/nodes.pyi b/typeshed/third_party/2.7/yaml/nodes.pyi
new file mode 100644
index 0000000..2deb1bd
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/nodes.pyi
@@ -0,0 +1,35 @@
+# Stubs for yaml.nodes (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Node:
+    tag = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, tag, value, start_mark, end_mark) -> None: ...
+
+class ScalarNode(Node):
+    id = ... # type: Any
+    tag = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    style = ... # type: Any
+    def __init__(self, tag, value, start_mark=..., end_mark=..., style=...) -> None: ...
+
+class CollectionNode(Node):
+    tag = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    flow_style = ... # type: Any
+    def __init__(self, tag, value, start_mark=..., end_mark=..., flow_style=...) -> None: ...
+
+class SequenceNode(CollectionNode):
+    id = ... # type: Any
+
+class MappingNode(CollectionNode):
+    id = ... # type: Any
diff --git a/typeshed/third_party/2.7/yaml/parser.pyi b/typeshed/third_party/2.7/yaml/parser.pyi
new file mode 100644
index 0000000..a14486b
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/parser.pyi
@@ -0,0 +1,48 @@
+# Stubs for yaml.parser (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import MarkedYAMLError
+
+class ParserError(MarkedYAMLError): ...
+
+class Parser:
+    DEFAULT_TAGS = ... # type: Any
+    current_event = ... # type: Any
+    yaml_version = ... # type: Any
+    tag_handles = ... # type: Any
+    states = ... # type: Any
+    marks = ... # type: Any
+    state = ... # type: Any
+    def __init__(self) -> None: ...
+    def dispose(self): ...
+    def check_event(self, *choices): ...
+    def peek_event(self): ...
+    def get_event(self): ...
+    def parse_stream_start(self): ...
+    def parse_implicit_document_start(self): ...
+    def parse_document_start(self): ...
+    def parse_document_end(self): ...
+    def parse_document_content(self): ...
+    def process_directives(self): ...
+    def parse_block_node(self): ...
+    def parse_flow_node(self): ...
+    def parse_block_node_or_indentless_sequence(self): ...
+    def parse_node(self, block=..., indentless_sequence=...): ...
+    def parse_block_sequence_first_entry(self): ...
+    def parse_block_sequence_entry(self): ...
+    def parse_indentless_sequence_entry(self): ...
+    def parse_block_mapping_first_key(self): ...
+    def parse_block_mapping_key(self): ...
+    def parse_block_mapping_value(self): ...
+    def parse_flow_sequence_first_entry(self): ...
+    def parse_flow_sequence_entry(self, first=...): ...
+    def parse_flow_sequence_entry_mapping_key(self): ...
+    def parse_flow_sequence_entry_mapping_value(self): ...
+    def parse_flow_sequence_entry_mapping_end(self): ...
+    def parse_flow_mapping_first_key(self): ...
+    def parse_flow_mapping_key(self, first=...): ...
+    def parse_flow_mapping_value(self): ...
+    def parse_flow_mapping_empty_value(self): ...
+    def process_empty_scalar(self, mark): ...
diff --git a/typeshed/third_party/2.7/yaml/reader.pyi b/typeshed/third_party/2.7/yaml/reader.pyi
new file mode 100644
index 0000000..c3e84d6
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/reader.pyi
@@ -0,0 +1,38 @@
+# Stubs for yaml.reader (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import YAMLError
+
+class ReaderError(YAMLError):
+    name = ... # type: Any
+    character = ... # type: Any
+    position = ... # type: Any
+    encoding = ... # type: Any
+    reason = ... # type: Any
+    def __init__(self, name, position, character, encoding, reason) -> None: ...
+
+class Reader:
+    name = ... # type: Any
+    stream = ... # type: Any
+    stream_pointer = ... # type: Any
+    eof = ... # type: Any
+    buffer = ... # type: Any
+    pointer = ... # type: Any
+    raw_buffer = ... # type: Any
+    raw_decode = ... # type: Any
+    encoding = ... # type: Any
+    index = ... # type: Any
+    line = ... # type: Any
+    column = ... # type: Any
+    def __init__(self, stream) -> None: ...
+    def peek(self, index=...): ...
+    def prefix(self, length=...): ...
+    def forward(self, length=...): ...
+    def get_mark(self): ...
+    def determine_encoding(self): ...
+    NON_PRINTABLE = ... # type: Any
+    def check_printable(self, data): ...
+    def update(self, length): ...
+    def update_raw(self, size=...): ...
diff --git a/typeshed/third_party/2.7/yaml/representer.pyi b/typeshed/third_party/2.7/yaml/representer.pyi
new file mode 100644
index 0000000..eaa0133
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/representer.pyi
@@ -0,0 +1,56 @@
+# Stubs for yaml.representer (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import YAMLError
+
+class RepresenterError(YAMLError): ...
+
+class BaseRepresenter:
+    yaml_representers = ... # type: Any
+    yaml_multi_representers = ... # type: Any
+    default_style = ... # type: Any
+    default_flow_style = ... # type: Any
+    represented_objects = ... # type: Any
+    object_keeper = ... # type: Any
+    alias_key = ... # type: Any
+    def __init__(self, default_style=..., default_flow_style=...) -> None: ...
+    def represent(self, data): ...
+    def get_classobj_bases(self, cls): ...
+    def represent_data(self, data): ...
+    def add_representer(cls, data_type, representer): ...
+    def add_multi_representer(cls, data_type, representer): ...
+    def represent_scalar(self, tag, value, style=...): ...
+    def represent_sequence(self, tag, sequence, flow_style=...): ...
+    def represent_mapping(self, tag, mapping, flow_style=...): ...
+    def ignore_aliases(self, data): ...
+
+class SafeRepresenter(BaseRepresenter):
+    def ignore_aliases(self, data): ...
+    def represent_none(self, data): ...
+    def represent_str(self, data): ...
+    def represent_unicode(self, data): ...
+    def represent_bool(self, data): ...
+    def represent_int(self, data): ...
+    def represent_long(self, data): ...
+    inf_value = ... # type: Any
+    def represent_float(self, data): ...
+    def represent_list(self, data): ...
+    def represent_dict(self, data): ...
+    def represent_set(self, data): ...
+    def represent_date(self, data): ...
+    def represent_datetime(self, data): ...
+    def represent_yaml_object(self, tag, data, cls, flow_style=...): ...
+    def represent_undefined(self, data): ...
+
+class Representer(SafeRepresenter):
+    def represent_str(self, data): ...
+    def represent_unicode(self, data): ...
+    def represent_long(self, data): ...
+    def represent_complex(self, data): ...
+    def represent_tuple(self, data): ...
+    def represent_name(self, data): ...
+    def represent_module(self, data): ...
+    def represent_instance(self, data): ...
+    def represent_object(self, data): ...
diff --git a/typeshed/third_party/2.7/yaml/resolver.pyi b/typeshed/third_party/2.7/yaml/resolver.pyi
new file mode 100644
index 0000000..70a33b2
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/resolver.pyi
@@ -0,0 +1,26 @@
+# Stubs for yaml.resolver (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import YAMLError
+
+class ResolverError(YAMLError): ...
+
+class BaseResolver:
+    DEFAULT_SCALAR_TAG = ... # type: Any
+    DEFAULT_SEQUENCE_TAG = ... # type: Any
+    DEFAULT_MAPPING_TAG = ... # type: Any
+    yaml_implicit_resolvers = ... # type: Any
+    yaml_path_resolvers = ... # type: Any
+    resolver_exact_paths = ... # type: Any
+    resolver_prefix_paths = ... # type: Any
+    def __init__(self) -> None: ...
+    def add_implicit_resolver(cls, tag, regexp, first): ...
+    def add_path_resolver(cls, tag, path, kind=...): ...
+    def descend_resolver(self, current_node, current_index): ...
+    def ascend_resolver(self): ...
+    def check_resolver_prefix(self, depth, path, kind, current_node, current_index): ...
+    def resolve(self, kind, value, implicit): ...
+
+class Resolver(BaseResolver): ...
diff --git a/typeshed/third_party/2.7/yaml/scanner.pyi b/typeshed/third_party/2.7/yaml/scanner.pyi
new file mode 100644
index 0000000..226ec83
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/scanner.pyi
@@ -0,0 +1,100 @@
+# Stubs for yaml.scanner (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import MarkedYAMLError
+
+class ScannerError(MarkedYAMLError): ...
+
+class SimpleKey:
+    token_number = ... # type: Any
+    required = ... # type: Any
+    index = ... # type: Any
+    line = ... # type: Any
+    column = ... # type: Any
+    mark = ... # type: Any
+    def __init__(self, token_number, required, index, line, column, mark) -> None: ...
+
+class Scanner:
+    done = ... # type: Any
+    flow_level = ... # type: Any
+    tokens = ... # type: Any
+    tokens_taken = ... # type: Any
+    indent = ... # type: Any
+    indents = ... # type: Any
+    allow_simple_key = ... # type: Any
+    possible_simple_keys = ... # type: Any
+    def __init__(self) -> None: ...
+    def check_token(self, *choices): ...
+    def peek_token(self): ...
+    def get_token(self): ...
+    def need_more_tokens(self): ...
+    def fetch_more_tokens(self): ...
+    def next_possible_simple_key(self): ...
+    def stale_possible_simple_keys(self): ...
+    def save_possible_simple_key(self): ...
+    def remove_possible_simple_key(self): ...
+    def unwind_indent(self, column): ...
+    def add_indent(self, column): ...
+    def fetch_stream_start(self): ...
+    def fetch_stream_end(self): ...
+    def fetch_directive(self): ...
+    def fetch_document_start(self): ...
+    def fetch_document_end(self): ...
+    def fetch_document_indicator(self, TokenClass): ...
+    def fetch_flow_sequence_start(self): ...
+    def fetch_flow_mapping_start(self): ...
+    def fetch_flow_collection_start(self, TokenClass): ...
+    def fetch_flow_sequence_end(self): ...
+    def fetch_flow_mapping_end(self): ...
+    def fetch_flow_collection_end(self, TokenClass): ...
+    def fetch_flow_entry(self): ...
+    def fetch_block_entry(self): ...
+    def fetch_key(self): ...
+    def fetch_value(self): ...
+    def fetch_alias(self): ...
+    def fetch_anchor(self): ...
+    def fetch_tag(self): ...
+    def fetch_literal(self): ...
+    def fetch_folded(self): ...
+    def fetch_block_scalar(self, style): ...
+    def fetch_single(self): ...
+    def fetch_double(self): ...
+    def fetch_flow_scalar(self, style): ...
+    def fetch_plain(self): ...
+    def check_directive(self): ...
+    def check_document_start(self): ...
+    def check_document_end(self): ...
+    def check_block_entry(self): ...
+    def check_key(self): ...
+    def check_value(self): ...
+    def check_plain(self): ...
+    def scan_to_next_token(self): ...
+    def scan_directive(self): ...
+    def scan_directive_name(self, start_mark): ...
+    def scan_yaml_directive_value(self, start_mark): ...
+    def scan_yaml_directive_number(self, start_mark): ...
+    def scan_tag_directive_value(self, start_mark): ...
+    def scan_tag_directive_handle(self, start_mark): ...
+    def scan_tag_directive_prefix(self, start_mark): ...
+    def scan_directive_ignored_line(self, start_mark): ...
+    def scan_anchor(self, TokenClass): ...
+    def scan_tag(self): ...
+    def scan_block_scalar(self, style): ...
+    def scan_block_scalar_indicators(self, start_mark): ...
+    def scan_block_scalar_ignored_line(self, start_mark): ...
+    def scan_block_scalar_indentation(self): ...
+    def scan_block_scalar_breaks(self, indent): ...
+    def scan_flow_scalar(self, style): ...
+    ESCAPE_REPLACEMENTS = ... # type: Any
+    ESCAPE_CODES = ... # type: Any
+    def scan_flow_scalar_non_spaces(self, double, start_mark): ...
+    def scan_flow_scalar_spaces(self, double, start_mark): ...
+    def scan_flow_scalar_breaks(self, double, start_mark): ...
+    def scan_plain(self): ...
+    def scan_plain_spaces(self, indent, start_mark): ...
+    def scan_tag_handle(self, name, start_mark): ...
+    def scan_tag_uri(self, name, start_mark): ...
+    def scan_uri_escapes(self, name, start_mark): ...
+    def scan_line_break(self): ...
diff --git a/typeshed/third_party/2.7/yaml/serializer.pyi b/typeshed/third_party/2.7/yaml/serializer.pyi
new file mode 100644
index 0000000..b40c64c
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/serializer.pyi
@@ -0,0 +1,27 @@
+# Stubs for yaml.serializer (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from yaml.error import YAMLError
+
+class SerializerError(YAMLError): ...
+
+class Serializer:
+    ANCHOR_TEMPLATE = ... # type: Any
+    use_encoding = ... # type: Any
+    use_explicit_start = ... # type: Any
+    use_explicit_end = ... # type: Any
+    use_version = ... # type: Any
+    use_tags = ... # type: Any
+    serialized_nodes = ... # type: Any
+    anchors = ... # type: Any
+    last_anchor_id = ... # type: Any
+    closed = ... # type: Any
+    def __init__(self, encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ...
+    def open(self): ...
+    def close(self): ...
+    def serialize(self, node): ...
+    def anchor_node(self, node): ...
+    def generate_anchor(self, node): ...
+    def serialize_node(self, node, parent, index): ...
diff --git a/typeshed/third_party/2.7/yaml/tokens.pyi b/typeshed/third_party/2.7/yaml/tokens.pyi
new file mode 100644
index 0000000..19f70a1
--- /dev/null
+++ b/typeshed/third_party/2.7/yaml/tokens.pyi
@@ -0,0 +1,97 @@
+# Stubs for yaml.tokens (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Token:
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, start_mark, end_mark) -> None: ...
+
+class DirectiveToken(Token):
+    id = ... # type: Any
+    name = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, name, value, start_mark, end_mark) -> None: ...
+
+class DocumentStartToken(Token):
+    id = ... # type: Any
+
+class DocumentEndToken(Token):
+    id = ... # type: Any
+
+class StreamStartToken(Token):
+    id = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    encoding = ... # type: Any
+    def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ...
+
+class StreamEndToken(Token):
+    id = ... # type: Any
+
+class BlockSequenceStartToken(Token):
+    id = ... # type: Any
+
+class BlockMappingStartToken(Token):
+    id = ... # type: Any
+
+class BlockEndToken(Token):
+    id = ... # type: Any
+
+class FlowSequenceStartToken(Token):
+    id = ... # type: Any
+
+class FlowMappingStartToken(Token):
+    id = ... # type: Any
+
+class FlowSequenceEndToken(Token):
+    id = ... # type: Any
+
+class FlowMappingEndToken(Token):
+    id = ... # type: Any
+
+class KeyToken(Token):
+    id = ... # type: Any
+
+class ValueToken(Token):
+    id = ... # type: Any
+
+class BlockEntryToken(Token):
+    id = ... # type: Any
+
+class FlowEntryToken(Token):
+    id = ... # type: Any
+
+class AliasToken(Token):
+    id = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, value, start_mark, end_mark) -> None: ...
+
+class AnchorToken(Token):
+    id = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, value, start_mark, end_mark) -> None: ...
+
+class TagToken(Token):
+    id = ... # type: Any
+    value = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    def __init__(self, value, start_mark, end_mark) -> None: ...
+
+class ScalarToken(Token):
+    id = ... # type: Any
+    value = ... # type: Any
+    plain = ... # type: Any
+    start_mark = ... # type: Any
+    end_mark = ... # type: Any
+    style = ... # type: Any
+    def __init__(self, value, plain, start_mark, end_mark, style=...) -> None: ...
diff --git a/typeshed/third_party/2and3/backports/__init__.pyi b/typeshed/third_party/2and3/backports/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2and3/backports/ssl_match_hostname.pyi b/typeshed/third_party/2and3/backports/ssl_match_hostname.pyi
new file mode 100644
index 0000000..c219980
--- /dev/null
+++ b/typeshed/third_party/2and3/backports/ssl_match_hostname.pyi
@@ -0,0 +1,3 @@
+class CertificateError(ValueError): ...
+
+def match_hostname(cert, hostname): ...
diff --git a/typeshed/third_party/2and3/backports_abc.pyi b/typeshed/third_party/2and3/backports_abc.pyi
new file mode 100644
index 0000000..a82293f
--- /dev/null
+++ b/typeshed/third_party/2and3/backports_abc.pyi
@@ -0,0 +1,19 @@
+# Stubs for backports_abc (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def mk_gen(): ...
+def mk_awaitable(): ...
+def mk_coroutine(): ...
+
+Generator = ... # type: Any
+Awaitable = ... # type: Any
+Coroutine = ... # type: Any
+
+def isawaitable(obj): ...
+
+PATCHED = ... # type: Any
+
+def patch(patch_inspect=True): ...
diff --git a/typeshed/third_party/2and3/certifi.pyi b/typeshed/third_party/2and3/certifi.pyi
new file mode 100644
index 0000000..c809e6d
--- /dev/null
+++ b/typeshed/third_party/2and3/certifi.pyi
@@ -0,0 +1,2 @@
+def where() -> str: ...
+def old_where() -> str: ...
diff --git a/typeshed/third_party/2and3/singledispatch.pyi b/typeshed/third_party/2and3/singledispatch.pyi
new file mode 100644
index 0000000..dac3cea
--- /dev/null
+++ b/typeshed/third_party/2and3/singledispatch.pyi
@@ -0,0 +1,5 @@
+# Stubs for singledispatch (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def singledispatch(func): ...
diff --git a/typeshed/third_party/3/docutils/__init__.pyi b/typeshed/third_party/3/docutils/__init__.pyi
new file mode 100644
index 0000000..eb1ae45
--- /dev/null
+++ b/typeshed/third_party/3/docutils/__init__.pyi
@@ -0,0 +1 @@
+...
diff --git a/typeshed/third_party/3/docutils/examples.pyi b/typeshed/third_party/3/docutils/examples.pyi
new file mode 100644
index 0000000..0abfc7b
--- /dev/null
+++ b/typeshed/third_party/3/docutils/examples.pyi
@@ -0,0 +1,3 @@
+from typing import Any
+
+html_parts = ...  # type: Any
diff --git a/typeshed/third_party/3/docutils/nodes.pyi b/typeshed/third_party/3/docutils/nodes.pyi
new file mode 100644
index 0000000..f747fb1
--- /dev/null
+++ b/typeshed/third_party/3/docutils/nodes.pyi
@@ -0,0 +1,8 @@
+from typing import Any, List
+
+class reference:
+    def __init__(self,
+                 rawsource: str = ...,
+                 text: str = ...,
+                 *children: List[Any],
+                 **attributes) -> None: ...
diff --git a/typeshed/third_party/3/docutils/parsers/__init__.pyi b/typeshed/third_party/3/docutils/parsers/__init__.pyi
new file mode 100644
index 0000000..eb1ae45
--- /dev/null
+++ b/typeshed/third_party/3/docutils/parsers/__init__.pyi
@@ -0,0 +1 @@
+...
diff --git a/typeshed/third_party/3/docutils/parsers/rst/__init__.pyi b/typeshed/third_party/3/docutils/parsers/rst/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/3/docutils/parsers/rst/nodes.pyi b/typeshed/third_party/3/docutils/parsers/rst/nodes.pyi
new file mode 100644
index 0000000..eb1ae45
--- /dev/null
+++ b/typeshed/third_party/3/docutils/parsers/rst/nodes.pyi
@@ -0,0 +1 @@
+...
diff --git a/typeshed/third_party/3/docutils/parsers/rst/roles.pyi b/typeshed/third_party/3/docutils/parsers/rst/roles.pyi
new file mode 100644
index 0000000..7307e58
--- /dev/null
+++ b/typeshed/third_party/3/docutils/parsers/rst/roles.pyi
@@ -0,0 +1,10 @@
+import docutils.nodes
+import docutils.parsers.rst.states
+
+from typing import Callable, Any, List, Dict, Tuple
+
+def register_local_role(name: str,
+                        role_fn: Callable[[str, str, str, int, docutils.parsers.rst.states.Inliner, Dict, List],
+                                          Tuple[List[docutils.nodes.reference], List[docutils.nodes.reference]]]
+                        ) -> None:
+    ...
diff --git a/typeshed/third_party/3/docutils/parsers/rst/states.pyi b/typeshed/third_party/3/docutils/parsers/rst/states.pyi
new file mode 100644
index 0000000..e39d2bc
--- /dev/null
+++ b/typeshed/third_party/3/docutils/parsers/rst/states.pyi
@@ -0,0 +1,5 @@
+import typing
+
+class Inliner:
+    def __init__(self) -> None:
+        ...
diff --git a/typeshed/third_party/3/enum.pyi b/typeshed/third_party/3/enum.pyi
new file mode 100644
index 0000000..dcb3b9c
--- /dev/null
+++ b/typeshed/third_party/3/enum.pyi
@@ -0,0 +1,19 @@
+from typing import List, Any, TypeVar
+
+class Enum:
+    def __new__(cls, value: Any) -> None: ...
+    def __repr__(self) -> str: ...
+    def __str__(self) -> str: ...
+    def __dir__(self) -> List[str]: ...
+    def __format__(self, format_spec: str) -> str: ...
+    def __hash__(self) -> Any: ...
+    def __reduce_ex__(self, proto: Any) -> Any: ...
+
+    name = ...  # type: str
+    value = None  # type: Any
+
+class IntEnum(int, Enum): ...
+
+_T = TypeVar('_T')
+
+def unique(enumeration: _T) -> _T: ...
diff --git a/typeshed/third_party/3/lxml/__init__.pyi b/typeshed/third_party/3/lxml/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/3/lxml/etree.pyi b/typeshed/third_party/3/lxml/etree.pyi
new file mode 100644
index 0000000..d1004b0
--- /dev/null
+++ b/typeshed/third_party/3/lxml/etree.pyi
@@ -0,0 +1,102 @@
+# Hand-written stub for lxml.etree as used by mypy.report.
+# This is *far* from complete, and the stubgen-generated ones crash mypy.
+# Any use of `Any` below means I couldn't figure out the type.
+
+import typing
+from typing import Any, Dict, List, Tuple, Union
+from typing import SupportsBytes
+
+
+# We do *not* want `typing.AnyStr` because it is a `TypeVar`, which is an
+# unnecessary constraint. It seems reasonable to constrain each
+# List/Dict argument to use one type consistently, though, and it is
+# necessary in order to keep these brief.
+AnyStr = Union[str, bytes]
+ListAnyStr = Union[List[str], List[bytes]]
+DictAnyStr = Union[Dict[str, str], Dict[bytes, bytes]]
+Dict_Tuple2AnyStr_Any = Union[Dict[Tuple[str, str], Any], Tuple[bytes, bytes], Any]
+
+
+class _Element:
+    def addprevious(self, element: '_Element') -> None:
+        pass
+
+class _ElementTree:
+    def write(self,
+              file: Union[AnyStr, typing.IO],
+              encoding: AnyStr = ...,
+              method: AnyStr = ...,
+              pretty_print: bool = ...,
+              xml_declaration: Any = ...,
+              with_tail: Any = ...,
+              standalone: bool = ...,
+              compression: int = ...,
+              exclusive: bool = ...,
+              with_comments: bool = ...,
+              inclusive_ns_prefixes: ListAnyStr = ...) -> None:
+        pass
+
+class _XSLTResultTree(SupportsBytes):
+    pass
+
+class _XSLTQuotedStringParam:
+    pass
+
+class XMLParser:
+    pass
+
+class XMLSchema:
+    def __init__(self,
+                 etree: Union[_Element, _ElementTree] = ...,
+                 file: Union[AnyStr, typing.IO] = ...) -> None:
+        pass
+
+    def assertValid(self,
+                    etree: Union[_Element, _ElementTree]) -> None:
+        pass
+
+class XSLTAccessControl:
+    pass
+
+class XSLT:
+    def __init__(self,
+                 xslt_input: Union[_Element, _ElementTree],
+                 extensions: Dict_Tuple2AnyStr_Any = ...,
+                 regexp: bool = ...,
+                 access_control: XSLTAccessControl = ...) -> None:
+        pass
+
+    def __call__(self,
+                 _input: Union[_Element, _ElementTree],
+                 profile_run: bool = ...,
+                 **kwargs: Union[AnyStr, _XSLTQuotedStringParam]) -> _XSLTResultTree:
+        pass
+
+    @staticmethod
+    def strparam(s: AnyStr) -> _XSLTQuotedStringParam:
+        pass
+
+def Element(_tag: AnyStr,
+            attrib: DictAnyStr = ...,
+            nsmap: DictAnyStr = ...,
+            **extra: AnyStr) -> _Element:
+    pass
+
+def SubElement(_parent: _Element, _tag: AnyStr,
+               attrib: DictAnyStr = ...,
+               nsmap: DictAnyStr = ...,
+               **extra: AnyStr) -> _Element:
+    pass
+
+def ElementTree(element: _Element = ...,
+                file: Union[AnyStr, typing.IO] = ...,
+                parser: XMLParser = ...) -> _ElementTree:
+    pass
+
+def ProcessingInstruction(target: AnyStr, text: AnyStr = ...) -> _Element:
+    pass
+
+def parse(source: Union[AnyStr, typing.IO],
+          parser: XMLParser = ...,
+          base_url: AnyStr = ...) -> _ElementTree:
+    pass
diff --git a/typeshed/third_party/3/requests/__init__.pyi b/typeshed/third_party/3/requests/__init__.pyi
new file mode 100644
index 0000000..173481a
--- /dev/null
+++ b/typeshed/third_party/3/requests/__init__.pyi
@@ -0,0 +1,38 @@
+# Stubs for requests (based on version 2.6.0, Python 3)
+
+from typing import Any
+from . import models
+from . import api
+from . import sessions
+from . import status_codes
+from . import exceptions
+import logging
+
+__title__ = ...  # type: Any
+__build__ = ...  # type: Any
+__license__ = ...  # type: Any
+__copyright__ = ...  # type: Any
+
+Request = models.Request
+Response = models.Response
+PreparedRequest = models.PreparedRequest
+request = api.request
+get = api.get
+head = api.head
+post = api.post
+patch = api.patch
+put = api.put
+delete = api.delete
+options = api.options
+session = sessions.session
+Session = sessions.Session
+codes = status_codes.codes
+RequestException = exceptions.RequestException
+Timeout = exceptions.Timeout
+URLRequired = exceptions.URLRequired
+TooManyRedirects = exceptions.TooManyRedirects
+HTTPError = exceptions.HTTPError
+ConnectionError = exceptions.ConnectionError
+
+class NullHandler(logging.Handler):
+    def emit(self, record): ...
diff --git a/typeshed/third_party/3/requests/adapters.pyi b/typeshed/third_party/3/requests/adapters.pyi
new file mode 100644
index 0000000..109dc9a
--- /dev/null
+++ b/typeshed/third_party/3/requests/adapters.pyi
@@ -0,0 +1,69 @@
+# Stubs for requests.adapters (Python 3)
+
+from typing import Any
+from . import models
+from .packages.urllib3 import poolmanager
+from .packages.urllib3 import response
+from .packages.urllib3.util import retry
+from . import compat
+from . import utils
+from . import structures
+from .packages.urllib3 import exceptions as urllib3_exceptions
+from . import cookies
+from . import exceptions
+from . import auth
+
+Response = models.Response
+PoolManager = poolmanager.PoolManager
+proxy_from_url = poolmanager.proxy_from_url
+HTTPResponse = response.HTTPResponse
+Retry = retry.Retry
+DEFAULT_CA_BUNDLE_PATH = utils.DEFAULT_CA_BUNDLE_PATH
+get_encoding_from_headers = utils.get_encoding_from_headers
+prepend_scheme_if_needed = utils.prepend_scheme_if_needed
+get_auth_from_url = utils.get_auth_from_url
+urldefragauth = utils.urldefragauth
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+ConnectTimeoutError = urllib3_exceptions.ConnectTimeoutError
+MaxRetryError = urllib3_exceptions.MaxRetryError
+ProtocolError = urllib3_exceptions.ProtocolError
+ReadTimeoutError = urllib3_exceptions.ReadTimeoutError
+ResponseError = urllib3_exceptions.ResponseError
+extract_cookies_to_jar = cookies.extract_cookies_to_jar
+ConnectionError = exceptions.ConnectionError
+ConnectTimeout = exceptions.ConnectTimeout
+ReadTimeout = exceptions.ReadTimeout
+SSLError = exceptions.SSLError
+ProxyError = exceptions.ProxyError
+RetryError = exceptions.RetryError
+
+DEFAULT_POOLBLOCK = ...  # type: Any
+DEFAULT_POOLSIZE = ...  # type: Any
+DEFAULT_RETRIES = ...  # type: Any
+
+class BaseAdapter:
+    def __init__(self) -> None: ...
+    # TODO: "request" parameter not actually supported, added to please mypy.
+    def send(self, request=...): ...
+    def close(self): ...
+
+class HTTPAdapter(BaseAdapter):
+    __attrs__ = ...  # type: Any
+    max_retries = ...  # type: Any
+    config = ...  # type: Any
+    proxy_manager = ...  # type: Any
+    def __init__(self, pool_connections=..., pool_maxsize=..., max_retries=...,
+                 pool_block=...): ...
+    poolmanager = ...  # type: Any
+    def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ...
+    def proxy_manager_for(self, proxy, **proxy_kwargs): ...
+    def cert_verify(self, conn, url, verify, cert): ...
+    def build_response(self, req, resp): ...
+    def get_connection(self, url, proxies=...): ...
+    def close(self): ...
+    def request_url(self, request, proxies): ...
+    def add_headers(self, request, **kwargs): ...
+    def proxy_headers(self, proxy): ...
+    # TODO: "request" is not actually optional, modified to please mypy.
+    def send(self, request=..., stream=..., timeout=..., verify=..., cert=...,
+             proxies=...): ...
diff --git a/typeshed/third_party/3/requests/api.pyi b/typeshed/third_party/3/requests/api.pyi
new file mode 100644
index 0000000..7b04128
--- /dev/null
+++ b/typeshed/third_party/3/requests/api.pyi
@@ -0,0 +1,14 @@
+# Stubs for requests.api (Python 3)
+
+import typing
+
+from .models import Response
+
+def request(method: str, url: str, **kwargs) -> Response: ...
+def get(url: str, **kwargs) -> Response: ...
+def options(url: str, **kwargs) -> Response: ...
+def head(url: str, **kwargs) -> Response: ...
+def post(url: str, data=..., json=..., **kwargs) -> Response: ...
+def put(url: str, data=..., **kwargs) -> Response: ...
+def patch(url: str, data=..., **kwargs) -> Response: ...
+def delete(url: str, **kwargs) -> Response: ...
diff --git a/typeshed/third_party/3/requests/auth.pyi b/typeshed/third_party/3/requests/auth.pyi
new file mode 100644
index 0000000..8eea2b0
--- /dev/null
+++ b/typeshed/third_party/3/requests/auth.pyi
@@ -0,0 +1,41 @@
+# Stubs for requests.auth (Python 3)
+
+from typing import Any
+from . import compat
+from . import cookies
+from . import utils
+from . import status_codes
+
+extract_cookies_to_jar = cookies.extract_cookies_to_jar
+parse_dict_header = utils.parse_dict_header
+to_native_string = utils.to_native_string
+codes = status_codes.codes
+
+CONTENT_TYPE_FORM_URLENCODED = ...  # type: Any
+CONTENT_TYPE_MULTI_PART = ...  # type: Any
+
+class AuthBase:
+    def __call__(self, r): ...
+
+class HTTPBasicAuth(AuthBase):
+    username = ...  # type: Any
+    password = ...  # type: Any
+    def __init__(self, username, password) -> None: ...
+    def __call__(self, r): ...
+
+class HTTPProxyAuth(HTTPBasicAuth):
+    def __call__(self, r): ...
+
+class HTTPDigestAuth(AuthBase):
+    username = ...  # type: Any
+    password = ...  # type: Any
+    last_nonce = ...  # type: Any
+    nonce_count = ...  # type: Any
+    chal = ...  # type: Any
+    pos = ...  # type: Any
+    num_401_calls = ...  # type: Any
+    def __init__(self, username, password) -> None: ...
+    def build_digest_header(self, method, url): ...
+    def handle_redirect(self, r, **kwargs): ...
+    def handle_401(self, r, **kwargs): ...
+    def __call__(self, r): ...
diff --git a/typeshed/third_party/3/requests/compat.pyi b/typeshed/third_party/3/requests/compat.pyi
new file mode 100644
index 0000000..63b92f6
--- /dev/null
+++ b/typeshed/third_party/3/requests/compat.pyi
@@ -0,0 +1,6 @@
+# Stubs for requests.compat (Python 3.4)
+
+from typing import Any
+import collections
+
+OrderedDict = collections.OrderedDict
diff --git a/typeshed/third_party/3/requests/cookies.pyi b/typeshed/third_party/3/requests/cookies.pyi
new file mode 100644
index 0000000..7b147d0
--- /dev/null
+++ b/typeshed/third_party/3/requests/cookies.pyi
@@ -0,0 +1,65 @@
+# Stubs for requests.cookies (Python 3)
+
+from typing import Any, MutableMapping
+#import cookielib
+from http import cookiejar as cookielib
+import collections
+from . import compat
+
+#cookielib = compat.cookielib
+
+class MockRequest:
+    type = ...  # type: Any
+    def __init__(self, request) -> None: ...
+    def get_type(self): ...
+    def get_host(self): ...
+    def get_origin_req_host(self): ...
+    def get_full_url(self): ...
+    def is_unverifiable(self): ...
+    def has_header(self, name): ...
+    def get_header(self, name, default=...): ...
+    def add_header(self, key, val): ...
+    def add_unredirected_header(self, name, value): ...
+    def get_new_headers(self): ...
+    @property
+    def unverifiable(self): ...
+    @property
+    def origin_req_host(self): ...
+    @property
+    def host(self): ...
+
+class MockResponse:
+    def __init__(self, headers) -> None: ...
+    def info(self): ...
+    def getheaders(self, name): ...
+
+def extract_cookies_to_jar(jar, request, response): ...
+def get_cookie_header(jar, request): ...
+def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ...
+
+class CookieConflictError(RuntimeError): ...
+
+class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
+    def get(self, name, default=..., domain=..., path=...): ...
+    def set(self, name, value, **kwargs): ...
+    def iterkeys(self): ...
+    def keys(self): ...
+    def itervalues(self): ...
+    def values(self): ...
+    def iteritems(self): ...
+    def items(self): ...
+    def list_domains(self): ...
+    def list_paths(self): ...
+    def multiple_domains(self): ...
+    def get_dict(self, domain=..., path=...): ...
+    def __getitem__(self, name): ...
+    def __setitem__(self, name, value): ...
+    def __delitem__(self, name): ...
+    def set_cookie(self, cookie, *args, **kwargs): ...
+    def update(self, other): ...
+    def copy(self): ...
+
+def create_cookie(name, value, **kwargs): ...
+def morsel_to_cookie(morsel): ...
+def cookiejar_from_dict(cookie_dict, cookiejar=..., overwrite=...): ...
+def merge_cookies(cookiejar, cookies): ...
diff --git a/typeshed/third_party/3/requests/exceptions.pyi b/typeshed/third_party/3/requests/exceptions.pyi
new file mode 100644
index 0000000..ff0c328
--- /dev/null
+++ b/typeshed/third_party/3/requests/exceptions.pyi
@@ -0,0 +1,26 @@
+# Stubs for requests.exceptions (Python 3)
+
+from typing import Any
+from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
+
+class RequestException(IOError):
+    response = ...  # type: Any
+    request = ...  # type: Any
+    def __init__(self, *args, **kwargs) -> None: ...
+
+class HTTPError(RequestException): ...
+class ConnectionError(RequestException): ...
+class ProxyError(ConnectionError): ...
+class SSLError(ConnectionError): ...
+class Timeout(RequestException): ...
+class ConnectTimeout(ConnectionError, Timeout): ...
+class ReadTimeout(Timeout): ...
+class URLRequired(RequestException): ...
+class TooManyRedirects(RequestException): ...
+class MissingSchema(RequestException, ValueError): ...
+class InvalidSchema(RequestException, ValueError): ...
+class InvalidURL(RequestException, ValueError): ...
+class ChunkedEncodingError(RequestException): ...
+class ContentDecodingError(RequestException, BaseHTTPError): ...
+class StreamConsumedError(RequestException, TypeError): ...
+class RetryError(RequestException): ...
diff --git a/typeshed/third_party/3/requests/hooks.pyi b/typeshed/third_party/3/requests/hooks.pyi
new file mode 100644
index 0000000..3367d9a
--- /dev/null
+++ b/typeshed/third_party/3/requests/hooks.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.hooks (Python 3)
+
+from typing import Any
+
+HOOKS = ...  # type: Any
+
+def default_hooks(): ...
+def dispatch_hook(key, hooks, hook_data, **kwargs): ...
diff --git a/typeshed/third_party/3/requests/models.pyi b/typeshed/third_party/3/requests/models.pyi
new file mode 100644
index 0000000..025ebc8
--- /dev/null
+++ b/typeshed/third_party/3/requests/models.pyi
@@ -0,0 +1,134 @@
+# Stubs for requests.models (Python 3)
+
+from typing import Any, List, MutableMapping, Iterator, Dict
+import datetime
+
+from . import hooks
+from . import structures
+from . import auth
+from . import cookies
+from .cookies import RequestsCookieJar
+from .packages.urllib3 import fields
+from .packages.urllib3 import filepost
+from .packages.urllib3 import util
+from .packages.urllib3 import exceptions as urllib3_exceptions
+from . import exceptions
+from . import utils
+from . import compat
+from . import status_codes
+
+default_hooks = hooks.default_hooks
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+HTTPBasicAuth = auth.HTTPBasicAuth
+cookiejar_from_dict = cookies.cookiejar_from_dict
+get_cookie_header = cookies.get_cookie_header
+RequestField = fields.RequestField
+encode_multipart_formdata = filepost.encode_multipart_formdata
+parse_url = util.parse_url
+DecodeError = urllib3_exceptions.DecodeError
+ReadTimeoutError = urllib3_exceptions.ReadTimeoutError
+ProtocolError = urllib3_exceptions.ProtocolError
+LocationParseError = urllib3_exceptions.LocationParseError
+HTTPError = exceptions.HTTPError
+MissingSchema = exceptions.MissingSchema
+InvalidURL = exceptions.InvalidURL
+ChunkedEncodingError = exceptions.ChunkedEncodingError
+ContentDecodingError = exceptions.ContentDecodingError
+ConnectionError = exceptions.ConnectionError
+StreamConsumedError = exceptions.StreamConsumedError
+guess_filename = utils.guess_filename
+get_auth_from_url = utils.get_auth_from_url
+requote_uri = utils.requote_uri
+stream_decode_response_unicode = utils.stream_decode_response_unicode
+to_key_val_list = utils.to_key_val_list
+parse_header_links = utils.parse_header_links
+iter_slices = utils.iter_slices
+guess_json_utf = utils.guess_json_utf
+super_len = utils.super_len
+to_native_string = utils.to_native_string
+codes = status_codes.codes
+
+REDIRECT_STATI = ...  # type: Any
+DEFAULT_REDIRECT_LIMIT = ...  # type: Any
+CONTENT_CHUNK_SIZE = ...  # type: Any
+ITER_CHUNK_SIZE = ...  # type: Any
+json_dumps = ...  # type: Any
+
+class RequestEncodingMixin:
+    @property
+    def path_url(self): ...
+
+class RequestHooksMixin:
+    def register_hook(self, event, hook): ...
+    def deregister_hook(self, event, hook): ...
+
+class Request(RequestHooksMixin):
+    hooks = ...  # type: Any
+    method = ...  # type: Any
+    url = ...  # type: Any
+    headers = ...  # type: Any
+    files = ...  # type: Any
+    data = ...  # type: Any
+    json = ...  # type: Any
+    params = ...  # type: Any
+    auth = ...  # type: Any
+    cookies = ...  # type: Any
+    def __init__(self, method=..., url=..., headers=..., files=..., data=..., params=...,
+                 auth=..., cookies=..., hooks=..., json=...): ...
+    def prepare(self): ...
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+    method = ...  # type: Any
+    url = ...  # type: Any
+    headers = ...  # type: Any
+    body = ...  # type: Any
+    hooks = ...  # type: Any
+    def __init__(self) -> None: ...
+    def prepare(self, method=..., url=..., headers=..., files=..., data=..., params=...,
+                auth=..., cookies=..., hooks=..., json=...): ...
+    def copy(self): ...
+    def prepare_method(self, method): ...
+    def prepare_url(self, url, params): ...
+    def prepare_headers(self, headers): ...
+    def prepare_body(self, data, files, json=...): ...
+    def prepare_content_length(self, body): ...
+    def prepare_auth(self, auth, url=...): ...
+    def prepare_cookies(self, cookies): ...
+    def prepare_hooks(self, hooks): ...
+
+class Response:
+    __attrs__ = ...  # type: Any
+    status_code = ...  # type: int
+    headers = ...  # type: MutableMapping[str, str]
+    raw = ...  # type: Any
+    url = ...  # type: str
+    encoding = ...  # type: str
+    history = ...  # type: List[Response]
+    reason = ...  # type: str
+    cookies = ...  # type: RequestsCookieJar
+    elapsed = ...  # type: datetime.timedelta
+    request = ...  # type: PreparedRequest
+    def __init__(self) -> None: ...
+    def __bool__(self) -> bool: ...
+    def __nonzero__(self) -> bool: ...
+    def __iter__(self) -> Iterator[bytes]: ...
+    @property
+    def ok(self) -> bool: ...
+    @property
+    def is_redirect(self) -> bool: ...
+    @property
+    def is_permanent_redirect(self) -> bool: ...
+    @property
+    def apparent_encoding(self) -> str: ...
+    def iter_content(self, chunk_size: int = ...,
+                     decode_unicode: bool = ...) -> Iterator[Any]: ...
+    def iter_lines(self, chunk_size=..., decode_unicode=..., delimiter=...): ...
+    @property
+    def content(self) -> bytes: ...
+    @property
+    def text(self) -> str: ...
+    def json(self, **kwargs) -> Any: ...
+    @property
+    def links(self) -> Dict[Any, Any]: ...
+    def raise_for_status(self) -> None: ...
+    def close(self) -> None: ...
diff --git a/typeshed/third_party/3/requests/packages/__init__.pyi b/typeshed/third_party/3/requests/packages/__init__.pyi
new file mode 100644
index 0000000..2b1bff8
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/__init__.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.packages (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class VendorAlias:
+    def __init__(self, package_names) -> None: ...
+    def find_module(self, fullname, path=...): ...
+    def load_module(self, name): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/__init__.pyi
new file mode 100644
index 0000000..61a1c5f
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/__init__.pyi
@@ -0,0 +1,35 @@
+# Stubs for requests.packages.urllib3 (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import connectionpool
+from . import filepost
+from . import poolmanager
+from . import response
+from .util import request as _request
+from .util import url
+from .util import timeout
+from .util import retry
+import logging
+
+__license__ = ...  # type: Any
+
+HTTPConnectionPool = connectionpool.HTTPConnectionPool
+HTTPSConnectionPool = connectionpool.HTTPSConnectionPool
+connection_from_url = connectionpool.connection_from_url
+encode_multipart_formdata = filepost.encode_multipart_formdata
+PoolManager = poolmanager.PoolManager
+ProxyManager = poolmanager.ProxyManager
+proxy_from_url = poolmanager.proxy_from_url
+HTTPResponse = response.HTTPResponse
+make_headers = _request.make_headers
+get_host = url.get_host
+Timeout = timeout.Timeout
+Retry = retry.Retry
+
+class NullHandler(logging.Handler):
+    def emit(self, record): ...
+
+def add_stderr_logger(level=...): ...
+def disable_warnings(category=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/_collections.pyi b/typeshed/third_party/3/requests/packages/urllib3/_collections.pyi
new file mode 100644
index 0000000..58aa944
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/_collections.pyi
@@ -0,0 +1,51 @@
+# Stubs for requests.packages.urllib3._collections (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from collections import MutableMapping
+
+class RLock:
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_value, traceback): ...
+
+class RecentlyUsedContainer(MutableMapping):
+    ContainerCls = ...  # type: Any
+    dispose_func = ...  # type: Any
+    lock = ...  # type: Any
+    def __init__(self, maxsize=..., dispose_func=...) -> None: ...
+    def __getitem__(self, key): ...
+    def __setitem__(self, key, value): ...
+    def __delitem__(self, key): ...
+    def __len__(self): ...
+    def __iter__(self): ...
+    def clear(self): ...
+    def keys(self): ...
+
+class HTTPHeaderDict(dict):
+    def __init__(self, headers=..., **kwargs) -> None: ...
+    def __setitem__(self, key, val): ...
+    def __getitem__(self, key): ...
+    def __delitem__(self, key): ...
+    def __contains__(self, key): ...
+    def __eq__(self, other): ...
+    def __ne__(self, other): ...
+    values = ...  # type: Any
+    get = ...  # type: Any
+    update = ...  # type: Any
+    iterkeys = ...  # type: Any
+    itervalues = ...  # type: Any
+    def pop(self, key, default=...): ...
+    def discard(self, key): ...
+    def add(self, key, val): ...
+    def extend(*args, **kwargs): ...
+    def getlist(self, key): ...
+    getheaders = ...  # type: Any
+    getallmatchingheaders = ...  # type: Any
+    iget = ...  # type: Any
+    def copy(self): ...
+    def iteritems(self): ...
+    def itermerged(self): ...
+    def items(self): ...
+    @classmethod
+    def from_httplib(cls, message, duplicates=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/connection.pyi b/typeshed/third_party/3/requests/packages/urllib3/connection.pyi
new file mode 100644
index 0000000..577bc85
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/connection.pyi
@@ -0,0 +1,64 @@
+# Stubs for requests.packages.urllib3.connection (Python 3.4)
+
+from typing import Any
+from . import packages
+from http.client import HTTPConnection as _HTTPConnection
+# from httplib import HTTPConnection as _HTTPConnection # python 2
+from . import exceptions
+from .packages import ssl_match_hostname
+from .util import ssl_
+from . import util
+import http.client
+
+class DummyConnection: ...
+
+import ssl
+BaseSSLError = ssl.SSLError
+ConnectionError = __builtins__.ConnectionError
+HTTPException = http.client.HTTPException
+
+ConnectTimeoutError = exceptions.ConnectTimeoutError
+SystemTimeWarning = exceptions.SystemTimeWarning
+SecurityWarning = exceptions.SecurityWarning
+match_hostname = ssl_match_hostname.match_hostname
+resolve_cert_reqs = ssl_.resolve_cert_reqs
+resolve_ssl_version = ssl_.resolve_ssl_version
+ssl_wrap_socket = ssl_.ssl_wrap_socket
+assert_fingerprint = ssl_.assert_fingerprint
+connection = util.connection
+
+port_by_scheme = ...  # type: Any
+RECENT_DATE = ...  # type: Any
+
+class HTTPConnection(_HTTPConnection):
+    default_port = ...  # type: Any
+    default_socket_options = ...  # type: Any
+    is_verified = ...  # type: Any
+    source_address = ...  # type: Any
+    socket_options = ...  # type: Any
+    def __init__(self, *args, **kw) -> None: ...
+    def connect(self): ...
+
+class HTTPSConnection(HTTPConnection):
+    default_port = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    def __init__(self, host, port=..., key_file=..., cert_file=..., strict=..., timeout=..., **kw) -> None: ...
+    sock = ...  # type: Any
+    def connect(self): ...
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+    cert_reqs = ...  # type: Any
+    ca_certs = ...  # type: Any
+    ssl_version = ...  # type: Any
+    assert_fingerprint = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    assert_hostname = ...  # type: Any
+    def set_cert(self, key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., assert_hostname=..., assert_fingerprint=...): ...
+    sock = ...  # type: Any
+    auto_open = ...  # type: Any
+    is_verified = ...  # type: Any
+    def connect(self): ...
+
+UnverifiedHTTPSConnection = ...  # type: Any
diff --git a/typeshed/third_party/3/requests/packages/urllib3/connectionpool.pyi b/typeshed/third_party/3/requests/packages/urllib3/connectionpool.pyi
new file mode 100644
index 0000000..778bb7c
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/connectionpool.pyi
@@ -0,0 +1,89 @@
+# Stubs for requests.packages.urllib3.connectionpool (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import exceptions
+from .packages import ssl_match_hostname
+from . import packages
+from .connection import (
+    HTTPException as HTTPException,
+    BaseSSLError as BaseSSLError,
+    ConnectionError as ConnectionError,
+)
+from . import request
+from . import response
+from . import connection
+from .util import connection as _connection
+from .util import retry
+from .util import timeout
+from .util import url
+
+ClosedPoolError = exceptions.ClosedPoolError
+ProtocolError = exceptions.ProtocolError
+EmptyPoolError = exceptions.EmptyPoolError
+HostChangedError = exceptions.HostChangedError
+LocationValueError = exceptions.LocationValueError
+MaxRetryError = exceptions.MaxRetryError
+ProxyError = exceptions.ProxyError
+ReadTimeoutError = exceptions.ReadTimeoutError
+SSLError = exceptions.SSLError
+TimeoutError = exceptions.TimeoutError
+InsecureRequestWarning = exceptions.InsecureRequestWarning
+CertificateError = ssl_match_hostname.CertificateError
+port_by_scheme = connection.port_by_scheme
+DummyConnection = connection.DummyConnection
+HTTPConnection = connection.HTTPConnection
+HTTPSConnection = connection.HTTPSConnection
+VerifiedHTTPSConnection = connection.VerifiedHTTPSConnection
+RequestMethods = request.RequestMethods
+HTTPResponse = response.HTTPResponse
+is_connection_dropped = _connection.is_connection_dropped
+Retry = retry.Retry
+Timeout = timeout.Timeout
+get_host = url.get_host
+
+xrange = ...  # type: Any
+log = ...  # type: Any
+
+class ConnectionPool:
+    scheme = ...  # type: Any
+    QueueCls = ...  # type: Any
+    host = ...  # type: Any
+    port = ...  # type: Any
+    def __init__(self, host, port=...) -> None: ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_val, exc_tb): ...
+    def close(self): ...
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+    scheme = ...  # type: Any
+    ConnectionCls = ...  # type: Any
+    strict = ...  # type: Any
+    timeout = ...  # type: Any
+    retries = ...  # type: Any
+    pool = ...  # type: Any
+    block = ...  # type: Any
+    proxy = ...  # type: Any
+    proxy_headers = ...  # type: Any
+    num_connections = ...  # type: Any
+    num_requests = ...  # type: Any
+    conn_kw = ...  # type: Any
+    def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., **conn_kw) -> None: ...
+    def close(self): ...
+    def is_same_host(self, url): ...
+    def urlopen(self, method, url, body=..., headers=..., retries=..., redirect=..., assert_same_host=..., timeout=..., pool_timeout=..., release_conn=..., **response_kw): ...
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+    scheme = ...  # type: Any
+    ConnectionCls = ...  # type: Any
+    key_file = ...  # type: Any
+    cert_file = ...  # type: Any
+    cert_reqs = ...  # type: Any
+    ca_certs = ...  # type: Any
+    ssl_version = ...  # type: Any
+    assert_hostname = ...  # type: Any
+    assert_fingerprint = ...  # type: Any
+    def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., ssl_version=..., assert_hostname=..., assert_fingerprint=..., **conn_kw) -> None: ...
+
+def connection_from_url(url, **kw): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi
new file mode 100644
index 0000000..17d26bb
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for requests.packages.urllib3.contrib (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi b/typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi
new file mode 100644
index 0000000..3e7d0f6
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi
@@ -0,0 +1,54 @@
+# Stubs for requests.packages.urllib3.exceptions (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class HTTPError(Exception): ...
+class HTTPWarning(Warning): ...
+
+class PoolError(HTTPError):
+    pool = ...  # type: Any
+    def __init__(self, pool, message) -> None: ...
+    def __reduce__(self): ...
+
+class RequestError(PoolError):
+    url = ...  # type: Any
+    def __init__(self, pool, url, message) -> None: ...
+    def __reduce__(self): ...
+
+class SSLError(HTTPError): ...
+class ProxyError(HTTPError): ...
+class DecodeError(HTTPError): ...
+class ProtocolError(HTTPError): ...
+
+ConnectionError = ...  # type: Any
+
+class MaxRetryError(RequestError):
+    reason = ...  # type: Any
+    def __init__(self, pool, url, reason=...) -> None: ...
+
+class HostChangedError(RequestError):
+    retries = ...  # type: Any
+    def __init__(self, pool, url, retries=...) -> None: ...
+
+class TimeoutStateError(HTTPError): ...
+class TimeoutError(HTTPError): ...
+class ReadTimeoutError(TimeoutError, RequestError): ...
+class ConnectTimeoutError(TimeoutError): ...
+class EmptyPoolError(PoolError): ...
+class ClosedPoolError(PoolError): ...
+class LocationValueError(ValueError, HTTPError): ...
+
+class LocationParseError(LocationValueError):
+    location = ...  # type: Any
+    def __init__(self, location) -> None: ...
+
+class ResponseError(HTTPError):
+    GENERIC_ERROR = ...  # type: Any
+    SPECIFIC_ERROR = ...  # type: Any
+
+class SecurityWarning(HTTPWarning): ...
+class InsecureRequestWarning(SecurityWarning): ...
+class SystemTimeWarning(SecurityWarning): ...
+class InsecurePlatformWarning(SecurityWarning): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/fields.pyi b/typeshed/third_party/3/requests/packages/urllib3/fields.pyi
new file mode 100644
index 0000000..cdc7734
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/fields.pyi
@@ -0,0 +1,16 @@
+# Stubs for requests.packages.urllib3.fields (Python 3.4)
+
+from typing import Any
+from . import packages
+
+def guess_content_type(filename, default=...): ...
+def format_header_param(name, value): ...
+
+class RequestField:
+    data = ...  # type: Any
+    headers = ...  # type: Any
+    def __init__(self, name, data, filename=..., headers=...) -> None: ...
+    @classmethod
+    def from_tuples(cls, fieldname, value): ...
+    def render_headers(self): ...
+    def make_multipart(self, content_disposition=..., content_type=..., content_location=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/filepost.pyi b/typeshed/third_party/3/requests/packages/urllib3/filepost.pyi
new file mode 100644
index 0000000..c6fefa6
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/filepost.pyi
@@ -0,0 +1,19 @@
+# Stubs for requests.packages.urllib3.filepost (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import packages
+#from .packages import six
+from . import fields
+
+#six = packages.six
+#b = six.b
+RequestField = fields.RequestField
+
+writer = ...  # type: Any
+
+def choose_boundary(): ...
+def iter_field_objects(fields): ...
+def iter_fields(fields): ...
+def encode_multipart_formdata(fields, boundary=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi
new file mode 100644
index 0000000..2314636
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for requests.packages.urllib3.packages (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
new file mode 100644
index 0000000..9efeac0
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.packages.urllib3.packages.ssl_match_hostname (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+import ssl
+
+CertificateError = ssl.CertificateError
+match_hostname = ssl.match_hostname
diff --git a/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi b/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
new file mode 100644
index 0000000..5abbc9d
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
@@ -0,0 +1,7 @@
+# Stubs for requests.packages.urllib3.packages.ssl_match_hostname._implementation (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class CertificateError(ValueError): ...
+
+def match_hostname(cert, hostname): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi b/typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi
new file mode 100644
index 0000000..a65f664
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi
@@ -0,0 +1,31 @@
+# Stubs for requests.packages.urllib3.poolmanager (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .request import RequestMethods
+
+class PoolManager(RequestMethods):
+    proxy = ...  # type: Any
+    connection_pool_kw = ...  # type: Any
+    pools = ...  # type: Any
+    def __init__(self, num_pools=..., headers=..., **connection_pool_kw) -> None: ...
+    def __enter__(self): ...
+    def __exit__(self, exc_type, exc_val, exc_tb): ...
+    def clear(self): ...
+    def connection_from_host(self, host, port=..., scheme=...): ...
+    def connection_from_url(self, url): ...
+    # TODO: This was the original signature -- copied another one from base class to fix complaint.
+    # def urlopen(self, method, url, redirect=True, **kw): ...
+    def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
+
+class ProxyManager(PoolManager):
+    proxy = ...  # type: Any
+    proxy_headers = ...  # type: Any
+    def __init__(self, proxy_url, num_pools=..., headers=..., proxy_headers=..., **connection_pool_kw) -> None: ...
+    def connection_from_host(self, host, port=..., scheme=...): ...
+    # TODO: This was the original signature -- copied another one from base class to fix complaint.
+    # def urlopen(self, method, url, redirect=True, **kw): ...
+    def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
+
+def proxy_from_url(url, **kw): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/request.pyi b/typeshed/third_party/3/requests/packages/urllib3/request.pyi
new file mode 100644
index 0000000..788c759
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/request.pyi
@@ -0,0 +1,13 @@
+# Stubs for requests.packages.urllib3.request (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class RequestMethods:
+    headers = ...  # type: Any
+    def __init__(self, headers=...) -> None: ...
+    def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
+    def request(self, method, url, fields=..., headers=..., **urlopen_kw): ...
+    def request_encode_url(self, method, url, fields=..., **urlopen_kw): ...
+    def request_encode_body(self, method, url, fields=..., headers=..., encode_multipart=..., multipart_boundary=..., **urlopen_kw): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/response.pyi b/typeshed/third_party/3/requests/packages/urllib3/response.pyi
new file mode 100644
index 0000000..62cdaf4
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/response.pyi
@@ -0,0 +1,58 @@
+# Stubs for requests.packages.urllib3.response (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import io
+from . import _collections
+from . import exceptions
+#from .packages import six
+from .connection import HTTPException as HTTPException, BaseSSLError as BaseSSLError
+from .util import response
+
+HTTPHeaderDict = _collections.HTTPHeaderDict
+ProtocolError = exceptions.ProtocolError
+DecodeError = exceptions.DecodeError
+ReadTimeoutError = exceptions.ReadTimeoutError
+binary_type = bytes # six.binary_type
+PY3 = True # six.PY3
+is_fp_closed = response.is_fp_closed
+
+class DeflateDecoder:
+    def __init__(self) -> None: ...
+    def __getattr__(self, name): ...
+    def decompress(self, data): ...
+
+class GzipDecoder:
+    def __init__(self) -> None: ...
+    def __getattr__(self, name): ...
+    def decompress(self, data): ...
+
+class HTTPResponse(io.IOBase):
+    CONTENT_DECODERS = ...  # type: Any
+    REDIRECT_STATUSES = ...  # type: Any
+    headers = ...  # type: Any
+    status = ...  # type: Any
+    version = ...  # type: Any
+    reason = ...  # type: Any
+    strict = ...  # type: Any
+    decode_content = ...  # type: Any
+    def __init__(self, body=..., headers=..., status=..., version=..., reason=..., strict=..., preload_content=..., decode_content=..., original_response=..., pool=..., connection=...) -> None: ...
+    def get_redirect_location(self): ...
+    def release_conn(self): ...
+    @property
+    def data(self): ...
+    def tell(self): ...
+    def read(self, amt=..., decode_content=..., cache_content=...): ...
+    def stream(self, amt=..., decode_content=...): ...
+    @classmethod
+    def from_httplib(ResponseCls, r, **response_kw): ...
+    def getheaders(self): ...
+    def getheader(self, name, default=...): ...
+    def close(self): ...
+    @property
+    def closed(self): ...
+    def fileno(self): ...
+    def flush(self): ...
+    def readable(self): ...
+    def readinto(self, b): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/__init__.pyi
new file mode 100644
index 0000000..e4a0e13
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/__init__.pyi
@@ -0,0 +1,29 @@
+# Stubs for requests.packages.urllib3.util (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from . import connection
+from . import request
+from . import response
+from . import ssl_
+from . import timeout
+from . import retry
+from . import url
+import ssl
+
+is_connection_dropped = connection.is_connection_dropped
+make_headers = request.make_headers
+is_fp_closed = response.is_fp_closed
+SSLContext = ssl.SSLContext
+HAS_SNI = ssl_.HAS_SNI
+assert_fingerprint = ssl_.assert_fingerprint
+resolve_cert_reqs = ssl_.resolve_cert_reqs
+resolve_ssl_version = ssl_.resolve_ssl_version
+ssl_wrap_socket = ssl_.ssl_wrap_socket
+current_time = timeout.current_time
+Timeout = timeout.Timeout
+Retry = retry.Retry
+get_host = url.get_host
+parse_url = url.parse_url
+split_first = url.split_first
+Url = url.Url
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi
new file mode 100644
index 0000000..cd67309
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi
@@ -0,0 +1,11 @@
+# Stubs for requests.packages.urllib3.util.connection (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+poll = ...  # type: Any
+select = ...  # type: Any
+
+def is_connection_dropped(conn): ...
+def create_connection(address, timeout=..., source_address=..., socket_options=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/request.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/request.pyi
new file mode 100644
index 0000000..20a6ea2
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/request.pyi
@@ -0,0 +1,12 @@
+# Stubs for requests.packages.urllib3.util.request (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+#from ..packages import six
+
+#b = six.b
+
+ACCEPT_ENCODING = ...  # type: Any
+
+def make_headers(keep_alive=..., accept_encoding=..., user_agent=..., basic_auth=..., proxy_basic_auth=..., disable_cache=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/response.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/response.pyi
new file mode 100644
index 0000000..761a006
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/response.pyi
@@ -0,0 +1,5 @@
+# Stubs for requests.packages.urllib3.util.response (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def is_fp_closed(obj): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi
new file mode 100644
index 0000000..e958d90
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi
@@ -0,0 +1,36 @@
+# Stubs for requests.packages.urllib3.util.retry (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+from .. import packages
+
+ConnectTimeoutError = exceptions.ConnectTimeoutError
+MaxRetryError = exceptions.MaxRetryError
+ProtocolError = exceptions.ProtocolError
+ReadTimeoutError = exceptions.ReadTimeoutError
+ResponseError = exceptions.ResponseError
+
+log = ...  # type: Any
+
+class Retry:
+    DEFAULT_METHOD_WHITELIST = ...  # type: Any
+    BACKOFF_MAX = ...  # type: Any
+    total = ...  # type: Any
+    connect = ...  # type: Any
+    read = ...  # type: Any
+    redirect = ...  # type: Any
+    status_forcelist = ...  # type: Any
+    method_whitelist = ...  # type: Any
+    backoff_factor = ...  # type: Any
+    raise_on_redirect = ...  # type: Any
+    def __init__(self, total=..., connect=..., read=..., redirect=..., method_whitelist=..., status_forcelist=..., backoff_factor=..., raise_on_redirect=..., _observed_errors=...) -> None: ...
+    def new(self, **kw): ...
+    @classmethod
+    def from_int(cls, retries, redirect=..., default=...): ...
+    def get_backoff_time(self): ...
+    def sleep(self): ...
+    def is_forced_retry(self, method, status_code): ...
+    def is_exhausted(self): ...
+    def increment(self, method=..., url=..., response=..., error=..., _pool=..., _stacktrace=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/ssl_.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/ssl_.pyi
new file mode 100644
index 0000000..c7db0ac
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/ssl_.pyi
@@ -0,0 +1,24 @@
+# Stubs for requests.packages.urllib3.util.ssl_ (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+import ssl
+
+SSLError = exceptions.SSLError
+InsecurePlatformWarning = exceptions.InsecurePlatformWarning
+SSLContext = ssl.SSLContext
+
+HAS_SNI = ...  # type: Any
+create_default_context = ...  # type: Any
+OP_NO_SSLv2 = ...  # type: Any
+OP_NO_SSLv3 = ...  # type: Any
+OP_NO_COMPRESSION = ...  # type: Any
+
+def assert_fingerprint(cert, fingerprint): ...
+def resolve_cert_reqs(candidate): ...
+def resolve_ssl_version(candidate): ...
+def create_urllib3_context(ssl_version=..., cert_reqs=..., options=..., ciphers=...): ...
+def ssl_wrap_socket(sock, keyfile=..., certfile=..., cert_reqs=..., ca_certs=...,
+                    server_hostname=..., ssl_version=..., ciphers=..., ssl_context=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi
new file mode 100644
index 0000000..0a7653c
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi
@@ -0,0 +1,24 @@
+# Stubs for requests.packages.urllib3.util.timeout (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+
+TimeoutStateError = exceptions.TimeoutStateError
+
+def current_time(): ...
+
+class Timeout:
+    DEFAULT_TIMEOUT = ...  # type: Any
+    total = ...  # type: Any
+    def __init__(self, total=..., connect=..., read=...) -> None: ...
+    @classmethod
+    def from_float(cls, timeout): ...
+    def clone(self): ...
+    def start_connect(self): ...
+    def get_connect_duration(self): ...
+    @property
+    def connect_timeout(self): ...
+    @property
+    def read_timeout(self): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/url.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/url.pyi
new file mode 100644
index 0000000..9877b4a
--- /dev/null
+++ b/typeshed/third_party/3/requests/packages/urllib3/util/url.pyi
@@ -0,0 +1,26 @@
+# Stubs for requests.packages.urllib3.util.url (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .. import exceptions
+
+LocationParseError = exceptions.LocationParseError
+
+url_attrs = ...  # type: Any
+
+class Url:
+    slots = ...  # type: Any
+    def __new__(cls, scheme=..., auth=..., host=..., port=..., path=..., query=..., fragment=...): ...
+    @property
+    def hostname(self): ...
+    @property
+    def request_uri(self): ...
+    @property
+    def netloc(self): ...
+    @property
+    def url(self): ...
+
+def split_first(s, delims): ...
+def parse_url(url): ...
+def get_host(url): ...
diff --git a/typeshed/third_party/3/requests/sessions.pyi b/typeshed/third_party/3/requests/sessions.pyi
new file mode 100644
index 0000000..9c17ed5
--- /dev/null
+++ b/typeshed/third_party/3/requests/sessions.pyi
@@ -0,0 +1,92 @@
+# Stubs for requests.sessions (Python 3)
+
+from typing import Any, Union, MutableMapping
+from . import auth
+from . import compat
+from . import cookies
+from . import models
+from .models import Response
+from . import hooks
+from . import utils
+from . import exceptions
+from .packages.urllib3 import _collections
+from . import structures
+from . import adapters
+from . import status_codes
+
+OrderedDict = compat.OrderedDict
+cookiejar_from_dict = cookies.cookiejar_from_dict
+extract_cookies_to_jar = cookies.extract_cookies_to_jar
+RequestsCookieJar = cookies.RequestsCookieJar
+merge_cookies = cookies.merge_cookies
+Request = models.Request
+PreparedRequest = models.PreparedRequest
+DEFAULT_REDIRECT_LIMIT = models.DEFAULT_REDIRECT_LIMIT
+default_hooks = hooks.default_hooks
+dispatch_hook = hooks.dispatch_hook
+to_key_val_list = utils.to_key_val_list
+default_headers = utils.default_headers
+to_native_string = utils.to_native_string
+TooManyRedirects = exceptions.TooManyRedirects
+InvalidSchema = exceptions.InvalidSchema
+ChunkedEncodingError = exceptions.ChunkedEncodingError
+ContentDecodingError = exceptions.ContentDecodingError
+RecentlyUsedContainer = _collections.RecentlyUsedContainer
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+HTTPAdapter = adapters.HTTPAdapter
+requote_uri = utils.requote_uri
+get_environ_proxies = utils.get_environ_proxies
+get_netrc_auth = utils.get_netrc_auth
+should_bypass_proxies = utils.should_bypass_proxies
+get_auth_from_url = utils.get_auth_from_url
+codes = status_codes.codes
+REDIRECT_STATI = models.REDIRECT_STATI
+
+REDIRECT_CACHE_SIZE = ... # type: Any
+
+def merge_setting(request_setting, session_setting, dict_class=...): ...
+def merge_hooks(request_hooks, session_hooks, dict_class=...): ...
+
+class SessionRedirectMixin:
+    def resolve_redirects(self, resp, req, stream=..., timeout=..., verify=..., cert=...,
+                          proxies=...): ...
+    def rebuild_auth(self, prepared_request, response): ...
+    def rebuild_proxies(self, prepared_request, proxies): ...
+
+class Session(SessionRedirectMixin):
+    __attrs__ = ... # type: Any
+    headers = ... # type: MutableMapping[str, str]
+    auth = ... # type: Any
+    proxies = ... # type: Any
+    hooks = ... # type: Any
+    params = ... # type: Any
+    stream = ... # type: Any
+    verify = ... # type: Any
+    cert = ... # type: Any
+    max_redirects = ... # type: Any
+    trust_env = ... # type: Any
+    cookies = ... # type: Any
+    adapters = ... # type: Any
+    redirect_cache = ... # type: Any
+    def __init__(self) -> None: ...
+    def __enter__(self) -> 'Session': ...
+    def __exit__(self, *args) -> None: ...
+    def prepare_request(self, request): ...
+    def request(self, method: str, url: Union[str, bytes], params=..., data=..., headers=...,
+                cookies=..., files=..., auth=..., timeout=..., allow_redirects=...,
+                proxies=..., hooks=..., stream=..., verify=..., cert=...,
+                json=...) -> Response: ...
+    def get(self, url: Union[str, bytes], **kwargs) -> Response: ...
+    def options(self, url: Union[str, bytes], **kwargs) -> Response: ...
+    def head(self, url: Union[str, bytes], **kwargs) -> Response: ...
+    def post(self, url: Union[str, bytes], data=..., json=..., **kwargs) -> Response: ...
+    def put(self, url: Union[str, bytes], data=..., **kwargs) -> Response: ...
+    def patch(self, url: Union[str, bytes], data=..., **kwargs) -> Response: ...
+    def delete(self, url: Union[str, bytes], **kwargs) -> Response: ...
+    def send(self, request, **kwargs): ...
+    def merge_environment_settings(self, url, proxies, stream, verify, cert): ...
+    def get_adapter(self, url): ...
+    def close(self) -> None: ...
+    def mount(self, prefix, adapter): ...
+
+def session() -> Session: ...
diff --git a/typeshed/third_party/3/requests/status_codes.pyi b/typeshed/third_party/3/requests/status_codes.pyi
new file mode 100644
index 0000000..e3035eb
--- /dev/null
+++ b/typeshed/third_party/3/requests/status_codes.pyi
@@ -0,0 +1,8 @@
+# Stubs for requests.status_codes (Python 3)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .structures import LookupDict
+
+codes = ...  # type: Any
diff --git a/typeshed/third_party/3/requests/structures.pyi b/typeshed/third_party/3/requests/structures.pyi
new file mode 100644
index 0000000..837cf25
--- /dev/null
+++ b/typeshed/third_party/3/requests/structures.pyi
@@ -0,0 +1,21 @@
+# Stubs for requests.structures (Python 3)
+
+from typing import Any
+import collections
+
+class CaseInsensitiveDict(collections.MutableMapping):
+    def __init__(self, data=..., **kwargs) -> None: ...
+    def __setitem__(self, key, value): ...
+    def __getitem__(self, key): ...
+    def __delitem__(self, key): ...
+    def __iter__(self): ...
+    def __len__(self): ...
+    def lower_items(self): ...
+    def __eq__(self, other): ...
+    def copy(self): ...
+
+class LookupDict(dict):
+    name = ...  # type: Any
+    def __init__(self, name=...) -> None: ...
+    def __getitem__(self, key): ...
+    def get(self, key, default=...): ...
diff --git a/typeshed/third_party/3/requests/utils.pyi b/typeshed/third_party/3/requests/utils.pyi
new file mode 100644
index 0000000..945277a
--- /dev/null
+++ b/typeshed/third_party/3/requests/utils.pyi
@@ -0,0 +1,52 @@
+# Stubs for requests.utils (Python 3)
+
+from typing import Any
+from . import compat
+from . import cookies
+from . import structures
+from . import exceptions
+
+OrderedDict = compat.OrderedDict
+RequestsCookieJar = cookies.RequestsCookieJar
+cookiejar_from_dict = cookies.cookiejar_from_dict
+CaseInsensitiveDict = structures.CaseInsensitiveDict
+InvalidURL = exceptions.InvalidURL
+
+NETRC_FILES = ...  # type: Any
+DEFAULT_CA_BUNDLE_PATH = ...  # type: Any
+
+def dict_to_sequence(d): ...
+def super_len(o): ...
+def get_netrc_auth(url): ...
+def guess_filename(obj): ...
+def from_key_val_list(value): ...
+def to_key_val_list(value): ...
+def parse_list_header(value): ...
+def parse_dict_header(value): ...
+def unquote_header_value(value, is_filename=...): ...
+def dict_from_cookiejar(cj): ...
+def add_dict_to_cookiejar(cj, cookie_dict): ...
+def get_encodings_from_content(content): ...
+def get_encoding_from_headers(headers): ...
+def stream_decode_response_unicode(iterator, r): ...
+def iter_slices(string, slice_length): ...
+def get_unicode_from_response(r): ...
+
+UNRESERVED_SET = ...  # type: Any
+
+def unquote_unreserved(uri): ...
+def requote_uri(uri): ...
+def address_in_network(ip, net): ...
+def dotted_netmask(mask): ...
+def is_ipv4_address(string_ip): ...
+def is_valid_cidr(string_network): ...
+def should_bypass_proxies(url): ...
+def get_environ_proxies(url): ...
+def default_user_agent(name=...): ...
+def default_headers(): ...
+def parse_header_links(value): ...
+def guess_json_utf(data): ...
+def prepend_scheme_if_needed(url, new_scheme): ...
+def get_auth_from_url(url): ...
+def to_native_string(string, encoding=...): ...
+def urldefragauth(url): ...
diff --git a/typeshed/third_party/3/six/__init__.pyi b/typeshed/third_party/3/six/__init__.pyi
new file mode 100644
index 0000000..0607cc1
--- /dev/null
+++ b/typeshed/third_party/3/six/__init__.pyi
@@ -0,0 +1,103 @@
+# Stubs for six (Python 3.5)
+
+from __future__ import print_function
+
+from typing import (
+    Any,
+    AnyStr,
+    Callable,
+    Dict,
+    ItemsView,
+    Iterable,
+    KeysView,
+    Mapping,
+    Optional,
+    Pattern,
+    Tuple,
+    TypeVar,
+    Union,
+    ValuesView,
+    overload,
+)
+import typing
+
+import unittest
+import types
+
+_T = TypeVar('_T')
+_K = TypeVar('_K')
+_V = TypeVar('_V')
+
+# TODO make constant, then move this stub to 2and3
+# https://github.com/python/typeshed/issues/17
+PY2 = False
+PY3 = True
+PY34 = ...  # type: bool
+
+string_types = str,
+integer_types = int,
+class_types = type,
+text_type = str
+binary_type = bytes
+
+MAXSIZE = ... # type: int
+
+#def add_move
+#def remove_move
+
+from builtins import next as advance_iterator
+next = advance_iterator
+
+def callable(obj: object) -> bool: ...
+
+def get_unbound_function(unbound: types.FunctionType) -> types.FunctionType: ...
+def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ...
+def create_unbound_method(func: types.FunctionType, cls: type) -> types.FunctionType: ...
+
+Iterator = object
+
+def get_method_function(meth: types.MethodType) -> types.FunctionType: ...
+def get_method_self(meth: types.MethodType) -> Optional[object]: ...
+def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ...
+def get_function_code(fun: types.FunctionType) -> types.CodeType: ...
+def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ...
+def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ...
+
+def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ...
+def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ...
+def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ...
+#def iterlists
+
+def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ...
+def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ...
+def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ...
+
+def b(s: str) -> binary_type: ...
+def u(s: str) -> text_type: ...
+
+unichr = chr
+def int2byte(i: int) -> bytes: ...
+def byte2int(bs: binary_type) -> int: ...
+def indexbytes(buf: binary_type, i: int) -> int: ...
+def iterbytes(buf: binary_type) -> typing.Iterator[int]: ...
+from io import StringIO as StringIO, BytesIO as BytesIO
+
+def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: str = None) -> None: ...
+ at overload
+def assertRaisesRegex(self: unittest.TestCase, msg: str = None) -> Any: ...
+ at overload
+def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ...
+def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: str = None) -> None: ...
+
+exec_ = exec
+
+def reraise(tp: type, value: Optional[BaseException], tb: types.TracebackType = None) -> None: ...
+def raise_from(value: BaseException, from_value: BaseException) -> None: ...
+
+print_ = print
+
+from functools import wraps as wraps
+
+def with_metaclass(meta: type, *bases: type) -> type: ...
+def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ...
+def python_2_unicode_compatible(klass: _T) -> _T: ...
diff --git a/typeshed/third_party/3/six/moves/__init__.pyi b/typeshed/third_party/3/six/moves/__init__.pyi
new file mode 100644
index 0000000..49cf9bc
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/__init__.pyi
@@ -0,0 +1,32 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves (Python 3.2)
+
+from io import StringIO as cStringIO
+from builtins import filter as filter
+from itertools import filterfalse as filterfalse
+from builtins import input as input
+from sys import intern as intern
+from builtins import map as map
+from os import getcwd as getcwd
+from os import getcwdb as getcwdb
+from builtins import range as range
+from imp import reload as reload_module
+from functools import reduce as reduce
+from shlex import quote as shlex_quote
+from io import StringIO as StringIO
+from collections import UserDict as UserDict
+from collections import UserList as UserList
+from collections import UserString as UserString
+from builtins import range as xrange
+from builtins import zip as zip
+from itertools import zip_longest as zip_longest
+import six.moves.cPickle as cPickle
+import html.parser as html_parser
+import html.entities as html_entities
+
+import six.moves.urllib_parse as urllib_parse
+import six.moves.urllib_error as urllib_error
+import six.moves.urllib as urllib
+import six.moves.urllib_robotparser as urllib_robotparser
diff --git a/typeshed/third_party/3/six/moves/cPickle.pyi b/typeshed/third_party/3/six/moves/cPickle.pyi
new file mode 100644
index 0000000..aa9f2bc
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/cPickle.pyi
@@ -0,0 +1,6 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.cPickle (Python 3.2)
+
+from pickle import *
diff --git a/typeshed/third_party/3/six/moves/urllib/__init__.pyi b/typeshed/third_party/3/six/moves/urllib/__init__.pyi
new file mode 100644
index 0000000..298b049
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib/__init__.pyi
@@ -0,0 +1,10 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib (Python 3.2)
+
+import six.moves.urllib.error as error
+import six.moves.urllib.parse as parse
+import six.moves.urllib.request as request
+import six.moves.urllib.response as response
+import six.moves.urllib.robotparser as robotparser
diff --git a/typeshed/third_party/3/six/moves/urllib/error.pyi b/typeshed/third_party/3/six/moves/urllib/error.pyi
new file mode 100644
index 0000000..a45b0f7
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib/error.pyi
@@ -0,0 +1,8 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.error (Python 3.2)
+
+from urllib.error import URLError as URLError
+from urllib.error import HTTPError as HTTPError
+from urllib.error import ContentTooShortError as ContentTooShortError
diff --git a/typeshed/third_party/3/six/moves/urllib/parse.pyi b/typeshed/third_party/3/six/moves/urllib/parse.pyi
new file mode 100644
index 0000000..c640af2
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib/parse.pyi
@@ -0,0 +1,22 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.parse (Python 3.2)
+
+from six.moves.urllib_parse import (
+    ParseResult as ParseResult,
+    SplitResult as SplitResult,
+    parse_qs as parse_qs,
+    parse_qsl as parse_qsl,
+    urldefrag as urldefrag,
+    urljoin as urljoin,
+    urlparse as urlparse,
+    urlsplit as urlsplit,
+    urlunparse as urlunparse,
+    urlunsplit as urlunsplit,
+    quote as quote,
+    quote_plus as quote_plus,
+    unquote as unquote,
+    unquote_plus as unquote_plus,
+    urlencode as urlencode,
+)
diff --git a/typeshed/third_party/3/six/moves/urllib/request.pyi b/typeshed/third_party/3/six/moves/urllib/request.pyi
new file mode 100644
index 0000000..1a75378
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib/request.pyi
@@ -0,0 +1,40 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.request (Python 3.2)
+
+from urllib.request import BaseHandler as BaseHandler
+from urllib.request import HTTPRedirectHandler as HTTPRedirectHandler
+from urllib.request import OpenerDirector as OpenerDirector
+
+from urllib.request import install_opener as install_opener
+from urllib.request import build_opener as build_opener
+
+# from urllib.request import urlopen as urlopen
+# from urllib.request import pathname2url as pathname2url
+# from urllib.request import url2pathname as url2pathname
+# from urllib.request import getproxies as getproxies
+# from urllib.request import Request as Request
+# from urllib.request import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler
+# from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor
+# from urllib.request import ProxyHandler as ProxyHandler
+# from urllib.request import HTTPPasswordMgr as HTTPPasswordMgr
+# from urllib.request import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm
+# from urllib.request import AbstractBasicAuthHandler as AbstractBasicAuthHandler
+# from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler
+# from urllib.request import ProxyBasicAuthHandler as ProxyBasicAuthHandler
+# from urllib.request import AbstractDigestAuthHandler as AbstractDigestAuthHandler
+# from urllib.request import HTTPDigestAuthHandler as HTTPDigestAuthHandler
+# from urllib.request import ProxyDigestAuthHandler as ProxyDigestAuthHandler
+# from urllib.request import HTTPHandler as HTTPHandler
+# from urllib.request import HTTPSHandler as HTTPSHandler
+# from urllib.request import FileHandler as FileHandler
+# from urllib.request import FTPHandler as FTPHandler
+# from urllib.request import CacheFTPHandler as CacheFTPHandler
+# from urllib.request import UnknownHandler as UnknownHandler
+# from urllib.request import HTTPErrorProcessor as HTTPErrorProcessor
+# from urllib.request import urlretrieve as urlretrieve
+# from urllib.request import urlcleanup as urlcleanup
+# from urllib.request import URLopener as URLopener
+# from urllib.request import FancyURLopener as FancyURLopener
+from urllib.request import proxy_bypass as proxy_bypass
diff --git a/typeshed/third_party/3/six/moves/urllib/response.pyi b/typeshed/third_party/3/six/moves/urllib/response.pyi
new file mode 100644
index 0000000..93ec5ce
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib/response.pyi
@@ -0,0 +1,9 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.response (Python 3.2)
+
+from urllib.response import addbase as addbase
+from urllib.response import addclosehook as addclosehook
+from urllib.response import addinfo as addinfo
+from urllib.response import addinfourl as addinfourl
diff --git a/typeshed/third_party/3/six/moves/urllib/robotparser.pyi b/typeshed/third_party/3/six/moves/urllib/robotparser.pyi
new file mode 100644
index 0000000..6690355
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib/robotparser.pyi
@@ -0,0 +1,6 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib.robotparser (Python 3.2)
+
+from urllib.robotparser import RobotFileParser as RobotFileParser
diff --git a/typeshed/third_party/3/six/moves/urllib_error.pyi b/typeshed/third_party/3/six/moves/urllib_error.pyi
new file mode 100644
index 0000000..1368664
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib_error.pyi
@@ -0,0 +1,10 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_error (Python 3.2)
+
+from six.moves.urllib.error import (
+    URLError as URLError,
+    HTTPError as HTTPError,
+    ContentTooShortError as ContentTooShortError,
+)
diff --git a/typeshed/third_party/3/six/moves/urllib_parse.pyi b/typeshed/third_party/3/six/moves/urllib_parse.pyi
new file mode 100644
index 0000000..96f6207
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib_parse.pyi
@@ -0,0 +1,20 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_parse (Python 3.2)
+
+from urllib.parse import ParseResult as ParseResult
+from urllib.parse import SplitResult as SplitResult
+from urllib.parse import parse_qs as parse_qs
+from urllib.parse import parse_qsl as parse_qsl
+from urllib.parse import urldefrag as urldefrag
+from urllib.parse import urljoin as urljoin
+from urllib.parse import urlparse as urlparse
+from urllib.parse import urlsplit as urlsplit
+from urllib.parse import urlunparse as urlunparse
+from urllib.parse import urlunsplit as urlunsplit
+from urllib.parse import quote as quote
+from urllib.parse import quote_plus as quote_plus
+from urllib.parse import unquote as unquote
+from urllib.parse import unquote_plus as unquote_plus
+from urllib.parse import urlencode as urlencode
diff --git a/typeshed/third_party/3/six/moves/urllib_request.pyi b/typeshed/third_party/3/six/moves/urllib_request.pyi
new file mode 100644
index 0000000..168f635
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib_request.pyi
@@ -0,0 +1,41 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_request (Python 3.2)
+
+from six.moves.urllib.request import (
+    install_opener as install_opener,
+    build_opener as build_opener,
+    BaseHandler as BaseHandler,
+    OpenerDirector as OpenerDirector,
+    HTTPRedirectHandler as HTTPRedirectHandler,
+
+    # urlopen as urlopen,
+    # pathname2url as pathname2url,
+    # url2pathname as url2pathname,
+    # getproxies as getproxies,
+    # Request as Request,
+    # HTTPDefaultErrorHandler as HTTPDefaultErrorHandler,
+    # HTTPCookieProcessor as HTTPCookieProcessor,
+    # ProxyHandler as ProxyHandler,
+    # HTTPPasswordMgr as HTTPPasswordMgr,
+    # HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm,
+    # AbstractBasicAuthHandler as AbstractBasicAuthHandler,
+    # HTTPBasicAuthHandler as HTTPBasicAuthHandler,
+    # ProxyBasicAuthHandler as ProxyBasicAuthHandler,
+    # AbstractDigestAuthHandler as AbstractDigestAuthHandler,
+    # HTTPDigestAuthHandler as HTTPDigestAuthHandler,
+    # ProxyDigestAuthHandler as ProxyDigestAuthHandler,
+    # HTTPHandler as HTTPHandler,
+    # HTTPSHandler as HTTPSHandler,
+    # FileHandler as FileHandler,
+    # FTPHandler as FTPHandler,
+    # CacheFTPHandler as CacheFTPHandler,
+    # UnknownHandler as UnknownHandler,
+    # HTTPErrorProcessor as HTTPErrorProcessor,
+    # urlretrieve as urlretrieve,
+    # urlcleanup as urlcleanup,
+    # URLopener as URLopener,
+    # FancyURLopener as FancyURLopener,
+    # proxy_bypass as proxy_bypass,
+)
diff --git a/typeshed/third_party/3/six/moves/urllib_response.pyi b/typeshed/third_party/3/six/moves/urllib_response.pyi
new file mode 100644
index 0000000..0a834a7
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib_response.pyi
@@ -0,0 +1,11 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_response (Python 3.2)
+
+from six.moves.urllib.response import (
+    addbase as addbase,
+    addclosehook as addclosehook,
+    addinfo as addinfo,
+    addinfourl as addinfourl,
+)
diff --git a/typeshed/third_party/3/six/moves/urllib_robotparser.pyi b/typeshed/third_party/3/six/moves/urllib_robotparser.pyi
new file mode 100644
index 0000000..8b4ca73
--- /dev/null
+++ b/typeshed/third_party/3/six/moves/urllib_robotparser.pyi
@@ -0,0 +1,8 @@
+# Generated by stubtool 0.1, DO NOT EDIT
+# See https://github.com/o11c/stubtool
+#
+# Stubs for six.moves.urllib_robotparser (Python 3.2)
+
+from six.moves.urllib.robotparser import (
+    RobotFileParser as RobotFileParser,
+)
diff --git a/xml/mypy-html.css b/xml/mypy-html.css
new file mode 100644
index 0000000..1a3302d
--- /dev/null
+++ b/xml/mypy-html.css
@@ -0,0 +1,104 @@
+/* CSS for type check coverage reports */
+
+/*
+    Used by both summary and file.
+*/
+body {
+    font-family: "Helvetica Neue", sans-serif;
+}
+
+/*
+    Used only by summary.
+*/
+
+h1 {
+    text-align: center;
+    font-size: 135%;
+    margin: 20px;
+}
+
+table.summary {
+    border-collapse: collapse;
+    margin-left: 7%;
+    margin-right: 7%;
+    width: 85%;
+}
+
+table caption {
+    margin: 1em;
+}
+
+table.summary, tr.summary, th.summary, td.summary {
+    border: 1px solid #aaa;
+}
+
+th.summary, td.summary {
+    padding: 0.4em;
+}
+
+td.summary a {
+    text-decoration: none;
+}
+
+.summary-quality-0 {
+    background-color: #dfd;
+}
+
+.summary-quality-1 {
+    background-color: #ffa;
+}
+
+.summary-quality-2 {
+    background-color: #faa;
+}
+
+td.summary-filename, th.summary-filename {
+    text-align: left;
+}
+
+td.summary-filename {
+    width: 50%;
+}
+
+.summary-precision {
+    text-align: center;
+}
+
+.summary-lines {
+    text-align: center;
+}
+
+/*
+    Used only by file.
+*/
+
+td.table-lines {
+    text-align: right;
+    padding-right: 0.5em;
+}
+
+td.table-code { }
+
+span.lineno {
+    text-align: right;
+}
+
+a:link.lineno, a:visited.lineno {
+    color: #999; text-decoration: none;
+}
+
+a:hover.lineno, a:active.lineno {
+    color: #000; text-decoration: underline;
+}
+
+.line-empty, .line-precise {
+    background-color: #dfd;
+}
+
+.line-imprecise {
+    background-color: #ffa;
+}
+
+.line-any {
+    background-color: #faa;
+}
diff --git a/xml/mypy-html.xslt b/xml/mypy-html.xslt
new file mode 100644
index 0000000..2e7ed51
--- /dev/null
+++ b/xml/mypy-html.xslt
@@ -0,0 +1,81 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- vim: set sts=2 sw=2: -->
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+  <xsl:param name="ext" select="'xml'"/>
+  <xsl:output method="html"/>
+  <xsl:variable name="xml_stylesheet_pi" select="string(//processing-instruction('xml-stylesheet'))"/>
+  <xsl:variable name="stylesheet_name" select="substring($xml_stylesheet_pi, 23, string-length($xml_stylesheet_pi) - 28)"/>
+  <xsl:template match="/mypy-report-index">
+    <html>
+      <head>
+        <link rel="stylesheet" type="text/css" href="{$stylesheet_name}.css"/>
+      </head>
+      <body>
+        <h1>Mypy Type Check Coverage Summary</h1>
+        <table class="summary">
+          <caption>Summary from <xsl:value-of select="@name"/></caption>
+          <thead>
+            <tr class="summary">
+              <th class="summary">File</th>
+              <th class="summary">Imprecision</th>
+              <th class="summary">Lines</th>
+            </tr>
+          </thead>
+          <tfoot>
+            <xsl:variable name="bad_lines" select="sum(file/@imprecise|file/@any)"/>
+            <xsl:variable name="total_lines" select="sum(file/@total)"/>
+            <xsl:variable name="global_score" select="$bad_lines div ($total_lines + not(number($total_lines)))"/>
+            <xsl:variable name="global_quality" select="string(number(number($global_score) > 0.00) + number(number($global_score) >= 0.20))"/>
+            <tr class="summary summary-quality-{$global_quality}">
+              <th class="summary summary-filename">Total</th>
+              <th class="summary summary-precision"><xsl:value-of select="format-number($global_score, '0.00%')"/> imprecise</th>
+              <th class="summary summary-lines"><xsl:value-of select="$total_lines"/> LOC</th>
+            </tr>
+          </tfoot>
+          <tbody>
+            <xsl:for-each select="file">
+              <xsl:variable name="local_score" select="(@imprecise + @any) div (@total + not(number(@total)))"/>
+              <xsl:variable name="local_quality" select="string(number(number($local_score) > 0.00) + number(number($local_score) >= 0.20))"/>
+              <tr class="summary summary-quality-{$local_quality}">
+                <td class="summary summary-filename"><a href="{$ext}/{@name}.{$ext}"><xsl:value-of select="@module"/></a></td>
+                <td class="summary summary-precision"><xsl:value-of select="format-number($local_score, '0.00%')"/> imprecise</td>
+                <td class="summary summary-lines"><xsl:value-of select="@total"/> LOC</td>
+              </tr>
+            </xsl:for-each>
+          </tbody>
+        </table>
+      </body>
+    </html>
+  </xsl:template>
+  <xsl:template match="/mypy-report-file">
+    <html>
+      <head>
+        <link rel="stylesheet" type="text/css" href="{$stylesheet_name}.css"/>
+      </head>
+      <body>
+        <h2><xsl:value-of select="@module"/></h2>
+        <table>
+          <caption><xsl:value-of select="@name"/></caption>
+          <tbody>
+            <tr>
+              <td class="table-lines">
+                <pre>
+                  <xsl:for-each select="line">
+                    <span id="L{@number}" class="lineno"><a class="lineno" href="#L{@number}"><xsl:value-of select="@number"/></a></span><xsl:text>
</xsl:text>
+                  </xsl:for-each>
+                </pre>
+              </td>
+              <td class="table-code">
+                <pre>
+                  <xsl:for-each select="line">
+                    <span class="line-{@precision}"><xsl:value-of select="@content"/></span><xsl:text>
</xsl:text>
+                  </xsl:for-each>
+                </pre>
+              </td>
+            </tr>
+          </tbody>
+        </table>
+      </body>
+    </html>
+  </xsl:template>
+</xsl:stylesheet>
diff --git a/xml/mypy-txt.xslt b/xml/mypy-txt.xslt
new file mode 100644
index 0000000..fe12065
--- /dev/null
+++ b/xml/mypy-txt.xslt
@@ -0,0 +1,100 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- vim: set sts=2 sw=2: -->
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:math="http://exslt.org/math" xmlns:str="http://exslt.org/strings">
+  <xsl:output method="text"/>
+  <xsl:template match="/mypy-report-index">
+    <!-- It's possible to output without the <xsl:text> but it's harder to control. -->
+    <xsl:text>Mypy Type Check Coverage Summary
</xsl:text>
+    <xsl:text>================================
</xsl:text>
+    <xsl:text>
</xsl:text>
+    <xsl:text>Script: </xsl:text><xsl:value-of select="@name"/><xsl:text>
</xsl:text>
+    <xsl:text>
</xsl:text>
+
+    <xsl:variable name="max_module_width">
+      <xsl:for-each select="file">
+        <xsl:sort select="string-length(@module)" data-type="number"/>
+        <xsl:if test="position() = last()">
+          <xsl:value-of select="string-length(@module)"/>
+        </xsl:if>
+      </xsl:for-each>
+    </xsl:variable>
+    <xsl:variable name="max_imprecision_width" select="string-length('100.00% imprecise')"/>
+    <xsl:variable name="max_loc_width" select="string-length(concat(sum(file/@total), ' LOC'))"/>
+
+    <xsl:text>+-</xsl:text>
+    <xsl:value-of select="str:padding($max_module_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_imprecision_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_loc_width, '-')"/>
+    <xsl:text>-+
</xsl:text>
+
+    <xsl:text>| </xsl:text>
+    <xsl:value-of select="'Module'"/>
+    <xsl:value-of select="str:padding($max_module_width - string-length('Module'), ' ')"/>
+    <xsl:text> | </xsl:text>
+    <xsl:value-of select="'Imprecision'"/>
+    <xsl:value-of select="str:padding($max_imprecision_width - string-length('Imprecision'), ' ')"/>
+    <xsl:text> | </xsl:text>
+    <xsl:value-of select="'Lines'"/>
+    <xsl:value-of select="str:padding($max_loc_width - string-length('Lines'), ' ')"/>
+    <xsl:text> |
</xsl:text>
+
+    <xsl:text>+-</xsl:text>
+    <xsl:value-of select="str:padding($max_module_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_imprecision_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_loc_width, '-')"/>
+    <xsl:text>-+
</xsl:text>
+
+    <xsl:for-each select="file">
+      <xsl:variable name="score" select="(@imprecise + @any) div (@total + not(number(@total)))"/>
+      <xsl:variable name="imprecision" select="concat(format-number($score, '0.00%'), ' imprecise')"/>
+      <xsl:variable name="lines" select="concat(@total, ' LOC')"/>
+
+      <xsl:text>| </xsl:text>
+      <xsl:value-of select="@module"/>
+      <xsl:value-of select="str:padding($max_module_width - string-length(@module), ' ')"/>
+      <xsl:text> | </xsl:text>
+      <xsl:value-of select="str:padding($max_imprecision_width - string-length($imprecision), ' ')"/>
+      <xsl:value-of select="$imprecision"/>
+      <xsl:text> | </xsl:text>
+      <xsl:value-of select="str:padding($max_loc_width - string-length($lines), ' ')"/>
+      <xsl:value-of select="$lines"/>
+      <xsl:text> |
</xsl:text>
+    </xsl:for-each>
+
+    <xsl:text>+-</xsl:text>
+    <xsl:value-of select="str:padding($max_module_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_imprecision_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_loc_width, '-')"/>
+    <xsl:text>-+
</xsl:text>
+
+    <xsl:variable name="total" select="sum(file/@total)"/>
+    <xsl:variable name="score" select="(sum(file/@imprecise|file/@any)) div ($total + not(number($total)))"/>
+    <xsl:variable name="imprecision" select="concat(format-number($score, '0.00%'), ' imprecise')"/>
+    <xsl:variable name="lines" select="concat($total, ' LOC')"/>
+
+    <xsl:text>| </xsl:text>
+    <xsl:value-of select="'Total'"/>
+    <xsl:value-of select="str:padding($max_module_width - string-length('Total'), ' ')"/>
+    <xsl:text> | </xsl:text>
+    <xsl:value-of select="str:padding($max_imprecision_width - string-length($imprecision), ' ')"/>
+    <xsl:value-of select="$imprecision"/>
+    <xsl:text> | </xsl:text>
+    <xsl:value-of select="str:padding($max_loc_width - string-length($lines), ' ')"/>
+    <xsl:value-of select="$lines"/>
+    <xsl:text> |
</xsl:text>
+
+    <xsl:text>+-</xsl:text>
+    <xsl:value-of select="str:padding($max_module_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_imprecision_width, '-')"/>
+    <xsl:text>-+-</xsl:text>
+    <xsl:value-of select="str:padding($max_loc_width, '-')"/>
+    <xsl:text>-+
</xsl:text>
+  </xsl:template>
+</xsl:stylesheet>
diff --git a/xml/mypy.xsd b/xml/mypy.xsd
new file mode 100644
index 0000000..d5f16eb
--- /dev/null
+++ b/xml/mypy.xsd
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- vim: set sts=2 sw=2: -->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
+  <!-- simple types -->
+  <xs:simpleType name="precision">
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="empty"/>
+      <xs:enumeration value="precise"/>
+      <xs:enumeration value="imprecise"/>
+      <xs:enumeration value="any"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <!-- root elements -->
+  <xs:element name="mypy-report-index">
+    <xs:complexType>
+      <xs:sequence>
+        <xs:element name="file" minOccurs="0" maxOccurs="unbounded">
+          <xs:complexType>
+            <xs:attribute name="name" type="xs:string" use="required"/>
+            <xs:attribute name="module" type="xs:string" use="required"/>
+            <xs:attribute name="total" type="xs:integer" use="required"/>
+            <xs:attribute name="empty" type="xs:integer" use="required"/>
+            <xs:attribute name="precise" type="xs:integer" use="required"/>
+            <xs:attribute name="imprecise" type="xs:integer" use="required"/>
+            <xs:attribute name="any" type="xs:integer" use="required"/>
+          </xs:complexType>
+        </xs:element>
+      </xs:sequence>
+      <xs:attribute name="name" type="xs:string" use="required"/>
+    </xs:complexType>
+  </xs:element>
+  <xs:element name="mypy-report-file">
+    <xs:complexType>
+      <xs:sequence>
+        <xs:element name="line" minOccurs="0" maxOccurs="unbounded">
+          <xs:complexType>
+            <xs:attribute name="number" type="xs:integer" use="required"/>
+            <xs:attribute name="precision" type="precision" use="required"/>
+            <xs:attribute name="content" type="xs:string" use="required"/>
+          </xs:complexType>
+        </xs:element>
+      </xs:sequence>
+      <xs:attribute name="name" type="xs:string" use="required"/>
+      <xs:attribute name="module" type="xs:string" use="required"/>
+    </xs:complexType>
+  </xs:element>
+</xs:schema>

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list