[med-svn] [mypy] 01/06: New upstream version 0.4.6

Andreas Tille tille at debian.org
Thu Dec 1 13:21:29 UTC 2016


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository mypy.

commit e4ccf548980500bc7b8fb16b62065bcd5f2d80ec
Author: Andreas Tille <tille at debian.org>
Date:   Thu Dec 1 14:12:24 2016 +0100

    New upstream version 0.4.6
---
 PKG-INFO                                           |    2 +-
 lib-typing/2.7/test_typing.py                      |  762 ++++++----
 lib-typing/2.7/typing.py                           | 1334 ++++++++++-------
 lib-typing/3.2/test_typing.py                      |  953 ++++++++----
 lib-typing/3.2/typing.py                           | 1515 ++++++++++++--------
 mypy/applytype.py                                  |    9 +-
 mypy/binder.py                                     |  149 +-
 mypy/build.py                                      |  136 +-
 mypy/checker.py                                    |  717 +++++----
 mypy/checkexpr.py                                  |  200 ++-
 mypy/checkmember.py                                |  243 +++-
 mypy/checkstrformat.py                             |   59 +-
 mypy/constraints.py                                |    2 +-
 mypy/errors.py                                     |   19 +-
 mypy/expandtype.py                                 |    6 +-
 mypy/experiments.py                                |    2 +
 mypy/exprtotype.py                                 |    6 +-
 mypy/fastparse.py                                  |   27 +-
 mypy/fastparse2.py                                 |    5 +
 mypy/fixup.py                                      |   34 +-
 mypy/join.py                                       |    8 +-
 mypy/lex.py                                        |    4 +-
 mypy/main.py                                       |   47 +-
 mypy/maptype.py                                    |   12 +-
 mypy/meet.py                                       |   11 +-
 mypy/messages.py                                   |   81 +-
 mypy/nodes.py                                      |  285 ++--
 mypy/options.py                                    |   33 +-
 mypy/parse.py                                      |   50 +-
 mypy/parsetype.py                                  |   26 +-
 mypy/report.py                                     |  211 ++-
 mypy/semanal.py                                    |  685 ++++++---
 mypy/solve.py                                      |    7 +-
 mypy/stats.py                                      |   14 +-
 mypy/strconv.py                                    |   18 +-
 mypy/stubgen.py                                    |   17 +-
 mypy/subtypes.py                                   |   16 +-
 mypy/traverser.py                                  |    4 +
 mypy/treetransform.py                              |   19 +-
 mypy/typeanal.py                                   |  116 +-
 mypy/types.py                                      |   75 +-
 mypy/util.py                                       |   41 +-
 mypy/version.py                                    |   13 +-
 mypy/visitor.py                                    |    3 +
 setup.cfg                                          |    8 +
 setup.py                                           |   20 +-
 typeshed/stdlib/2.7/xml/sax/__init__.pyi           |   20 -
 typeshed/stdlib/{2.7 => 2}/BaseHTTPServer.pyi      |    0
 typeshed/stdlib/{2.7 => 2}/ConfigParser.pyi        |    7 -
 typeshed/stdlib/{2.7 => 2}/Cookie.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/HTMLParser.pyi          |    0
 typeshed/stdlib/{2.7 => 2}/Queue.pyi               |    6 +-
 typeshed/stdlib/{2.7 => 2}/SocketServer.pyi        |    0
 typeshed/stdlib/{2.7 => 2}/StringIO.pyi            |    1 +
 typeshed/stdlib/{2.7 => 2}/UserDict.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/UserList.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/UserString.pyi          |    0
 typeshed/stdlib/{2.7 => 2}/__builtin__.pyi         |   15 +-
 typeshed/stdlib/{2.7 => 2}/__future__.pyi          |    0
 typeshed/stdlib/{2.7 => 2}/_ast.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/_codecs.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/_collections.pyi        |    0
 typeshed/stdlib/{2.7 => 2}/_functools.pyi          |    0
 typeshed/stdlib/{2.7 => 2}/_hotshot.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/_io.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/_json.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/_md5.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/_random.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/_sha.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/_sha256.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/_sha512.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/_socket.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/_sre.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/_struct.pyi             |    2 +-
 typeshed/stdlib/{2.7 => 2}/_symtable.pyi           |    2 +-
 typeshed/stdlib/{2.7 => 2}/_warnings.pyi           |    0
 typeshed/stdlib/{2.7 => 2}/_weakref.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/_weakrefset.pyi         |    0
 typeshed/stdlib/{2.7 => 2}/abc.pyi                 |    2 -
 typeshed/stdlib/{2.7 => 2}/array.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/ast.pyi                 |    2 -
 typeshed/stdlib/{2.7 => 2}/atexit.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/base64.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/binascii.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/builtins.pyi            |   15 +-
 typeshed/stdlib/{2.7 => 2}/cPickle.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/cStringIO.pyi           |    0
 typeshed/stdlib/{2.7 => 2}/calendar.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/codecs.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/collections.pyi         |    3 +-
 typeshed/stdlib/{2.7 => 2}/compileall.pyi          |    0
 typeshed/stdlib/2/cookielib.pyi                    |  110 ++
 typeshed/stdlib/{2.7 => 2}/copy.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/csv.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/datetime.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/decimal.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/difflib.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/distutils/__init__.pyi  |    0
 .../stdlib/{2.7 => 2}/distutils/emxccompiler.pyi   |    0
 typeshed/stdlib/{2.7 => 2}/doctest.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/email/MIMEText.pyi      |    0
 typeshed/stdlib/{2.7 => 2}/email/__init__.pyi      |    0
 typeshed/stdlib/{2.7 => 2}/email/_parseaddr.pyi    |    0
 typeshed/stdlib/{2.7 => 2}/email/mime/__init__.pyi |    0
 typeshed/stdlib/{2.7 => 2}/email/mime/base.pyi     |    0
 .../stdlib/{2.7 => 2}/email/mime/multipart.pyi     |    0
 .../stdlib/{2.7 => 2}/email/mime/nonmultipart.pyi  |    0
 typeshed/stdlib/{2.7 => 2}/email/mime/text.pyi     |    0
 typeshed/stdlib/{2.7 => 2}/email/utils.pyi         |    0
 typeshed/stdlib/{2.7 => 2}/encodings/__init__.pyi  |    0
 typeshed/stdlib/{2.7 => 2}/encodings/utf_8.pyi     |    0
 typeshed/stdlib/{2.7 => 2}/exceptions.pyi          |    4 +-
 typeshed/stdlib/{2.7 => 2}/fcntl.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/fileinput.pyi           |    0
 typeshed/stdlib/{2.7 => 2}/fnmatch.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/functools.pyi           |    0
 typeshed/stdlib/2/future_builtins.pyi              |   14 +
 typeshed/stdlib/{2.7 => 2}/gc.pyi                  |    0
 typeshed/stdlib/{2.7 => 2}/genericpath.pyi         |    0
 typeshed/stdlib/{2.7 => 2}/getopt.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/getpass.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/gettext.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/glob.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/grp.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/gzip.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/hashlib.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/heapq.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/htmlentitydefs.pyi      |    0
 typeshed/stdlib/{2.7 => 2}/httplib.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/imp.pyi                 |    2 +-
 typeshed/stdlib/{2.7 => 2}/importlib.pyi           |    0
 typeshed/stdlib/{2.7 => 2}/inspect.pyi             |   17 +-
 typeshed/stdlib/{2.7 => 2}/io.pyi                  |    1 +
 typeshed/stdlib/{2.7 => 2}/itertools.pyi           |    4 +-
 typeshed/stdlib/{2.7 => 2}/json.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/linecache.pyi           |    0
 typeshed/stdlib/{2.7 => 2}/markupbase.pyi          |    0
 typeshed/stdlib/{2.7 => 2}/md5.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/mimetools.pyi           |    0
 .../stdlib/{2.7 => 2}/multiprocessing/__init__.pyi |    0
 .../stdlib/{2.7 => 2}/multiprocessing/process.pyi  |    0
 .../stdlib/{2.7 => 2}/multiprocessing/util.pyi     |    0
 typeshed/stdlib/{2.7 => 2}/optparse.pyi            |    6 -
 typeshed/stdlib/{2.7 => 2}/os/__init__.pyi         |   53 +-
 typeshed/stdlib/{2.7 => 2}/os/path.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/pdb.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/pickle.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/pipes.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/platform.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/posix.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/posixpath.pyi           |   20 +-
 typeshed/stdlib/{2.7 => 2}/pprint.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/pwd.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/quopri.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/random.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/re.pyi                  |    0
 typeshed/stdlib/{2.7 => 2}/resource.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/rfc822.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/robotparser.pyi         |    0
 typeshed/stdlib/{2.7 => 2}/runpy.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/select.pyi              |    6 +-
 typeshed/stdlib/{2.7 => 2}/sha.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/shelve.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/shlex.pyi               |    2 +-
 typeshed/stdlib/{2.7 => 2}/shutil.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/signal.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/simplejson/__init__.pyi |    0
 typeshed/stdlib/{2.7 => 2}/simplejson/decoder.pyi  |    0
 typeshed/stdlib/{2.7 => 2}/simplejson/encoder.pyi  |    0
 typeshed/stdlib/{2.7 => 2}/simplejson/scanner.pyi  |    0
 typeshed/stdlib/{2.7 => 2}/smtplib.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/socket.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/spwd.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/sqlite3/__init__.pyi    |    0
 typeshed/stdlib/{2.7 => 2}/sqlite3/dbapi2.pyi      |    0
 typeshed/stdlib/{2.7 => 2}/ssl.pyi                 |    4 +-
 typeshed/stdlib/{2.7 => 2}/stat.pyi                |    0
 typeshed/stdlib/{2.7 => 2}/string.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/strop.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/struct.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/subprocess.pyi          |    0
 typeshed/stdlib/{2.7 => 2}/sys.pyi                 |    0
 typeshed/stdlib/{2.7 => 2}/syslog.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/tempfile.pyi            |   41 +-
 typeshed/stdlib/{2.7 => 2}/textwrap.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/thread.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/time.pyi                |    8 +-
 typeshed/stdlib/{2.7 => 2}/token.pyi               |    0
 typeshed/stdlib/{2.7 => 2}/tokenize.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/types.pyi               |    1 -
 typeshed/stdlib/{2.7 => 2}/typing.pyi              |    3 +-
 typeshed/stdlib/{2.7 => 2}/unicodedata.pyi         |    0
 typeshed/stdlib/{2.7 => 2}/unittest.pyi            |    9 -
 typeshed/stdlib/{2.7 => 2}/urllib.pyi              |    0
 typeshed/stdlib/{2.7 => 2}/urllib2.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/urlparse.pyi            |    0
 typeshed/stdlib/{2.7 => 2}/uuid.pyi                |    8 +-
 typeshed/stdlib/{2.7 => 2}/weakref.pyi             |    0
 typeshed/stdlib/{2.7 => 2}/wsgiref/__init__.pyi    |    0
 typeshed/stdlib/{2.7 => 2}/wsgiref/validate.pyi    |    0
 typeshed/stdlib/{2.7 => 2}/xml/__init__.pyi        |    0
 .../stdlib/{2.7 => 2}/xml/etree/ElementInclude.pyi |    0
 .../stdlib/{2.7 => 2}/xml/etree/ElementPath.pyi    |    0
 .../stdlib/{2.7 => 2}/xml/etree/ElementTree.pyi    |    0
 typeshed/stdlib/{2.7 => 2}/xml/etree/__init__.pyi  |    0
 .../stdlib/{2.7 => 2}/xml/etree/cElementTree.pyi   |    0
 typeshed/stdlib/{2.7 => 2}/xxsubtype.pyi           |    0
 typeshed/stdlib/{2.7 => 2}/zlib.pyi                |    0
 typeshed/stdlib/2and3/distutils/extension.pyi      |    8 +-
 typeshed/stdlib/2and3/pkgutil.pyi                  |    2 +-
 typeshed/stdlib/2and3/threading.pyi                |   13 +-
 typeshed/stdlib/2and3/webbrowser.pyi               |    4 +-
 .../stdlib/{3/xml/sax => 2and3/xml}/__init__.pyi   |    0
 typeshed/stdlib/2and3/xml/sax/__init__.pyi         |   32 +
 typeshed/stdlib/{2.7 => 2and3}/xml/sax/handler.pyi |    0
 .../stdlib/{2.7 => 2and3}/xml/sax/saxutils.pyi     |    0
 .../stdlib/{2.7 => 2and3}/xml/sax/xmlreader.pyi    |    0
 typeshed/stdlib/2and3/zipfile.pyi                  |    2 +-
 typeshed/stdlib/3.4/asyncio/__init__.pyi           |   12 +-
 typeshed/stdlib/3.4/asyncio/futures.pyi            |    9 +
 typeshed/stdlib/3.4/asyncio/locks.pyi              |    2 +-
 typeshed/stdlib/3.4/asyncio/tasks.pyi              |   39 +-
 typeshed/stdlib/3.5/pathlib.pyi                    |   45 +-
 typeshed/stdlib/3/_importlib_modulespec.pyi        |    5 +-
 typeshed/stdlib/3/abc.pyi                          |    2 +-
 typeshed/stdlib/3/ast.pyi                          |    2 -
 typeshed/stdlib/3/builtins.pyi                     |   63 +-
 typeshed/stdlib/3/collections/__init__.pyi         |    2 +
 typeshed/stdlib/3/collections/abc.pyi              |   14 +
 typeshed/stdlib/3/concurrent/futures/_base.pyi     |    4 +-
 typeshed/stdlib/3/datetime.pyi                     |   24 +-
 typeshed/stdlib/3/functools.pyi                    |    2 +-
 typeshed/stdlib/3/importlib/abc.pyi                |   14 +-
 typeshed/stdlib/3/io.pyi                           |    1 +
 typeshed/stdlib/3/itertools.pyi                    |    4 +-
 typeshed/stdlib/3/multiprocessing/__init__.pyi     |   57 +-
 typeshed/stdlib/3/multiprocessing/managers.pyi     |    4 +-
 typeshed/stdlib/3/multiprocessing/pool.pyi         |   58 +-
 typeshed/stdlib/3/os/__init__.pyi                  |   11 +-
 typeshed/stdlib/3/queue.pyi                        |    6 +-
 typeshed/stdlib/3/resource.pyi                     |   21 +-
 typeshed/stdlib/3/select.pyi                       |    6 +-
 typeshed/stdlib/3/shutil.pyi                       |    5 +
 typeshed/stdlib/3/ssl.pyi                          |   17 +-
 typeshed/stdlib/3/struct.pyi                       |   14 +-
 typeshed/stdlib/3/subprocess.pyi                   |   15 +-
 typeshed/stdlib/3/time.pyi                         |   16 +-
 typeshed/stdlib/3/types.pyi                        |    1 -
 typeshed/stdlib/3/typing.pyi                       |   26 +-
 typeshed/stdlib/3/unittest.pyi                     |    2 +-
 typeshed/stdlib/3/urllib/request.pyi               |   10 +-
 typeshed/stdlib/3/urllib/robotparser.pyi           |    2 +-
 typeshed/third_party/2.7/tornado/__init__.pyi      |    0
 .../third_party/{2.7 => 2}/Crypto/Cipher/AES.pyi   |    0
 .../{2.7 => 2}/Crypto/Cipher/__init__.pyi          |    0
 .../{2.7 => 2}/Crypto/Random/__init__.pyi          |    0
 .../{2.7 => 2}/Crypto/Random/random.pyi            |    0
 .../3/xml => third_party/2/Crypto}/__init__.pyi    |    0
 .../{2.7/Crypto => 2/OpenSSL}/__init__.pyi         |    0
 typeshed/third_party/{2.7 => 2}/OpenSSL/crypto.pyi |    0
 typeshed/third_party/{2.7 => 2}/boto/__init__.pyi  |    0
 .../third_party/{2.7 => 2}/boto/connection.pyi     |    0
 .../third_party/{2.7 => 2}/boto/ec2/__init__.pyi   |    0
 .../{2.7 => 2}/boto/ec2/elb/__init__.pyi           |    0
 typeshed/third_party/{2.7 => 2}/boto/exception.pyi |    0
 .../{2.7/OpenSSL => 2/concurrent}/__init__.pyi     |    0
 .../{2.7 => 2}/concurrent/futures/__init__.pyi     |    0
 typeshed/third_party/{2.7 => 2}/croniter.pyi       |    2 +-
 .../{2.7/concurrent => 2/dateutil}/__init__.pyi    |    0
 .../third_party/{2.7 => 2}/dateutil/parser.pyi     |    0
 .../{2.7 => 2}/dateutil/relativedelta.pyi          |    0
 typeshed/third_party/{2.7 => 2}/enum.pyi           |    0
 .../{2.7 => 2}/fb303/FacebookService.pyi           |    0
 .../{2.7/dateutil => 2/fb303}/__init__.pyi         |    0
 typeshed/third_party/{2.7 => 2}/gflags.pyi         |    0
 .../{2.7/fb303 => 2/google}/__init__.pyi           |    0
 .../{2.7 => 2}/google/protobuf/__init__.pyi        |    0
 .../{2.7 => 2}/google/protobuf/descriptor.pyi      |    0
 .../{2.7 => 2}/google/protobuf/descriptor_pb2.pyi  |    0
 .../google/protobuf/internal}/__init__.pyi         |    0
 .../google/protobuf/internal/decoder.pyi           |    0
 .../google/protobuf/internal/encoder.pyi           |    0
 .../google/protobuf/internal/enum_type_wrapper.pyi |    0
 .../google/protobuf/internal/wire_format.pyi       |    0
 .../{2.7 => 2}/google/protobuf/message.pyi         |    0
 .../{2.7 => 2}/google/protobuf/reflection.pyi      |    0
 typeshed/third_party/{2.7 => 2}/itsdangerous.pyi   |    0
 typeshed/third_party/{2.7 => 2}/kazoo/__init__.pyi |    0
 typeshed/third_party/{2.7 => 2}/kazoo/client.pyi   |    0
 .../third_party/{2.7 => 2}/kazoo/exceptions.pyi    |    0
 .../{2.7 => 2}/kazoo/recipe/__init__.pyi           |    0
 .../{2.7 => 2}/kazoo/recipe/watchers.pyi           |    0
 typeshed/third_party/{2.7 => 2}/pycurl.pyi         |    0
 typeshed/third_party/{2.7 => 2}/pymssql.pyi        |    2 +-
 typeshed/third_party/{2.7 => 2}/redis/__init__.pyi |    0
 typeshed/third_party/{2.7 => 2}/redis/client.pyi   |    0
 .../third_party/{2.7 => 2}/redis/connection.pyi    |    0
 .../third_party/{2.7 => 2}/redis/exceptions.pyi    |    0
 typeshed/third_party/{2.7 => 2}/redis/utils.pyi    |    0
 .../third_party/{2.7 => 2}/requests/__init__.pyi   |    0
 .../third_party/{2.7 => 2}/requests/adapters.pyi   |   11 +-
 typeshed/third_party/{2.7 => 2}/requests/api.pyi   |    0
 typeshed/third_party/{2.7 => 2}/requests/auth.pyi  |    0
 .../third_party/{2.7 => 2}/requests/compat.pyi     |    0
 .../third_party/{2.7 => 2}/requests/cookies.pyi    |    0
 .../third_party/{2.7 => 2}/requests/exceptions.pyi |    0
 typeshed/third_party/{2.7 => 2}/requests/hooks.pyi |    0
 .../third_party/{2.7 => 2}/requests/models.pyi     |    0
 .../{2.7 => 2}/requests/packages/__init__.pyi      |    0
 .../requests/packages/urllib3/__init__.pyi         |    0
 .../requests/packages/urllib3/_collections.pyi     |    0
 .../requests/packages/urllib3/connection.pyi       |    0
 .../requests/packages/urllib3/connectionpool.pyi   |    0
 .../requests/packages/urllib3/contrib/__init__.pyi |    0
 .../requests/packages/urllib3/exceptions.pyi       |    0
 .../requests/packages/urllib3/fields.pyi           |    0
 .../requests/packages/urllib3/filepost.pyi         |    0
 .../packages/urllib3/packages/__init__.pyi         |    0
 .../packages/ssl_match_hostname/__init__.pyi       |    0
 .../ssl_match_hostname/_implementation.pyi         |    0
 .../requests/packages/urllib3/poolmanager.pyi      |    0
 .../requests/packages/urllib3/request.pyi          |    0
 .../requests/packages/urllib3/response.pyi         |    0
 .../requests/packages/urllib3/util/__init__.pyi    |    0
 .../requests/packages/urllib3/util/connection.pyi  |    0
 .../requests/packages/urllib3/util/request.pyi     |    0
 .../requests/packages/urllib3/util/response.pyi    |    0
 .../requests/packages/urllib3/util/retry.pyi       |    0
 .../requests/packages/urllib3/util/timeout.pyi     |    0
 .../requests/packages/urllib3/util/url.pyi         |    0
 .../third_party/{2.7 => 2}/requests/sessions.pyi   |    4 +-
 .../{2.7 => 2}/requests/status_codes.pyi           |    0
 .../third_party/{2.7 => 2}/requests/structures.pyi |    0
 typeshed/third_party/{2.7 => 2}/requests/utils.pyi |    0
 .../third_party/{2.7 => 2}/routes/__init__.pyi     |    0
 typeshed/third_party/{2.7 => 2}/routes/mapper.pyi  |    0
 typeshed/third_party/{2.7 => 2}/routes/util.pyi    |    0
 .../protobuf/internal => 2/scribe}/__init__.pyi    |    0
 typeshed/third_party/{2.7 => 2}/scribe/scribe.pyi  |    0
 typeshed/third_party/{2.7 => 2}/scribe/ttypes.pyi  |    0
 .../selenium/webdriver/remote/webdriver.pyi        |    0
 .../selenium/webdriver/remote/webelement.pyi       |    0
 typeshed/third_party/{2.7 => 2}/six/__init__.pyi   |    9 +-
 .../third_party/{2.7 => 2}/six/moves/__init__.pyi  |    0
 .../third_party/{2.7 => 2}/six/moves/cPickle.pyi   |    0
 .../{2.7 => 2}/six/moves/urllib/__init__.pyi       |    0
 .../{2.7 => 2}/six/moves/urllib/error.pyi          |    0
 .../{2.7 => 2}/six/moves/urllib/parse.pyi          |    0
 .../{2.7 => 2}/six/moves/urllib/request.pyi        |    0
 .../{2.7 => 2}/six/moves/urllib/response.pyi       |    0
 .../{2.7 => 2}/six/moves/urllib/robotparser.pyi    |    0
 .../{2.7 => 2}/six/moves/urllib_error.pyi          |    0
 .../{2.7 => 2}/six/moves/urllib_parse.pyi          |    0
 .../{2.7 => 2}/six/moves/urllib_request.pyi        |    0
 .../{2.7 => 2}/six/moves/urllib_response.pyi       |    0
 .../{2.7 => 2}/six/moves/urllib_robotparser.pyi    |    0
 .../third_party/{2.7 => 2}/sqlalchemy/__init__.pyi |    0
 .../{2.7 => 2}/sqlalchemy/databases/__init__.pyi   |    0
 .../{2.7 => 2}/sqlalchemy/databases/mysql.pyi      |    0
 .../{2.7 => 2}/sqlalchemy/dialects/__init__.pyi    |    0
 .../sqlalchemy/dialects/mysql/__init__.pyi         |    0
 .../{2.7 => 2}/sqlalchemy/dialects/mysql/base.pyi  |    0
 .../{2.7 => 2}/sqlalchemy/engine/__init__.pyi      |    0
 .../{2.7 => 2}/sqlalchemy/engine/base.pyi          |    0
 .../{2.7 => 2}/sqlalchemy/engine/strategies.pyi    |    0
 .../{2.7 => 2}/sqlalchemy/engine/url.pyi           |    0
 typeshed/third_party/{2.7 => 2}/sqlalchemy/exc.pyi |    0
 .../{2.7 => 2}/sqlalchemy/inspection.pyi           |    0
 typeshed/third_party/{2.7 => 2}/sqlalchemy/log.pyi |    0
 .../{2.7 => 2}/sqlalchemy/orm/__init__.pyi         |    0
 .../{2.7 => 2}/sqlalchemy/orm/session.pyi          |    0
 .../third_party/{2.7 => 2}/sqlalchemy/pool.pyi     |    0
 .../third_party/{2.7 => 2}/sqlalchemy/schema.pyi   |    0
 .../{2.7 => 2}/sqlalchemy/sql/__init__.pyi         |    0
 .../{2.7 => 2}/sqlalchemy/sql/annotation.pyi       |    0
 .../third_party/{2.7 => 2}/sqlalchemy/sql/base.pyi |    0
 .../third_party/{2.7 => 2}/sqlalchemy/sql/ddl.pyi  |    0
 .../third_party/{2.7 => 2}/sqlalchemy/sql/dml.pyi  |    0
 .../{2.7 => 2}/sqlalchemy/sql/elements.pyi         |    0
 .../{2.7 => 2}/sqlalchemy/sql/expression.pyi       |    0
 .../{2.7 => 2}/sqlalchemy/sql/functions.pyi        |    0
 .../{2.7 => 2}/sqlalchemy/sql/naming.pyi           |    0
 .../{2.7 => 2}/sqlalchemy/sql/operators.pyi        |    0
 .../{2.7 => 2}/sqlalchemy/sql/schema.pyi           |   31 +-
 .../{2.7 => 2}/sqlalchemy/sql/selectable.pyi       |   16 +-
 .../{2.7 => 2}/sqlalchemy/sql/sqltypes.pyi         |    0
 .../{2.7 => 2}/sqlalchemy/sql/type_api.pyi         |    0
 .../{2.7 => 2}/sqlalchemy/sql/visitors.pyi         |    0
 .../third_party/{2.7 => 2}/sqlalchemy/types.pyi    |    0
 .../{2.7 => 2}/sqlalchemy/util/__init__.pyi        |    0
 .../{2.7 => 2}/sqlalchemy/util/_collections.pyi    |    0
 .../{2.7 => 2}/sqlalchemy/util/compat.pyi          |    0
 .../{2.7 => 2}/sqlalchemy/util/deprecations.pyi    |    0
 .../{2.7 => 2}/sqlalchemy/util/langhelpers.pyi     |    2 -
 typeshed/third_party/{2.7 => 2}/thrift/Thrift.pyi  |    0
 .../{2.7/scribe => 2/thrift}/__init__.pyi          |    0
 .../{2.7 => 2}/thrift/protocol/TBinaryProtocol.pyi |    0
 .../{2.7 => 2}/thrift/protocol/TProtocol.pyi       |    0
 .../{2.7 => 2}/thrift/protocol/__init__.pyi        |    0
 .../{2.7 => 2}/thrift/transport/TSocket.pyi        |    0
 .../{2.7 => 2}/thrift/transport/TTransport.pyi     |    0
 .../{2.7 => 2}/thrift/transport/__init__.pyi       |    0
 .../{2.7/thrift => 2/tornado}/__init__.pyi         |    0
 .../third_party/{2.7 => 2}/tornado/concurrent.pyi  |    0
 typeshed/third_party/{2.7 => 2}/tornado/gen.pyi    |    0
 .../third_party/{2.7 => 2}/tornado/httpclient.pyi  |    0
 .../third_party/{2.7 => 2}/tornado/httpserver.pyi  |    0
 .../third_party/{2.7 => 2}/tornado/httputil.pyi    |    0
 typeshed/third_party/{2.7 => 2}/tornado/ioloop.pyi |    0
 typeshed/third_party/{2.7 => 2}/tornado/locks.pyi  |    0
 .../third_party/{2.7 => 2}/tornado/netutil.pyi     |    0
 .../third_party/{2.7 => 2}/tornado/tcpserver.pyi   |    0
 .../third_party/{2.7 => 2}/tornado/testing.pyi     |    0
 typeshed/third_party/{2.7 => 2}/tornado/util.pyi   |    0
 typeshed/third_party/{2.7 => 2}/tornado/web.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/__init__.pyi  |    0
 typeshed/third_party/{2.7 => 2}/yaml/composer.pyi  |    0
 .../third_party/{2.7 => 2}/yaml/constructor.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/dumper.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/emitter.pyi   |    0
 typeshed/third_party/{2.7 => 2}/yaml/error.pyi     |    0
 typeshed/third_party/{2.7 => 2}/yaml/events.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/loader.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/nodes.pyi     |    0
 typeshed/third_party/{2.7 => 2}/yaml/parser.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/reader.pyi    |    0
 .../third_party/{2.7 => 2}/yaml/representer.pyi    |    0
 typeshed/third_party/{2.7 => 2}/yaml/resolver.pyi  |    0
 typeshed/third_party/{2.7 => 2}/yaml/scanner.pyi   |    0
 .../third_party/{2.7 => 2}/yaml/serializer.pyi     |    0
 typeshed/third_party/{2.7 => 2}/yaml/tokens.pyi    |    0
 typeshed/third_party/2and3/mypy_extensions.pyi     |    6 +
 typeshed/third_party/3/lxml/etree.pyi              |   12 +-
 typeshed/third_party/3/pkg_resources.pyi           |   12 +-
 typeshed/third_party/3/requests/adapters.pyi       |   13 +-
 typeshed/third_party/3/requests/sessions.pyi       |    6 +-
 typeshed/third_party/3/typed_ast/ast27.pyi         |    6 +-
 typeshed/third_party/3/typed_ast/ast35.pyi         |    2 -
 438 files changed, 5858 insertions(+), 3244 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 3c8fe23..67929db 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy-lang
-Version: 0.4.5
+Version: 0.4.6
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
diff --git a/lib-typing/2.7/test_typing.py b/lib-typing/2.7/test_typing.py
index 434afdb..d095f84 100644
--- a/lib-typing/2.7/test_typing.py
+++ b/lib-typing/2.7/test_typing.py
@@ -5,21 +5,27 @@ import pickle
 import re
 import sys
 from unittest import TestCase, main, SkipTest
+from copy import copy, deepcopy
 
 from typing import Any
 from typing import TypeVar, AnyStr
 from typing import T, KT, VT  # Not in __all__.
 from typing import Union, Optional
-from typing import Tuple
+from typing import Tuple, List, MutableMapping
 from typing import Callable
-from typing import Generic
+from typing import Generic, ClassVar
 from typing import cast
 from typing import Type
 from typing import NewType
 from typing import NamedTuple
 from typing import IO, TextIO, BinaryIO
 from typing import Pattern, Match
+import abc
 import typing
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
 
 
 class BaseTestCase(TestCase):
@@ -38,6 +44,10 @@ class BaseTestCase(TestCase):
                 message += ' : %s' % msg
             raise self.failureException(message)
 
+    def clear_caches(self):
+        for f in typing._cleanups:
+            f()
+
 
 class Employee(object):
     pass
@@ -61,18 +71,11 @@ class AnyTests(BaseTestCase):
         with self.assertRaises(TypeError):
             isinstance(42, Any)
 
-    def test_any_subclass(self):
-        self.assertTrue(issubclass(Employee, Any))
-        self.assertTrue(issubclass(int, Any))
-        self.assertTrue(issubclass(type(None), Any))
-        self.assertTrue(issubclass(object, Any))
-
-    def test_others_any(self):
-        self.assertFalse(issubclass(Any, Employee))
-        self.assertFalse(issubclass(Any, int))
-        self.assertFalse(issubclass(Any, type(None)))
-        # However, Any is a subclass of object (this can't be helped).
-        self.assertTrue(issubclass(Any, object))
+    def test_any_subclass_type_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(Employee, Any)
+        with self.assertRaises(TypeError):
+            issubclass(Any, Employee)
 
     def test_repr(self):
         self.assertEqual(repr(Any), 'typing.Any')
@@ -87,32 +90,21 @@ class AnyTests(BaseTestCase):
         with self.assertRaises(TypeError):
             class A(Any):
                 pass
+        with self.assertRaises(TypeError):
+            class A(type(Any)):
+                pass
 
     def test_cannot_instantiate(self):
         with self.assertRaises(TypeError):
             Any()
+        with self.assertRaises(TypeError):
+            type(Any)()
 
     def test_cannot_subscript(self):
         with self.assertRaises(TypeError):
             Any[int]
 
     def test_any_is_subclass(self):
-        # Any should be considered a subclass of everything.
-        self.assertIsSubclass(Any, Any)
-        self.assertIsSubclass(Any, typing.List)
-        self.assertIsSubclass(Any, typing.List[int])
-        self.assertIsSubclass(Any, typing.List[T])
-        self.assertIsSubclass(Any, typing.Mapping)
-        self.assertIsSubclass(Any, typing.Mapping[str, int])
-        self.assertIsSubclass(Any, typing.Mapping[KT, VT])
-        self.assertIsSubclass(Any, Generic)
-        self.assertIsSubclass(Any, Generic[T])
-        self.assertIsSubclass(Any, Generic[KT, VT])
-        self.assertIsSubclass(Any, AnyStr)
-        self.assertIsSubclass(Any, Union)
-        self.assertIsSubclass(Any, Union[int, str])
-        self.assertIsSubclass(Any, typing.Match)
-        self.assertIsSubclass(Any, typing.Match[str])
         # These expressions must simply not fail.
         typing.Match[Any]
         typing.Pattern[Any]
@@ -123,13 +115,8 @@ class TypeVarTests(BaseTestCase):
 
     def test_basic_plain(self):
         T = TypeVar('T')
-        # Every class is a subclass of T.
-        self.assertIsSubclass(int, T)
-        self.assertIsSubclass(str, T)
         # T equals itself.
         self.assertEqual(T, T)
-        # T is a subclass of itself.
-        self.assertIsSubclass(T, T)
         # T is an instance of TypeVar
         self.assertIsInstance(T, TypeVar)
 
@@ -138,16 +125,12 @@ class TypeVarTests(BaseTestCase):
         with self.assertRaises(TypeError):
             isinstance(42, T)
 
-    def test_basic_constrained(self):
-        A = TypeVar('A', str, bytes)
-        # Only str and bytes are subclasses of A.
-        self.assertIsSubclass(str, A)
-        self.assertIsSubclass(bytes, A)
-        self.assertNotIsSubclass(int, A)
-        # A equals itself.
-        self.assertEqual(A, A)
-        # A is a subclass of itself.
-        self.assertIsSubclass(A, A)
+    def test_typevar_subclass_type_error(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            issubclass(int, T)
+        with self.assertRaises(TypeError):
+            issubclass(T, int)
 
     def test_constrained_error(self):
         with self.assertRaises(TypeError):
@@ -163,8 +146,9 @@ class TypeVarTests(BaseTestCase):
         self.assertEqual(Union[X, X], X)
         self.assertNotEqual(Union[X, int], Union[X])
         self.assertNotEqual(Union[X, int], Union[int])
-        self.assertEqual(Union[X, int].__union_params__, (X, int))
-        self.assertEqual(Union[X, int].__union_set_params__, {X, int})
+        self.assertEqual(Union[X, int].__args__, (X, int))
+        self.assertEqual(Union[X, int].__parameters__, (X,))
+        self.assertIs(Union[X, int].__origin__, Union)
 
     def test_union_constrained(self):
         A = TypeVar('A', str, bytes)
@@ -184,19 +168,6 @@ class TypeVarTests(BaseTestCase):
         self.assertNotEqual(TypeVar('T'), TypeVar('T'))
         self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
 
-    def test_subclass_as_unions(self):
-        # None of these are true -- each type var is its own world.
-        self.assertFalse(issubclass(TypeVar('T', int, str),
-                                    TypeVar('T', int, str)))
-        self.assertFalse(issubclass(TypeVar('T', int, float),
-                                    TypeVar('T', int, float, str)))
-        self.assertFalse(issubclass(TypeVar('T', int, str),
-                                    TypeVar('T', str, int)))
-        A = TypeVar('A', int, str)
-        B = TypeVar('B', int, str, float)
-        self.assertFalse(issubclass(A, B))
-        self.assertFalse(issubclass(B, A))
-
     def test_cannot_subclass_vars(self):
         with self.assertRaises(TypeError):
             class V(TypeVar('T')):
@@ -211,12 +182,6 @@ class TypeVarTests(BaseTestCase):
         with self.assertRaises(TypeError):
             TypeVar('A')()
 
-    def test_bound(self):
-        X = TypeVar('X', bound=Employee)
-        self.assertIsSubclass(Employee, X)
-        self.assertIsSubclass(Manager, X)
-        self.assertNotIsSubclass(int, X)
-
     def test_bound_errors(self):
         with self.assertRaises(TypeError):
             TypeVar('X', bound=42)
@@ -229,16 +194,27 @@ class UnionTests(BaseTestCase):
     def test_basics(self):
         u = Union[int, float]
         self.assertNotEqual(u, Union)
-        self.assertTrue(issubclass(int, u))
-        self.assertTrue(issubclass(float, u))
+
+    def test_subclass_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(int, Union)
+        with self.assertRaises(TypeError):
+            issubclass(Union, int)
+        with self.assertRaises(TypeError):
+            issubclass(int, Union[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(Union[int, str], int)
 
     def test_union_any(self):
         u = Union[Any]
         self.assertEqual(u, Any)
-        u = Union[int, Any]
-        self.assertEqual(u, Any)
-        u = Union[Any, int]
-        self.assertEqual(u, Any)
+        u1 = Union[int, Any]
+        u2 = Union[Any, int]
+        u3 = Union[Any, object]
+        self.assertEqual(u1, u2)
+        self.assertNotEqual(u1, Any)
+        self.assertNotEqual(u2, Any)
+        self.assertNotEqual(u3, Any)
 
     def test_union_object(self):
         u = Union[object]
@@ -248,29 +224,11 @@ class UnionTests(BaseTestCase):
         u = Union[object, int]
         self.assertEqual(u, object)
 
-    def test_union_any_object(self):
-        u = Union[object, Any]
-        self.assertEqual(u, Any)
-        u = Union[Any, object]
-        self.assertEqual(u, Any)
-
     def test_unordered(self):
         u1 = Union[int, float]
         u2 = Union[float, int]
         self.assertEqual(u1, u2)
 
-    def test_subclass(self):
-        u = Union[int, Employee]
-        self.assertTrue(issubclass(Manager, u))
-
-    def test_self_subclass(self):
-        self.assertTrue(issubclass(Union[KT, VT], Union))
-        self.assertFalse(issubclass(Union, Union[KT, VT]))
-
-    def test_multiple_inheritance(self):
-        u = Union[int, Employee]
-        self.assertTrue(issubclass(ManagingFounder, u))
-
     def test_single_class_disappears(self):
         t = Union[Employee]
         self.assertIs(t, Employee)
@@ -283,13 +241,6 @@ class UnionTests(BaseTestCase):
         u = Union[Employee, Manager]
         self.assertIs(u, Employee)
 
-    def test_weird_subclasses(self):
-        u = Union[Employee, int, float]
-        v = Union[int, float]
-        self.assertTrue(issubclass(v, u))
-        w = Union[int, Manager]
-        self.assertTrue(issubclass(w, u))
-
     def test_union_union(self):
         u = Union[int, float]
         v = Union[u, Employee]
@@ -307,6 +258,9 @@ class UnionTests(BaseTestCase):
             class C(Union):
                 pass
         with self.assertRaises(TypeError):
+            class C(type(Union)):
+                pass
+        with self.assertRaises(TypeError):
             class C(Union[int, str]):
                 pass
 
@@ -316,6 +270,13 @@ class UnionTests(BaseTestCase):
         u = Union[int, float]
         with self.assertRaises(TypeError):
             u()
+        with self.assertRaises(TypeError):
+            type(u)()
+
+    def test_union_generalization(self):
+        self.assertFalse(Union[str, typing.Iterable[int]] == str)
+        self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
+        self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
 
     def test_optional(self):
         o = Optional[int]
@@ -326,10 +287,6 @@ class UnionTests(BaseTestCase):
         with self.assertRaises(TypeError):
             Union[()]
 
-    def test_issubclass_union(self):
-        self.assertIsSubclass(Union[int, str], Union)
-        self.assertNotIsSubclass(int, Union)
-
     def test_union_instance_type_error(self):
         with self.assertRaises(TypeError):
             isinstance(42, Union[int, str])
@@ -354,43 +311,17 @@ class UnionTests(BaseTestCase):
         Union[Elem, str]  # Nor should this
 
 
-class TypeVarUnionTests(BaseTestCase):
-
-    def test_simpler(self):
-        A = TypeVar('A', int, str, float)
-        B = TypeVar('B', int, str)
-        self.assertIsSubclass(A, A)
-        self.assertIsSubclass(B, B)
-        self.assertNotIsSubclass(B, A)
-        self.assertIsSubclass(A, Union[int, str, float])
-        self.assertNotIsSubclass(Union[int, str, float], A)
-        self.assertNotIsSubclass(Union[int, str], B)
-        self.assertIsSubclass(B, Union[int, str])
-        self.assertNotIsSubclass(A, B)
-        self.assertNotIsSubclass(Union[int, str, float], B)
-        self.assertNotIsSubclass(A, Union[int, str])
-
-    def test_var_union_subclass(self):
-        self.assertTrue(issubclass(T, Union[int, T]))
-        self.assertTrue(issubclass(KT, Union[KT, VT]))
-
-    def test_var_union(self):
-        TU = TypeVar('TU', Union[int, float], None)
-        self.assertIsSubclass(int, TU)
-        self.assertIsSubclass(float, TU)
-
-
 class TupleTests(BaseTestCase):
 
     def test_basics(self):
-        self.assertTrue(issubclass(Tuple[int, str], Tuple))
-        self.assertTrue(issubclass(Tuple[int, str], Tuple[int, str]))
-        self.assertFalse(issubclass(int, Tuple))
-        self.assertFalse(issubclass(Tuple[float, str], Tuple[int, str]))
-        self.assertFalse(issubclass(Tuple[int, str, int], Tuple[int, str]))
-        self.assertFalse(issubclass(Tuple[int, str], Tuple[int, str, int]))
+        with self.assertRaises(TypeError):
+            issubclass(Tuple, Tuple[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(tuple, Tuple[int, str])
+
+        class TP(tuple): pass
         self.assertTrue(issubclass(tuple, Tuple))
-        self.assertFalse(issubclass(Tuple, tuple))  # Can't have it both ways.
+        self.assertTrue(issubclass(TP, Tuple))
 
     def test_equality(self):
         self.assertEqual(Tuple[int], Tuple[int])
@@ -406,21 +337,7 @@ class TupleTests(BaseTestCase):
     def test_tuple_instance_type_error(self):
         with self.assertRaises(TypeError):
             isinstance((0, 0), Tuple[int, int])
-        with self.assertRaises(TypeError):
-            isinstance((0, 0), Tuple)
-
-    def test_tuple_ellipsis_subclass(self):
-
-        class B(object):
-            pass
-
-        class C(B):
-            pass
-
-        self.assertNotIsSubclass(Tuple[B], Tuple[B, ...])
-        self.assertIsSubclass(Tuple[C, ...], Tuple[B, ...])
-        self.assertNotIsSubclass(Tuple[C, ...], Tuple[B])
-        self.assertNotIsSubclass(Tuple[C], Tuple[B, ...])
+        isinstance((0, 0), Tuple)
 
     def test_repr(self):
         self.assertEqual(repr(Tuple), 'typing.Tuple')
@@ -438,17 +355,9 @@ class TupleTests(BaseTestCase):
 class CallableTests(BaseTestCase):
 
     def test_self_subclass(self):
-        self.assertTrue(issubclass(Callable[[int], int], Callable))
-        self.assertFalse(issubclass(Callable, Callable[[int], int]))
-        self.assertTrue(issubclass(Callable[[int], int], Callable[[int], int]))
-        self.assertFalse(issubclass(Callable[[Employee], int],
-                                    Callable[[Manager], int]))
-        self.assertFalse(issubclass(Callable[[Manager], int],
-                                    Callable[[Employee], int]))
-        self.assertFalse(issubclass(Callable[[int], Employee],
-                                    Callable[[int], Manager]))
-        self.assertFalse(issubclass(Callable[[int], Manager],
-                                    Callable[[int], Employee]))
+        with self.assertRaises(TypeError):
+            self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
+        self.assertTrue(issubclass(type(lambda x: x), Callable))
 
     def test_eq_hash(self):
         self.assertEqual(Callable[[int], int], Callable[[int], int])
@@ -459,23 +368,24 @@ class CallableTests(BaseTestCase):
         self.assertNotEqual(Callable[[int], int], Callable[[], int])
         self.assertNotEqual(Callable[[int], int], Callable)
 
-    def test_cannot_subclass(self):
-        with self.assertRaises(TypeError):
-
-            class C(Callable):
-                pass
-
-        with self.assertRaises(TypeError):
-
-            class C(Callable[[int], int]):
-                pass
-
     def test_cannot_instantiate(self):
         with self.assertRaises(TypeError):
             Callable()
+        with self.assertRaises(TypeError):
+            type(Callable)()
         c = Callable[[int], str]
         with self.assertRaises(TypeError):
             c()
+        with self.assertRaises(TypeError):
+            type(c)()
+
+    def test_callable_wrong_forms(self):
+        with self.assertRaises(TypeError):
+            Callable[(), int]
+        with self.assertRaises(TypeError):
+            Callable[[()], int]
+        with self.assertRaises(TypeError):
+            Callable[[int, 1], 2]
 
     def test_callable_instance_works(self):
         def f():
@@ -503,6 +413,10 @@ class CallableTests(BaseTestCase):
         ctv = Callable[..., str]
         self.assertEqual(repr(ctv), 'typing.Callable[..., str]')
 
+    def test_ellipsis_in_generic(self):
+        # Shouldn't crash; see https://github.com/python/typing/issues/259
+        typing.List[Callable[..., str]]
+
 
 XK = TypeVar('XK', unicode, bytes)
 XV = TypeVar('XV')
@@ -570,7 +484,13 @@ class ProtocolTests(BaseTestCase):
     def test_protocol_instance_type_error(self):
         with self.assertRaises(TypeError):
             isinstance(0, typing.SupportsAbs)
-
+        class C1(typing.SupportsInt):
+            def __int__(self):
+                return 42
+        class C2(C1):
+            pass
+        c = C2()
+        self.assertIsInstance(c, C1)
 
 class GenericTests(BaseTestCase):
 
@@ -587,6 +507,15 @@ class GenericTests(BaseTestCase):
         with self.assertRaises(TypeError):
             Y[unicode, unicode]
 
+    def test_generic_errors(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            Generic[T]()
+        with self.assertRaises(TypeError):
+            isinstance([], List[int])
+        with self.assertRaises(TypeError):
+            issubclass(list, List[int])
+
     def test_init(self):
         T = TypeVar('T')
         S = TypeVar('S')
@@ -597,9 +526,9 @@ class GenericTests(BaseTestCase):
 
     def test_repr(self):
         self.assertEqual(repr(SimpleMapping),
-                         __name__ + '.' + 'SimpleMapping<~XK, ~XV>')
+                         __name__ + '.' + 'SimpleMapping')
         self.assertEqual(repr(MySimpleMapping),
-                         __name__ + '.' + 'MySimpleMapping<~XK, ~XV>')
+                         __name__ + '.' + 'MySimpleMapping')
 
     def test_chain_repr(self):
         T = TypeVar('T')
@@ -623,7 +552,36 @@ class GenericTests(BaseTestCase):
         self.assertNotEqual(Z, Y[T])
 
         self.assertTrue(str(Z).endswith(
-            '.C<~T>[typing.Tuple[~S, ~T]]<~S, ~T>[~T, int]<~T>[str]'))
+            '.C[typing.Tuple[str, int]]'))
+
+    def test_new_repr(self):
+        T = TypeVar('T')
+        U = TypeVar('U', covariant=True)
+        S = TypeVar('S')
+
+        self.assertEqual(repr(List), 'typing.List')
+        self.assertEqual(repr(List[T]), 'typing.List[~T]')
+        self.assertEqual(repr(List[U]), 'typing.List[+U]')
+        self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
+        self.assertEqual(repr(List[int]), 'typing.List[int]')
+
+    def test_new_repr_complex(self):
+        T = TypeVar('T')
+        TS = TypeVar('TS')
+
+        self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
+        self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
+                         'typing.List[typing.Tuple[int, ~T]]')
+        self.assertEqual(repr(List[Tuple[T, T]][List[int]]),
+                 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]')
+
+    def test_new_repr_bare(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
+        self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]')
+        class C(typing.Dict[Any, Any]): pass
+        # this line should just work
+        repr(C.__mro__)
 
     def test_dict(self):
         T = TypeVar('T')
@@ -642,6 +600,217 @@ class GenericTests(BaseTestCase):
         c.bar = 'abc'
         self.assertEqual(c.__dict__, {'bar': 'abc'})
 
+    def test_false_subclasses(self):
+        class MyMapping(MutableMapping[str, str]): pass
+        self.assertNotIsInstance({}, MyMapping)
+        self.assertNotIsSubclass(dict, MyMapping)
+
+    def test_abc_bases(self):
+        class MM(MutableMapping[str, str]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+        # this should just work
+        MM().update()
+        self.assertIsInstance(MM(), collections_abc.MutableMapping)
+        self.assertIsInstance(MM(), MutableMapping)
+        self.assertNotIsInstance(MM(), List)
+        self.assertNotIsInstance({}, MM)
+
+    def test_multiple_bases(self):
+        class MM1(MutableMapping[str, str], collections_abc.MutableMapping):
+            pass
+        with self.assertRaises(TypeError):
+            # consistent MRO not possible
+            class MM2(collections_abc.MutableMapping, MutableMapping[str, str]):
+                pass
+
+    def test_orig_bases(self):
+        T = TypeVar('T')
+        class C(typing.Dict[str, T]): pass
+        self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
+
+    def test_naive_runtime_checks(self):
+        def naive_dict_check(obj, tp):
+            # Check if a dictionary conforms to Dict type
+            if len(tp.__parameters__) > 0:
+                raise NotImplementedError
+            if tp.__args__:
+                KT, VT = tp.__args__
+                return all(isinstance(k, KT) and isinstance(v, VT)
+                   for k, v in obj.items())
+        self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[typing.Text, int]))
+        self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[typing.Text, int]))
+        with self.assertRaises(NotImplementedError):
+            naive_dict_check({1: 'x'}, typing.Dict[typing.Text, T])
+
+        def naive_generic_check(obj, tp):
+            # Check if an instance conforms to the generic class
+            if not hasattr(obj, '__orig_class__'):
+                raise NotImplementedError
+            return obj.__orig_class__ == tp
+        class Node(Generic[T]): pass
+        self.assertTrue(naive_generic_check(Node[int](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), List))
+        with self.assertRaises(NotImplementedError):
+            naive_generic_check([1,2,3], Node[int])
+
+        def naive_list_base_check(obj, tp):
+            # Check if list conforms to a List subclass
+            return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
+                       for x in obj)
+        class C(List[int]): pass
+        self.assertTrue(naive_list_base_check([1, 2, 3], C))
+        self.assertFalse(naive_list_base_check(['a', 'b'], C))
+
+    def test_multi_subscr_base(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        V = TypeVar('V')
+        class C(List[T][U][V]): pass
+        class D(C, List[T][U][V]): pass
+        self.assertEqual(C.__parameters__, (V,))
+        self.assertEqual(D.__parameters__, (V,))
+        self.assertEqual(C[int].__parameters__, ())
+        self.assertEqual(D[int].__parameters__, ())
+        self.assertEqual(C[int].__args__, (int,))
+        self.assertEqual(D[int].__args__, (int,))
+        self.assertEqual(C.__bases__, (List,))
+        self.assertEqual(D.__bases__, (C, List))
+        self.assertEqual(C.__orig_bases__, (List[T][U][V],))
+        self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
+
+    def test_extended_generic_rules_eq(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        self.assertEqual(Tuple[T, T][int], Tuple[int, int])
+        self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
+        with self.assertRaises(TypeError):
+            Tuple[T, int][()]
+        with self.assertRaises(TypeError):
+            Tuple[T, U][T, ...]
+
+        self.assertEqual(Union[T, int][int], int)
+        self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
+        class Base(object): pass
+        class Derived(Base): pass
+        self.assertEqual(Union[T, Base][Derived], Base)
+        with self.assertRaises(TypeError):
+            Union[T, int][1]
+
+        self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
+        self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
+        with self.assertRaises(TypeError):
+            Callable[[T], U][..., int]
+        with self.assertRaises(TypeError):
+            Callable[[T], U][[], int]
+
+    def test_extended_generic_rules_repr(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
+                         'Union[Tuple, Callable]')
+        self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
+                         'Tuple')
+        self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
+                         'Callable[..., Union[int, NoneType]]')
+        self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
+                         'Callable[[], List[int]]')
+
+    def test_generic_forvard_ref(self):
+        LLT = List[List['CC']]
+        class CC: pass
+        self.assertEqual(typing._eval_type(LLT, globals(), locals()), List[List[CC]])
+        T = TypeVar('T')
+        AT = Tuple[T, ...]
+        self.assertIs(typing._eval_type(AT, globals(), locals()), AT)
+        CT = Callable[..., List[T]]
+        self.assertIs(typing._eval_type(CT, globals(), locals()), CT)
+
+    def test_extended_generic_rules_subclassing(self):
+        class T1(Tuple[T, KT]): pass
+        class T2(Tuple[T, ...]): pass
+        class C1(Callable[[T], T]): pass
+        class C2(Callable[..., int]):
+            def __call__(self):
+                return None
+
+        self.assertEqual(T1.__parameters__, (T, KT))
+        self.assertEqual(T1[int, str].__args__, (int, str))
+        self.assertEqual(T1[int, T].__origin__, T1)
+
+        self.assertEqual(T2.__parameters__, (T,))
+        with self.assertRaises(TypeError):
+            T1[int]
+        with self.assertRaises(TypeError):
+            T2[int, str]
+
+        self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
+        self.assertEqual(C2.__parameters__, ())
+        self.assertIsInstance(C2(), collections_abc.Callable)
+        self.assertIsSubclass(C2, collections_abc.Callable)
+        self.assertIsSubclass(C1, collections_abc.Callable)
+        self.assertIsInstance(T1(), tuple)
+        self.assertIsSubclass(T2, tuple)
+        self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
+        self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
+
+    def test_fail_with_bare_union(self):
+        with self.assertRaises(TypeError):
+            List[Union]
+        with self.assertRaises(TypeError):
+            Tuple[Optional]
+        with self.assertRaises(TypeError):
+            ClassVar[ClassVar]
+        with self.assertRaises(TypeError):
+            List[ClassVar[int]]
+
+    def test_fail_with_bare_generic(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            List[Generic]
+        with self.assertRaises(TypeError):
+            Tuple[Generic[T]]
+        with self.assertRaises(TypeError):
+            List[typing._Protocol]
+
+    def test_type_erasure_special(self):
+        T = TypeVar('T')
+        # this is the only test that checks type caching
+        self.clear_caches()
+        class MyTup(Tuple[T, T]): pass
+        self.assertIs(MyTup[int]().__class__, MyTup)
+        self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
+        class MyCall(Callable[..., T]):
+            def __call__(self): return None
+        self.assertIs(MyCall[T]().__class__, MyCall)
+        self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
+        class MyDict(typing.Dict[T, T]): pass
+        self.assertIs(MyDict[int]().__class__, MyDict)
+        self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
+        class MyDef(typing.DefaultDict[str, T]): pass
+        self.assertIs(MyDef[int]().__class__, MyDef)
+        self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
+
+    def test_all_repr_eq_any(self):
+        objs = (getattr(typing, el) for el in typing.__all__)
+        for obj in objs:
+            self.assertNotEqual(repr(obj), '')
+            self.assertEqual(obj, obj)
+            if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
+                self.assertEqual(obj[Any].__args__, (Any,))
+            if isinstance(obj, type):
+                for base in obj.__mro__:
+                    self.assertNotEqual(repr(base), '')
+                    self.assertEqual(base, base)
+
     def test_pickle(self):
         global C  # pickle wants to reference the class by name
         T = TypeVar('T')
@@ -661,6 +830,24 @@ class GenericTests(BaseTestCase):
             self.assertEqual(x.foo, 42)
             self.assertEqual(x.bar, 'abc')
             self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+        simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
+        for s in simples:
+            for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+                z = pickle.dumps(s, proto)
+                x = pickle.loads(z)
+                self.assertEqual(s, x)
+
+    def test_copy_and_deepcopy(self):
+        T = TypeVar('T')
+        class Node(Generic[T]): pass
+        things = [Any, Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int],
+                  Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T],
+                  typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str],
+                  typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'],
+                  Union['T', int], List['T'], typing.Mapping['T', int]]
+        for t in things:
+            self.assertEqual(t, deepcopy(t))
+            self.assertEqual(t, copy(t))
 
     def test_errors(self):
         with self.assertRaises(TypeError):
@@ -679,12 +866,12 @@ class GenericTests(BaseTestCase):
         if not PY32:
             self.assertEqual(C.__qualname__,
                              'GenericTests.test_repr_2.<locals>.C')
-        self.assertEqual(repr(C).split('.')[-1], 'C<~T>')
+        self.assertEqual(repr(C).split('.')[-1], 'C')
         X = C[int]
         self.assertEqual(X.__module__, __name__)
         if not PY32:
-            self.assertEqual(X.__qualname__, 'C')
-        self.assertEqual(repr(X).split('.')[-1], 'C<~T>[int]')
+            self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
+        self.assertEqual(repr(X).split('.')[-1], 'C[int]')
 
         class Y(C[int]):
             pass
@@ -798,51 +985,44 @@ class GenericTests(BaseTestCase):
         with self.assertRaises(Exception):
             D[T]
 
+class ClassVarTests(BaseTestCase):
 
-class VarianceTests(BaseTestCase):
-
-    def test_invariance(self):
-        # Because of invariance, List[subclass of X] is not a subclass
-        # of List[X], and ditto for MutableSequence.
-        self.assertNotIsSubclass(typing.List[Manager], typing.List[Employee])
-        self.assertNotIsSubclass(typing.MutableSequence[Manager],
-                              typing.MutableSequence[Employee])
-        # It's still reflexive.
-        self.assertIsSubclass(typing.List[Employee], typing.List[Employee])
-        self.assertIsSubclass(typing.MutableSequence[Employee],
-                          typing.MutableSequence[Employee])
-
-    def test_covariance_tuple(self):
-        # Check covariace for Tuple (which are really special cases).
-        self.assertIsSubclass(Tuple[Manager], Tuple[Employee])
-        self.assertNotIsSubclass(Tuple[Employee], Tuple[Manager])
-        # And pairwise.
-        self.assertIsSubclass(Tuple[Manager, Manager],
-                              Tuple[Employee, Employee])
-        self.assertNotIsSubclass(Tuple[Employee, Employee],
-                              Tuple[Manager, Employee])
-        # And using ellipsis.
-        self.assertIsSubclass(Tuple[Manager, ...], Tuple[Employee, ...])
-        self.assertNotIsSubclass(Tuple[Employee, ...], Tuple[Manager, ...])
-
-    def test_covariance_sequence(self):
-        # Check covariance for Sequence (which is just a generic class
-        # for this purpose, but using a covariant type variable).
-        self.assertIsSubclass(typing.Sequence[Manager],
-                              typing.Sequence[Employee])
-        self.assertNotIsSubclass(typing.Sequence[Employee],
-                              typing.Sequence[Manager])
-
-    def test_covariance_mapping(self):
-        # Ditto for Mapping (covariant in the value, invariant in the key).
-        self.assertIsSubclass(typing.Mapping[Employee, Manager],
-                          typing.Mapping[Employee, Employee])
-        self.assertNotIsSubclass(typing.Mapping[Manager, Employee],
-                              typing.Mapping[Employee, Employee])
-        self.assertNotIsSubclass(typing.Mapping[Employee, Manager],
-                              typing.Mapping[Manager, Manager])
-        self.assertNotIsSubclass(typing.Mapping[Manager, Employee],
-                              typing.Mapping[Manager, Manager])
+    def test_basics(self):
+        with self.assertRaises(TypeError):
+            ClassVar[1]
+        with self.assertRaises(TypeError):
+            ClassVar[int, str]
+        with self.assertRaises(TypeError):
+            ClassVar[int][str]
+
+    def test_repr(self):
+        self.assertEqual(repr(ClassVar), 'typing.ClassVar')
+        cv = ClassVar[int]
+        self.assertEqual(repr(cv), 'typing.ClassVar[int]')
+        cv = ClassVar[Employee]
+        self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar)):
+                pass
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar[int])):
+                pass
+
+    def test_cannot_init(self):
+        with self.assertRaises(TypeError):
+            ClassVar()
+        with self.assertRaises(TypeError):
+            type(ClassVar)()
+        with self.assertRaises(TypeError):
+            type(ClassVar[Optional[int]])()
+
+    def test_no_isinstance(self):
+        with self.assertRaises(TypeError):
+            isinstance(1, ClassVar[int])
+        with self.assertRaises(TypeError):
+            issubclass(int, ClassVar)
 
 
 class CastTests(BaseTestCase):
@@ -917,7 +1097,6 @@ class CollectionsAbcTests(BaseTestCase):
         # path and could fail.  So call this a few times.
         self.assertIsInstance([], typing.Iterable)
         self.assertIsInstance([], typing.Iterable)
-        self.assertIsInstance([], typing.Iterable[int])
         self.assertNotIsInstance(42, typing.Iterable)
         # Just in case, also test issubclass() a few times.
         self.assertIsSubclass(list, typing.Iterable)
@@ -926,7 +1105,6 @@ class CollectionsAbcTests(BaseTestCase):
     def test_iterator(self):
         it = iter([])
         self.assertIsInstance(it, typing.Iterator)
-        self.assertIsInstance(it, typing.Iterator[int])
         self.assertNotIsInstance(42, typing.Iterator)
 
     def test_sized(self):
@@ -1083,10 +1261,6 @@ class CollectionsAbcTests(BaseTestCase):
             yield 42
         g = foo()
         self.assertIsSubclass(type(g), typing.Generator)
-        self.assertIsSubclass(typing.Generator[Manager, Employee, Manager],
-                          typing.Generator[Employee, Manager, Employee])
-        self.assertNotIsSubclass(typing.Generator[Manager, Manager, Manager],
-                              typing.Generator[Employee, Employee, Employee])
 
     def test_no_generator_instantiation(self):
         with self.assertRaises(TypeError):
@@ -1105,12 +1279,30 @@ class CollectionsAbcTests(BaseTestCase):
             MMA()
 
         class MMC(MMA):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
             def __len__(self):
                 return 0
 
         self.assertEqual(len(MMC()), 0)
+        assert callable(MMC.update)
+        self.assertIsInstance(MMC(), typing.Mapping)
 
         class MMB(typing.MutableMapping[KT, VT]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
             def __len__(self):
                 return 0
 
@@ -1125,6 +1317,81 @@ class CollectionsAbcTests(BaseTestCase):
         self.assertIsSubclass(MMB, typing.Mapping)
         self.assertIsSubclass(MMC, typing.Mapping)
 
+        self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
+        self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
+
+        self.assertIsSubclass(MMA, collections.Mapping)
+        self.assertIsSubclass(MMB, collections.Mapping)
+        self.assertIsSubclass(MMC, collections.Mapping)
+
+        self.assertIsSubclass(MMB[str, str], typing.Mapping)
+        self.assertIsSubclass(MMC, MMA)
+
+        class I(typing.Iterable): pass
+        self.assertNotIsSubclass(list, I)
+
+        class G(typing.Generator[int, int, int]): pass
+        def g(): yield 0
+        self.assertIsSubclass(G, typing.Generator)
+        self.assertIsSubclass(G, typing.Iterable)
+        if hasattr(collections, 'Generator'):
+            self.assertIsSubclass(G, collections.Generator)
+        self.assertIsSubclass(G, collections.Iterable)
+        self.assertNotIsSubclass(type(g), G)
+
+    def test_subclassing_subclasshook(self):
+
+        class Base(typing.Iterable):
+            @classmethod
+            def __subclasshook__(cls, other):
+                if other.__name__ == 'Foo':
+                    return True
+                else:
+                    return False
+
+        class C(Base): pass
+        class Foo: pass
+        class Bar: pass
+        self.assertIsSubclass(Foo, Base)
+        self.assertIsSubclass(Foo, C)
+        self.assertNotIsSubclass(Bar, C)
+
+    def test_subclassing_register(self):
+
+        class A(typing.Container): pass
+        class B(A): pass
+
+        class C: pass
+        A.register(C)
+        self.assertIsSubclass(C, A)
+        self.assertNotIsSubclass(C, B)
+
+        class D: pass
+        B.register(D)
+        self.assertIsSubclass(D, A)
+        self.assertIsSubclass(D, B)
+
+        class M(): pass
+        collections.MutableMapping.register(M)
+        self.assertIsSubclass(M, typing.Mapping)
+
+    def test_collections_as_base(self):
+
+        class M(collections.Mapping): pass
+        self.assertIsSubclass(M, typing.Mapping)
+        self.assertIsSubclass(M, typing.Iterable)
+
+        class S(collections.MutableSequence): pass
+        self.assertIsSubclass(S, typing.MutableSequence)
+        self.assertIsSubclass(S, typing.Iterable)
+
+        class I(collections.Iterable): pass
+        self.assertIsSubclass(I, typing.Iterable)
+
+        class A(collections.Mapping): pass
+        class B: pass
+        A.register(B)
+        self.assertIsSubclass(B, typing.Mapping)
 
 class TypeTests(BaseTestCase):
 
@@ -1155,6 +1422,19 @@ class TypeTests(BaseTestCase):
 
         joe = new_user(BasicUser)
 
+    def test_type_optional(self):
+        A = Optional[Type[BaseException]]
+
+        def foo(a):
+            # type: (A) -> Optional[BaseException]
+            if a is None:
+                return None
+            else:
+                return a()
+
+        assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
+        assert foo(None) is None
+
 
 class NewTypeTests(BaseTestCase):
 
@@ -1220,22 +1500,16 @@ class RETests(BaseTestCase):
         pat = re.compile('[a-z]+', re.I)
         self.assertIsSubclass(pat.__class__, Pattern)
         self.assertIsSubclass(type(pat), Pattern)
-        self.assertIsSubclass(type(pat), Pattern[str])
+        self.assertIsInstance(pat, Pattern)
 
         mat = pat.search('12345abcde.....')
         self.assertIsSubclass(mat.__class__, Match)
-        self.assertIsSubclass(mat.__class__, Match[str])
-        self.assertIsSubclass(mat.__class__, Match[bytes])  # Sad but true.
         self.assertIsSubclass(type(mat), Match)
-        self.assertIsSubclass(type(mat), Match[str])
+        self.assertIsInstance(mat, Match)
 
+        # these should just work
         p = Pattern[Union[str, bytes]]
-        self.assertIsSubclass(Pattern[str], Pattern)
-        self.assertIsSubclass(Pattern[str], p)
-
         m = Match[Union[bytes, str]]
-        self.assertIsSubclass(Match[bytes], Match)
-        self.assertIsSubclass(Match[bytes], m)
 
     def test_errors(self):
         with self.assertRaises(TypeError):
@@ -1250,9 +1524,6 @@ class RETests(BaseTestCase):
             m[str]
         with self.assertRaises(TypeError):
             # We don't support isinstance().
-            isinstance(42, Pattern)
-        with self.assertRaises(TypeError):
-            # We don't support isinstance().
             isinstance(42, Pattern[str])
 
     def test_repr(self):
@@ -1277,7 +1548,7 @@ class RETests(BaseTestCase):
                 pass
 
         self.assertEqual(str(ex.exception),
-                         "A type alias cannot be subclassed")
+                         "Cannot subclass typing._TypeAlias")
 
 
 class AllTests(BaseTestCase):
@@ -1299,6 +1570,15 @@ class AllTests(BaseTestCase):
         # Check that Text is defined.
         self.assertIn('Text', a)
 
+    def test_respect_no_type_check(self):
+        @typing.no_type_check
+        class NoTpCheck(object):
+            class Inn(object):
+                def __init__(self, x): pass
+                    # type: (this is not actualy a type) -> None
+        self.assertTrue(NoTpCheck.__no_type_check__)
+        self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
+
     def test_get_type_hints_dummy(self):
 
         def foo(x):
diff --git a/lib-typing/2.7/typing.py b/lib-typing/2.7/typing.py
index e7a0e14..5627697 100644
--- a/lib-typing/2.7/typing.py
+++ b/lib-typing/2.7/typing.py
@@ -18,6 +18,7 @@ __all__ = [
     # Super-special typing primitives.
     'Any',
     'Callable',
+    'ClassVar',
     'Generic',
     'Optional',
     'Tuple',
@@ -54,6 +55,7 @@ __all__ = [
     'DefaultDict',
     'List',
     'Set',
+    'FrozenSet',
     'NamedTuple',  # Not really a type.
     'Generator',
 
@@ -81,12 +83,19 @@ def _qualname(x):
         # Fall back to just name.
         return x.__name__
 
+def _trim_name(nm):
+    if nm.startswith('_') and nm not in ('_TypeAlias',
+                    '_ForwardRef', '_TypingBase', '_FinalTypingBase'):
+        nm = nm[1:]
+    return nm
+
 
 class TypingMeta(type):
-    """Metaclass for every type defined below.
+    """Metaclass for most types defined in typing module
+    (not a part of public API).
 
-    This also defines a dummy constructor (all the work is done in
-    __new__) and a nicer repr().
+    This also defines a dummy constructor (all the work for most typing
+    constructs is done in __new__) and a nicer repr().
     """
 
     _is_protocol = False
@@ -107,8 +116,8 @@ class TypingMeta(type):
     def _eval_type(self, globalns, localns):
         """Override this in subclasses to interpret forward references.
 
-        For example, Union['C'] is internally stored as
-        Union[_ForwardRef('C')], which should evaluate to _Union[C],
+        For example, List['C'] is internally stored as
+        List[_ForwardRef('C')], which should evaluate to List[C],
         where C is an object found in globalns or localns (searching
         localns first, of course).
         """
@@ -118,50 +127,88 @@ class TypingMeta(type):
         pass
 
     def __repr__(self):
-        return '%s.%s' % (self.__module__, _qualname(self))
+        qname = _trim_name(_qualname(self))
+        return '%s.%s' % (self.__module__, qname)
+
+
+class _TypingBase(object):
+    """Internal indicator of special typing constructs."""
+    __metaclass__ = TypingMeta
+    __slots__ = ()
+
+    def __init__(self, *args, **kwds):
+        pass
+
+    def __new__(cls, *args, **kwds):
+        """Constructor.
+
+        This only exists to give a better error message in case
+        someone tries to subclass a special typing object (not a good idea).
+        """
+        if (len(args) == 3 and
+                isinstance(args[0], str) and
+                isinstance(args[1], tuple)):
+            # Close enough.
+            raise TypeError("Cannot subclass %r" % cls)
+        return super(_TypingBase, cls).__new__(cls)
+
+    # Things that are not classes also need these.
+    def _eval_type(self, globalns, localns):
+        return self
+
+    def _get_type_vars(self, tvars):
+        pass
+
+    def __repr__(self):
+        cls = type(self)
+        qname = _trim_name(_qualname(cls))
+        return '%s.%s' % (cls.__module__, qname)
+
+    def __call__(self, *args, **kwds):
+        raise TypeError("Cannot instantiate %r" % type(self))
+
 
+class _FinalTypingBase(_TypingBase):
+    """Internal mix-in class to prevent instantiation.
 
-class Final(object):
-    """Mix-in class to prevent instantiation."""
+    Prevents instantiation unless _root=True is given in class call.
+    It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+    """
 
     __slots__ = ()
 
-    def __new__(self, *args, **kwds):
-        raise TypeError("Cannot instantiate %r" % self.__class__)
+    def __new__(cls, *args, **kwds):
+        self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds)
+        if '_root' in kwds and kwds['_root'] is True:
+            return self
+        raise TypeError("Cannot instantiate %r" % cls)
+
+    def __reduce__(self):
+        return _trim_name(type(self).__name__)
+
 
+class _ForwardRef(_TypingBase):
+    """Internal wrapper to hold a forward reference."""
 
-class _ForwardRef(TypingMeta):
-    """Wrapper to hold a forward reference."""
+    __slots__ = ('__forward_arg__', '__forward_code__',
+                 '__forward_evaluated__', '__forward_value__')
 
-    def __new__(cls, arg):
+    def __init__(self, arg):
+        super(_ForwardRef, self).__init__(arg)
         if not isinstance(arg, basestring):
-            raise TypeError('ForwardRef must be a string -- got %r' % (arg,))
+            raise TypeError('Forward reference must be a string -- got %r' % (arg,))
         try:
             code = compile(arg, '<string>', 'eval')
         except SyntaxError:
-            raise SyntaxError('ForwardRef must be an expression -- got %r' %
+            raise SyntaxError('Forward reference must be an expression -- got %r' %
                               (arg,))
-        self = super(_ForwardRef, cls).__new__(cls, arg, (), {})
         self.__forward_arg__ = arg
         self.__forward_code__ = code
         self.__forward_evaluated__ = False
         self.__forward_value__ = None
-        typing_globals = globals()
-        frame = sys._getframe(1)
-        while frame is not None and frame.f_globals is typing_globals:
-            frame = frame.f_back
-        assert frame is not None
-        self.__forward_frame__ = frame
-        return self
 
     def _eval_type(self, globalns, localns):
-        if not isinstance(localns, dict):
-            raise TypeError('ForwardRef localns must be a dict -- got %r' %
-                            (localns,))
-        if not isinstance(globalns, dict):
-            raise TypeError('ForwardRef globalns must be a dict -- got %r' %
-                            (globalns,))
-        if not self.__forward_evaluated__:
+        if not self.__forward_evaluated__ or localns is not globalns:
             if globalns is None and localns is None:
                 globalns = localns = {}
             elif globalns is None:
@@ -178,44 +225,23 @@ class _ForwardRef(TypingMeta):
         raise TypeError("Forward references cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        if not self.__forward_evaluated__:
-            globalns = self.__forward_frame__.f_globals
-            localns = self.__forward_frame__.f_locals
-            try:
-                self._eval_type(globalns, localns)
-            except NameError:
-                return False  # Too early.
-        return issubclass(cls, self.__forward_value__)
+        raise TypeError("Forward references cannot be used with issubclass().")
 
     def __repr__(self):
         return '_ForwardRef(%r)' % (self.__forward_arg__,)
 
 
-class _TypeAlias(object):
+class _TypeAlias(_TypingBase):
     """Internal helper class for defining generic variants of concrete types.
 
-    Note that this is not a type; let's call it a pseudo-type.  It can
-    be used in instance and subclass checks, e.g. isinstance(m, Match)
-    or issubclass(type(m), Match).  However, it cannot be itself the
-    target of an issubclass() call; e.g. issubclass(Match, C) (for
-    some arbitrary class C) raises TypeError rather than returning
-    False.
+    Note that this is not a type; let's call it a pseudo-type.  It cannot
+    be used in instance and subclass checks in parameterized form, i.e.
+    ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+    ``False``.
     """
 
     __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
 
-    def __new__(cls, *args, **kwds):
-        """Constructor.
-
-        This only exists to give a better error message in case
-        someone tries to subclass a type alias (not a good idea).
-        """
-        if (len(args) == 3 and
-                isinstance(args[0], basestring) and
-                isinstance(args[1], tuple)):
-            # Close enough.
-            raise TypeError("A type alias cannot be subclassed")
-        return object.__new__(cls)
 
     def __init__(self, name, type_var, impl_type, type_checker):
         """Initializer.
@@ -229,9 +255,9 @@ class _TypeAlias(object):
                 and returns a value that should be a type_var instance.
         """
         assert isinstance(name, basestring), repr(name)
-        assert isinstance(type_var, type), repr(type_var)
         assert isinstance(impl_type, type), repr(impl_type)
         assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+        assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
         self.name = name
         self.type_var = type_var
         self.impl_type = impl_type
@@ -241,36 +267,33 @@ class _TypeAlias(object):
         return "%s[%s]" % (self.name, _type_repr(self.type_var))
 
     def __getitem__(self, parameter):
-        assert isinstance(parameter, type), repr(parameter)
         if not isinstance(self.type_var, TypeVar):
             raise TypeError("%s cannot be further parameterized." % self)
-        if self.type_var.__constraints__:
-            if not issubclass(parameter, Union[self.type_var.__constraints__]):
+        if self.type_var.__constraints__ and isinstance(parameter, type):
+            if not issubclass(parameter, self.type_var.__constraints__):
                 raise TypeError("%s is not a valid substitution for %s." %
                                 (parameter, self.type_var))
+        if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+            raise TypeError("%s cannot be re-parameterized." % self)
         return self.__class__(self.name, parameter,
                               self.impl_type, self.type_checker)
 
     def __instancecheck__(self, obj):
-        raise TypeError("Type aliases cannot be used with isinstance().")
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with isinstance().")
+        return isinstance(obj, self.impl_type)
 
     def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if isinstance(cls, _TypeAlias):
-            # Covariance.  For now, we compare by name.
-            return (cls.name == self.name and
-                    issubclass(cls.type_var, self.type_var))
-        else:
-            # Note that this is too lenient, because the
-            # implementation type doesn't carry information about
-            # whether it is about bytes or str (for example).
-            return issubclass(cls, self.impl_type)
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with issubclass().")
+        return issubclass(cls, self.impl_type)
 
 
 def _get_type_vars(types, tvars):
     for t in types:
-        if isinstance(t, TypingMeta):
+        if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
             t._get_type_vars(tvars)
 
 
@@ -281,14 +304,13 @@ def _type_vars(types):
 
 
 def _eval_type(t, globalns, localns):
-    if isinstance(t, TypingMeta):
+    if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
         return t._eval_type(globalns, localns)
-    else:
-        return t
+    return t
 
 
 def _type_check(arg, msg):
-    """Check that the argument is a type, and return it.
+    """Check that the argument is a type, and return it (internal helper).
 
     As a special case, accept None and return type(None) instead.
     Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
@@ -303,13 +325,19 @@ def _type_check(arg, msg):
         return type(None)
     if isinstance(arg, basestring):
         arg = _ForwardRef(arg)
-    if not isinstance(arg, (type, _TypeAlias)) and not callable(arg):
+    if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+        not isinstance(arg, (type, _TypingBase)) and not callable(arg)):
         raise TypeError(msg + " Got %.100r." % (arg,))
+    # Bare Union etc. are not valid as type arguments
+    if (type(arg).__name__ in ('_Union', '_Optional')
+        and not getattr(arg, '__origin__', None)
+        or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)):
+        raise TypeError("Plain %s is not valid as type argument" % arg)
     return arg
 
 
 def _type_repr(obj):
-    """Return the repr() of an object, special-casing types.
+    """Return the repr() of an object, special-casing types (internal helper).
 
     If obj is a type, we return a shorter version than the default
     type.__repr__, based on the module and qualified name, which is
@@ -319,10 +347,76 @@ def _type_repr(obj):
     if isinstance(obj, type) and not isinstance(obj, TypingMeta):
         if obj.__module__ == '__builtin__':
             return _qualname(obj)
-        else:
-            return '%s.%s' % (obj.__module__, _qualname(obj))
-    else:
-        return repr(obj)
+        return '%s.%s' % (obj.__module__, _qualname(obj))
+    if obj is Ellipsis:
+        return('...')
+    if isinstance(obj, types.FunctionType):
+        return obj.__name__
+    return repr(obj)
+
+
+class ClassVarMeta(TypingMeta):
+    """Metaclass for _ClassVar"""
+
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace)
+        return self
+
+
+class _ClassVar(_FinalTypingBase):
+    """Special type construct to mark class variables.
+
+    An annotation wrapped in ClassVar indicates that a given
+    attribute is intended to be used as a class variable and
+    should not be set on instances of that class. Usage::
+
+      class Starship:
+          stats = {}  # type: ClassVar[Dict[str, int]] # class variable
+          damage = 10 # type: int                      # instance variable
+
+    ClassVar accepts only types and cannot be further subscribed.
+
+    Note that ClassVar is not a class itself, and should not
+    be used with isinstance() or issubclass().
+    """
+
+    __metaclass__ = ClassVarMeta
+    __slots__ = ('__type__',)
+
+    def __init__(self, tp=None, _root=False):
+        self.__type__ = tp
+
+    def __getitem__(self, item):
+        cls = type(self)
+        if self.__type__ is None:
+            return cls(_type_check(item,
+                       '{} accepts only types.'.format(cls.__name__[1:])),
+                       _root=True)
+        raise TypeError('{} cannot be further subscripted'
+                        .format(cls.__name__[1:]))
+
+    def _eval_type(self, globalns, localns):
+        return type(self)(_eval_type(self.__type__, globalns, localns),
+                          _root=True)
+
+    def __repr__(self):
+        r = super(_ClassVar, self).__repr__()
+        if self.__type__ is not None:
+            r += '[{}]'.format(_type_repr(self.__type__))
+        return r
+
+    def __hash__(self):
+        return hash((type(self).__name__, self.__type__))
+
+    def __eq__(self, other):
+        if not isinstance(other, _ClassVar):
+            return NotImplemented
+        if self.__type__ is not None:
+            return self.__type__ == other.__type__
+        return self is other
+
+ClassVar = _ClassVar(_root=True)
 
 
 class AnyMeta(TypingMeta):
@@ -333,26 +427,30 @@ class AnyMeta(TypingMeta):
         self = super(AnyMeta, cls).__new__(cls, name, bases, namespace)
         return self
 
-    def __instancecheck__(self, obj):
-        raise TypeError("Any cannot be used with isinstance().")
-
-    def __subclasscheck__(self, cls):
-        if not isinstance(cls, type):
-            return super(AnyMeta, cls).__subclasscheck__(cls)  # To TypeError.
-        return True
-
 
-class Any(Final):
+class _Any(_FinalTypingBase):
     """Special type indicating an unconstrained type.
 
-    - Any object is an instance of Any.
-    - Any class is a subclass of Any.
-    - As a special case, Any and object are subclasses of each other.
-    """
+    - Any is compatible with every type.
+    - Any assumed to have all methods.
+    - All values assumed to be instances of Any.
 
+    Note that all the above statements are true from the point of view of
+    static type checkers. At runtime, Any should not be used with instance
+    or class checks.
+    """
     __metaclass__ = AnyMeta
     __slots__ = ()
 
+    def __instancecheck__(self, obj):
+        raise TypeError("Any cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Any cannot be used with issubclass().")
+
+
+Any = _Any(_root=True)
+
 
 class TypeVarMeta(TypingMeta):
     def __new__(cls, name, bases, namespace):
@@ -360,7 +458,7 @@ class TypeVarMeta(TypingMeta):
         return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace)
 
 
-class TypeVar(TypingMeta):
+class TypeVar(_TypingBase):
     """Type variable.
 
     Usage::
@@ -373,7 +471,7 @@ class TypeVar(TypingMeta):
     as for generic function definitions.  See class Generic for more
     information on generic types.  Generic functions work as follows:
 
-      def repeat(x: T, n: int) -> Sequence[T]:
+      def repeat(x: T, n: int) -> List[T]:
           '''Return a list containing n references to x.'''
           return [x]*n
 
@@ -386,14 +484,12 @@ class TypeVar(TypingMeta):
     that if the arguments are instances of some subclass of str,
     the return type is still plain str.
 
-    At runtime, isinstance(x, T) will raise TypeError.  However,
-    issubclass(C, T) is true for any class C, and issubclass(str, A)
-    and issubclass(bytes, A) are true, and issubclass(int, A) is
-    false.  (TODO: Why is this needed?  This may change.  See #136.)
+    At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
 
-    Type variables may be marked covariant or contravariant by passing
-    covariant=True or contravariant=True.  See PEP 484 for more
-    details.  By default type variables are invariant.
+    Type variables defined with covariant=True or contravariant=True
+    can be used do declare covariant or contravariant generic types.
+    See PEP 484 for more details. By default generic types are invariant
+    in all type variables.
 
     Type variables can be introspected. e.g.:
 
@@ -405,14 +501,17 @@ class TypeVar(TypingMeta):
     """
 
     __metaclass__ = TypeVarMeta
+    __slots__ = ('__name__', '__bound__', '__constraints__',
+                 '__covariant__', '__contravariant__')
 
-    def __new__(cls, name, *constraints, **kwargs):
+    def __init__(self, name, *constraints, **kwargs):
+        super(TypeVar, self).__init__(name, *constraints, **kwargs)
         bound = kwargs.get('bound', None)
         covariant = kwargs.get('covariant', False)
         contravariant = kwargs.get('contravariant', False)
-        self = super(TypeVar, cls).__new__(cls, name, (Final,), {})
+        self.__name__ = name
         if covariant and contravariant:
-            raise ValueError("Bivariant type variables are not supported.")
+            raise ValueError("Bivariant types are not supported.")
         self.__covariant__ = bool(covariant)
         self.__contravariant__ = bool(contravariant)
         if constraints and bound is not None:
@@ -425,7 +524,6 @@ class TypeVar(TypingMeta):
             self.__bound__ = _type_check(bound, "Bound must be a type.")
         else:
             self.__bound__ = None
-        return self
 
     def _get_type_vars(self, tvars):
         if self not in tvars:
@@ -444,16 +542,7 @@ class TypeVar(TypingMeta):
         raise TypeError("Type variables cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        # TODO: Make this raise TypeError too?
-        if cls is self:
-            return True
-        if cls is Any:
-            return True
-        if self.__bound__ is not None:
-            return issubclass(cls, self.__bound__)
-        if self.__constraints__:
-            return any(issubclass(cls, c) for c in self.__constraints__)
-        return True
+        raise TypeError("Type variables cannot be used with issubclass().")
 
 
 # Some unconstrained type variables.  These are used by the container types.
@@ -471,125 +560,144 @@ T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
 AnyStr = TypeVar('AnyStr', bytes, unicode)
 
 
-class UnionMeta(TypingMeta):
-    """Metaclass for Union."""
+def _replace_arg(arg, tvars, args):
+    """An internal helper function: replace arg if it is a type variable
+    found in tvars with corresponding substitution from args or
+    with corresponding substitution sub-tree if arg is a generic type.
+    """
 
-    def __new__(cls, name, bases, namespace, parameters=None):
-        cls.assert_no_subclassing(bases)
-        if parameters is None:
-            return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
-        if not isinstance(parameters, tuple):
-            raise TypeError("Expected parameters=<tuple>")
-        # Flatten out Union[Union[...], ...] and type-check non-Union args.
-        params = []
-        msg = "Union[arg, ...]: each arg must be a type."
-        for p in parameters:
-            if isinstance(p, UnionMeta):
-                params.extend(p.__union_params__)
-            else:
-                params.append(_type_check(p, msg))
-        # Weed out strict duplicates, preserving the first of each occurrence.
-        all_params = set(params)
-        if len(all_params) < len(params):
-            new_params = []
-            for t in params:
-                if t in all_params:
-                    new_params.append(t)
-                    all_params.remove(t)
-            params = new_params
-            assert not all_params, all_params
-        # Weed out subclasses.
-        # E.g. Union[int, Employee, Manager] == Union[int, Employee].
-        # If Any or object is present it will be the sole survivor.
-        # If both Any and object are present, Any wins.
-        # Never discard type variables, except against Any.
-        # (In particular, Union[str, AnyStr] != AnyStr.)
-        all_params = set(params)
-        for t1 in params:
-            if t1 is Any:
-                return Any
-            if isinstance(t1, TypeVar):
-                continue
-            if isinstance(t1, _TypeAlias):
-                # _TypeAlias is not a real class.
-                continue
-            if not isinstance(t1, type):
-                assert callable(t1)  # A callable might sneak through.
-                continue
-            if any(isinstance(t2, type) and issubclass(t1, t2)
-                   for t2 in all_params - {t1} if not isinstance(t2, TypeVar)):
-                all_params.remove(t1)
-        # It's not a union if there's only one type left.
-        if len(all_params) == 1:
-            return all_params.pop()
-        # Create a new class with these params.
-        self = super(UnionMeta, cls).__new__(cls, name, bases, {})
-        self.__union_params__ = tuple(t for t in params if t in all_params)
-        self.__union_set_params__ = frozenset(self.__union_params__)
-        return self
+    if tvars is None:
+        tvars = []
+    if hasattr(arg, '_subs_tree'):
+        return arg._subs_tree(tvars, args)
+    if isinstance(arg, TypeVar):
+        for i, tvar in enumerate(tvars):
+            if arg == tvar:
+                return args[i]
+    return arg
 
-    def _eval_type(self, globalns, localns):
-        p = tuple(_eval_type(t, globalns, localns)
-                  for t in self.__union_params__)
-        if p == self.__union_params__:
-            return self
-        else:
-            return self.__class__(self.__name__, self.__bases__, {},
-                                  p)
 
-    def _get_type_vars(self, tvars):
-        if self.__union_params__:
-            _get_type_vars(self.__union_params__, tvars)
+def _subs_tree(cls, tvars=None, args=None):
+    """An internal helper function: calculate substitution tree
+    for generic cls after replacing its type parameters with
+    substitutions in tvars -> args (if any).
+    Repeat the same following __origin__'s.
 
-    def __repr__(self):
-        r = super(UnionMeta, self).__repr__()
-        if self.__union_params__:
-            r += '[%s]' % (', '.join(_type_repr(t)
-                                     for t in self.__union_params__))
-        return r
+    Return a list of arguments with all possible substitutions
+    performed. Arguments that are generic classes themselves are represented
+    as tuples (so that no new classes are created by this function).
+    For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+    """
 
-    def __getitem__(self, parameters):
-        if self.__union_params__ is not None:
-            raise TypeError(
-                "Cannot subscript an existing Union. Use Union[u, t] instead.")
-        if parameters == ():
-            raise TypeError("Cannot take a Union of no types.")
-        if not isinstance(parameters, tuple):
-            parameters = (parameters,)
-        return self.__class__(self.__name__, self.__bases__,
-                              dict(self.__dict__), parameters)
+    if cls.__origin__ is None:
+        return cls
+    # Make of chain of origins (i.e. cls -> cls.__origin__)
+    current = cls.__origin__
+    orig_chain = []
+    while current.__origin__ is not None:
+        orig_chain.append(current)
+        current = current.__origin__
+    # Replace type variables in __args__ if asked ...
+    tree_args = []
+    for arg in cls.__args__:
+        tree_args.append(_replace_arg(arg, tvars, args))
+    # ... then continue replacing down the origin chain.
+    for ocls in orig_chain:
+        new_tree_args = []
+        for i, arg in enumerate(ocls.__args__):
+            new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+        tree_args = new_tree_args
+    return tree_args
+
+
+def _remove_dups_flatten(parameters):
+    """An internal helper for Union creation and substitution: flatten Union's
+    among parameters, then remove duplicates and strict subclasses.
+    """
 
-    def __eq__(self, other):
-        if not isinstance(other, UnionMeta):
-            return NotImplemented
-        return self.__union_set_params__ == other.__union_set_params__
+    # Flatten out Union[Union[...], ...].
+    params = []
+    for p in parameters:
+        if isinstance(p, _Union) and p.__origin__ is Union:
+            params.extend(p.__args__)
+        elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+            params.extend(p[1:])
+        else:
+            params.append(p)
+    # Weed out strict duplicates, preserving the first of each occurrence.
+    all_params = set(params)
+    if len(all_params) < len(params):
+        new_params = []
+        for t in params:
+            if t in all_params:
+                new_params.append(t)
+                all_params.remove(t)
+        params = new_params
+        assert not all_params, all_params
+    # Weed out subclasses.
+    # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+    # If object is present it will be sole survivor among proper classes.
+    # Never discard type variables.
+    # (In particular, Union[str, AnyStr] != AnyStr.)
+    all_params = set(params)
+    for t1 in params:
+        if not isinstance(t1, type):
+            continue
+        if any(isinstance(t2, type) and issubclass(t1, t2)
+               for t2 in all_params - {t1}
+               if not (isinstance(t2, GenericMeta) and
+                       t2.__origin__ is not None)):
+            all_params.remove(t1)
+    return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+    # Check correct count for parameters of a generic cls (internal helper).
+    if not cls.__parameters__:
+        raise TypeError("%s is not a generic class" % repr(cls))
+    alen = len(parameters)
+    elen = len(cls.__parameters__)
+    if alen != elen:
+        raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+                        ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+    maxsize = 128
+    cache = {}
+    _cleanups.append(cache.clear)
+
+    @functools.wraps(func)
+    def inner(*args):
+        key = args
+        try:
+            return cache[key]
+        except TypeError:
+            # Assume it's an unhashable argument.
+            return func(*args)
+        except KeyError:
+            value = func(*args)
+            if len(cache) >= maxsize:
+                # If the cache grows too much, just start over.
+                cache.clear()
+            cache[key] = value
+            return value
 
-    def __hash__(self):
-        return hash(self.__union_set_params__)
+    return inner
 
-    def __instancecheck__(self, obj):
-        raise TypeError("Unions cannot be used with isinstance().")
 
-    def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if self.__union_params__ is None:
-            return isinstance(cls, UnionMeta)
-        elif isinstance(cls, UnionMeta):
-            if cls.__union_params__ is None:
-                return False
-            return all(issubclass(c, self) for c in (cls.__union_params__))
-        elif isinstance(cls, TypeVar):
-            if cls in self.__union_params__:
-                return True
-            if cls.__constraints__:
-                return issubclass(Union[cls.__constraints__], self)
-            return False
-        else:
-            return any(issubclass(cls, t) for t in self.__union_params__)
+class UnionMeta(TypingMeta):
+    """Metaclass for Union."""
+
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
 
 
-class Union(Final):
+class _Union(_FinalTypingBase):
     """Union type; Union[X, Y] means either X or Y.
 
     To define a union, use e.g. Union[int, str].  Details:
@@ -624,265 +732,146 @@ class Union(Final):
         Union[Manager, int, Employee] == Union[int, Employee]
         Union[Employee, Manager] == Employee
 
-    - Corollary: if Any is present it is the sole survivor, e.g.::
-
-        Union[int, Any] == Any
-
     - Similar for object::
 
         Union[int, object] == object
 
-    - To cut a tie: Union[object, Any] == Union[Any, object] == Any.
-
     - You cannot subclass or instantiate a union.
 
-    - You cannot write Union[X][Y] (what would it mean?).
-
     - You can use Optional[X] as a shorthand for Union[X, None].
     """
 
     __metaclass__ = UnionMeta
-
-    # Unsubscripted Union type has params set to None.
-    __union_params__ = None
-    __union_set_params__ = None
-
-
-class OptionalMeta(TypingMeta):
-    """Metaclass for Optional."""
-
-    def __new__(cls, name, bases, namespace):
-        cls.assert_no_subclassing(bases)
-        return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
-
-    def __getitem__(self, arg):
-        arg = _type_check(arg, "Optional[t] requires a single type.")
-        return Union[arg, type(None)]
-
-
-class Optional(Final):
-    """Optional type.
-
-    Optional[X] is equivalent to Union[X, type(None)].
-    """
-
-    __metaclass__ = OptionalMeta
-    __slots__ = ()
-
-
-class TupleMeta(TypingMeta):
-    """Metaclass for Tuple."""
-
-    def __new__(cls, name, bases, namespace, parameters=None,
-                use_ellipsis=False):
-        cls.assert_no_subclassing(bases)
-        self = super(TupleMeta, cls).__new__(cls, name, bases, namespace)
-        self.__tuple_params__ = parameters
-        self.__tuple_use_ellipsis__ = use_ellipsis
+    __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
+
+    def __new__(cls, parameters=None, origin=None, *args, **kwds):
+        self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds)
+        if origin is None:
+            self.__parameters__ = None
+            self.__args__ = None
+            self.__origin__ = None
+            self.__tree_hash__ = hash(frozenset(('Union',)))
+            return self
+        if not isinstance(parameters, tuple):
+            raise TypeError("Expected parameters=<tuple>")
+        if origin is Union:
+            parameters = _remove_dups_flatten(parameters)
+            # It's not a union if there's only one type left.
+            if len(parameters) == 1:
+                return parameters[0]
+        self.__parameters__ = _type_vars(parameters)
+        self.__args__ = parameters
+        self.__origin__ = origin
+        # Pre-calculate the __hash__ on instantiation.
+        # This improves speed for complex substitutions.
+        subs_tree = self._subs_tree()
+        if isinstance(subs_tree, tuple):
+            self.__tree_hash__ = hash(frozenset(subs_tree))
+        else:
+            self.__tree_hash__ = hash(subs_tree)
         return self
 
-    def _get_type_vars(self, tvars):
-        if self.__tuple_params__:
-            _get_type_vars(self.__tuple_params__, tvars)
-
     def _eval_type(self, globalns, localns):
-        tp = self.__tuple_params__
-        if tp is None:
+        if self.__args__ is None:
             return self
-        p = tuple(_eval_type(t, globalns, localns) for t in tp)
-        if p == self.__tuple_params__:
+        ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+        ev_origin = _eval_type(self.__origin__, globalns, localns)
+        if ev_args == self.__args__ and ev_origin == self.__origin__:
+            # Everything is already evaluated.
             return self
-        else:
-            return self.__class__(self.__name__, self.__bases__, {},
-                                  p)
+        return self.__class__(ev_args, ev_origin, _root=True)
+
+    def _get_type_vars(self, tvars):
+        if self.__origin__ and self.__parameters__:
+            _get_type_vars(self.__parameters__, tvars)
 
     def __repr__(self):
-        r = super(TupleMeta, self).__repr__()
-        if self.__tuple_params__ is not None:
-            params = [_type_repr(p) for p in self.__tuple_params__]
-            if self.__tuple_use_ellipsis__:
-                params.append('...')
-            if not params:
-                params.append('()')
-            r += '[%s]' % (
-                ', '.join(params))
-        return r
+        if self.__origin__ is None:
+            return super(_Union, self).__repr__()
+        tree = self._subs_tree()
+        if not isinstance(tree, tuple):
+            return repr(tree)
+        return tree[0]._tree_repr(tree)
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list)
 
+    @_tp_cache
     def __getitem__(self, parameters):
-        if self.__tuple_params__ is not None:
-            raise TypeError("Cannot re-parameterize %r" % (self,))
+        if parameters == ():
+            raise TypeError("Cannot take a Union of no types.")
         if not isinstance(parameters, tuple):
             parameters = (parameters,)
-        if len(parameters) == 2 and parameters[1] == Ellipsis:
-            parameters = parameters[:1]
-            use_ellipsis = True
-            msg = "Tuple[t, ...]: t must be a type."
+        if self.__origin__ is None:
+            msg = "Union[arg, ...]: each arg must be a type."
         else:
-            use_ellipsis = False
-            msg = "Tuple[t0, t1, ...]: each t must be a type."
+            msg = "Parameters to generic types must be types."
         parameters = tuple(_type_check(p, msg) for p in parameters)
-        return self.__class__(self.__name__, self.__bases__,
-                              dict(self.__dict__), parameters,
-                              use_ellipsis=use_ellipsis)
+        if self is not Union:
+            _check_generic(self, parameters)
+        return self.__class__(parameters, origin=self, _root=True)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self is Union:
+            return Union  # Nothing to substitute
+        tree_args = _subs_tree(self, tvars, args)
+        tree_args = _remove_dups_flatten(tree_args)
+        if len(tree_args) == 1:
+            return tree_args[0]  # Union of a single type is that type
+        return (Union,) + tree_args
 
     def __eq__(self, other):
-        if not isinstance(other, TupleMeta):
-            return NotImplemented
-        return (self.__tuple_params__ == other.__tuple_params__ and
-                self.__tuple_use_ellipsis__ == other.__tuple_use_ellipsis__)
+        if not isinstance(other, _Union):
+            return self._subs_tree() == other
+        return self.__tree_hash__ == other.__tree_hash__
 
     def __hash__(self):
-        return hash(self.__tuple_params__)
+        return self.__tree_hash__
 
     def __instancecheck__(self, obj):
-        raise TypeError("Tuples cannot be used with isinstance().")
+        raise TypeError("Unions cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if not isinstance(cls, type):
-            # To TypeError.
-            return super(TupleMeta, self).__subclasscheck__(cls)
-        if issubclass(cls, tuple):
-            return True  # Special case.
-        if not isinstance(cls, TupleMeta):
-            return super(TupleMeta, self).__subclasscheck__(cls)  # False.
-        if self.__tuple_params__ is None:
-            return True
-        if cls.__tuple_params__ is None:
-            return False  # ???
-        if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__:
-            return False
-        # Covariance.
-        return (len(self.__tuple_params__) == len(cls.__tuple_params__) and
-                all(issubclass(x, p)
-                    for x, p in zip(cls.__tuple_params__,
-                                    self.__tuple_params__)))
-
-
-class Tuple(Final):
-    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+        raise TypeError("Unions cannot be used with issubclass().")
 
-    Example: Tuple[T1, T2] is a tuple of two elements corresponding
-    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
-    of an int, a float and a string.
 
-    To specify a variable-length tuple of homogeneous type, use Sequence[T].
-    """
-
-    __metaclass__ = TupleMeta
-    __slots__ = ()
+Union = _Union(_root=True)
 
 
-class CallableMeta(TypingMeta):
-    """Metaclass for Callable."""
+class OptionalMeta(TypingMeta):
+    """Metaclass for Optional."""
 
-    def __new__(cls, name, bases, namespace,
-                args=None, result=None):
+    def __new__(cls, name, bases, namespace):
         cls.assert_no_subclassing(bases)
-        if args is None and result is None:
-            pass  # Must be 'class Callable'.
-        else:
-            if args is not Ellipsis:
-                if not isinstance(args, list):
-                    raise TypeError("Callable[args, result]: "
-                                    "args must be a list."
-                                    " Got %.100r." % (args,))
-                msg = "Callable[[arg, ...], result]: each arg must be a type."
-                args = tuple(_type_check(arg, msg) for arg in args)
-            msg = "Callable[args, result]: result must be a type."
-            result = _type_check(result, msg)
-        self = super(CallableMeta, cls).__new__(cls, name, bases, namespace)
-        self.__args__ = args
-        self.__result__ = result
-        return self
-
-    def _get_type_vars(self, tvars):
-        if self.__args__:
-            _get_type_vars(self.__args__, tvars)
-
-    def _eval_type(self, globalns, localns):
-        if self.__args__ is None and self.__result__ is None:
-            return self
-        if self.__args__ is Ellipsis:
-            args = self.__args__
-        else:
-            args = [_eval_type(t, globalns, localns) for t in self.__args__]
-        result = _eval_type(self.__result__, globalns, localns)
-        if args == self.__args__ and result == self.__result__:
-            return self
-        else:
-            return self.__class__(self.__name__, self.__bases__, {},
-                                  args=args, result=result)
-
-    def __repr__(self):
-        r = super(CallableMeta, self).__repr__()
-        if self.__args__ is not None or self.__result__ is not None:
-            if self.__args__ is Ellipsis:
-                args_r = '...'
-            else:
-                args_r = '[%s]' % ', '.join(_type_repr(t)
-                                            for t in self.__args__)
-            r += '[%s, %s]' % (args_r, _type_repr(self.__result__))
-        return r
-
-    def __getitem__(self, parameters):
-        if self.__args__ is not None or self.__result__ is not None:
-            raise TypeError("This Callable type is already parameterized.")
-        if not isinstance(parameters, tuple) or len(parameters) != 2:
-            raise TypeError(
-                "Callable must be used as Callable[[arg, ...], result].")
-        args, result = parameters
-        return self.__class__(self.__name__, self.__bases__,
-                              dict(self.__dict__),
-                              args=args, result=result)
-
-    def __eq__(self, other):
-        if not isinstance(other, CallableMeta):
-            return NotImplemented
-        return (self.__args__ == other.__args__ and
-                self.__result__ == other.__result__)
-
-    def __hash__(self):
-        return hash(self.__args__) ^ hash(self.__result__)
+        return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
 
-    def __instancecheck__(self, obj):
-        # For unparametrized Callable we allow this, because
-        # typing.Callable should be equivalent to
-        # collections.abc.Callable.
-        if self.__args__ is None and self.__result__ is None:
-            return isinstance(obj, collections_abc.Callable)
-        else:
-            raise TypeError("Callable[] cannot be used with isinstance().")
 
-    def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if not isinstance(cls, CallableMeta):
-            return super(CallableMeta, self).__subclasscheck__(cls)
-        if self.__args__ is None and self.__result__ is None:
-            return True
-        # We're not doing covariance or contravariance -- this is *invariance*.
-        return self == cls
+class _Optional(_FinalTypingBase):
+    """Optional type.
 
+    Optional[X] is equivalent to Union[X, None].
+    """
 
-class Callable(Final):
-    """Callable type; Callable[[int], str] is a function of (int) -> str.
+    __metaclass__ = OptionalMeta
+    __slots__ = ()
 
-    The subscription syntax must always be used with exactly two
-    values: the argument list and the return type.  The argument list
-    must be a list of types; the return type must be a single type.
+    @_tp_cache
+    def __getitem__(self, arg):
+        arg = _type_check(arg, "Optional[t] requires a single type.")
+        return Union[arg, type(None)]
 
-    There is no syntax to indicate optional or keyword arguments,
-    such function types are rarely used as callback types.
-    """
 
-    __metaclass__ = CallableMeta
-    __slots__ = ()
+Optional = _Optional(_root=True)
 
 
 def _gorg(a):
-    """Return the farthest origin of a generic class."""
+    """Return the farthest origin of a generic class (internal helper)."""
     assert isinstance(a, GenericMeta)
     while a.__origin__ is not None:
         a = a.__origin__
@@ -890,10 +879,10 @@ def _gorg(a):
 
 
 def _geqv(a, b):
-    """Return whether two generic classes are equivalent.
+    """Return whether two generic classes are equivalent (internal helper).
 
     The intention is to consider generic class X and any of its
-    parameterized forms (X[T], X[int], etc.)  as equivalent.
+    parameterized forms (X[T], X[int], etc.) as equivalent.
 
     However, X is not equivalent to a subclass of X.
 
@@ -918,13 +907,54 @@ def _next_in_mro(cls):
     return next_in_mro
 
 
+def _valid_for_check(cls):
+    """An internal helper to prohibit isinstance([1], List[str]) etc."""
+    if cls is Generic:
+        raise TypeError("Class %r cannot be used with class "
+                        "or instance checks" % cls)
+    if (cls.__origin__ is not None and
+        sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']):
+        raise TypeError("Parameterized generics cannot be used with class "
+                        "or instance checks")
+
+
+def _make_subclasshook(cls):
+    """Construct a __subclasshook__ callable that incorporates
+    the associated __extra__ class in subclass checks performed
+    against cls.
+    """
+    if isinstance(cls.__extra__, abc.ABCMeta):
+        # The logic mirrors that of ABCMeta.__subclasscheck__.
+        # Registered classes need not be checked here because
+        # cls and its extra share the same _abc_registry.
+        def __extrahook__(cls, subclass):
+            _valid_for_check(cls)
+            res = cls.__extra__.__subclasshook__(subclass)
+            if res is not NotImplemented:
+                return res
+            if cls.__extra__ in getattr(subclass, '__mro__', ()):
+                return True
+            for scls in cls.__extra__.__subclasses__():
+                if isinstance(scls, GenericMeta):
+                    continue
+                if issubclass(subclass, scls):
+                    return True
+            return NotImplemented
+    else:
+        # For non-ABC extras we'll just call issubclass().
+        def __extrahook__(cls, subclass):
+            _valid_for_check(cls)
+            if cls.__extra__ and issubclass(subclass, cls.__extra__):
+                return True
+            return NotImplemented
+    return classmethod(__extrahook__)
+
+
 class GenericMeta(TypingMeta, abc.ABCMeta):
     """Metaclass for generic types."""
 
     def __new__(cls, name, bases, namespace,
-                tvars=None, args=None, origin=None, extra=None):
-        self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
-
+                tvars=None, args=None, origin=None, extra=None, orig_bases=None):
         if tvars is not None:
             # Called from __getitem__() below.
             assert origin is not None
@@ -965,48 +995,107 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
                          ", ".join(str(g) for g in gvars)))
                 tvars = gvars
 
+        initial_bases = bases
+        if extra is None:
+            extra = namespace.get('__extra__')
+        if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+            bases = (extra,) + bases
+        bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)
+
+        # remove bare Generic from bases if there are other generic bases
+        if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+            bases = tuple(b for b in bases if b is not Generic)
+        self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
+
         self.__parameters__ = tvars
-        self.__args__ = args
+        # Be prepared that GenericMeta will be subclassed by TupleMeta
+        # and CallableMeta, those two allow ..., (), or [] in __args___.
+        self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else
+                              () if a is _TypingEmpty else
+                              a for a in args) if args else None
         self.__origin__ = origin
-        self.__extra__ = namespace.get('__extra__')
+        self.__extra__ = extra
         # Speed hack (https://github.com/python/typing/issues/196).
         self.__next_in_mro__ = _next_in_mro(self)
+        # Preserve base classes on subclassing (__bases__ are type erased now).
+        if orig_bases is None:
+            self.__orig_bases__ = initial_bases
+
+        # This allows unparameterized generic collections to be used
+        # with issubclass() and isinstance() in the same way as their
+        # collections.abc counterparts (e.g., isinstance([], Iterable)).
+        if ('__subclasshook__' not in namespace and extra  # allow overriding
+            or hasattr(self.__subclasshook__, '__name__') and
+            self.__subclasshook__.__name__ == '__extrahook__'):
+            self.__subclasshook__ = _make_subclasshook(self)
+
+        if origin and hasattr(origin, '__qualname__'):  # Fix for Python 3.2.
+            self.__qualname__ = origin.__qualname__
+        self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,))
         return self
 
+    def __init__(self, *args, **kwargs):
+        super(GenericMeta, self).__init__(*args, **kwargs)
+        if isinstance(self.__extra__, abc.ABCMeta):
+            self._abc_registry = self.__extra__._abc_registry
+
     def _get_type_vars(self, tvars):
         if self.__origin__ and self.__parameters__:
             _get_type_vars(self.__parameters__, tvars)
 
+    def _eval_type(self, globalns, localns):
+        ev_origin = (self.__origin__._eval_type(globalns, localns)
+                     if self.__origin__ else None)
+        ev_args = tuple(_eval_type(a, globalns, localns) for a
+                        in self.__args__) if self.__args__ else None
+        if ev_origin == self.__origin__ and ev_args == self.__args__:
+            return self
+        return self.__class__(self.__name__,
+                              self.__bases__,
+                              dict(self.__dict__),
+                              tvars=_type_vars(ev_args) if ev_args else None,
+                              args=ev_args,
+                              origin=ev_origin,
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
+
     def __repr__(self):
-        if self.__origin__ is not None:
-            r = repr(self.__origin__)
-        else:
-            r = super(GenericMeta, self).__repr__()
-        if self.__args__:
-            r += '[%s]' % (
-                ', '.join(_type_repr(p) for p in self.__args__))
-        if self.__parameters__:
-            r += '<%s>' % (
-                ', '.join(_type_repr(p) for p in self.__parameters__))
-        return r
+        if self.__origin__ is None:
+            return super(GenericMeta, self).__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if arg == ():
+                arg_list.append('()')
+            elif not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self.__origin__ is None:
+            return self
+        tree_args = _subs_tree(self, tvars, args)
+        return (_gorg(self),) + tuple(tree_args)
 
     def __eq__(self, other):
         if not isinstance(other, GenericMeta):
             return NotImplemented
-        if self.__origin__ is not None:
-            return (self.__origin__ is other.__origin__ and
-                    self.__args__ == other.__args__ and
-                    self.__parameters__ == other.__parameters__)
-        else:
+        if self.__origin__ is None or other.__origin__ is None:
             return self is other
+        return self.__tree_hash__ == other.__tree_hash__
 
     def __hash__(self):
-        return hash((self.__name__, self.__parameters__))
+        return self.__tree_hash__
 
+    @_tp_cache
     def __getitem__(self, params):
         if not isinstance(params, tuple):
             params = (params,)
-        if not params:
+        if not params and not _gorg(self) is Tuple:
             raise TypeError(
                 "Parameter list to %s[...] cannot be empty" % _qualname(self))
         msg = "Parameters to generic types must be types."
@@ -1020,34 +1109,31 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
                 raise TypeError(
                     "Parameters to Generic[...] must all be unique")
             tvars = params
-            args = None
+            args = params
+        elif self in (Tuple, Callable):
+            tvars = _type_vars(params)
+            args = params
         elif self is _Protocol:
             # _Protocol is internal, don't check anything.
             tvars = params
-            args = None
+            args = params
         elif self.__origin__ in (Generic, _Protocol):
             # Can't subscript Generic[...] or _Protocol[...].
             raise TypeError("Cannot subscript already-subscripted %s" %
                             repr(self))
         else:
             # Subscripting a regular Generic subclass.
-            if not self.__parameters__:
-                raise TypeError("%s is not a generic class" % repr(self))
-            alen = len(params)
-            elen = len(self.__parameters__)
-            if alen != elen:
-                raise TypeError(
-                    "Too %s parameters for %s; actual %s, expected %s" %
-                    ("many" if alen > elen else "few", repr(self), alen, elen))
+            _check_generic(self, params)
             tvars = _type_vars(params)
             args = params
         return self.__class__(self.__name__,
-                              (self,) + self.__bases__,
+                              self.__bases__,
                               dict(self.__dict__),
                               tvars=tvars,
                               args=args,
                               origin=self,
-                              extra=self.__extra__)
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
 
     def __instancecheck__(self, instance):
         # Since we extend ABC.__subclasscheck__ and
@@ -1055,58 +1141,41 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
         # latter, we must extend __instancecheck__ too. For simplicity
         # we just skip the cache check -- instance checks for generic
         # classes are supposed to be rare anyways.
-        return self.__subclasscheck__(instance.__class__)
+        if not isinstance(instance, type):
+            return issubclass(instance.__class__, self)
+        return False
 
-    def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if isinstance(cls, GenericMeta):
-            # For a class C(Generic[T]) where T is co-variant,
-            # C[X] is a subclass of C[Y] iff X is a subclass of Y.
-            origin = self.__origin__
-            if origin is not None and origin is cls.__origin__:
-                assert len(self.__args__) == len(origin.__parameters__)
-                assert len(cls.__args__) == len(origin.__parameters__)
-                for p_self, p_cls, p_origin in zip(self.__args__,
-                                                   cls.__args__,
-                                                   origin.__parameters__):
-                    if isinstance(p_origin, TypeVar):
-                        if p_origin.__covariant__:
-                            # Covariant -- p_cls must be a subclass of p_self.
-                            if not issubclass(p_cls, p_self):
-                                break
-                        elif p_origin.__contravariant__:
-                            # Contravariant.  I think it's the opposite. :-)
-                            if not issubclass(p_self, p_cls):
-                                break
-                        else:
-                            # Invariant -- p_cls and p_self must equal.
-                            if p_self != p_cls:
-                                break
-                    else:
-                        # If the origin's parameter is not a typevar,
-                        # insist on invariance.
-                        if p_self != p_cls:
-                            break
-                else:
-                    return True
-                # If we break out of the loop, the superclass gets a chance.
-        if super(GenericMeta, self).__subclasscheck__(cls):
-            return True
-        if self.__extra__ is None or isinstance(cls, GenericMeta):
-            return False
-        return issubclass(cls, self.__extra__)
+    def __copy__(self):
+        return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
+                              self.__parameters__, self.__args__, self.__origin__,
+                              self.__extra__, self.__orig_bases__)
 
 
 # Prevent checks for Generic to crash when defining Generic.
 Generic = None
 
 
+def _generic_new(base_cls, cls, *args, **kwds):
+    # Assure type is erased on instantiation,
+    # but attempt to store it in __orig_class__
+    if cls.__origin__ is None:
+        return base_cls.__new__(cls)
+    else:
+        origin = _gorg(cls)
+        obj = base_cls.__new__(origin)
+        try:
+            obj.__orig_class__ = cls
+        except AttributeError:
+            pass
+        obj.__init__(*args, **kwds)
+        return obj
+
+
 class Generic(object):
     """Abstract base class for generic types.
 
-    A generic type is typically declared by inheriting from an
-    instantiation of this class with one or more type variables.
+    A generic type is typically declared by inheriting from
+    this class parameterized with one or more type variables.
     For example, a generic mapping type might be defined as::
 
       class Mapping(Generic[KT, VT]):
@@ -1127,13 +1196,155 @@ class Generic(object):
     __slots__ = ()
 
     def __new__(cls, *args, **kwds):
-        if cls.__origin__ is None:
-            return cls.__next_in_mro__.__new__(cls)
+        if _geqv(cls, Generic):
+            raise TypeError("Type Generic cannot be instantiated; "
+                            "it can be used only as a base class")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty(object):
+    """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+    to allow empty list/tuple in specific places, without allowing them
+    to sneak in where prohibited.
+    """
+
+
+class _TypingEllipsis(object):
+    """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+    """Metaclass for Tuple (internal)."""
+
+    @_tp_cache
+    def __getitem__(self, parameters):
+        if self.__origin__ is not None or not _geqv(self, Tuple):
+            # Normal generic rules apply if this is not the first subscription
+            # or a subscription of a subclass.
+            return super(TupleMeta, self).__getitem__(parameters)
+        if parameters == ():
+            return super(TupleMeta, self).__getitem__((_TypingEmpty,))
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if len(parameters) == 2 and parameters[1] is Ellipsis:
+            msg = "Tuple[t, ...]: t must be a type."
+            p = _type_check(parameters[0], msg)
+            return super(TupleMeta, self).__getitem__((p, _TypingEllipsis))
+        msg = "Tuple[t0, t1, ...]: each t must be a type."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        return super(TupleMeta, self).__getitem__(parameters)
+
+    def __instancecheck__(self, obj):
+        if self.__args__ == None:
+            return isinstance(obj, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        if self.__args__ == None:
+            return issubclass(cls, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with issubclass().")
+
+
+class Tuple(tuple):
+    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+    Example: Tuple[T1, T2] is a tuple of two elements corresponding
+    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
+    of an int, a float and a string.
+
+    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+    """
+
+    __metaclass__ = TupleMeta
+    __extra__ = tuple
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Tuple):
+            raise TypeError("Type Tuple cannot be instantiated; "
+                            "use tuple() instead")
+        return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+    """ Metaclass for Callable."""
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super(CallableMeta, self).__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        if _gorg(self) is not Callable:
+            return super(CallableMeta, self)._tree_repr(tree)
+        # For actual Callable (not its subclass) we override
+        # super(CallableMeta, self)._tree_repr() for nice formatting.
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        if arg_list[0] == '...':
+            return repr(tree[0]) + '[..., %s]' % arg_list[1]
+        return (repr(tree[0]) +
+                '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+    def __getitem__(self, parameters):
+        """A thin wrapper around __getitem_inner__ to provide the latter
+        with hashable arguments to improve speed.
+        """
+
+        if  self.__origin__ is not None or not _geqv(self, Callable):
+            return super(CallableMeta, self).__getitem__(parameters)
+        if not isinstance(parameters, tuple) or len(parameters) != 2:
+            raise TypeError("Callable must be used as "
+                            "Callable[[arg, ...], result].")
+        args, result = parameters
+        if args is Ellipsis:
+            parameters = (Ellipsis, result)
         else:
-            origin = _gorg(cls)
-            obj = cls.__next_in_mro__.__new__(origin)
-            obj.__init__(*args, **kwds)
-            return obj
+            if not isinstance(args, list):
+                raise TypeError("Callable[args, result]: args must be a list."
+                                " Got %.100r." % (args,))
+            parameters = (tuple(args), result)
+        return self.__getitem_inner__(parameters)
+
+    @_tp_cache
+    def __getitem_inner__(self, parameters):
+        args, result = parameters
+        msg = "Callable[args, result]: result must be a type."
+        result = _type_check(result, msg)
+        if args is Ellipsis:
+            return super(CallableMeta, self).__getitem__((_TypingEllipsis, result))
+        msg = "Callable[[arg, ...], result]: each arg must be a type."
+        args = tuple(_type_check(arg, msg) for arg in args)
+        parameters = args + (result,)
+        return super(CallableMeta, self).__getitem__(parameters)
+
+
+class Callable(object):
+    """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+    The subscription syntax must always be used with exactly two
+    values: the argument list and the return type.  The argument list
+    must be a list of types or ellipsis; the return type must be a single type.
+
+    There is no syntax to indicate optional or keyword arguments,
+    such function types are rarely used as callback types.
+    """
+
+    __metaclass__ = CallableMeta
+    __extra__ = collections_abc.Callable
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Callable):
+            raise TypeError("Type Callable cannot be instantiated; "
+                            "use a non-abstract subclass instead")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
 
 
 def cast(typ, val):
@@ -1172,17 +1383,25 @@ def no_type_check(arg):
     """Decorator to indicate that annotations are not type hints.
 
     The argument must be a class or function; if it is a class, it
-    applies recursively to all methods defined in that class (but not
-    to methods defined in its superclasses or subclasses).
+    applies recursively to all methods and classes defined in that class
+    (but not to methods defined in its superclasses or subclasses).
 
-    This mutates the function(s) in place.
+    This mutates the function(s) or class(es) in place.
     """
     if isinstance(arg, type):
-        for obj in arg.__dict__.values():
+        arg_attrs = arg.__dict__.copy()
+        for attr, val in arg.__dict__.items():
+            if val in arg.__bases__:
+                arg_attrs.pop(attr)
+        for obj in arg_attrs.values():
             if isinstance(obj, types.FunctionType):
                 obj.__no_type_check__ = True
-    else:
+            if isinstance(obj, type):
+                no_type_check(obj)
+    try:
         arg.__no_type_check__ = True
+    except TypeError: # built-in classes
+        pass
     return arg
 
 
@@ -1248,6 +1467,8 @@ class _ProtocolMeta(GenericMeta):
     """
 
     def __instancecheck__(self, obj):
+        if _Protocol not in self.__bases__:
+            return super(_ProtocolMeta, self).__instancecheck__(obj)
         raise TypeError("Protocols cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
@@ -1294,7 +1515,9 @@ class _ProtocolMeta(GenericMeta):
                             attr != '__next_in_mro__' and
                             attr != '__parameters__' and
                             attr != '__origin__' and
+                            attr != '__orig_bases__' and
                             attr != '__extra__' and
+                            attr != '__tree_hash__' and
                             attr != '__module__'):
                         attrs.add(attr)
 
@@ -1304,7 +1527,7 @@ class _ProtocolMeta(GenericMeta):
 class _Protocol(object):
     """Internal base class for protocol classes.
 
-    This implements a simple-minded structural isinstance check
+    This implements a simple-minded structural issubclass check
     (similar but more general than the one-offs in collections.abc
     such as Hashable).
     """
@@ -1388,31 +1611,38 @@ class Container(Generic[T_co]):
 
 
 class AbstractSet(Sized, Iterable[T_co], Container[T_co]):
+    __slots__ = ()
     __extra__ = collections_abc.Set
 
 
 class MutableSet(AbstractSet[T]):
+    __slots__ = ()
     __extra__ = collections_abc.MutableSet
 
 
-# NOTE: Only the value type is covariant.
+# NOTE: It is only covariant in the value type.
 class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]):
+    __slots__ = ()
     __extra__ = collections_abc.Mapping
 
 
 class MutableMapping(Mapping[KT, VT]):
+    __slots__ = ()
     __extra__ = collections_abc.MutableMapping
 
 
 if hasattr(collections_abc, 'Reversible'):
     class Sequence(Sized, Reversible[T_co], Container[T_co]):
+        __slots__ = ()
         __extra__ = collections_abc.Sequence
 else:
     class Sequence(Sized, Iterable[T_co], Container[T_co]):
+        __slots__ = ()
         __extra__ = collections_abc.Sequence
 
 
 class MutableSequence(Sequence[T]):
+    __slots__ = ()
     __extra__ = collections_abc.MutableSequence
 
 
@@ -1425,41 +1655,28 @@ ByteString.register(bytearray)
 
 
 class List(list, MutableSequence[T]):
+    __slots__ = ()
     __extra__ = list
 
     def __new__(cls, *args, **kwds):
         if _geqv(cls, List):
             raise TypeError("Type List cannot be instantiated; "
                             "use list() instead")
-        return list.__new__(cls, *args, **kwds)
+        return _generic_new(list, cls, *args, **kwds)
 
 
 class Set(set, MutableSet[T]):
+    __slots__ = ()
     __extra__ = set
 
     def __new__(cls, *args, **kwds):
         if _geqv(cls, Set):
             raise TypeError("Type Set cannot be instantiated; "
                             "use set() instead")
-        return set.__new__(cls, *args, **kwds)
-
-
-class _FrozenSetMeta(GenericMeta):
-    """This metaclass ensures set is not a subclass of FrozenSet.
-
-    Without this metaclass, set would be considered a subclass of
-    FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and
-    set is a subclass of that.
-    """
-
-    def __subclasscheck__(self, cls):
-        if issubclass(cls, Set):
-            return False
-        return super(_FrozenSetMeta, self).__subclasscheck__(cls)
+        return _generic_new(set, cls, *args, **kwds)
 
 
 class FrozenSet(frozenset, AbstractSet[T_co]):
-    __metaclass__ = _FrozenSetMeta
     __slots__ = ()
     __extra__ = frozenset
 
@@ -1467,45 +1684,51 @@ class FrozenSet(frozenset, AbstractSet[T_co]):
         if _geqv(cls, FrozenSet):
             raise TypeError("Type FrozenSet cannot be instantiated; "
                             "use frozenset() instead")
-        return frozenset.__new__(cls, *args, **kwds)
+        return _generic_new(frozenset, cls, *args, **kwds)
 
 
 class MappingView(Sized, Iterable[T_co]):
+    __slots__ = ()
     __extra__ = collections_abc.MappingView
 
 
 class KeysView(MappingView[KT], AbstractSet[KT]):
+    __slots__ = ()
     __extra__ = collections_abc.KeysView
 
 
 class ItemsView(MappingView[Tuple[KT, VT_co]],
                 AbstractSet[Tuple[KT, VT_co]],
                 Generic[KT, VT_co]):
+    __slots__ = ()
     __extra__ = collections_abc.ItemsView
 
 
 class ValuesView(MappingView[VT_co]):
+    __slots__ = ()
     __extra__ = collections_abc.ValuesView
 
 
 class Dict(dict, MutableMapping[KT, VT]):
+    __slots__ = ()
     __extra__ = dict
 
     def __new__(cls, *args, **kwds):
         if _geqv(cls, Dict):
             raise TypeError("Type Dict cannot be instantiated; "
                             "use dict() instead")
-        return dict.__new__(cls, *args, **kwds)
+        return _generic_new(dict, cls, *args, **kwds)
 
 
 class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
+    __slots__ = ()
     __extra__ = collections.defaultdict
 
     def __new__(cls, *args, **kwds):
         if _geqv(cls, DefaultDict):
             raise TypeError("Type DefaultDict cannot be instantiated; "
                             "use collections.defaultdict() instead")
-        return collections.defaultdict.__new__(cls, *args, **kwds)
+        return _generic_new(collections.defaultdict, cls, *args, **kwds)
 
 
 # Determine what base class to use for Generator.
@@ -1525,15 +1748,15 @@ class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]):
         if _geqv(cls, Generator):
             raise TypeError("Type Generator cannot be instantiated; "
                             "create a subclass instead")
-        return super(Generator, cls).__new__(cls, *args, **kwds)
+        return _generic_new(_G_base, cls, *args, **kwds)
 
 
 # Internal type variable used for Type[].
-CT = TypeVar('CT', covariant=True, bound=type)
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
 
 
 # This is not a real generic class.  Don't use outside annotations.
-class Type(type, Generic[CT]):
+class Type(Generic[CT_co]):
     """A special construct usable to annotate class objects.
 
     For example, suppose we have the following classes::
@@ -1556,6 +1779,7 @@ class Type(type, Generic[CT]):
 
     At this point the type checker knows that joe has type BasicUser.
     """
+    __slots__ = ()
     __extra__ = type
 
 
diff --git a/lib-typing/3.2/test_typing.py b/lib-typing/3.2/test_typing.py
index a7f8dd5..0910fd4 100644
--- a/lib-typing/3.2/test_typing.py
+++ b/lib-typing/3.2/test_typing.py
@@ -4,14 +4,15 @@ import pickle
 import re
 import sys
 from unittest import TestCase, main, skipUnless, SkipTest
+from copy import copy, deepcopy
 
 from typing import Any
 from typing import TypeVar, AnyStr
 from typing import T, KT, VT  # Not in __all__.
 from typing import Union, Optional
-from typing import Tuple
+from typing import Tuple, List, MutableMapping
 from typing import Callable
-from typing import Generic
+from typing import Generic, ClassVar
 from typing import cast
 from typing import get_type_hints
 from typing import no_type_check, no_type_check_decorator
@@ -20,7 +21,12 @@ from typing import NewType
 from typing import NamedTuple
 from typing import IO, TextIO, BinaryIO
 from typing import Pattern, Match
+import abc
 import typing
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
 
 
 class BaseTestCase(TestCase):
@@ -39,6 +45,10 @@ class BaseTestCase(TestCase):
                 message += ' : %s' % msg
             raise self.failureException(message)
 
+    def clear_caches(self):
+        for f in typing._cleanups:
+            f()
+
 
 class Employee:
     pass
@@ -62,18 +72,11 @@ class AnyTests(BaseTestCase):
         with self.assertRaises(TypeError):
             isinstance(42, Any)
 
-    def test_any_subclass(self):
-        self.assertTrue(issubclass(Employee, Any))
-        self.assertTrue(issubclass(int, Any))
-        self.assertTrue(issubclass(type(None), Any))
-        self.assertTrue(issubclass(object, Any))
-
-    def test_others_any(self):
-        self.assertFalse(issubclass(Any, Employee))
-        self.assertFalse(issubclass(Any, int))
-        self.assertFalse(issubclass(Any, type(None)))
-        # However, Any is a subclass of object (this can't be helped).
-        self.assertTrue(issubclass(Any, object))
+    def test_any_subclass_type_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(Employee, Any)
+        with self.assertRaises(TypeError):
+            issubclass(Any, Employee)
 
     def test_repr(self):
         self.assertEqual(repr(Any), 'typing.Any')
@@ -88,32 +91,21 @@ class AnyTests(BaseTestCase):
         with self.assertRaises(TypeError):
             class A(Any):
                 pass
+        with self.assertRaises(TypeError):
+            class A(type(Any)):
+                pass
 
     def test_cannot_instantiate(self):
         with self.assertRaises(TypeError):
             Any()
+        with self.assertRaises(TypeError):
+            type(Any)()
 
     def test_cannot_subscript(self):
         with self.assertRaises(TypeError):
             Any[int]
 
-    def test_any_is_subclass(self):
-        # Any should be considered a subclass of everything.
-        self.assertIsSubclass(Any, Any)
-        self.assertIsSubclass(Any, typing.List)
-        self.assertIsSubclass(Any, typing.List[int])
-        self.assertIsSubclass(Any, typing.List[T])
-        self.assertIsSubclass(Any, typing.Mapping)
-        self.assertIsSubclass(Any, typing.Mapping[str, int])
-        self.assertIsSubclass(Any, typing.Mapping[KT, VT])
-        self.assertIsSubclass(Any, Generic)
-        self.assertIsSubclass(Any, Generic[T])
-        self.assertIsSubclass(Any, Generic[KT, VT])
-        self.assertIsSubclass(Any, AnyStr)
-        self.assertIsSubclass(Any, Union)
-        self.assertIsSubclass(Any, Union[int, str])
-        self.assertIsSubclass(Any, typing.Match)
-        self.assertIsSubclass(Any, typing.Match[str])
+    def test_any_works_with_alias(self):
         # These expressions must simply not fail.
         typing.Match[Any]
         typing.Pattern[Any]
@@ -124,13 +116,8 @@ class TypeVarTests(BaseTestCase):
 
     def test_basic_plain(self):
         T = TypeVar('T')
-        # Every class is a subclass of T.
-        self.assertIsSubclass(int, T)
-        self.assertIsSubclass(str, T)
         # T equals itself.
         self.assertEqual(T, T)
-        # T is a subclass of itself.
-        self.assertIsSubclass(T, T)
         # T is an instance of TypeVar
         self.assertIsInstance(T, TypeVar)
 
@@ -139,16 +126,12 @@ class TypeVarTests(BaseTestCase):
         with self.assertRaises(TypeError):
             isinstance(42, T)
 
-    def test_basic_constrained(self):
-        A = TypeVar('A', str, bytes)
-        # Only str and bytes are subclasses of A.
-        self.assertIsSubclass(str, A)
-        self.assertIsSubclass(bytes, A)
-        self.assertNotIsSubclass(int, A)
-        # A equals itself.
-        self.assertEqual(A, A)
-        # A is a subclass of itself.
-        self.assertIsSubclass(A, A)
+    def test_typevar_subclass_type_error(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            issubclass(int, T)
+        with self.assertRaises(TypeError):
+            issubclass(T, int)
 
     def test_constrained_error(self):
         with self.assertRaises(TypeError):
@@ -164,8 +147,9 @@ class TypeVarTests(BaseTestCase):
         self.assertEqual(Union[X, X], X)
         self.assertNotEqual(Union[X, int], Union[X])
         self.assertNotEqual(Union[X, int], Union[int])
-        self.assertEqual(Union[X, int].__union_params__, (X, int))
-        self.assertEqual(Union[X, int].__union_set_params__, {X, int})
+        self.assertEqual(Union[X, int].__args__, (X, int))
+        self.assertEqual(Union[X, int].__parameters__, (X,))
+        self.assertIs(Union[X, int].__origin__, Union)
 
     def test_union_constrained(self):
         A = TypeVar('A', str, bytes)
@@ -185,19 +169,6 @@ class TypeVarTests(BaseTestCase):
         self.assertNotEqual(TypeVar('T'), TypeVar('T'))
         self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
 
-    def test_subclass_as_unions(self):
-        # None of these are true -- each type var is its own world.
-        self.assertFalse(issubclass(TypeVar('T', int, str),
-                                    TypeVar('T', int, str)))
-        self.assertFalse(issubclass(TypeVar('T', int, float),
-                                    TypeVar('T', int, float, str)))
-        self.assertFalse(issubclass(TypeVar('T', int, str),
-                                    TypeVar('T', str, int)))
-        A = TypeVar('A', int, str)
-        B = TypeVar('B', int, str, float)
-        self.assertFalse(issubclass(A, B))
-        self.assertFalse(issubclass(B, A))
-
     def test_cannot_subclass_vars(self):
         with self.assertRaises(TypeError):
             class V(TypeVar('T')):
@@ -212,12 +183,6 @@ class TypeVarTests(BaseTestCase):
         with self.assertRaises(TypeError):
             TypeVar('A')()
 
-    def test_bound(self):
-        X = TypeVar('X', bound=Employee)
-        self.assertIsSubclass(Employee, X)
-        self.assertIsSubclass(Manager, X)
-        self.assertNotIsSubclass(int, X)
-
     def test_bound_errors(self):
         with self.assertRaises(TypeError):
             TypeVar('X', bound=42)
@@ -230,16 +195,27 @@ class UnionTests(BaseTestCase):
     def test_basics(self):
         u = Union[int, float]
         self.assertNotEqual(u, Union)
-        self.assertTrue(issubclass(int, u))
-        self.assertTrue(issubclass(float, u))
+
+    def test_subclass_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(int, Union)
+        with self.assertRaises(TypeError):
+            issubclass(Union, int)
+        with self.assertRaises(TypeError):
+            issubclass(int, Union[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(Union[int, str], int)
 
     def test_union_any(self):
         u = Union[Any]
         self.assertEqual(u, Any)
-        u = Union[int, Any]
-        self.assertEqual(u, Any)
-        u = Union[Any, int]
-        self.assertEqual(u, Any)
+        u1 = Union[int, Any]
+        u2 = Union[Any, int]
+        u3 = Union[Any, object]
+        self.assertEqual(u1, u2)
+        self.assertNotEqual(u1, Any)
+        self.assertNotEqual(u2, Any)
+        self.assertNotEqual(u3, Any)
 
     def test_union_object(self):
         u = Union[object]
@@ -249,29 +225,11 @@ class UnionTests(BaseTestCase):
         u = Union[object, int]
         self.assertEqual(u, object)
 
-    def test_union_any_object(self):
-        u = Union[object, Any]
-        self.assertEqual(u, Any)
-        u = Union[Any, object]
-        self.assertEqual(u, Any)
-
     def test_unordered(self):
         u1 = Union[int, float]
         u2 = Union[float, int]
         self.assertEqual(u1, u2)
 
-    def test_subclass(self):
-        u = Union[int, Employee]
-        self.assertTrue(issubclass(Manager, u))
-
-    def test_self_subclass(self):
-        self.assertTrue(issubclass(Union[KT, VT], Union))
-        self.assertFalse(issubclass(Union, Union[KT, VT]))
-
-    def test_multiple_inheritance(self):
-        u = Union[int, Employee]
-        self.assertTrue(issubclass(ManagingFounder, u))
-
     def test_single_class_disappears(self):
         t = Union[Employee]
         self.assertIs(t, Employee)
@@ -284,13 +242,6 @@ class UnionTests(BaseTestCase):
         u = Union[Employee, Manager]
         self.assertIs(u, Employee)
 
-    def test_weird_subclasses(self):
-        u = Union[Employee, int, float]
-        v = Union[int, float]
-        self.assertTrue(issubclass(v, u))
-        w = Union[int, Manager]
-        self.assertTrue(issubclass(w, u))
-
     def test_union_union(self):
         u = Union[int, float]
         v = Union[u, Employee]
@@ -308,15 +259,27 @@ class UnionTests(BaseTestCase):
             class C(Union):
                 pass
         with self.assertRaises(TypeError):
+            class C(type(Union)):
+                pass
+        with self.assertRaises(TypeError):
             class C(Union[int, str]):
                 pass
 
     def test_cannot_instantiate(self):
         with self.assertRaises(TypeError):
             Union()
+        with self.assertRaises(TypeError):
+            type(Union)()
         u = Union[int, float]
         with self.assertRaises(TypeError):
             u()
+        with self.assertRaises(TypeError):
+            type(u)()
+
+    def test_union_generalization(self):
+        self.assertFalse(Union[str, typing.Iterable[int]] == str)
+        self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
+        self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
 
     def test_optional(self):
         o = Optional[int]
@@ -327,10 +290,6 @@ class UnionTests(BaseTestCase):
         with self.assertRaises(TypeError):
             Union[()]
 
-    def test_issubclass_union(self):
-        self.assertIsSubclass(Union[int, str], Union)
-        self.assertNotIsSubclass(int, Union)
-
     def test_union_instance_type_error(self):
         with self.assertRaises(TypeError):
             isinstance(42, Union[int, str])
@@ -355,43 +314,17 @@ class UnionTests(BaseTestCase):
         Union[Elem, str]  # Nor should this
 
 
-class TypeVarUnionTests(BaseTestCase):
-
-    def test_simpler(self):
-        A = TypeVar('A', int, str, float)
-        B = TypeVar('B', int, str)
-        self.assertIsSubclass(A, A)
-        self.assertIsSubclass(B, B)
-        self.assertNotIsSubclass(B, A)
-        self.assertIsSubclass(A, Union[int, str, float])
-        self.assertNotIsSubclass(Union[int, str, float], A)
-        self.assertNotIsSubclass(Union[int, str], B)
-        self.assertIsSubclass(B, Union[int, str])
-        self.assertNotIsSubclass(A, B)
-        self.assertNotIsSubclass(Union[int, str, float], B)
-        self.assertNotIsSubclass(A, Union[int, str])
-
-    def test_var_union_subclass(self):
-        self.assertTrue(issubclass(T, Union[int, T]))
-        self.assertTrue(issubclass(KT, Union[KT, VT]))
-
-    def test_var_union(self):
-        TU = TypeVar('TU', Union[int, float], None)
-        self.assertIsSubclass(int, TU)
-        self.assertIsSubclass(float, TU)
-
-
 class TupleTests(BaseTestCase):
 
     def test_basics(self):
-        self.assertTrue(issubclass(Tuple[int, str], Tuple))
-        self.assertTrue(issubclass(Tuple[int, str], Tuple[int, str]))
-        self.assertFalse(issubclass(int, Tuple))
-        self.assertFalse(issubclass(Tuple[float, str], Tuple[int, str]))
-        self.assertFalse(issubclass(Tuple[int, str, int], Tuple[int, str]))
-        self.assertFalse(issubclass(Tuple[int, str], Tuple[int, str, int]))
+        with self.assertRaises(TypeError):
+            issubclass(Tuple, Tuple[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(tuple, Tuple[int, str])
+
+        class TP(tuple): ...
         self.assertTrue(issubclass(tuple, Tuple))
-        self.assertFalse(issubclass(Tuple, tuple))  # Can't have it both ways.
+        self.assertTrue(issubclass(TP, Tuple))
 
     def test_equality(self):
         self.assertEqual(Tuple[int], Tuple[int])
@@ -407,21 +340,7 @@ class TupleTests(BaseTestCase):
     def test_tuple_instance_type_error(self):
         with self.assertRaises(TypeError):
             isinstance((0, 0), Tuple[int, int])
-        with self.assertRaises(TypeError):
-            isinstance((0, 0), Tuple)
-
-    def test_tuple_ellipsis_subclass(self):
-
-        class B:
-            pass
-
-        class C(B):
-            pass
-
-        self.assertNotIsSubclass(Tuple[B], Tuple[B, ...])
-        self.assertIsSubclass(Tuple[C, ...], Tuple[B, ...])
-        self.assertNotIsSubclass(Tuple[C, ...], Tuple[B])
-        self.assertNotIsSubclass(Tuple[C], Tuple[B, ...])
+        self.assertIsInstance((0, 0), Tuple)
 
     def test_repr(self):
         self.assertEqual(repr(Tuple), 'typing.Tuple')
@@ -439,17 +358,9 @@ class TupleTests(BaseTestCase):
 class CallableTests(BaseTestCase):
 
     def test_self_subclass(self):
-        self.assertTrue(issubclass(Callable[[int], int], Callable))
-        self.assertFalse(issubclass(Callable, Callable[[int], int]))
-        self.assertTrue(issubclass(Callable[[int], int], Callable[[int], int]))
-        self.assertFalse(issubclass(Callable[[Employee], int],
-                                    Callable[[Manager], int]))
-        self.assertFalse(issubclass(Callable[[Manager], int],
-                                    Callable[[Employee], int]))
-        self.assertFalse(issubclass(Callable[[int], Employee],
-                                    Callable[[int], Manager]))
-        self.assertFalse(issubclass(Callable[[int], Manager],
-                                    Callable[[int], Employee]))
+        with self.assertRaises(TypeError):
+            self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
+        self.assertTrue(issubclass(type(lambda x: x), Callable))
 
     def test_eq_hash(self):
         self.assertEqual(Callable[[int], int], Callable[[int], int])
@@ -460,23 +371,26 @@ class CallableTests(BaseTestCase):
         self.assertNotEqual(Callable[[int], int], Callable[[], int])
         self.assertNotEqual(Callable[[int], int], Callable)
 
-    def test_cannot_subclass(self):
-        with self.assertRaises(TypeError):
-
-            class C(Callable):
-                pass
-
-        with self.assertRaises(TypeError):
-
-            class C(Callable[[int], int]):
-                pass
-
     def test_cannot_instantiate(self):
         with self.assertRaises(TypeError):
             Callable()
+        with self.assertRaises(TypeError):
+            type(Callable)()
         c = Callable[[int], str]
         with self.assertRaises(TypeError):
             c()
+        with self.assertRaises(TypeError):
+            type(c)()
+
+    def test_callable_wrong_forms(self):
+        with self.assertRaises(TypeError):
+            Callable[[...], int]
+        with self.assertRaises(TypeError):
+            Callable[(), int]
+        with self.assertRaises(TypeError):
+            Callable[[()], int]
+        with self.assertRaises(TypeError):
+            Callable[[int, 1], 2]
 
     def test_callable_instance_works(self):
         def f():
@@ -512,6 +426,10 @@ class CallableTests(BaseTestCase):
         self.assertEqual(get_type_hints(foo, globals(), locals()),
                          {'a': Callable[..., T]})
 
+    def test_ellipsis_in_generic(self):
+        # Shouldn't crash; see https://github.com/python/typing/issues/259
+        typing.List[Callable[..., str]]
+
 
 XK = TypeVar('XK', str, bytes)
 XV = TypeVar('XV')
@@ -595,6 +513,13 @@ class ProtocolTests(BaseTestCase):
     def test_protocol_instance_type_error(self):
         with self.assertRaises(TypeError):
             isinstance(0, typing.SupportsAbs)
+        class C1(typing.SupportsInt):
+            def __int__(self) -> int:
+                return 42
+        class C2(C1):
+            pass
+        c = C2()
+        self.assertIsInstance(c, C1)
 
 
 class GenericTests(BaseTestCase):
@@ -612,6 +537,15 @@ class GenericTests(BaseTestCase):
         with self.assertRaises(TypeError):
             Y[str, str]
 
+    def test_generic_errors(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            Generic[T]()
+        with self.assertRaises(TypeError):
+            isinstance([], List[int])
+        with self.assertRaises(TypeError):
+            issubclass(list, List[int])
+
     def test_init(self):
         T = TypeVar('T')
         S = TypeVar('S')
@@ -622,9 +556,9 @@ class GenericTests(BaseTestCase):
 
     def test_repr(self):
         self.assertEqual(repr(SimpleMapping),
-                         __name__ + '.' + 'SimpleMapping<~XK, ~XV>')
+                         __name__ + '.' + 'SimpleMapping')
         self.assertEqual(repr(MySimpleMapping),
-                         __name__ + '.' + 'MySimpleMapping<~XK, ~XV>')
+                         __name__ + '.' + 'MySimpleMapping')
 
     def test_chain_repr(self):
         T = TypeVar('T')
@@ -648,7 +582,36 @@ class GenericTests(BaseTestCase):
         self.assertNotEqual(Z, Y[T])
 
         self.assertTrue(str(Z).endswith(
-            '.C<~T>[typing.Tuple[~S, ~T]]<~S, ~T>[~T, int]<~T>[str]'))
+            '.C[typing.Tuple[str, int]]'))
+
+    def test_new_repr(self):
+        T = TypeVar('T')
+        U = TypeVar('U', covariant=True)
+        S = TypeVar('S')
+
+        self.assertEqual(repr(List), 'typing.List')
+        self.assertEqual(repr(List[T]), 'typing.List[~T]')
+        self.assertEqual(repr(List[U]), 'typing.List[+U]')
+        self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
+        self.assertEqual(repr(List[int]), 'typing.List[int]')
+
+    def test_new_repr_complex(self):
+        T = TypeVar('T')
+        TS = TypeVar('TS')
+
+        self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
+        self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
+                         'typing.List[typing.Tuple[int, ~T]]')
+        self.assertEqual(repr(List[Tuple[T, T]][List[int]]),
+                 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]')
+
+    def test_new_repr_bare(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
+        self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]')
+        class C(typing.Dict[Any, Any]): ...
+        # this line should just work
+        repr(C.__mro__)
 
     def test_dict(self):
         T = TypeVar('T')
@@ -667,6 +630,234 @@ class GenericTests(BaseTestCase):
         c.bar = 'abc'
         self.assertEqual(c.__dict__, {'bar': 'abc'})
 
+    def test_false_subclasses(self):
+        class MyMapping(MutableMapping[str, str]): pass
+        self.assertNotIsInstance({}, MyMapping)
+        self.assertNotIsSubclass(dict, MyMapping)
+
+    def test_abc_bases(self):
+        class MM(MutableMapping[str, str]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+        # this should just work
+        MM().update()
+        self.assertIsInstance(MM(), collections_abc.MutableMapping)
+        self.assertIsInstance(MM(), MutableMapping)
+        self.assertNotIsInstance(MM(), List)
+        self.assertNotIsInstance({}, MM)
+
+    def test_multiple_bases(self):
+        class MM1(MutableMapping[str, str], collections_abc.MutableMapping):
+            pass
+        with self.assertRaises(TypeError):
+            # consistent MRO not possible
+            class MM2(collections_abc.MutableMapping, MutableMapping[str, str]):
+                pass
+
+    def test_orig_bases(self):
+        T = TypeVar('T')
+        class C(typing.Dict[str, T]): ...
+        self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
+
+    def test_naive_runtime_checks(self):
+        def naive_dict_check(obj, tp):
+            # Check if a dictionary conforms to Dict type
+            if len(tp.__parameters__) > 0:
+                raise NotImplementedError
+            if tp.__args__:
+                KT, VT = tp.__args__
+                return all(isinstance(k, KT) and isinstance(v, VT)
+                   for k, v in obj.items())
+        self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[str, int]))
+        self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[str, int]))
+        with self.assertRaises(NotImplementedError):
+            naive_dict_check({1: 'x'}, typing.Dict[str, T])
+
+        def naive_generic_check(obj, tp):
+            # Check if an instance conforms to the generic class
+            if not hasattr(obj, '__orig_class__'):
+                raise NotImplementedError
+            return obj.__orig_class__ == tp
+        class Node(Generic[T]): ...
+        self.assertTrue(naive_generic_check(Node[int](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), List))
+        with self.assertRaises(NotImplementedError):
+            naive_generic_check([1,2,3], Node[int])
+
+        def naive_list_base_check(obj, tp):
+            # Check if list conforms to a List subclass
+            return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
+                       for x in obj)
+        class C(List[int]): ...
+        self.assertTrue(naive_list_base_check([1, 2, 3], C))
+        self.assertFalse(naive_list_base_check(['a', 'b'], C))
+
+    def test_multi_subscr_base(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        V = TypeVar('V')
+        class C(List[T][U][V]): ...
+        class D(C, List[T][U][V]): ...
+        self.assertEqual(C.__parameters__, (V,))
+        self.assertEqual(D.__parameters__, (V,))
+        self.assertEqual(C[int].__parameters__, ())
+        self.assertEqual(D[int].__parameters__, ())
+        self.assertEqual(C[int].__args__, (int,))
+        self.assertEqual(D[int].__args__, (int,))
+        self.assertEqual(C.__bases__, (List,))
+        self.assertEqual(D.__bases__, (C, List))
+        self.assertEqual(C.__orig_bases__, (List[T][U][V],))
+        self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
+
+    def test_extended_generic_rules_eq(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        self.assertEqual(Tuple[T, T][int], Tuple[int, int])
+        self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
+        with self.assertRaises(TypeError):
+            Tuple[T, int][()]
+        with self.assertRaises(TypeError):
+            Tuple[T, U][T, ...]
+
+        self.assertEqual(Union[T, int][int], int)
+        self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
+        class Base: ...
+        class Derived(Base): ...
+        self.assertEqual(Union[T, Base][Derived], Base)
+        with self.assertRaises(TypeError):
+            Union[T, int][1]
+
+        self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
+        self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
+        with self.assertRaises(TypeError):
+            Callable[[T], U][..., int]
+        with self.assertRaises(TypeError):
+            Callable[[T], U][[], int]
+
+    def test_extended_generic_rules_repr(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
+                         'Union[Tuple, Callable]')
+        self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
+                         'Tuple')
+        self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
+                         'Callable[..., Union[int, NoneType]]')
+        self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
+                         'Callable[[], List[int]]')
+
+    def test_generic_forward_ref(self):
+        def foobar(x: List[List['CC']]): ...
+        class CC: ...
+        self.assertEqual(get_type_hints(foobar, globals(), locals()), {'x': List[List[CC]]})
+        T = TypeVar('T')
+        AT = Tuple[T, ...]
+        def barfoo(x: AT): ...
+        self.assertIs(get_type_hints(barfoo, globals(), locals())['x'], AT)
+        CT = Callable[..., List[T]]
+        def barfoo2(x: CT): ...
+        self.assertIs(get_type_hints(barfoo2, globals(), locals())['x'], CT)
+
+    def test_extended_generic_rules_subclassing(self):
+        class T1(Tuple[T, KT]): ...
+        class T2(Tuple[T, ...]): ...
+        class C1(Callable[[T], T]): ...
+        class C2(Callable[..., int]):
+            def __call__(self):
+                return None
+
+        self.assertEqual(T1.__parameters__, (T, KT))
+        self.assertEqual(T1[int, str].__args__, (int, str))
+        self.assertEqual(T1[int, T].__origin__, T1)
+
+        self.assertEqual(T2.__parameters__, (T,))
+        with self.assertRaises(TypeError):
+            T1[int]
+        with self.assertRaises(TypeError):
+            T2[int, str]
+
+        self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
+        self.assertEqual(C2.__parameters__, ())
+        self.assertIsInstance(C2(), collections_abc.Callable)
+        self.assertIsSubclass(C2, collections_abc.Callable)
+        self.assertIsSubclass(C1, collections_abc.Callable)
+        self.assertIsInstance(T1(), tuple)
+        self.assertIsSubclass(T2, tuple)
+        self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
+        self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
+
+    def test_fail_with_bare_union(self):
+        with self.assertRaises(TypeError):
+            List[Union]
+        with self.assertRaises(TypeError):
+            Tuple[Optional]
+        with self.assertRaises(TypeError):
+            ClassVar[ClassVar]
+        with self.assertRaises(TypeError):
+            List[ClassVar[int]]
+
+    def test_fail_with_bare_generic(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            List[Generic]
+        with self.assertRaises(TypeError):
+            Tuple[Generic[T]]
+        with self.assertRaises(TypeError):
+            List[typing._Protocol]
+
+    def test_type_erasure_special(self):
+        T = TypeVar('T')
+        # this is the only test that checks type caching
+        self.clear_caches()
+        class MyTup(Tuple[T, T]): ...
+        self.assertIs(MyTup[int]().__class__, MyTup)
+        self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
+        class MyCall(Callable[..., T]):
+            def __call__(self): return None
+        self.assertIs(MyCall[T]().__class__, MyCall)
+        self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
+        class MyDict(typing.Dict[T, T]): ...
+        self.assertIs(MyDict[int]().__class__, MyDict)
+        self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
+        class MyDef(typing.DefaultDict[str, T]): ...
+        self.assertIs(MyDef[int]().__class__, MyDef)
+        self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
+
+    def test_all_repr_eq_any(self):
+        objs = (getattr(typing, el) for el in typing.__all__)
+        for obj in objs:
+            self.assertNotEqual(repr(obj), '')
+            self.assertEqual(obj, obj)
+            if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
+                self.assertEqual(obj[Any].__args__, (Any,))
+            if isinstance(obj, type):
+                for base in obj.__mro__:
+                    self.assertNotEqual(repr(base), '')
+                    self.assertEqual(base, base)
+
+    def test_substitution_helper(self):
+        T = TypeVar('T')
+        KT = TypeVar('KT')
+        VT = TypeVar('VT')
+        class Map(Generic[KT, VT]):
+            def meth(self, k: KT, v: VT): ...
+        StrMap = Map[str, T]
+        obj = StrMap[int]()
+
+        new_args = typing._subs_tree(obj.__orig_class__)
+        new_annots = {k: typing._replace_arg(v, type(obj).__parameters__, new_args)
+                      for k, v in obj.meth.__annotations__.items()}
+
+        self.assertEqual(new_annots, {'k': str, 'v': int})
+
     def test_pickle(self):
         global C  # pickle wants to reference the class by name
         T = TypeVar('T')
@@ -686,6 +877,24 @@ class GenericTests(BaseTestCase):
             self.assertEqual(x.foo, 42)
             self.assertEqual(x.bar, 'abc')
             self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+        simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
+        for s in simples:
+            for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+                z = pickle.dumps(s, proto)
+                x = pickle.loads(z)
+                self.assertEqual(s, x)
+
+    def test_copy_and_deepcopy(self):
+        T = TypeVar('T')
+        class Node(Generic[T]): ...
+        things = [Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int],
+                  Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T],
+                  typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str],
+                  typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'],
+                  Union['T', int], List['T'], typing.Mapping['T', int]]
+        for t in things + [Any]:
+            self.assertEqual(t, copy(t))
+            self.assertEqual(t, deepcopy(t))
 
     def test_errors(self):
         with self.assertRaises(TypeError):
@@ -704,12 +913,12 @@ class GenericTests(BaseTestCase):
         if not PY32:
             self.assertEqual(C.__qualname__,
                              'GenericTests.test_repr_2.<locals>.C')
-        self.assertEqual(repr(C).split('.')[-1], 'C<~T>')
+        self.assertEqual(repr(C).split('.')[-1], 'C')
         X = C[int]
         self.assertEqual(X.__module__, __name__)
         if not PY32:
-            self.assertEqual(X.__qualname__, 'C')
-        self.assertEqual(repr(X).split('.')[-1], 'C<~T>[int]')
+            self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
+        self.assertEqual(repr(X).split('.')[-1], 'C[int]')
 
         class Y(C[int]):
             pass
@@ -823,51 +1032,44 @@ class GenericTests(BaseTestCase):
         with self.assertRaises(Exception):
             D[T]
 
+class ClassVarTests(BaseTestCase):
 
-class VarianceTests(BaseTestCase):
-
-    def test_invariance(self):
-        # Because of invariance, List[subclass of X] is not a subclass
-        # of List[X], and ditto for MutableSequence.
-        self.assertNotIsSubclass(typing.List[Manager], typing.List[Employee])
-        self.assertNotIsSubclass(typing.MutableSequence[Manager],
-                              typing.MutableSequence[Employee])
-        # It's still reflexive.
-        self.assertIsSubclass(typing.List[Employee], typing.List[Employee])
-        self.assertIsSubclass(typing.MutableSequence[Employee],
-                          typing.MutableSequence[Employee])
-
-    def test_covariance_tuple(self):
-        # Check covariace for Tuple (which are really special cases).
-        self.assertIsSubclass(Tuple[Manager], Tuple[Employee])
-        self.assertNotIsSubclass(Tuple[Employee], Tuple[Manager])
-        # And pairwise.
-        self.assertIsSubclass(Tuple[Manager, Manager],
-                              Tuple[Employee, Employee])
-        self.assertNotIsSubclass(Tuple[Employee, Employee],
-                              Tuple[Manager, Employee])
-        # And using ellipsis.
-        self.assertIsSubclass(Tuple[Manager, ...], Tuple[Employee, ...])
-        self.assertNotIsSubclass(Tuple[Employee, ...], Tuple[Manager, ...])
-
-    def test_covariance_sequence(self):
-        # Check covariance for Sequence (which is just a generic class
-        # for this purpose, but using a covariant type variable).
-        self.assertIsSubclass(typing.Sequence[Manager],
-                              typing.Sequence[Employee])
-        self.assertNotIsSubclass(typing.Sequence[Employee],
-                              typing.Sequence[Manager])
-
-    def test_covariance_mapping(self):
-        # Ditto for Mapping (covariant in the value, invariant in the key).
-        self.assertIsSubclass(typing.Mapping[Employee, Manager],
-                          typing.Mapping[Employee, Employee])
-        self.assertNotIsSubclass(typing.Mapping[Manager, Employee],
-                              typing.Mapping[Employee, Employee])
-        self.assertNotIsSubclass(typing.Mapping[Employee, Manager],
-                              typing.Mapping[Manager, Manager])
-        self.assertNotIsSubclass(typing.Mapping[Manager, Employee],
-                              typing.Mapping[Manager, Manager])
+    def test_basics(self):
+        with self.assertRaises(TypeError):
+            ClassVar[1]
+        with self.assertRaises(TypeError):
+            ClassVar[int, str]
+        with self.assertRaises(TypeError):
+            ClassVar[int][str]
+
+    def test_repr(self):
+        self.assertEqual(repr(ClassVar), 'typing.ClassVar')
+        cv = ClassVar[int]
+        self.assertEqual(repr(cv), 'typing.ClassVar[int]')
+        cv = ClassVar[Employee]
+        self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar)):
+                pass
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar[int])):
+                pass
+
+    def test_cannot_init(self):
+        with self.assertRaises(TypeError):
+            ClassVar()
+        with self.assertRaises(TypeError):
+            type(ClassVar)()
+        with self.assertRaises(TypeError):
+            type(ClassVar[Optional[int]])()
+
+    def test_no_isinstance(self):
+        with self.assertRaises(TypeError):
+            isinstance(1, ClassVar[int])
+        with self.assertRaises(TypeError):
+            issubclass(int, ClassVar)
 
 
 class CastTests(BaseTestCase):
@@ -1079,12 +1281,12 @@ class OverloadTests(BaseTestCase):
         blah()
 
 
-PY35 = sys.version_info[:2] >= (3, 5)
+ASYNCIO = sys.version_info[:2] >= (3, 5)
 
-PY35_TESTS = """
+ASYNCIO_TESTS = """
 import asyncio
 
-T_a = TypeVar('T')
+T_a = TypeVar('T_a')
 
 class AwaitableWrapper(typing.Awaitable[T_a]):
 
@@ -1112,8 +1314,110 @@ class AsyncIteratorWrapper(typing.AsyncIterator[T_a]):
             raise StopAsyncIteration
 """
 
-if PY35:
-    exec(PY35_TESTS)
+if ASYNCIO:
+    try:
+        exec(ASYNCIO_TESTS)
+    except ImportError:
+        ASYNCIO = False
+
+PY36 = sys.version_info[:2] >= (3, 6)
+
+PY36_TESTS = """
+from test import ann_module, ann_module2, ann_module3
+
+class A:
+    y: float
+class B(A):
+    x: ClassVar[Optional['B']] = None
+    y: int
+class CSub(B):
+    z: ClassVar['CSub'] = B()
+class G(Generic[T]):
+    lst: ClassVar[List[T]] = []
+
+class CoolEmployee(NamedTuple):
+    name: str
+    cool: int
+"""
+
+if PY36:
+    exec(PY36_TESTS)
+
+gth = get_type_hints
+
+class GetTypeHintTests(BaseTestCase):
+    def test_get_type_hints_from_various_objects(self):
+        # For invalid objects should fail with TypeError (not AttributeError etc).
+        with self.assertRaises(TypeError):
+            gth(123)
+        with self.assertRaises(TypeError):
+            gth('abc')
+        with self.assertRaises(TypeError):
+            gth(None)
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_get_type_hints_modules(self):
+        self.assertEqual(gth(ann_module), {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str})
+        self.assertEqual(gth(ann_module2), {})
+        self.assertEqual(gth(ann_module3), {})
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_get_type_hints_classes(self):
+        self.assertEqual(gth(ann_module.C, ann_module.__dict__),
+                         {'y': Optional[ann_module.C]})
+        self.assertIsInstance(gth(ann_module.j_class), dict)
+        self.assertEqual(gth(ann_module.M), {'123': 123, 'o': type})
+        self.assertEqual(gth(ann_module.D),
+                         {'j': str, 'k': str, 'y': Optional[ann_module.C]})
+        self.assertEqual(gth(ann_module.Y), {'z': int})
+        self.assertEqual(gth(ann_module.h_class),
+                         {'y': Optional[ann_module.C]})
+        self.assertEqual(gth(ann_module.S), {'x': str, 'y': str})
+        self.assertEqual(gth(ann_module.foo), {'x': int})
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_respect_no_type_check(self):
+        @no_type_check
+        class NoTpCheck:
+            class Inn:
+                def __init__(self, x: 'not a type'): ...
+        self.assertTrue(NoTpCheck.__no_type_check__)
+        self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
+        self.assertEqual(gth(ann_module2.NTC.meth), {})
+        class ABase(Generic[T]):
+            def meth(x: int): ...
+        @no_type_check
+        class Der(ABase): ...
+        self.assertEqual(gth(ABase.meth), {'x': int})
+
+    def test_get_type_hints_for_builins(self):
+        # Should not fail for built-in classes and functions.
+        self.assertEqual(gth(int), {})
+        self.assertEqual(gth(type), {})
+        self.assertEqual(gth(dir), {})
+        self.assertEqual(gth(len), {})
+
+    def test_previous_behavior(self):
+        def testf(x, y): ...
+        testf.__annotations__['x'] = 'int'
+        self.assertEqual(gth(testf), {'x': int})
+
+    def test_get_type_hints_for_object_with_annotations(self):
+        class A: ...
+        class B: ...
+        b = B()
+        b.__annotations__ = {'x': 'A'}
+        self.assertEqual(gth(b, locals()), {'x': A})
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_get_type_hints_ClassVar(self):
+        self.assertEqual(gth(ann_module2.CV, ann_module2.__dict__),
+                         {'var': typing.ClassVar[ann_module2.CV]})
+        self.assertEqual(gth(B, globals()),
+                         {'y': int, 'x': ClassVar[Optional[B]]})
+        self.assertEqual(gth(CSub, globals()),
+                         {'z': ClassVar[CSub], 'y': int, 'x': ClassVar[Optional[B]]})
+        self.assertEqual(gth(G), {'lst': ClassVar[List[T]]})
 
 
 class CollectionsAbcTests(BaseTestCase):
@@ -1128,7 +1432,6 @@ class CollectionsAbcTests(BaseTestCase):
         # path and could fail.  So call this a few times.
         self.assertIsInstance([], typing.Iterable)
         self.assertIsInstance([], typing.Iterable)
-        self.assertIsInstance([], typing.Iterable[int])
         self.assertNotIsInstance(42, typing.Iterable)
         # Just in case, also test issubclass() a few times.
         self.assertIsSubclass(list, typing.Iterable)
@@ -1137,10 +1440,9 @@ class CollectionsAbcTests(BaseTestCase):
     def test_iterator(self):
         it = iter([])
         self.assertIsInstance(it, typing.Iterator)
-        self.assertIsInstance(it, typing.Iterator[int])
         self.assertNotIsInstance(42, typing.Iterator)
 
-    @skipUnless(PY35, 'Python 3.5 required')
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
     def test_awaitable(self):
         ns = {}
         exec(
@@ -1149,32 +1451,41 @@ class CollectionsAbcTests(BaseTestCase):
             globals(), ns)
         foo = ns['foo']
         g = foo()
-        self.assertIsSubclass(type(g), typing.Awaitable[int])
         self.assertIsInstance(g, typing.Awaitable)
         self.assertNotIsInstance(foo, typing.Awaitable)
-        self.assertIsSubclass(typing.Awaitable[Manager],
-                          typing.Awaitable[Employee])
-        self.assertNotIsSubclass(typing.Awaitable[Employee],
-                              typing.Awaitable[Manager])
         g.send(None)  # Run foo() till completion, to avoid warning.
 
-    @skipUnless(PY35, 'Python 3.5 required')
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
+    def test_coroutine(self):
+        ns = {}
+        exec(
+            "async def foo():\n"
+            "    return\n",
+            globals(), ns)
+        foo = ns['foo']
+        g = foo()
+        self.assertIsInstance(g, typing.Coroutine)
+        with self.assertRaises(TypeError):
+            isinstance(g, typing.Coroutine[int])
+        self.assertNotIsInstance(foo, typing.Coroutine)
+        try:
+            g.send(None)
+        except StopIteration:
+            pass
+
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
     def test_async_iterable(self):
         base_it = range(10)  # type: Iterator[int]
         it = AsyncIteratorWrapper(base_it)
         self.assertIsInstance(it, typing.AsyncIterable)
         self.assertIsInstance(it, typing.AsyncIterable)
-        self.assertIsSubclass(typing.AsyncIterable[Manager],
-                          typing.AsyncIterable[Employee])
         self.assertNotIsInstance(42, typing.AsyncIterable)
 
-    @skipUnless(PY35, 'Python 3.5 required')
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
     def test_async_iterator(self):
         base_it = range(10)  # type: Iterator[int]
         it = AsyncIteratorWrapper(base_it)
         self.assertIsInstance(it, typing.AsyncIterator)
-        self.assertIsSubclass(typing.AsyncIterator[Manager],
-                          typing.AsyncIterator[Employee])
         self.assertNotIsInstance(42, typing.AsyncIterator)
 
     def test_sized(self):
@@ -1185,6 +1496,13 @@ class CollectionsAbcTests(BaseTestCase):
         self.assertIsInstance([], typing.Container)
         self.assertNotIsInstance(42, typing.Container)
 
+    def test_collection(self):
+        if hasattr(typing, 'Collection'):
+            self.assertIsInstance(tuple(), typing.Collection)
+            self.assertIsInstance(frozenset(), typing.Collection)
+            self.assertIsSubclass(dict, typing.Collection)
+            self.assertNotIsInstance(42, typing.Collection)
+
     def test_abstractset(self):
         self.assertIsInstance(set(), typing.AbstractSet)
         self.assertNotIsInstance(42, typing.AbstractSet)
@@ -1331,10 +1649,6 @@ class CollectionsAbcTests(BaseTestCase):
             yield 42
         g = foo()
         self.assertIsSubclass(type(g), typing.Generator)
-        self.assertIsSubclass(typing.Generator[Manager, Employee, Manager],
-                          typing.Generator[Employee, Manager, Employee])
-        self.assertNotIsSubclass(typing.Generator[Manager, Manager, Manager],
-                              typing.Generator[Employee, Employee, Employee])
 
     def test_no_generator_instantiation(self):
         with self.assertRaises(TypeError):
@@ -1353,12 +1667,30 @@ class CollectionsAbcTests(BaseTestCase):
             MMA()
 
         class MMC(MMA):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
             def __len__(self):
                 return 0
 
         self.assertEqual(len(MMC()), 0)
+        assert callable(MMC.update)
+        self.assertIsInstance(MMC(), typing.Mapping)
 
         class MMB(typing.MutableMapping[KT, VT]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
             def __len__(self):
                 return 0
 
@@ -1373,6 +1705,82 @@ class CollectionsAbcTests(BaseTestCase):
         self.assertIsSubclass(MMB, typing.Mapping)
         self.assertIsSubclass(MMC, typing.Mapping)
 
+        self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
+        self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
+
+        self.assertIsSubclass(MMA, collections.Mapping)
+        self.assertIsSubclass(MMB, collections.Mapping)
+        self.assertIsSubclass(MMC, collections.Mapping)
+
+        self.assertIsSubclass(MMB[str, str], typing.Mapping)
+        self.assertIsSubclass(MMC, MMA)
+
+        class I(typing.Iterable): ...
+        self.assertNotIsSubclass(list, I)
+
+        class G(typing.Generator[int, int, int]): ...
+        def g(): yield 0
+        self.assertIsSubclass(G, typing.Generator)
+        self.assertIsSubclass(G, typing.Iterable)
+        if hasattr(collections, 'Generator'):
+            self.assertIsSubclass(G, collections.Generator)
+        self.assertIsSubclass(G, collections.Iterable)
+        self.assertNotIsSubclass(type(g), G)
+
+    def test_subclassing_subclasshook(self):
+
+        class Base(typing.Iterable):
+            @classmethod
+            def __subclasshook__(cls, other):
+                if other.__name__ == 'Foo':
+                    return True
+                else:
+                    return False
+
+        class C(Base): ...
+        class Foo: ...
+        class Bar: ...
+        self.assertIsSubclass(Foo, Base)
+        self.assertIsSubclass(Foo, C)
+        self.assertNotIsSubclass(Bar, C)
+
+    def test_subclassing_register(self):
+
+        class A(typing.Container): ...
+        class B(A): ...
+
+        class C: ...
+        A.register(C)
+        self.assertIsSubclass(C, A)
+        self.assertNotIsSubclass(C, B)
+
+        class D: ...
+        B.register(D)
+        self.assertIsSubclass(D, A)
+        self.assertIsSubclass(D, B)
+
+        class M(): ...
+        collections.MutableMapping.register(M)
+        self.assertIsSubclass(M, typing.Mapping)
+
+    def test_collections_as_base(self):
+
+        class M(collections.Mapping): ...
+        self.assertIsSubclass(M, typing.Mapping)
+        self.assertIsSubclass(M, typing.Iterable)
+
+        class S(collections.MutableSequence): ...
+        self.assertIsSubclass(S, typing.MutableSequence)
+        self.assertIsSubclass(S, typing.Iterable)
+
+        class I(collections.Iterable): ...
+        self.assertIsSubclass(I, typing.Iterable)
+
+        class A(collections.Mapping, metaclass=abc.ABCMeta): ...
+        class B: ...
+        A.register(B)
+        self.assertIsSubclass(B, typing.Mapping)
+
 
 class OtherABCTests(BaseTestCase):
 
@@ -1385,7 +1793,6 @@ class OtherABCTests(BaseTestCase):
 
         cm = manager()
         self.assertIsInstance(cm, typing.ContextManager)
-        self.assertIsInstance(cm, typing.ContextManager[int])
         self.assertNotIsInstance(42, typing.ContextManager)
 
 
@@ -1415,6 +1822,18 @@ class TypeTests(BaseTestCase):
 
         joe = new_user(BasicUser)
 
+    def test_type_optional(self):
+        A = Optional[Type[BaseException]]
+
+        def foo(a: A) -> Optional[BaseException]:
+            if a is None:
+                return None
+            else:
+                return a()
+
+        assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
+        assert foo(None) is None
+
 
 class NewTypeTests(BaseTestCase):
 
@@ -1452,6 +1871,31 @@ class NamedTupleTests(BaseTestCase):
         self.assertEqual(Emp._fields, ('name', 'id'))
         self.assertEqual(Emp._field_types, dict(name=str, id=int))
 
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_annotation_usage(self):
+        tim = CoolEmployee('Tim', 9000)
+        self.assertIsInstance(tim, CoolEmployee)
+        self.assertIsInstance(tim, tuple)
+        self.assertEqual(tim.name, 'Tim')
+        self.assertEqual(tim.cool, 9000)
+        self.assertEqual(CoolEmployee.__name__, 'CoolEmployee')
+        self.assertEqual(CoolEmployee._fields, ('name', 'cool'))
+        self.assertEqual(CoolEmployee._field_types, dict(name=str, cool=int))
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_namedtuple_keyword_usage(self):
+        LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int)
+        nick = LocalEmployee('Nick', 25)
+        self.assertIsInstance(nick, tuple)
+        self.assertEqual(nick.name, 'Nick')
+        self.assertEqual(LocalEmployee.__name__, 'LocalEmployee')
+        self.assertEqual(LocalEmployee._fields, ('name', 'age'))
+        self.assertEqual(LocalEmployee._field_types, dict(name=str, age=int))
+        with self.assertRaises(TypeError):
+            NamedTuple('Name', [('x', int)], y=str)
+        with self.assertRaises(TypeError):
+            NamedTuple('Name', x=1, y='a')
+
     def test_pickle(self):
         global Emp  # pickle wants to reference the class by name
         Emp = NamedTuple('Emp', [('name', str), ('id', int)])
@@ -1504,22 +1948,16 @@ class RETests(BaseTestCase):
         pat = re.compile('[a-z]+', re.I)
         self.assertIsSubclass(pat.__class__, Pattern)
         self.assertIsSubclass(type(pat), Pattern)
-        self.assertIsSubclass(type(pat), Pattern[str])
+        self.assertIsInstance(pat, Pattern)
 
         mat = pat.search('12345abcde.....')
         self.assertIsSubclass(mat.__class__, Match)
-        self.assertIsSubclass(mat.__class__, Match[str])
-        self.assertIsSubclass(mat.__class__, Match[bytes])  # Sad but true.
         self.assertIsSubclass(type(mat), Match)
-        self.assertIsSubclass(type(mat), Match[str])
+        self.assertIsInstance(mat, Match)
 
+        # these should just work
         p = Pattern[Union[str, bytes]]
-        self.assertIsSubclass(Pattern[str], Pattern)
-        self.assertIsSubclass(Pattern[str], p)
-
         m = Match[Union[bytes, str]]
-        self.assertIsSubclass(Match[bytes], Match)
-        self.assertIsSubclass(Match[bytes], m)
 
     def test_errors(self):
         with self.assertRaises(TypeError):
@@ -1534,9 +1972,6 @@ class RETests(BaseTestCase):
             m[str]
         with self.assertRaises(TypeError):
             # We don't support isinstance().
-            isinstance(42, Pattern)
-        with self.assertRaises(TypeError):
-            # We don't support isinstance().
             isinstance(42, Pattern[str])
 
     def test_repr(self):
@@ -1561,7 +1996,7 @@ class RETests(BaseTestCase):
                 pass
 
         self.assertEqual(str(ex.exception),
-                         "A type alias cannot be subclassed")
+                         "Cannot subclass typing._TypeAlias")
 
 
 class AllTests(BaseTestCase):
diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py
index 4cac66c..1a943ac 100644
--- a/lib-typing/3.2/typing.py
+++ b/lib-typing/3.2/typing.py
@@ -17,6 +17,7 @@ __all__ = [
     # Super-special typing primitives.
     'Any',
     'Callable',
+    'ClassVar',
     'Generic',
     'Optional',
     'Tuple',
@@ -26,9 +27,6 @@ __all__ = [
 
     # ABCs (from collections.abc).
     'AbstractSet',  # collections.abc.Set.
-    'Awaitable',
-    'AsyncIterator',
-    'AsyncIterable',
     'ByteString',
     'Container',
     'Hashable',
@@ -44,6 +42,14 @@ __all__ = [
     'Sequence',
     'Sized',
     'ValuesView',
+    # The following are added depending on presence
+    # of their non-generic counterparts in stdlib:
+    # Awaitable,
+    # AsyncIterator,
+    # AsyncIterable,
+    # Coroutine,
+    # Collection,
+    # ContextManager
 
     # Structural checks, a.k.a. protocols.
     'Reversible',
@@ -57,6 +63,7 @@ __all__ = [
     'DefaultDict',
     'List',
     'Set',
+    'FrozenSet',
     'NamedTuple',  # Not really a type.
     'Generator',
 
@@ -85,17 +92,24 @@ def _qualname(x):
         return x.__name__
 
 
+def _trim_name(nm):
+    if nm.startswith('_') and nm not in ('_TypeAlias',
+                    '_ForwardRef', '_TypingBase', '_FinalTypingBase'):
+        nm = nm[1:]
+    return nm
+
+
 class TypingMeta(type):
-    """Metaclass for every type defined below.
+    """Metaclass for most types defined in typing module
+    (not a part of public API).
 
     This overrides __new__() to require an extra keyword parameter
     '_root', which serves as a guard against naive subclassing of the
     typing classes.  Any legitimate class defined using a metaclass
-    derived from TypingMeta (including internal subclasses created by
-    e.g.  Union[X, Y]) must pass _root=True.
+    derived from TypingMeta must pass _root=True.
 
-    This also defines a dummy constructor (all the work is done in
-    __new__) and a nicer repr().
+    This also defines a dummy constructor (all the work for most typing
+    constructs is done in __new__) and a nicer repr().
     """
 
     _is_protocol = False
@@ -112,8 +126,8 @@ class TypingMeta(type):
     def _eval_type(self, globalns, localns):
         """Override this in subclasses to interpret forward references.
 
-        For example, Union['C'] is internally stored as
-        Union[_ForwardRef('C')], which should evaluate to _Union[C],
+        For example, List['C'] is internally stored as
+        List[_ForwardRef('C')], which should evaluate to List[C],
         where C is an object found in globalns or localns (searching
         localns first, of course).
         """
@@ -123,50 +137,88 @@ class TypingMeta(type):
         pass
 
     def __repr__(self):
-        return '%s.%s' % (self.__module__, _qualname(self))
+        qname = _trim_name(_qualname(self))
+        return '%s.%s' % (self.__module__, qname)
 
 
-class Final:
-    """Mix-in class to prevent instantiation."""
+class _TypingBase(metaclass=TypingMeta, _root=True):
+    """Internal indicator of special typing constructs."""
 
     __slots__ = ()
 
-    def __new__(self, *args, **kwds):
-        raise TypeError("Cannot instantiate %r" % self.__class__)
+    def __init__(self, *args, **kwds):
+        pass
+
+    def __new__(cls, *args, **kwds):
+        """Constructor.
+
+        This only exists to give a better error message in case
+        someone tries to subclass a special typing object (not a good idea).
+        """
+        if (len(args) == 3 and
+                isinstance(args[0], str) and
+                isinstance(args[1], tuple)):
+            # Close enough.
+            raise TypeError("Cannot subclass %r" % cls)
+        return super().__new__(cls)
+
+    # Things that are not classes also need these.
+    def _eval_type(self, globalns, localns):
+        return self
 
+    def _get_type_vars(self, tvars):
+        pass
 
-class _ForwardRef(TypingMeta):
-    """Wrapper to hold a forward reference."""
+    def __repr__(self):
+        cls = type(self)
+        qname = _trim_name(_qualname(cls))
+        return '%s.%s' % (cls.__module__, qname)
 
-    def __new__(cls, arg):
+    def __call__(self, *args, **kwds):
+        raise TypeError("Cannot instantiate %r" % type(self))
+
+
+class _FinalTypingBase(_TypingBase, _root=True):
+    """Internal mix-in class to prevent instantiation.
+
+    Prevents instantiation unless _root=True is given in class call.
+    It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, _root=False, **kwds):
+        self = super().__new__(cls, *args, **kwds)
+        if _root is True:
+            return self
+        raise TypeError("Cannot instantiate %r" % cls)
+
+    def __reduce__(self):
+        return _trim_name(type(self).__name__)
+
+
+class _ForwardRef(_TypingBase, _root=True):
+    """Internal wrapper to hold a forward reference."""
+
+    __slots__ = ('__forward_arg__', '__forward_code__',
+                 '__forward_evaluated__', '__forward_value__')
+
+    def __init__(self, arg):
+        super().__init__(arg)
         if not isinstance(arg, str):
-            raise TypeError('ForwardRef must be a string -- got %r' % (arg,))
+            raise TypeError('Forward reference must be a string -- got %r' % (arg,))
         try:
             code = compile(arg, '<string>', 'eval')
         except SyntaxError:
-            raise SyntaxError('ForwardRef must be an expression -- got %r' %
+            raise SyntaxError('Forward reference must be an expression -- got %r' %
                               (arg,))
-        self = super().__new__(cls, arg, (), {}, _root=True)
         self.__forward_arg__ = arg
         self.__forward_code__ = code
         self.__forward_evaluated__ = False
         self.__forward_value__ = None
-        typing_globals = globals()
-        frame = sys._getframe(1)
-        while frame is not None and frame.f_globals is typing_globals:
-            frame = frame.f_back
-        assert frame is not None
-        self.__forward_frame__ = frame
-        return self
 
     def _eval_type(self, globalns, localns):
-        if not isinstance(localns, dict):
-            raise TypeError('ForwardRef localns must be a dict -- got %r' %
-                            (localns,))
-        if not isinstance(globalns, dict):
-            raise TypeError('ForwardRef globalns must be a dict -- got %r' %
-                            (globalns,))
-        if not self.__forward_evaluated__:
+        if not self.__forward_evaluated__ or localns is not globalns:
             if globalns is None and localns is None:
                 globalns = localns = {}
             elif globalns is None:
@@ -179,49 +231,36 @@ class _ForwardRef(TypingMeta):
             self.__forward_evaluated__ = True
         return self.__forward_value__
 
+    def __eq__(self, other):
+        if not isinstance(other, _ForwardRef):
+            return NotImplemented
+        return (self.__forward_arg__ == other.__forward_arg__ and
+                self.__forward_value__ == other.__forward_value__)
+
+    def __hash__(self):
+        return hash((self.__forward_arg__, self.__forward_value__))
+
     def __instancecheck__(self, obj):
         raise TypeError("Forward references cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        if not self.__forward_evaluated__:
-            globalns = self.__forward_frame__.f_globals
-            localns = self.__forward_frame__.f_locals
-            try:
-                self._eval_type(globalns, localns)
-            except NameError:
-                return False  # Too early.
-        return issubclass(cls, self.__forward_value__)
+        raise TypeError("Forward references cannot be used with issubclass().")
 
     def __repr__(self):
         return '_ForwardRef(%r)' % (self.__forward_arg__,)
 
 
-class _TypeAlias:
+class _TypeAlias(_TypingBase, _root=True):
     """Internal helper class for defining generic variants of concrete types.
 
-    Note that this is not a type; let's call it a pseudo-type.  It can
-    be used in instance and subclass checks, e.g. isinstance(m, Match)
-    or issubclass(type(m), Match).  However, it cannot be itself the
-    target of an issubclass() call; e.g. issubclass(Match, C) (for
-    some arbitrary class C) raises TypeError rather than returning
-    False.
+    Note that this is not a type; let's call it a pseudo-type.  It cannot
+    be used in instance and subclass checks in parameterized form, i.e.
+    ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+    ``False``.
     """
 
     __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
 
-    def __new__(cls, *args, **kwds):
-        """Constructor.
-
-        This only exists to give a better error message in case
-        someone tries to subclass a type alias (not a good idea).
-        """
-        if (len(args) == 3 and
-                isinstance(args[0], str) and
-                isinstance(args[1], tuple)):
-            # Close enough.
-            raise TypeError("A type alias cannot be subclassed")
-        return object.__new__(cls)
-
     def __init__(self, name, type_var, impl_type, type_checker):
         """Initializer.
 
@@ -234,9 +273,9 @@ class _TypeAlias:
                 and returns a value that should be a type_var instance.
         """
         assert isinstance(name, str), repr(name)
-        assert isinstance(type_var, type), repr(type_var)
         assert isinstance(impl_type, type), repr(impl_type)
         assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+        assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
         self.name = name
         self.type_var = type_var
         self.impl_type = impl_type
@@ -246,36 +285,41 @@ class _TypeAlias:
         return "%s[%s]" % (self.name, _type_repr(self.type_var))
 
     def __getitem__(self, parameter):
-        assert isinstance(parameter, type), repr(parameter)
         if not isinstance(self.type_var, TypeVar):
             raise TypeError("%s cannot be further parameterized." % self)
-        if self.type_var.__constraints__:
-            if not issubclass(parameter, Union[self.type_var.__constraints__]):
+        if self.type_var.__constraints__ and isinstance(parameter, type):
+            if not issubclass(parameter, self.type_var.__constraints__):
                 raise TypeError("%s is not a valid substitution for %s." %
                                 (parameter, self.type_var))
+        if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+            raise TypeError("%s cannot be re-parameterized." % self)
         return self.__class__(self.name, parameter,
                               self.impl_type, self.type_checker)
 
+    def __eq__(self, other):
+        if not isinstance(other, _TypeAlias):
+            return NotImplemented
+        return self.name == other.name and self.type_var == other.type_var
+
+    def __hash__(self):
+        return hash((self.name, self.type_var))
+
     def __instancecheck__(self, obj):
-        raise TypeError("Type aliases cannot be used with isinstance().")
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with isinstance().")
+        return isinstance(obj, self.impl_type)
 
     def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if isinstance(cls, _TypeAlias):
-            # Covariance.  For now, we compare by name.
-            return (cls.name == self.name and
-                    issubclass(cls.type_var, self.type_var))
-        else:
-            # Note that this is too lenient, because the
-            # implementation type doesn't carry information about
-            # whether it is about bytes or str (for example).
-            return issubclass(cls, self.impl_type)
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with issubclass().")
+        return issubclass(cls, self.impl_type)
 
 
 def _get_type_vars(types, tvars):
     for t in types:
-        if isinstance(t, TypingMeta):
+        if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
             t._get_type_vars(tvars)
 
 
@@ -286,14 +330,13 @@ def _type_vars(types):
 
 
 def _eval_type(t, globalns, localns):
-    if isinstance(t, TypingMeta):
+    if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
         return t._eval_type(globalns, localns)
-    else:
-        return t
+    return t
 
 
 def _type_check(arg, msg):
-    """Check that the argument is a type, and return it.
+    """Check that the argument is a type, and return it (internal helper).
 
     As a special case, accept None and return type(None) instead.
     Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
@@ -308,13 +351,19 @@ def _type_check(arg, msg):
         return type(None)
     if isinstance(arg, str):
         arg = _ForwardRef(arg)
-    if not isinstance(arg, (type, _TypeAlias)) and not callable(arg):
+    if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+        not isinstance(arg, (type, _TypingBase)) and not callable(arg)):
         raise TypeError(msg + " Got %.100r." % (arg,))
+    # Bare Union etc. are not valid as type arguments
+    if (type(arg).__name__ in ('_Union', '_Optional')
+        and not getattr(arg, '__origin__', None)
+        or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)):
+        raise TypeError("Plain %s is not valid as type argument" % arg)
     return arg
 
 
 def _type_repr(obj):
-    """Return the repr() of an object, special-casing types.
+    """Return the repr() of an object, special-casing types (internal helper).
 
     If obj is a type, we return a shorter version than the default
     type.__repr__, based on the module and qualified name, which is
@@ -324,40 +373,39 @@ def _type_repr(obj):
     if isinstance(obj, type) and not isinstance(obj, TypingMeta):
         if obj.__module__ == 'builtins':
             return _qualname(obj)
-        else:
-            return '%s.%s' % (obj.__module__, _qualname(obj))
-    else:
-        return repr(obj)
+        return '%s.%s' % (obj.__module__, _qualname(obj))
+    if obj is ...:
+        return('...')
+    if isinstance(obj, types.FunctionType):
+        return obj.__name__
+    return repr(obj)
 
 
-class AnyMeta(TypingMeta):
-    """Metaclass for Any."""
+class _Any(_FinalTypingBase, _root=True):
+    """Special type indicating an unconstrained type.
 
-    def __new__(cls, name, bases, namespace, _root=False):
-        self = super().__new__(cls, name, bases, namespace, _root=_root)
-        return self
+    - Any is compatible with every type.
+    - Any assumed to have all methods.
+    - All values assumed to be instances of Any.
+
+    Note that all the above statements are true from the point of view of
+    static type checkers. At runtime, Any should not be used with instance
+    or class checks.
+    """
+
+    __slots__ = ()
 
     def __instancecheck__(self, obj):
         raise TypeError("Any cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        if not isinstance(cls, type):
-            return super().__subclasscheck__(cls)  # To TypeError.
-        return True
+        raise TypeError("Any cannot be used with issubclass().")
 
 
-class Any(Final, metaclass=AnyMeta, _root=True):
-    """Special type indicating an unconstrained type.
+Any = _Any(_root=True)
 
-    - Any object is an instance of Any.
-    - Any class is a subclass of Any.
-    - As a special case, Any and object are subclasses of each other.
-    """
 
-    __slots__ = ()
-
-
-class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
+class TypeVar(_TypingBase, _root=True):
     """Type variable.
 
     Usage::
@@ -370,7 +418,7 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
     as for generic function definitions.  See class Generic for more
     information on generic types.  Generic functions work as follows:
 
-      def repeat(x: T, n: int) -> Sequence[T]:
+      def repeat(x: T, n: int) -> List[T]:
           '''Return a list containing n references to x.'''
           return [x]*n
 
@@ -383,14 +431,12 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
     that if the arguments are instances of some subclass of str,
     the return type is still plain str.
 
-    At runtime, isinstance(x, T) will raise TypeError.  However,
-    issubclass(C, T) is true for any class C, and issubclass(str, A)
-    and issubclass(bytes, A) are true, and issubclass(int, A) is
-    false.  (TODO: Why is this needed?  This may change.  See #136.)
+    At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
 
-    Type variables may be marked covariant or contravariant by passing
-    covariant=True or contravariant=True.  See PEP 484 for more
-    details.  By default type variables are invariant.
+    Type variables defined with covariant=True or contravariant=True
+    can be used do declare covariant or contravariant generic types.
+    See PEP 484 for more details. By default generic types are invariant
+    in all type variables.
 
     Type variables can be introspected. e.g.:
 
@@ -401,11 +447,16 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
       A.__constraints__ == (str, bytes)
     """
 
-    def __new__(cls, name, *constraints, bound=None,
+    __slots__ = ('__name__', '__bound__', '__constraints__',
+                 '__covariant__', '__contravariant__')
+
+    def __init__(self, name, *constraints, bound=None,
                 covariant=False, contravariant=False):
-        self = super().__new__(cls, name, (Final,), {}, _root=True)
+        super().__init__(name, *constraints, bound=bound,
+                         covariant=covariant, contravariant=contravariant)
+        self.__name__ = name
         if covariant and contravariant:
-            raise ValueError("Bivariant type variables are not supported.")
+            raise ValueError("Bivariant types are not supported.")
         self.__covariant__ = bool(covariant)
         self.__contravariant__ = bool(contravariant)
         if constraints and bound is not None:
@@ -418,7 +469,6 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
             self.__bound__ = _type_check(bound, "Bound must be a type.")
         else:
             self.__bound__ = None
-        return self
 
     def _get_type_vars(self, tvars):
         if self not in tvars:
@@ -437,16 +487,7 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
         raise TypeError("Type variables cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        # TODO: Make this raise TypeError too?
-        if cls is self:
-            return True
-        if cls is Any:
-            return True
-        if self.__bound__ is not None:
-            return issubclass(cls, self.__bound__)
-        if self.__constraints__:
-            return any(issubclass(cls, c) for c in self.__constraints__)
-        return True
+        raise TypeError("Type variables cannot be used with issubclass().")
 
 
 # Some unconstrained type variables.  These are used by the container types.
@@ -464,124 +505,129 @@ T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
 AnyStr = TypeVar('AnyStr', bytes, str)
 
 
-class UnionMeta(TypingMeta):
-    """Metaclass for Union."""
-
-    def __new__(cls, name, bases, namespace, parameters=None, _root=False):
-        if parameters is None:
-            return super().__new__(cls, name, bases, namespace, _root=_root)
-        if not isinstance(parameters, tuple):
-            raise TypeError("Expected parameters=<tuple>")
-        # Flatten out Union[Union[...], ...] and type-check non-Union args.
-        params = []
-        msg = "Union[arg, ...]: each arg must be a type."
-        for p in parameters:
-            if isinstance(p, UnionMeta):
-                params.extend(p.__union_params__)
-            else:
-                params.append(_type_check(p, msg))
-        # Weed out strict duplicates, preserving the first of each occurrence.
-        all_params = set(params)
-        if len(all_params) < len(params):
-            new_params = []
-            for t in params:
-                if t in all_params:
-                    new_params.append(t)
-                    all_params.remove(t)
-            params = new_params
-            assert not all_params, all_params
-        # Weed out subclasses.
-        # E.g. Union[int, Employee, Manager] == Union[int, Employee].
-        # If Any or object is present it will be the sole survivor.
-        # If both Any and object are present, Any wins.
-        # Never discard type variables, except against Any.
-        # (In particular, Union[str, AnyStr] != AnyStr.)
-        all_params = set(params)
-        for t1 in params:
-            if t1 is Any:
-                return Any
-            if isinstance(t1, TypeVar):
-                continue
-            if isinstance(t1, _TypeAlias):
-                # _TypeAlias is not a real class.
-                continue
-            if not isinstance(t1, type):
-                assert callable(t1)  # A callable might sneak through.
-                continue
-            if any(isinstance(t2, type) and issubclass(t1, t2)
-                   for t2 in all_params - {t1} if not isinstance(t2, TypeVar)):
-                all_params.remove(t1)
-        # It's not a union if there's only one type left.
-        if len(all_params) == 1:
-            return all_params.pop()
-        # Create a new class with these params.
-        self = super().__new__(cls, name, bases, {}, _root=True)
-        self.__union_params__ = tuple(t for t in params if t in all_params)
-        self.__union_set_params__ = frozenset(self.__union_params__)
-        return self
-
-    def _eval_type(self, globalns, localns):
-        p = tuple(_eval_type(t, globalns, localns)
-                  for t in self.__union_params__)
-        if p == self.__union_params__:
-            return self
-        else:
-            return self.__class__(self.__name__, self.__bases__, {},
-                                  p, _root=True)
-
-    def _get_type_vars(self, tvars):
-        if self.__union_params__:
-            _get_type_vars(self.__union_params__, tvars)
+def _replace_arg(arg, tvars, args):
+    """An internal helper function: replace arg if it is a type variable
+    found in tvars with corresponding substitution from args or
+    with corresponding substitution sub-tree if arg is a generic type.
+    """
 
-    def __repr__(self):
-        r = super().__repr__()
-        if self.__union_params__:
-            r += '[%s]' % (', '.join(_type_repr(t)
-                                     for t in self.__union_params__))
-        return r
+    if tvars is None:
+        tvars = []
+    if hasattr(arg, '_subs_tree'):
+        return arg._subs_tree(tvars, args)
+    if isinstance(arg, TypeVar):
+        for i, tvar in enumerate(tvars):
+            if arg == tvar:
+                return args[i]
+    return arg
 
-    def __getitem__(self, parameters):
-        if self.__union_params__ is not None:
-            raise TypeError(
-                "Cannot subscript an existing Union. Use Union[u, t] instead.")
-        if parameters == ():
-            raise TypeError("Cannot take a Union of no types.")
-        if not isinstance(parameters, tuple):
-            parameters = (parameters,)
-        return self.__class__(self.__name__, self.__bases__,
-                              dict(self.__dict__), parameters, _root=True)
 
-    def __eq__(self, other):
-        if not isinstance(other, UnionMeta):
-            return NotImplemented
-        return self.__union_set_params__ == other.__union_set_params__
+def _subs_tree(cls, tvars=None, args=None):
+    """An internal helper function: calculate substitution tree
+    for generic cls after replacing its type parameters with
+    substitutions in tvars -> args (if any).
+    Repeat the same following __origin__'s.
 
-    def __hash__(self):
-        return hash(self.__union_set_params__)
+    Return a list of arguments with all possible substitutions
+    performed. Arguments that are generic classes themselves are represented
+    as tuples (so that no new classes are created by this function).
+    For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+    """
 
-    def __instancecheck__(self, obj):
-        raise TypeError("Unions cannot be used with isinstance().")
+    if cls.__origin__ is None:
+        return cls
+    # Make of chain of origins (i.e. cls -> cls.__origin__)
+    current = cls.__origin__
+    orig_chain = []
+    while current.__origin__ is not None:
+        orig_chain.append(current)
+        current = current.__origin__
+    # Replace type variables in __args__ if asked ...
+    tree_args = []
+    for arg in cls.__args__:
+        tree_args.append(_replace_arg(arg, tvars, args))
+    # ... then continue replacing down the origin chain.
+    for ocls in orig_chain:
+        new_tree_args = []
+        for i, arg in enumerate(ocls.__args__):
+            new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+        tree_args = new_tree_args
+    return tree_args
+
+
+def _remove_dups_flatten(parameters):
+    """An internal helper for Union creation and substitution: flatten Union's
+    among parameters, then remove duplicates and strict subclasses.
+    """
 
-    def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if self.__union_params__ is None:
-            return isinstance(cls, UnionMeta)
-        elif isinstance(cls, UnionMeta):
-            if cls.__union_params__ is None:
-                return False
-            return all(issubclass(c, self) for c in (cls.__union_params__))
-        elif isinstance(cls, TypeVar):
-            if cls in self.__union_params__:
-                return True
-            if cls.__constraints__:
-                return issubclass(Union[cls.__constraints__], self)
-            return False
+    # Flatten out Union[Union[...], ...].
+    params = []
+    for p in parameters:
+        if isinstance(p, _Union) and p.__origin__ is Union:
+            params.extend(p.__args__)
+        elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+            params.extend(p[1:])
         else:
-            return any(issubclass(cls, t) for t in self.__union_params__)
+            params.append(p)
+    # Weed out strict duplicates, preserving the first of each occurrence.
+    all_params = set(params)
+    if len(all_params) < len(params):
+        new_params = []
+        for t in params:
+            if t in all_params:
+                new_params.append(t)
+                all_params.remove(t)
+        params = new_params
+        assert not all_params, all_params
+    # Weed out subclasses.
+    # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+    # If object is present it will be sole survivor among proper classes.
+    # Never discard type variables.
+    # (In particular, Union[str, AnyStr] != AnyStr.)
+    all_params = set(params)
+    for t1 in params:
+        if not isinstance(t1, type):
+            continue
+        if any(isinstance(t2, type) and issubclass(t1, t2)
+               for t2 in all_params - {t1}
+               if not (isinstance(t2, GenericMeta) and
+                       t2.__origin__ is not None)):
+            all_params.remove(t1)
+    return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+    # Check correct count for parameters of a generic cls (internal helper).
+    if not cls.__parameters__:
+        raise TypeError("%s is not a generic class" % repr(cls))
+    alen = len(parameters)
+    elen = len(cls.__parameters__)
+    if alen != elen:
+        raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+                        ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+    """Internal wrapper caching __getitem__ of generic types with a fallback to
+    original function for non-hashable arguments.
+    """
+
+    cached = functools.lru_cache()(func)
+    _cleanups.append(cached.cache_clear)
+    @functools.wraps(func)
+    def inner(*args, **kwds):
+        try:
+            return cached(*args, **kwds)
+        except TypeError:
+            pass  # All real errors (not unhashable args) are raised below.
+        return func(*args, **kwds)
+    return inner
 
 
-class Union(Final, metaclass=UnionMeta, _root=True):
+class _Union(_FinalTypingBase, _root=True):
     """Union type; Union[X, Y] means either X or Y.
 
     To define a union, use e.g. Union[int, str].  Details:
@@ -616,256 +662,136 @@ class Union(Final, metaclass=UnionMeta, _root=True):
         Union[Manager, int, Employee] == Union[int, Employee]
         Union[Employee, Manager] == Employee
 
-    - Corollary: if Any is present it is the sole survivor, e.g.::
-
-        Union[int, Any] == Any
-
     - Similar for object::
 
         Union[int, object] == object
 
-    - To cut a tie: Union[object, Any] == Union[Any, object] == Any.
-
     - You cannot subclass or instantiate a union.
 
-    - You cannot write Union[X][Y] (what would it mean?).
-
     - You can use Optional[X] as a shorthand for Union[X, None].
     """
 
-    # Unsubscripted Union type has params set to None.
-    __union_params__ = None
-    __union_set_params__ = None
-
-
-class OptionalMeta(TypingMeta):
-    """Metaclass for Optional."""
-
-    def __new__(cls, name, bases, namespace, _root=False):
-        return super().__new__(cls, name, bases, namespace, _root=_root)
-
-    def __getitem__(self, arg):
-        arg = _type_check(arg, "Optional[t] requires a single type.")
-        return Union[arg, type(None)]
-
-
-class Optional(Final, metaclass=OptionalMeta, _root=True):
-    """Optional type.
+    __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
 
-    Optional[X] is equivalent to Union[X, type(None)].
-    """
-
-    __slots__ = ()
-
-
-class TupleMeta(TypingMeta):
-    """Metaclass for Tuple."""
-
-    def __new__(cls, name, bases, namespace, parameters=None,
-                use_ellipsis=False, _root=False):
-        self = super().__new__(cls, name, bases, namespace, _root=_root)
-        self.__tuple_params__ = parameters
-        self.__tuple_use_ellipsis__ = use_ellipsis
+    def __new__(cls, parameters=None, origin=None, *args, _root=False):
+        self = super().__new__(cls, parameters, origin, *args, _root=_root)
+        if origin is None:
+            self.__parameters__ = None
+            self.__args__ = None
+            self.__origin__ = None
+            self.__tree_hash__ = hash(frozenset(('Union',)))
+            return self
+        if not isinstance(parameters, tuple):
+            raise TypeError("Expected parameters=<tuple>")
+        if origin is Union:
+            parameters = _remove_dups_flatten(parameters)
+            # It's not a union if there's only one type left.
+            if len(parameters) == 1:
+                return parameters[0]
+        self.__parameters__ = _type_vars(parameters)
+        self.__args__ = parameters
+        self.__origin__ = origin
+        # Pre-calculate the __hash__ on instantiation.
+        # This improves speed for complex substitutions.
+        subs_tree = self._subs_tree()
+        if isinstance(subs_tree, tuple):
+            self.__tree_hash__ = hash(frozenset(subs_tree))
+        else:
+            self.__tree_hash__ = hash(subs_tree)
         return self
 
-    def _get_type_vars(self, tvars):
-        if self.__tuple_params__:
-            _get_type_vars(self.__tuple_params__, tvars)
-
     def _eval_type(self, globalns, localns):
-        tp = self.__tuple_params__
-        if tp is None:
+        if self.__args__ is None:
             return self
-        p = tuple(_eval_type(t, globalns, localns) for t in tp)
-        if p == self.__tuple_params__:
+        ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+        ev_origin = _eval_type(self.__origin__, globalns, localns)
+        if ev_args == self.__args__ and ev_origin == self.__origin__:
+            # Everything is already evaluated.
             return self
-        else:
-            return self.__class__(self.__name__, self.__bases__, {},
-                                  p, _root=True)
+        return self.__class__(ev_args, ev_origin, _root=True)
+
+    def _get_type_vars(self, tvars):
+        if self.__origin__ and self.__parameters__:
+            _get_type_vars(self.__parameters__, tvars)
 
     def __repr__(self):
-        r = super().__repr__()
-        if self.__tuple_params__ is not None:
-            params = [_type_repr(p) for p in self.__tuple_params__]
-            if self.__tuple_use_ellipsis__:
-                params.append('...')
-            if not params:
-                params.append('()')
-            r += '[%s]' % (
-                ', '.join(params))
-        return r
+        if self.__origin__ is None:
+            return super().__repr__()
+        tree = self._subs_tree()
+        if not isinstance(tree, tuple):
+            return repr(tree)
+        return tree[0]._tree_repr(tree)
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super().__repr__() + '[%s]' % ', '.join(arg_list)
 
+    @_tp_cache
     def __getitem__(self, parameters):
-        if self.__tuple_params__ is not None:
-            raise TypeError("Cannot re-parameterize %r" % (self,))
+        if parameters == ():
+            raise TypeError("Cannot take a Union of no types.")
         if not isinstance(parameters, tuple):
             parameters = (parameters,)
-        if len(parameters) == 2 and parameters[1] == Ellipsis:
-            parameters = parameters[:1]
-            use_ellipsis = True
-            msg = "Tuple[t, ...]: t must be a type."
+        if self.__origin__ is None:
+            msg = "Union[arg, ...]: each arg must be a type."
         else:
-            use_ellipsis = False
-            msg = "Tuple[t0, t1, ...]: each t must be a type."
+            msg = "Parameters to generic types must be types."
         parameters = tuple(_type_check(p, msg) for p in parameters)
-        return self.__class__(self.__name__, self.__bases__,
-                              dict(self.__dict__), parameters,
-                              use_ellipsis=use_ellipsis, _root=True)
+        if self is not Union:
+            _check_generic(self, parameters)
+        return self.__class__(parameters, origin=self, _root=True)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self is Union:
+            return Union  # Nothing to substitute
+        tree_args = _subs_tree(self, tvars, args)
+        tree_args = _remove_dups_flatten(tree_args)
+        if len(tree_args) == 1:
+            return tree_args[0]  # Union of a single type is that type
+        return (Union,) + tree_args
 
     def __eq__(self, other):
-        if not isinstance(other, TupleMeta):
-            return NotImplemented
-        return (self.__tuple_params__ == other.__tuple_params__ and
-                self.__tuple_use_ellipsis__ == other.__tuple_use_ellipsis__)
+        if not isinstance(other, _Union):
+            return self._subs_tree() == other
+        return self.__tree_hash__ == other.__tree_hash__
 
     def __hash__(self):
-        return hash(self.__tuple_params__)
+        return self.__tree_hash__
 
     def __instancecheck__(self, obj):
-        raise TypeError("Tuples cannot be used with isinstance().")
+        raise TypeError("Unions cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if not isinstance(cls, type):
-            return super().__subclasscheck__(cls)  # To TypeError.
-        if issubclass(cls, tuple):
-            return True  # Special case.
-        if not isinstance(cls, TupleMeta):
-            return super().__subclasscheck__(cls)  # False.
-        if self.__tuple_params__ is None:
-            return True
-        if cls.__tuple_params__ is None:
-            return False  # ???
-        if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__:
-            return False
-        # Covariance.
-        return (len(self.__tuple_params__) == len(cls.__tuple_params__) and
-                all(issubclass(x, p)
-                    for x, p in zip(cls.__tuple_params__,
-                                    self.__tuple_params__)))
-
-
-class Tuple(Final, metaclass=TupleMeta, _root=True):
-    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
-
-    Example: Tuple[T1, T2] is a tuple of two elements corresponding
-    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
-    of an int, a float and a string.
-
-    To specify a variable-length tuple of homogeneous type, use Sequence[T].
-    """
-
-    __slots__ = ()
-
+        raise TypeError("Unions cannot be used with issubclass().")
 
-class CallableMeta(TypingMeta):
-    """Metaclass for Callable."""
 
-    def __new__(cls, name, bases, namespace, _root=False,
-                args=None, result=None):
-        if args is None and result is None:
-            pass  # Must be 'class Callable'.
-        else:
-            if args is not Ellipsis:
-                if not isinstance(args, list):
-                    raise TypeError("Callable[args, result]: "
-                                    "args must be a list."
-                                    " Got %.100r." % (args,))
-                msg = "Callable[[arg, ...], result]: each arg must be a type."
-                args = tuple(_type_check(arg, msg) for arg in args)
-            msg = "Callable[args, result]: result must be a type."
-            result = _type_check(result, msg)
-        self = super().__new__(cls, name, bases, namespace, _root=_root)
-        self.__args__ = args
-        self.__result__ = result
-        return self
+Union = _Union(_root=True)
 
-    def _get_type_vars(self, tvars):
-        if self.__args__:
-            _get_type_vars(self.__args__, tvars)
 
-    def _eval_type(self, globalns, localns):
-        if self.__args__ is None and self.__result__ is None:
-            return self
-        if self.__args__ is Ellipsis:
-            args = self.__args__
-        else:
-            args = [_eval_type(t, globalns, localns) for t in self.__args__]
-        result = _eval_type(self.__result__, globalns, localns)
-        if args == self.__args__ and result == self.__result__:
-            return self
-        else:
-            return self.__class__(self.__name__, self.__bases__, {},
-                                  args=args, result=result, _root=True)
-
-    def __repr__(self):
-        r = super().__repr__()
-        if self.__args__ is not None or self.__result__ is not None:
-            if self.__args__ is Ellipsis:
-                args_r = '...'
-            else:
-                args_r = '[%s]' % ', '.join(_type_repr(t)
-                                            for t in self.__args__)
-            r += '[%s, %s]' % (args_r, _type_repr(self.__result__))
-        return r
-
-    def __getitem__(self, parameters):
-        if self.__args__ is not None or self.__result__ is not None:
-            raise TypeError("This Callable type is already parameterized.")
-        if not isinstance(parameters, tuple) or len(parameters) != 2:
-            raise TypeError(
-                "Callable must be used as Callable[[arg, ...], result].")
-        args, result = parameters
-        return self.__class__(self.__name__, self.__bases__,
-                              dict(self.__dict__), _root=True,
-                              args=args, result=result)
-
-    def __eq__(self, other):
-        if not isinstance(other, CallableMeta):
-            return NotImplemented
-        return (self.__args__ == other.__args__ and
-                self.__result__ == other.__result__)
-
-    def __hash__(self):
-        return hash(self.__args__) ^ hash(self.__result__)
-
-    def __instancecheck__(self, obj):
-        # For unparametrized Callable we allow this, because
-        # typing.Callable should be equivalent to
-        # collections.abc.Callable.
-        if self.__args__ is None and self.__result__ is None:
-            return isinstance(obj, collections_abc.Callable)
-        else:
-            raise TypeError("Callable[] cannot be used with isinstance().")
-
-    def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if not isinstance(cls, CallableMeta):
-            return super().__subclasscheck__(cls)
-        if self.__args__ is None and self.__result__ is None:
-            return True
-        # We're not doing covariance or contravariance -- this is *invariance*.
-        return self == cls
+class _Optional(_FinalTypingBase, _root=True):
+    """Optional type.
 
+    Optional[X] is equivalent to Union[X, None].
+    """
 
-class Callable(Final, metaclass=CallableMeta, _root=True):
-    """Callable type; Callable[[int], str] is a function of (int) -> str.
+    __slots__ = ()
 
-    The subscription syntax must always be used with exactly two
-    values: the argument list and the return type.  The argument list
-    must be a list of types; the return type must be a single type.
+    @_tp_cache
+    def __getitem__(self, arg):
+        arg = _type_check(arg, "Optional[t] requires a single type.")
+        return Union[arg, type(None)]
 
-    There is no syntax to indicate optional or keyword arguments,
-    such function types are rarely used as callback types.
-    """
 
-    __slots__ = ()
+Optional = _Optional(_root=True)
 
 
 def _gorg(a):
-    """Return the farthest origin of a generic class."""
+    """Return the farthest origin of a generic class (internal helper)."""
     assert isinstance(a, GenericMeta)
     while a.__origin__ is not None:
         a = a.__origin__
@@ -873,10 +799,10 @@ def _gorg(a):
 
 
 def _geqv(a, b):
-    """Return whether two generic classes are equivalent.
+    """Return whether two generic classes are equivalent (internal helper).
 
     The intention is to consider generic class X and any of its
-    parameterized forms (X[T], X[int], etc.)  as equivalent.
+    parameterized forms (X[T], X[int], etc.) as equivalent.
 
     However, X is not equivalent to a subclass of X.
 
@@ -901,13 +827,54 @@ def _next_in_mro(cls):
     return next_in_mro
 
 
+def _valid_for_check(cls):
+    """An internal helper to prohibit isinstance([1], List[str]) etc."""
+    if cls is Generic:
+        raise TypeError("Class %r cannot be used with class "
+                        "or instance checks" % cls)
+    if (cls.__origin__ is not None and
+        sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']):
+        raise TypeError("Parameterized generics cannot be used with class "
+                        "or instance checks")
+
+
+def _make_subclasshook(cls):
+    """Construct a __subclasshook__ callable that incorporates
+    the associated __extra__ class in subclass checks performed
+    against cls.
+    """
+    if isinstance(cls.__extra__, abc.ABCMeta):
+        # The logic mirrors that of ABCMeta.__subclasscheck__.
+        # Registered classes need not be checked here because
+        # cls and its extra share the same _abc_registry.
+        def __extrahook__(subclass):
+            _valid_for_check(cls)
+            res = cls.__extra__.__subclasshook__(subclass)
+            if res is not NotImplemented:
+                return res
+            if cls.__extra__ in subclass.__mro__:
+                return True
+            for scls in cls.__extra__.__subclasses__():
+                if isinstance(scls, GenericMeta):
+                    continue
+                if issubclass(subclass, scls):
+                    return True
+            return NotImplemented
+    else:
+        # For non-ABC extras we'll just call issubclass().
+        def __extrahook__(subclass):
+            _valid_for_check(cls)
+            if cls.__extra__ and issubclass(subclass, cls.__extra__):
+                return True
+            return NotImplemented
+    return __extrahook__
+
+
 class GenericMeta(TypingMeta, abc.ABCMeta):
     """Metaclass for generic types."""
 
     def __new__(cls, name, bases, namespace,
-                tvars=None, args=None, origin=None, extra=None):
-        self = super().__new__(cls, name, bases, namespace, _root=True)
-
+                tvars=None, args=None, origin=None, extra=None, orig_bases=None):
         if tvars is not None:
             # Called from __getitem__() below.
             assert origin is not None
@@ -948,48 +915,102 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
                          ", ".join(str(g) for g in gvars)))
                 tvars = gvars
 
+        initial_bases = bases
+        if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+            bases = (extra,) + bases
+        bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)
+
+        # remove bare Generic from bases if there are other generic bases
+        if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+            bases = tuple(b for b in bases if b is not Generic)
+        self = super().__new__(cls, name, bases, namespace, _root=True)
+
         self.__parameters__ = tvars
-        self.__args__ = args
+        # Be prepared that GenericMeta will be subclassed by TupleMeta
+        # and CallableMeta, those two allow ..., (), or [] in __args___.
+        self.__args__ = tuple(... if a is _TypingEllipsis else
+                              () if a is _TypingEmpty else
+                              a for a in args) if args else None
         self.__origin__ = origin
         self.__extra__ = extra
         # Speed hack (https://github.com/python/typing/issues/196).
         self.__next_in_mro__ = _next_in_mro(self)
+        # Preserve base classes on subclassing (__bases__ are type erased now).
+        if orig_bases is None:
+            self.__orig_bases__ = initial_bases
+
+        # This allows unparameterized generic collections to be used
+        # with issubclass() and isinstance() in the same way as their
+        # collections.abc counterparts (e.g., isinstance([], Iterable)).
+        if ('__subclasshook__' not in namespace and extra  # allow overriding
+            or hasattr(self.__subclasshook__, '__name__') and
+            self.__subclasshook__.__name__ == '__extrahook__'):
+            self.__subclasshook__ = _make_subclasshook(self)
+        if isinstance(extra, abc.ABCMeta):
+            self._abc_registry = extra._abc_registry
+
+        if origin and hasattr(origin, '__qualname__'):  # Fix for Python 3.2.
+            self.__qualname__ = origin.__qualname__
+        self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,))
         return self
 
     def _get_type_vars(self, tvars):
         if self.__origin__ and self.__parameters__:
             _get_type_vars(self.__parameters__, tvars)
 
+    def _eval_type(self, globalns, localns):
+        ev_origin = (self.__origin__._eval_type(globalns, localns)
+                     if self.__origin__ else None)
+        ev_args = tuple(_eval_type(a, globalns, localns) for a
+                        in self.__args__) if self.__args__ else None
+        if ev_origin == self.__origin__ and ev_args == self.__args__:
+            return self
+        return self.__class__(self.__name__,
+                              self.__bases__,
+                              dict(self.__dict__),
+                              tvars=_type_vars(ev_args) if ev_args else None,
+                              args=ev_args,
+                              origin=ev_origin,
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
+
     def __repr__(self):
-        if self.__origin__ is not None:
-            r = repr(self.__origin__)
-        else:
-            r = super().__repr__()
-        if self.__args__:
-            r += '[%s]' % (
-                ', '.join(_type_repr(p) for p in self.__args__))
-        if self.__parameters__:
-            r += '<%s>' % (
-                ', '.join(_type_repr(p) for p in self.__parameters__))
-        return r
+        if self.__origin__ is None:
+            return super().__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if arg == ():
+                arg_list.append('()')
+            elif not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super().__repr__() + '[%s]' % ', '.join(arg_list)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self.__origin__ is None:
+            return self
+        tree_args = _subs_tree(self, tvars, args)
+        return (_gorg(self),) + tuple(tree_args)
 
     def __eq__(self, other):
         if not isinstance(other, GenericMeta):
             return NotImplemented
-        if self.__origin__ is not None:
-            return (self.__origin__ is other.__origin__ and
-                    self.__args__ == other.__args__ and
-                    self.__parameters__ == other.__parameters__)
-        else:
+        if self.__origin__ is None or other.__origin__ is None:
             return self is other
+        return self.__tree_hash__ == other.__tree_hash__
 
     def __hash__(self):
-        return hash((self.__name__, self.__parameters__))
+        return self.__tree_hash__
 
+    @_tp_cache
     def __getitem__(self, params):
         if not isinstance(params, tuple):
             params = (params,)
-        if not params:
+        if not params and not _gorg(self) is Tuple:
             raise TypeError(
                 "Parameter list to %s[...] cannot be empty" % _qualname(self))
         msg = "Parameters to generic types must be types."
@@ -1003,34 +1024,31 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
                 raise TypeError(
                     "Parameters to Generic[...] must all be unique")
             tvars = params
-            args = None
+            args = params
+        elif self in (Tuple, Callable):
+            tvars = _type_vars(params)
+            args = params
         elif self is _Protocol:
             # _Protocol is internal, don't check anything.
             tvars = params
-            args = None
+            args = params
         elif self.__origin__ in (Generic, _Protocol):
             # Can't subscript Generic[...] or _Protocol[...].
             raise TypeError("Cannot subscript already-subscripted %s" %
                             repr(self))
         else:
             # Subscripting a regular Generic subclass.
-            if not self.__parameters__:
-                raise TypeError("%s is not a generic class" % repr(self))
-            alen = len(params)
-            elen = len(self.__parameters__)
-            if alen != elen:
-                raise TypeError(
-                    "Too %s parameters for %s; actual %s, expected %s" %
-                    ("many" if alen > elen else "few", repr(self), alen, elen))
+            _check_generic(self, params)
             tvars = _type_vars(params)
             args = params
         return self.__class__(self.__name__,
-                              (self,) + self.__bases__,
+                              self.__bases__,
                               dict(self.__dict__),
                               tvars=tvars,
                               args=args,
                               origin=self,
-                              extra=self.__extra__)
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
 
     def __instancecheck__(self, instance):
         # Since we extend ABC.__subclasscheck__ and
@@ -1038,58 +1056,39 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
         # latter, we must extend __instancecheck__ too. For simplicity
         # we just skip the cache check -- instance checks for generic
         # classes are supposed to be rare anyways.
-        return self.__subclasscheck__(instance.__class__)
+        return issubclass(instance.__class__, self)
 
-    def __subclasscheck__(self, cls):
-        if cls is Any:
-            return True
-        if isinstance(cls, GenericMeta):
-            # For a class C(Generic[T]) where T is co-variant,
-            # C[X] is a subclass of C[Y] iff X is a subclass of Y.
-            origin = self.__origin__
-            if origin is not None and origin is cls.__origin__:
-                assert len(self.__args__) == len(origin.__parameters__)
-                assert len(cls.__args__) == len(origin.__parameters__)
-                for p_self, p_cls, p_origin in zip(self.__args__,
-                                                   cls.__args__,
-                                                   origin.__parameters__):
-                    if isinstance(p_origin, TypeVar):
-                        if p_origin.__covariant__:
-                            # Covariant -- p_cls must be a subclass of p_self.
-                            if not issubclass(p_cls, p_self):
-                                break
-                        elif p_origin.__contravariant__:
-                            # Contravariant.  I think it's the opposite. :-)
-                            if not issubclass(p_self, p_cls):
-                                break
-                        else:
-                            # Invariant -- p_cls and p_self must equal.
-                            if p_self != p_cls:
-                                break
-                    else:
-                        # If the origin's parameter is not a typevar,
-                        # insist on invariance.
-                        if p_self != p_cls:
-                            break
-                else:
-                    return True
-                # If we break out of the loop, the superclass gets a chance.
-        if super().__subclasscheck__(cls):
-            return True
-        if self.__extra__ is None or isinstance(cls, GenericMeta):
-            return False
-        return issubclass(cls, self.__extra__)
+    def __copy__(self):
+        return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
+                              self.__parameters__, self.__args__, self.__origin__,
+                              self.__extra__, self.__orig_bases__)
 
 
 # Prevent checks for Generic to crash when defining Generic.
 Generic = None
 
 
+def _generic_new(base_cls, cls, *args, **kwds):
+    # Assure type is erased on instantiation,
+    # but attempt to store it in __orig_class__
+    if cls.__origin__ is None:
+        return base_cls.__new__(cls)
+    else:
+        origin = _gorg(cls)
+        obj = base_cls.__new__(origin)
+        try:
+            obj.__orig_class__ = cls
+        except AttributeError:
+            pass
+        obj.__init__(*args, **kwds)
+        return obj
+
+
 class Generic(metaclass=GenericMeta):
     """Abstract base class for generic types.
 
-    A generic type is typically declared by inheriting from an
-    instantiation of this class with one or more type variables.
+    A generic type is typically declared by inheriting from
+    this class parameterized with one or more type variables.
     For example, a generic mapping type might be defined as::
 
       class Mapping(Generic[KT, VT]):
@@ -1109,13 +1108,208 @@ class Generic(metaclass=GenericMeta):
     __slots__ = ()
 
     def __new__(cls, *args, **kwds):
-        if cls.__origin__ is None:
-            return cls.__next_in_mro__.__new__(cls)
+        if _geqv(cls, Generic):
+            raise TypeError("Type Generic cannot be instantiated; "
+                            "it can be used only as a base class")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty:
+    """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+    to allow empty list/tuple in specific places, without allowing them
+    to sneak in where prohibited.
+    """
+
+
+class _TypingEllipsis:
+    """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+    """Metaclass for Tuple (internal)."""
+
+    @_tp_cache
+    def __getitem__(self, parameters):
+        if self.__origin__ is not None or not _geqv(self, Tuple):
+            # Normal generic rules apply if this is not the first subscription
+            # or a subscription of a subclass.
+            return super().__getitem__(parameters)
+        if parameters == ():
+            return super().__getitem__((_TypingEmpty,))
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if len(parameters) == 2 and parameters[1] is ...:
+            msg = "Tuple[t, ...]: t must be a type."
+            p = _type_check(parameters[0], msg)
+            return super().__getitem__((p, _TypingEllipsis))
+        msg = "Tuple[t0, t1, ...]: each t must be a type."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        return super().__getitem__(parameters)
+
+    def __instancecheck__(self, obj):
+        if self.__args__ == None:
+            return isinstance(obj, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        if self.__args__ == None:
+            return issubclass(cls, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with issubclass().")
+
+
+class Tuple(tuple, extra=tuple, metaclass=TupleMeta):
+    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+    Example: Tuple[T1, T2] is a tuple of two elements corresponding
+    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
+    of an int, a float and a string.
+
+    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Tuple):
+            raise TypeError("Type Tuple cannot be instantiated; "
+                            "use tuple() instead")
+        return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+    """Metaclass for Callable (internal)."""
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super().__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        if _gorg(self) is not Callable:
+            return super()._tree_repr(tree)
+        # For actual Callable (not its subclass) we override
+        # super()._tree_repr() for nice formatting.
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        if arg_list[0] == '...':
+            return repr(tree[0]) + '[..., %s]' % arg_list[1]
+        return (repr(tree[0]) +
+                '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+    def __getitem__(self, parameters):
+        """A thin wrapper around __getitem_inner__ to provide the latter
+        with hashable arguments to improve speed.
+        """
+
+        if  self.__origin__ is not None or not _geqv(self, Callable):
+            return super().__getitem__(parameters)
+        if not isinstance(parameters, tuple) or len(parameters) != 2:
+            raise TypeError("Callable must be used as "
+                            "Callable[[arg, ...], result].")
+        args, result = parameters
+        if args is Ellipsis:
+            parameters = (Ellipsis, result)
         else:
-            origin = _gorg(cls)
-            obj = cls.__next_in_mro__.__new__(origin)
-            obj.__init__(*args, **kwds)
-            return obj
+            if not isinstance(args, list):
+                raise TypeError("Callable[args, result]: args must be a list."
+                                " Got %.100r." % (args,))
+            parameters = (tuple(args), result)
+        return self.__getitem_inner__(parameters)
+
+    @_tp_cache
+    def __getitem_inner__(self, parameters):
+        args, result = parameters
+        msg = "Callable[args, result]: result must be a type."
+        result = _type_check(result, msg)
+        if args is Ellipsis:
+            return super().__getitem__((_TypingEllipsis, result))
+        msg = "Callable[[arg, ...], result]: each arg must be a type."
+        args = tuple(_type_check(arg, msg) for arg in args)
+        parameters = args + (result,)
+        return super().__getitem__(parameters)
+
+
+class Callable(extra=collections_abc.Callable, metaclass = CallableMeta):
+    """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+    The subscription syntax must always be used with exactly two
+    values: the argument list and the return type.  The argument list
+    must be a list of types or ellipsis; the return type must be a single type.
+
+    There is no syntax to indicate optional or keyword arguments,
+    such function types are rarely used as callback types.
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Callable):
+            raise TypeError("Type Callable cannot be instantiated; "
+                            "use a non-abstract subclass instead")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _ClassVar(_FinalTypingBase, _root=True):
+    """Special type construct to mark class variables.
+
+    An annotation wrapped in ClassVar indicates that a given
+    attribute is intended to be used as a class variable and
+    should not be set on instances of that class. Usage::
+
+      class Starship:
+          stats: ClassVar[Dict[str, int]] = {} # class variable
+          damage: int = 10                     # instance variable
+
+    ClassVar accepts only types and cannot be further subscribed.
+
+    Note that ClassVar is not a class itself, and should not
+    be used with isinstance() or issubclass().
+    """
+
+    __slots__ = ('__type__',)
+
+    def __init__(self, tp=None, **kwds):
+        self.__type__ = tp
+
+    def __getitem__(self, item):
+        cls = type(self)
+        if self.__type__ is None:
+            return cls(_type_check(item,
+                       '{} accepts only single type.'.format(cls.__name__[1:])),
+                       _root=True)
+        raise TypeError('{} cannot be further subscripted'
+                        .format(cls.__name__[1:]))
+
+    def _eval_type(self, globalns, localns):
+        new_tp = _eval_type(self.__type__, globalns, localns)
+        if new_tp == self.__type__:
+            return self
+        return type(self)(new_tp, _root=True)
+
+    def __repr__(self):
+        r = super().__repr__()
+        if self.__type__ is not None:
+            r += '[{}]'.format(_type_repr(self.__type__))
+        return r
+
+    def __hash__(self):
+        return hash((type(self).__name__, self.__type__))
+
+    def __eq__(self, other):
+        if not isinstance(other, _ClassVar):
+            return NotImplemented
+        if self.__type__ is not None:
+            return self.__type__ == other.__type__
+        return self is other
+
+
+ClassVar = _ClassVar(_root=True)
 
 
 def cast(typ, val):
@@ -1131,7 +1325,11 @@ def cast(typ, val):
 
 def _get_defaults(func):
     """Internal helper to extract the default arguments, by name."""
-    code = func.__code__
+    try:
+        code = func.__code__
+    except AttributeError:
+        # Some built-in functions don't have __code__, __defaults__, etc.
+        return {}
     pos_count = code.co_argcount
     arg_names = code.co_varnames
     arg_names = arg_names[:pos_count]
@@ -1146,12 +1344,20 @@ def _get_defaults(func):
 
 
 def get_type_hints(obj, globalns=None, localns=None):
-    """Return type hints for a function or method object.
+    """Return type hints for an object.
 
     This is often the same as obj.__annotations__, but it handles
     forward references encoded as string literals, and if necessary
     adds Optional[t] if a default value equal to None is set.
 
+    The argument may be a module, class, method, or function. The annotations
+    are returned as a dictionary. For classes, annotations include also
+    inherited members.
+
+    TypeError is raised if the argument is not of a type that can contain
+    annotations, and an empty dictionary is returned if no annotations are
+    present.
+
     BEWARE -- the behavior of globalns and localns is counterintuitive
     (unless you are familiar with how eval() and exec() work).  The
     search order is locals first, then globals.
@@ -1166,6 +1372,7 @@ def get_type_hints(obj, globalns=None, localns=None):
     - If two dict arguments are passed, they specify globals and
       locals, respectively.
     """
+
     if getattr(obj, '__no_type_check__', None):
         return {}
     if globalns is None:
@@ -1174,9 +1381,35 @@ def get_type_hints(obj, globalns=None, localns=None):
             localns = globalns
     elif localns is None:
         localns = globalns
+    # Classes require a special treatment.
+    if isinstance(obj, type):
+        hints = {}
+        for base in reversed(obj.__mro__):
+            ann = base.__dict__.get('__annotations__', {})
+            for name, value in ann.items():
+                if value is None:
+                    value = type(None)
+                if isinstance(value, str):
+                    value = _ForwardRef(value)
+                value = _eval_type(value, globalns, localns)
+                hints[name] = value
+        return hints
+    hints = getattr(obj, '__annotations__', None)
+    if hints is None:
+        # Return empty annotations for something that _could_ have them.
+        if (isinstance(obj, types.FunctionType) or
+            isinstance(obj, types.BuiltinFunctionType) or
+            isinstance(obj, types.MethodType) or
+            isinstance(obj, types.ModuleType)):
+            return {}
+        else:
+            raise TypeError('{!r} is not a module, class, method, '
+                            'or function.'.format(obj))
     defaults = _get_defaults(obj)
-    hints = dict(obj.__annotations__)
+    hints = dict(hints)
     for name, value in hints.items():
+        if value is None:
+            value = type(None)
         if isinstance(value, str):
             value = _ForwardRef(value)
         value = _eval_type(value, globalns, localns)
@@ -1190,17 +1423,25 @@ def no_type_check(arg):
     """Decorator to indicate that annotations are not type hints.
 
     The argument must be a class or function; if it is a class, it
-    applies recursively to all methods defined in that class (but not
-    to methods defined in its superclasses or subclasses).
+    applies recursively to all methods and classes defined in that class
+    (but not to methods defined in its superclasses or subclasses).
 
-    This mutates the function(s) in place.
+    This mutates the function(s) or class(es) in place.
     """
     if isinstance(arg, type):
-        for obj in arg.__dict__.values():
+        arg_attrs = arg.__dict__.copy()
+        for attr, val in arg.__dict__.items():
+            if val in arg.__bases__:
+                arg_attrs.pop(attr)
+        for obj in arg_attrs.values():
             if isinstance(obj, types.FunctionType):
                 obj.__no_type_check__ = True
-    else:
+            if isinstance(obj, type):
+                no_type_check(obj)
+    try:
         arg.__no_type_check__ = True
+    except TypeError: # built-in classes
+        pass
     return arg
 
 
@@ -1266,6 +1507,8 @@ class _ProtocolMeta(GenericMeta):
     """
 
     def __instancecheck__(self, obj):
+        if _Protocol not in self.__bases__:
+            return super().__instancecheck__(obj)
         raise TypeError("Protocols cannot be used with isinstance().")
 
     def __subclasscheck__(self, cls):
@@ -1304,6 +1547,8 @@ class _ProtocolMeta(GenericMeta):
                 else:
                     if (not attr.startswith('_abc_') and
                             attr != '__abstractmethods__' and
+                            attr != '__annotations__' and
+                            attr != '__weakref__' and
                             attr != '_is_protocol' and
                             attr != '__dict__' and
                             attr != '__args__' and
@@ -1312,7 +1557,9 @@ class _ProtocolMeta(GenericMeta):
                             attr != '__next_in_mro__' and
                             attr != '__parameters__' and
                             attr != '__origin__' and
+                            attr != '__orig_bases__' and
                             attr != '__extra__' and
+                            attr != '__tree_hash__' and
                             attr != '__module__'):
                         attrs.add(attr)
 
@@ -1322,7 +1569,7 @@ class _ProtocolMeta(GenericMeta):
 class _Protocol(metaclass=_ProtocolMeta):
     """Internal base class for protocol classes.
 
-    This implements a simple-minded structural isinstance check
+    This implements a simple-minded structural issubclass check
     (similar but more general than the one-offs in collections.abc
     such as Hashable).
     """
@@ -1341,8 +1588,16 @@ Hashable = collections_abc.Hashable  # Not generic.
 if hasattr(collections_abc, 'Awaitable'):
     class Awaitable(Generic[T_co], extra=collections_abc.Awaitable):
         __slots__ = ()
-else:
-    Awaitable = None
+
+    __all__.append('Awaitable')
+
+
+if hasattr(collections_abc, 'Coroutine'):
+    class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co],
+                    extra=collections_abc.Coroutine):
+        __slots__ = ()
+
+    __all__.append('Coroutine')
 
 
 if hasattr(collections_abc, 'AsyncIterable'):
@@ -1354,9 +1609,8 @@ if hasattr(collections_abc, 'AsyncIterable'):
                         extra=collections_abc.AsyncIterator):
         __slots__ = ()
 
-else:
-    AsyncIterable = None
-    AsyncIterator = None
+    __all__.append('AsyncIterable')
+    __all__.append('AsyncIterator')
 
 
 class Iterable(Generic[T_co], extra=collections_abc.Iterable):
@@ -1434,109 +1688,117 @@ class Container(Generic[T_co], extra=collections_abc.Container):
     __slots__ = ()
 
 
-# Callable was defined earlier.
+if hasattr(collections_abc, 'Collection'):
+    class Collection(Sized, Iterable[T_co], Container[T_co],
+                     extra=collections_abc.Collection):
+        __slots__ = ()
+
+    __all__.append('Collection')
 
 
-class AbstractSet(Sized, Iterable[T_co], Container[T_co],
-                  extra=collections_abc.Set):
-    pass
+# Callable was defined earlier.
+
+if hasattr(collections_abc, 'Collection'):
+    class AbstractSet(Collection[T_co],
+                      extra=collections_abc.Set):
+        __slots__ = ()
+else:
+    class AbstractSet(Sized, Iterable[T_co], Container[T_co],
+                      extra=collections_abc.Set):
+        __slots__ = ()
 
 
 class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet):
-    pass
+    __slots__ = ()
 
 
-# NOTE: Only the value type is covariant.
-class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
-              extra=collections_abc.Mapping):
-    pass
+# NOTE: It is only covariant in the value type.
+if hasattr(collections_abc, 'Collection'):
+    class Mapping(Collection[KT], Generic[KT, VT_co],
+                  extra=collections_abc.Mapping):
+        __slots__ = ()
+else:
+    class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
+                  extra=collections_abc.Mapping):
+        __slots__ = ()
 
 
 class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping):
-    pass
+    __slots__ = ()
 
 if hasattr(collections_abc, 'Reversible'):
-    class Sequence(Sized, Reversible[T_co], Container[T_co],
-               extra=collections_abc.Sequence):
-        pass
+    if hasattr(collections_abc, 'Collection'):
+        class Sequence(Reversible[T_co], Collection[T_co],
+                   extra=collections_abc.Sequence):
+            __slots__ = ()
+    else:
+        class Sequence(Sized, Reversible[T_co], Container[T_co],
+                   extra=collections_abc.Sequence):
+            __slots__ = ()
 else:
     class Sequence(Sized, Iterable[T_co], Container[T_co],
                    extra=collections_abc.Sequence):
-        pass
+        __slots__ = ()
 
 
 class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence):
-    pass
+    __slots__ = ()
 
 
 class ByteString(Sequence[int], extra=collections_abc.ByteString):
-    pass
-
-
-ByteString.register(type(memoryview(b'')))
+    __slots__ = ()
 
 
 class List(list, MutableSequence[T], extra=list):
 
+    __slots__ = ()
+
     def __new__(cls, *args, **kwds):
         if _geqv(cls, List):
             raise TypeError("Type List cannot be instantiated; "
                             "use list() instead")
-        return list.__new__(cls, *args, **kwds)
+        return _generic_new(list, cls, *args, **kwds)
 
 
 class Set(set, MutableSet[T], extra=set):
 
+    __slots__ = ()
+
     def __new__(cls, *args, **kwds):
         if _geqv(cls, Set):
             raise TypeError("Type Set cannot be instantiated; "
                             "use set() instead")
-        return set.__new__(cls, *args, **kwds)
-
+        return _generic_new(set, cls, *args, **kwds)
 
-class _FrozenSetMeta(GenericMeta):
-    """This metaclass ensures set is not a subclass of FrozenSet.
 
-    Without this metaclass, set would be considered a subclass of
-    FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and
-    set is a subclass of that.
-    """
-
-    def __subclasscheck__(self, cls):
-        if issubclass(cls, Set):
-            return False
-        return super().__subclasscheck__(cls)
-
-
-class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta,
-                extra=frozenset):
+class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset):
     __slots__ = ()
 
     def __new__(cls, *args, **kwds):
         if _geqv(cls, FrozenSet):
             raise TypeError("Type FrozenSet cannot be instantiated; "
                             "use frozenset() instead")
-        return frozenset.__new__(cls, *args, **kwds)
+        return _generic_new(frozenset, cls, *args, **kwds)
 
 
 class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView):
-    pass
+    __slots__ = ()
 
 
 class KeysView(MappingView[KT], AbstractSet[KT],
                extra=collections_abc.KeysView):
-    pass
+    __slots__ = ()
 
 
 class ItemsView(MappingView[Tuple[KT, VT_co]],
                 AbstractSet[Tuple[KT, VT_co]],
                 Generic[KT, VT_co],
                 extra=collections_abc.ItemsView):
-    pass
+    __slots__ = ()
 
 
 class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView):
-    pass
+    __slots__ = ()
 
 
 if hasattr(contextlib, 'AbstractContextManager'):
@@ -1547,20 +1809,24 @@ if hasattr(contextlib, 'AbstractContextManager'):
 
 class Dict(dict, MutableMapping[KT, VT], extra=dict):
 
+    __slots__ = ()
+
     def __new__(cls, *args, **kwds):
         if _geqv(cls, Dict):
             raise TypeError("Type Dict cannot be instantiated; "
                             "use dict() instead")
-        return dict.__new__(cls, *args, **kwds)
+        return _generic_new(dict, cls, *args, **kwds)
 
 class DefaultDict(collections.defaultdict, MutableMapping[KT, VT],
                   extra=collections.defaultdict):
 
+    __slots__ = ()
+
     def __new__(cls, *args, **kwds):
         if _geqv(cls, DefaultDict):
             raise TypeError("Type DefaultDict cannot be instantiated; "
                             "use collections.defaultdict() instead")
-        return collections.defaultdict.__new__(cls, *args, **kwds)
+        return _generic_new(collections.defaultdict, cls, *args, **kwds)
 
 # Determine what base class to use for Generator.
 if hasattr(collections_abc, 'Generator'):
@@ -1579,15 +1845,15 @@ class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co],
         if _geqv(cls, Generator):
             raise TypeError("Type Generator cannot be instantiated; "
                             "create a subclass instead")
-        return super().__new__(cls, *args, **kwds)
+        return _generic_new(_G_base, cls, *args, **kwds)
 
 
 # Internal type variable used for Type[].
-CT = TypeVar('CT', covariant=True, bound=type)
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
 
 
 # This is not a real generic class.  Don't use outside annotations.
-class Type(type, Generic[CT], extra=type):
+class Type(Generic[CT_co], extra=type):
     """A special construct usable to annotate class objects.
 
     For example, suppose we have the following classes::
@@ -1611,13 +1877,43 @@ class Type(type, Generic[CT], extra=type):
     At this point the type checker knows that joe has type BasicUser.
     """
 
+    __slots__ = ()
+
+
+def _make_nmtuple(name, types):
+    msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type"
+    types = [(n, _type_check(t, msg)) for n, t in types]
+    nm_tpl = collections.namedtuple(name, [n for n, t in types])
+    nm_tpl._field_types = dict(types)
+    try:
+        nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):
+        pass
+    return nm_tpl
+
 
-def NamedTuple(typename, fields):
+_PY36 = sys.version_info[:2] >= (3, 6)
+
+
+class NamedTupleMeta(type):
+
+    def __new__(cls, typename, bases, ns):
+        if ns.get('_root', False):
+            return super().__new__(cls, typename, bases, ns)
+        if not _PY36:
+            raise TypeError("Class syntax for NamedTuple is only supported"
+                            " in Python 3.6+")
+        types = ns.get('__annotations__', {})
+        return _make_nmtuple(typename, types.items())
+
+class NamedTuple(metaclass=NamedTupleMeta):
     """Typed version of namedtuple.
 
-    Usage::
+    Usage in Python versions >= 3.6::
 
-        Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)])
+        class Employee(NamedTuple):
+            name: str
+            id: int
 
     This is equivalent to::
 
@@ -1626,17 +1922,26 @@ def NamedTuple(typename, fields):
     The resulting class has one extra attribute: _field_types,
     giving a dict mapping field names to types.  (The field names
     are in the _fields attribute, which is part of the namedtuple
-    API.)
+    API.) Alternative equivalent keyword syntax is also accepted::
+
+        Employee = NamedTuple('Employee', name=str, id=int)
+
+    In Python versions <= 3.5 use::
+
+        Employee = NamedTuple('Employee', [('name', str), ('id', int)])
     """
-    fields = [(n, t) for n, t in fields]
-    cls = collections.namedtuple(typename, [n for n, t in fields])
-    cls._field_types = dict(fields)
-    # Set the module to the caller's module (otherwise it'd be 'typing').
-    try:
-        cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
-    except (AttributeError, ValueError):
-        pass
-    return cls
+    _root = True
+
+    def __new__(self, typename, fields=None, **kwargs):
+        if kwargs and not _PY36:
+            raise TypeError("Keyword syntax for NamedTuple is only supported"
+                            " in Python 3.6+")
+        if fields is None:
+            fields = kwargs.items()
+        elif kwargs:
+            raise TypeError("Either list of fields or keywords"
+                            " can be provided to NamedTuple, not both")
+        return _make_nmtuple(typename, fields)
 
 
 def NewType(name, tp):
@@ -1798,7 +2103,7 @@ class TextIO(IO[str]):
         pass
 
     @abstractproperty
-    def errors(self) -> str:
+    def errors(self) -> Optional[str]:
         pass
 
     @abstractproperty
diff --git a/mypy/applytype.py b/mypy/applytype.py
index 5f066e5..d976700 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -3,13 +3,13 @@ from typing import List, Dict
 import mypy.subtypes
 from mypy.sametypes import is_same_type
 from mypy.expandtype import expand_type
-from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, Void
+from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType
 from mypy.messages import MessageBuilder
 from mypy.nodes import Context
 
 
 def apply_generic_arguments(callable: CallableType, types: List[Type],
-                            msg: MessageBuilder, context: Context) -> Type:
+                            msg: MessageBuilder, context: Context) -> CallableType:
     """Apply generic type arguments to a callable type.
 
     For example, applying [int] to 'def [T] (T) -> T' results in
@@ -18,10 +18,7 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
     Note that each type can be None; in this case, it will not be applied.
     """
     tvars = callable.variables
-    if len(tvars) != len(types):
-        msg.incompatible_type_application(len(tvars), len(types), context)
-        return AnyType()
-
+    assert len(tvars) == len(types)
     # Check that inferred type variable values are compatible with allowed
     # values and bounds.  Also, promote subtype values to allowed values.
     types = types[:]
diff --git a/mypy/binder.py b/mypy/binder.py
index bc633e5..23be259 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -1,20 +1,28 @@
-from typing import (Any, Dict, List, Set, Iterator, Union)
+from typing import (Dict, List, Set, Iterator, Union)
 from contextlib import contextmanager
 
 from mypy.types import Type, AnyType, PartialType
-from mypy.nodes import (Node, Expression, Var, RefExpr, SymbolTableNode)
+from mypy.nodes import (Key, Node, Expression, Var, RefExpr, SymbolTableNode)
 
 from mypy.subtypes import is_subtype
 from mypy.join import join_simple
 from mypy.sametypes import is_same_type
 
 
-class Frame(Dict[Any, Type]):
-    pass
+class Frame(Dict[Key, Type]):
+    """A Frame represents a specific point in the execution of a program.
+    It carries information about the current types of expressions at
+    that point, arising either from assignments to those expressions
+    or the result of isinstance checks. It also records whether it is
+    possible to reach that point at all.
 
+    This information is not copied into a new Frame when it is pushed
+    onto the stack, so a given Frame only has information about types
+    that were assigned in that frame.
+    """
 
-class Key(AnyType):
-    pass
+    def __init__(self) -> None:
+        self.unreachable = False
 
 
 class ConditionalTypeBinder:
@@ -39,13 +47,19 @@ class ConditionalTypeBinder:
     """
 
     def __init__(self) -> None:
-        # The set of frames currently used.  These map
+        # The stack of frames currently used.  These map
         # expr.literal_hash -- literals like 'foo.bar' --
-        # to types.
+        # to types. The last element of this list is the
+        # top-most, current frame. Each earlier element
+        # records the state as of when that frame was last
+        # on top of the stack.
         self.frames = [Frame()]
 
         # For frames higher in the stack, we record the set of
-        # Frames that can escape there
+        # Frames that can escape there, either by falling off
+        # the end of the frame or by a loop control construct
+        # or raised exception. The last element of self.frames
+        # has no corresponding element in this list.
         self.options_on_return = []  # type: List[List[Frame]]
 
         # Maps expr.literal_hash] to get_declaration(expr)
@@ -55,27 +69,20 @@ class ConditionalTypeBinder:
         # Whenever a new key (e.g. x.a.b) is added, we update this
         self.dependencies = {}  # type: Dict[Key, Set[Key]]
 
-        # breaking_out is set to True on return/break/continue/raise
-        # It is cleared on pop_frame() and placed in last_pop_breaking_out
-        # Lines of code after breaking_out = True are unreachable and not
-        # typechecked.
-        self.breaking_out = False
-
         # Whether the last pop changed the newly top frame on exit
         self.last_pop_changed = False
-        # Whether the last pop was necessarily breaking out, and couldn't fall through
-        self.last_pop_breaking_out = False
 
         self.try_frames = set()  # type: Set[int]
-        self.loop_frames = []  # type: List[int]
+        self.break_frames = []  # type: List[int]
+        self.continue_frames = []  # type: List[int]
 
     def _add_dependencies(self, key: Key, value: Key = None) -> None:
         if value is None:
             value = key
         else:
             self.dependencies.setdefault(key, set()).add(value)
-        if isinstance(key, tuple):
-            for elt in key:
+        for elt in key:
+            if isinstance(elt, Key):
                 self._add_dependencies(elt, value)
 
     def push_frame(self) -> Frame:
@@ -105,9 +112,17 @@ class ConditionalTypeBinder:
             self._add_dependencies(key)
         self._push(key, typ)
 
+    def unreachable(self) -> None:
+        self.frames[-1].unreachable = True
+
     def get(self, expr: Union[Expression, Var]) -> Type:
         return self._get(expr.literal_hash)
 
+    def is_unreachable(self) -> bool:
+        # TODO: Copy the value of unreachable into new frames to avoid
+        # this traversal on every statement?
+        return any(f.unreachable for f in self.frames)
+
     def cleanse(self, expr: Expression) -> None:
         """Remove all references to a Node from the binder."""
         self._cleanse_key(expr.literal_hash)
@@ -126,6 +141,7 @@ class ConditionalTypeBinder:
         options are the same.
         """
 
+        frames = [f for f in frames if not f.unreachable]
         changed = False
         keys = set(key for f in frames for key in f)
 
@@ -133,6 +149,9 @@ class ConditionalTypeBinder:
             current_value = self._get(key)
             resulting_values = [f.get(key, current_value) for f in frames]
             if any(x is None for x in resulting_values):
+                # We didn't know anything about key before
+                # (current_value must be None), and we still don't
+                # know anything about key in at least one possible frame.
                 continue
 
             if isinstance(self.declarations.get(key), AnyType):
@@ -147,27 +166,32 @@ class ConditionalTypeBinder:
                 self._push(key, type)
                 changed = True
 
+        self.frames[-1].unreachable = not frames
+
         return changed
 
-    def pop_frame(self, fall_through: int = 0) -> Frame:
+    def pop_frame(self, can_skip: bool, fall_through: int) -> Frame:
         """Pop a frame and return it.
 
         See frame_context() for documentation of fall_through.
         """
-        if fall_through and not self.breaking_out:
+
+        if fall_through > 0:
             self.allow_jump(-fall_through)
 
         result = self.frames.pop()
         options = self.options_on_return.pop()
 
+        if can_skip:
+            options.insert(0, self.frames[-1])
+
         self.last_pop_changed = self.update_from_options(options)
-        self.last_pop_breaking_out = self.breaking_out
 
         return result
 
-    def get_declaration(self, node: Node) -> Type:
-        if isinstance(node, (RefExpr, SymbolTableNode)) and isinstance(node.node, Var):
-            type = node.node.type
+    def get_declaration(self, expr: Node) -> Type:
+        if isinstance(expr, RefExpr) and isinstance(expr.node, Var):
+            type = expr.node.type
             if isinstance(type, PartialType):
                 return None
             return type
@@ -239,25 +263,74 @@ class ConditionalTypeBinder:
         frame = Frame()
         for f in self.frames[index + 1:]:
             frame.update(f)
+            if f.unreachable:
+                frame.unreachable = True
         self.options_on_return[index].append(frame)
 
-    def push_loop_frame(self) -> None:
-        self.loop_frames.append(len(self.frames) - 1)
+    def handle_break(self) -> None:
+        self.allow_jump(self.break_frames[-1])
+        self.unreachable()
 
-    def pop_loop_frame(self) -> None:
-        self.loop_frames.pop()
+    def handle_continue(self) -> None:
+        self.allow_jump(self.continue_frames[-1])
+        self.unreachable()
 
     @contextmanager
-    def frame_context(self, fall_through: int = 0) -> Iterator[Frame]:
+    def frame_context(self, *, can_skip: bool, fall_through: int = 1,
+                      break_frame: int = 0, continue_frame: int = 0,
+                      try_frame: bool = False) -> Iterator[Frame]:
         """Return a context manager that pushes/pops frames on enter/exit.
 
-        If fall_through > 0, then it will allow the frame to escape to
-        its ancestor `fall_through` levels higher.
+        If can_skip is True, control flow is allowed to bypass the
+        newly-created frame.
+
+        If fall_through > 0, then it will allow control flow that
+        falls off the end of the frame to escape to its ancestor
+        `fall_through` levels higher. Otherwise control flow ends
+        at the end of the frame.
+
+        If break_frame > 0, then 'break' statements within this frame
+        will jump out to the frame break_frame levels higher than the
+        frame created by this call to frame_context. Similarly for
+        continue_frame and 'continue' statements.
+
+        If try_frame is true, then execution is allowed to jump at any
+        point within the newly created frame (or its descendents) to
+        its parent (i.e., to the frame that was on top before this
+        call to frame_context).
 
-        A simple 'with binder.frame_context(): pass' will change the
-        last_pop_* flags but nothing else.
+        After the context manager exits, self.last_pop_changed indicates
+        whether any types changed in the newly-topmost frame as a result
+        of popping this frame.
+        """
+        assert len(self.frames) > 1
+
+        if break_frame:
+            self.break_frames.append(len(self.frames) - break_frame)
+        if continue_frame:
+            self.continue_frames.append(len(self.frames) - continue_frame)
+        if try_frame:
+            self.try_frames.add(len(self.frames) - 1)
+
+        new_frame = self.push_frame()
+        if try_frame:
+            # An exception may occur immediately
+            self.allow_jump(-1)
+        yield new_frame
+        self.pop_frame(can_skip, fall_through)
+
+        if break_frame:
+            self.break_frames.pop()
+        if continue_frame:
+            self.continue_frames.pop()
+        if try_frame:
+            self.try_frames.remove(len(self.frames) - 1)
+
+    @contextmanager
+    def top_frame_context(self) -> Iterator[Frame]:
+        """A variant of frame_context for use at the top level of
+        a namespace (module, function, or class).
         """
-        was_breaking_out = self.breaking_out
+        assert len(self.frames) == 1
         yield self.push_frame()
-        self.pop_frame(fall_through)
-        self.breaking_out = was_breaking_out
+        self.pop_frame(True, 0)
diff --git a/mypy/build.py b/mypy/build.py
index b7ee2d0..cbc088f 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -15,7 +15,6 @@ import collections
 import contextlib
 import hashlib
 import json
-import os
 import os.path
 import sys
 import time
@@ -24,7 +23,7 @@ from os.path import dirname, basename
 from typing import (AbstractSet, Dict, Iterable, Iterator, List,
                     NamedTuple, Optional, Set, Tuple, Union)
 
-from mypy.nodes import (MypyFile, Import, ImportFrom, ImportAll)
+from mypy.nodes import (MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll)
 from mypy.semanal import FirstPass, SemanticAnalyzer, ThirdPass
 from mypy.checker import TypeChecker
 from mypy.indirection import TypeIndirectionVisitor
@@ -33,9 +32,11 @@ from mypy.report import Reports
 from mypy import moduleinfo
 from mypy import util
 from mypy.fixup import fixup_module_pass_one, fixup_module_pass_two
+from mypy.nodes import Expression
 from mypy.options import Options
 from mypy.parse import parse
 from mypy.stats import dump_type_stats
+from mypy.types import Type
 from mypy.version import __version__
 
 
@@ -49,6 +50,7 @@ PYTHON_EXTENSIONS = ['.pyi', '.py']
 Graph = Dict[str, 'State']
 
 
+# TODO: Get rid of BuildResult.  We might as well return a BuildManager.
 class BuildResult:
     """The result of a successful build.
 
@@ -62,7 +64,7 @@ class BuildResult:
     def __init__(self, manager: 'BuildManager') -> None:
         self.manager = manager
         self.files = manager.modules
-        self.types = manager.type_checker.type_map
+        self.types = manager.all_types
         self.errors = manager.errors.messages()
 
 
@@ -73,11 +75,6 @@ class BuildSource:
         self.module = module or '__main__'
         self.text = text
 
-    @property
-    def effective_path(self) -> str:
-        """Return the effective path (ie, <string> if its from in memory)"""
-        return self.path or '<string>'
-
 
 class BuildSourceSet:
     """Efficiently test a file's membership in the set of build sources."""
@@ -132,7 +129,9 @@ def build(sources: List[BuildSource],
     find_module_clear_caches()
 
     # Determine the default module search path.
-    lib_path = default_lib_path(data_dir, options.python_version)
+    lib_path = default_lib_path(data_dir,
+                                options.python_version,
+                                custom_typeshed_dir=options.custom_typeshed_dir)
 
     if options.use_builtins_fixtures:
         # Use stub builtins (to speed up test cases and to make them easier to
@@ -154,6 +153,9 @@ def build(sources: List[BuildSource],
         # to the lib_path
         lib_path.insert(0, os.getcwd())
 
+    # Prepend a config-defined mypy path.
+    lib_path[:0] = options.mypy_path
+
     # Add MYPYPATH environment variable to front of library path, if defined.
     lib_path[:0] = mypy_path()
 
@@ -184,7 +186,7 @@ def build(sources: List[BuildSource],
         manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
                     (time.time() - manager.start_time,
                      len(manager.modules),
-                     len(manager.type_checker.type_map),
+                     len(manager.all_types),
                      manager.errors.num_messages()))
         # Finish the HTML or XML reports even if CompileError was raised.
         reports.finish()
@@ -249,15 +251,20 @@ def mypy_path() -> List[str]:
     return path_env.split(os.pathsep)
 
 
-def default_lib_path(data_dir: str, pyversion: Tuple[int, int]) -> List[str]:
+def default_lib_path(data_dir: str,
+                     pyversion: Tuple[int, int],
+                     custom_typeshed_dir: Optional[str]) -> List[str]:
     """Return default standard library search paths."""
     # IDEA: Make this more portable.
     path = []  # type: List[str]
 
-    auto = os.path.join(data_dir, 'stubs-auto')
-    if os.path.isdir(auto):
-        data_dir = auto
-
+    if custom_typeshed_dir:
+        typeshed_dir = custom_typeshed_dir
+    else:
+        auto = os.path.join(data_dir, 'stubs-auto')
+        if os.path.isdir(auto):
+            data_dir = auto
+        typeshed_dir = os.path.join(data_dir, "typeshed")
     # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption
     # is that a module added with 3.4 will still be present in Python 3.5.
     versions = ["%d.%d" % (pyversion[0], minor)
@@ -266,7 +273,7 @@ def default_lib_path(data_dir: str, pyversion: Tuple[int, int]) -> List[str]:
     # (Note that 3.1 and 3.0 aren't really supported, but we don't care.)
     for v in versions + [str(pyversion[0]), '2and3']:
         for lib_type in ['stdlib', 'third_party']:
-            stubdir = os.path.join(data_dir, 'typeshed', lib_type, v)
+            stubdir = os.path.join(typeshed_dir, lib_type, v)
             if os.path.isdir(stubdir):
                 path.append(stubdir)
 
@@ -303,10 +310,25 @@ CacheMeta = NamedTuple('CacheMeta',
 PRI_HIGH = 5  # top-level "from X import blah"
 PRI_MED = 10  # top-level "import X"
 PRI_LOW = 20  # either form inside a function
+PRI_MYPY = 25  # inside "if MYPY" or "if TYPE_CHECKING"
 PRI_INDIRECT = 30  # an indirect dependency
 PRI_ALL = 99  # include all priorities
 
 
+def import_priority(imp: ImportBase, toplevel_priority: int) -> int:
+    """Compute import priority from an import node."""
+    if not imp.is_top_level:
+        # Inside a function
+        return PRI_LOW
+    if imp.is_mypy_only:
+        # Inside "if MYPY" or "if typing.TYPE_CHECKING"
+        return max(PRI_MYPY, toplevel_priority)
+    # A regular import; priority determined by argument.
+    return toplevel_priority
+
+
+# TODO: Get rid of all_types.  It's not used except for one log message.
+#       Maybe we could instead publish a map from module ID to its type_map.
 class BuildManager:
     """This class holds shared state for building a mypy program.
 
@@ -322,7 +344,7 @@ class BuildManager:
                        Semantic analyzer, pass 2
       semantic_analyzer_pass3:
                        Semantic analyzer, pass 3
-      type_checker:    Type checker
+      all_types:       Map {Expression: Type} collected from all modules
       errors:          Used for reporting all errors
       options:         Build options
       missing_modules: Set of modules that could not be imported encountered so far
@@ -349,7 +371,7 @@ class BuildManager:
         self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors)
         self.modules = self.semantic_analyzer.modules
         self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors)
-        self.type_checker = TypeChecker(self.errors, self.modules)
+        self.all_types = {}  # type: Dict[Expression, Type]
         self.indirection_detector = TypeIndirectionVisitor()
         self.missing_modules = set()  # type: Set[str]
         self.stale_modules = set()  # type: Set[str]
@@ -390,20 +412,21 @@ class BuildManager:
         for imp in file.imports:
             if not imp.is_unreachable:
                 if isinstance(imp, Import):
-                    pri = PRI_MED if imp.is_top_level else PRI_LOW
+                    pri = import_priority(imp, PRI_MED)
+                    ancestor_pri = import_priority(imp, PRI_LOW)
                     for id, _ in imp.ids:
                         ancestor_parts = id.split(".")[:-1]
                         ancestors = []
                         for part in ancestor_parts:
                             ancestors.append(part)
-                            res.append((PRI_LOW, ".".join(ancestors), imp.line))
+                            res.append((ancestor_pri, ".".join(ancestors), imp.line))
                         res.append((pri, id, imp.line))
                 elif isinstance(imp, ImportFrom):
                     cur_id = correct_rel_imp(imp)
                     pos = len(res)
                     all_are_submodules = True
                     # Also add any imported names that are submodules.
-                    pri = PRI_MED if imp.is_top_level else PRI_LOW
+                    pri = import_priority(imp, PRI_MED)
                     for name, __ in imp.names:
                         sub_id = cur_id + '.' + name
                         if self.is_module(sub_id):
@@ -416,10 +439,10 @@ class BuildManager:
                     # cur_id is also a dependency, and we should
                     # insert it *before* any submodules.
                     if not all_are_submodules:
-                        pri = PRI_HIGH if imp.is_top_level else PRI_LOW
+                        pri = import_priority(imp, PRI_HIGH)
                         res.insert(pos, ((pri, cur_id, imp.line)))
                 elif isinstance(imp, ImportAll):
-                    pri = PRI_HIGH if imp.is_top_level else PRI_LOW
+                    pri = import_priority(imp, PRI_HIGH)
                     res.append((pri, correct_rel_imp(imp), imp.line))
 
         return res
@@ -461,9 +484,9 @@ class BuildManager:
                                'or using the "--silent-imports" flag would help)',
                                severity='note', only_once=True)
 
-    def report_file(self, file: MypyFile) -> None:
+    def report_file(self, file: MypyFile, type_map: Dict[Expression, Type]) -> None:
         if self.source_set.is_source(file):
-            self.reports.file(file, type_map=self.type_checker.type_map)
+            self.reports.file(file, type_map)
 
     def log(self, *message: str) -> None:
         if self.options.verbosity >= 1:
@@ -774,7 +797,7 @@ def is_meta_fresh(meta: Optional[CacheMeta], id: str, path: str, manager: BuildM
     st = manager.get_stat(path)  # TODO: Errors
     if st.st_mtime != meta.mtime or st.st_size != meta.size:
         manager.log('Metadata abandoned for {}: file {} is modified'.format(id, path))
-        return None
+        return False
 
     # It's a match on (id, path, mtime, size).
     # Check data_json; assume if its mtime matches it's good.
@@ -859,7 +882,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
     st = manager.get_stat(path)  # TODO: Handle errors
     mtime = st.st_mtime
     size = st.st_size
-    options = manager.options.clone_for_file(path)
+    options = manager.options.clone_for_module(id)
     meta = {'id': id,
             'path': path,
             'mtime': mtime,
@@ -1099,7 +1122,7 @@ class State:
         else:
             self.import_context = []
         self.id = id or '__main__'
-        self.options = manager.options.clone_for_file(path or '')
+        self.options = manager.options.clone_for_module(self.id)
         if not path and source is None:
             file_id = id
             if id == 'builtins' and self.options.python_version[0] == 2:
@@ -1407,23 +1430,42 @@ class State:
             if self.options.dump_type_stats:
                 dump_type_stats(self.tree, self.xpath)
 
-    def type_check(self) -> None:
+    def type_check_first_pass(self) -> None:
         manager = self.manager
         if self.options.semantic_analysis_only:
             return
         with self.wrap_context():
-            manager.type_checker.visit_file(self.tree, self.xpath, self.options)
+            self.type_checker = TypeChecker(manager.errors, manager.modules, self.options,
+                                            self.tree, self.xpath)
+            self.type_checker.check_first_pass()
+
+    def type_check_second_pass(self) -> bool:
+        if self.options.semantic_analysis_only:
+            return False
+        with self.wrap_context():
+            return self.type_checker.check_second_pass()
+
+    def finish_passes(self) -> None:
+        manager = self.manager
+        if self.options.semantic_analysis_only:
+            return
+        with self.wrap_context():
+            manager.all_types.update(self.type_checker.type_map)
 
             if self.options.incremental:
-                self._patch_indirect_dependencies(manager.type_checker.module_refs)
+                self._patch_indirect_dependencies(self.type_checker.module_refs,
+                                                  self.type_checker.type_map)
 
             if self.options.dump_inference_stats:
                 dump_type_stats(self.tree, self.xpath, inferred=True,
-                                typemap=manager.type_checker.type_map)
-            manager.report_file(self.tree)
-
-    def _patch_indirect_dependencies(self, module_refs: Set[str]) -> None:
-        types = self.manager.type_checker.module_type_map.values()
+                                typemap=self.type_checker.type_map)
+            manager.report_file(self.tree, self.type_checker.type_map)
+
+    def _patch_indirect_dependencies(self,
+                                     module_refs: Set[str],
+                                     type_map: Dict[Expression, Type]) -> None:
+        types = set(type_map.values())
+        types.discard(None)
         valid = self.valid_references()
 
         encountered = self.manager.indirection_detector.find_modules(types) | module_refs
@@ -1469,7 +1511,7 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
     manager.log("Loaded graph with %d nodes" % len(graph))
     process_graph(graph, manager)
     if manager.options.warn_unused_ignores:
-        # TODO: This could also be a per-file option.
+        # TODO: This could also be a per-module option.
         manager.errors.generate_unused_ignore_notes()
 
 
@@ -1650,7 +1692,11 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
             process_stale_scc(graph, scc)
 
     sccs_left = len(fresh_scc_queue)
-    manager.log("{} fresh SCCs left in queue (and will remain unprocessed)".format(sccs_left))
+    if sccs_left:
+        manager.log("{} fresh SCCs left in queue (and will remain unprocessed)".format(sccs_left))
+        manager.trace(str(fresh_scc_queue))
+    else:
+        manager.log("No fresh SCCs left in queue")
 
 
 def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) -> List[str]:
@@ -1675,8 +1721,8 @@ def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) ->
     each SCC thus found.  The recursion is bounded because at each
     recursion the spread in priorities is (at least) one less.
 
-    In practice there are only a few priority levels (currently
-    N=3) and in the worst case we just carry out the same algorithm
+    In practice there are only a few priority levels (less than a
+    dozen) and in the worst case we just carry out the same algorithm
     for finding SCCs N times.  Thus the complexity is no worse than
     the complexity of the original SCC-finding algorithm -- see
     strongly_connected_components() below for a reference.
@@ -1722,7 +1768,15 @@ def process_stale_scc(graph: Graph, scc: List[str]) -> None:
     for id in scc:
         graph[id].semantic_analysis_pass_three()
     for id in scc:
-        graph[id].type_check()
+        graph[id].type_check_first_pass()
+    more = True
+    while more:
+        more = False
+        for id in scc:
+            if graph[id].type_check_second_pass():
+                more = True
+    for id in scc:
+        graph[id].finish_passes()
         graph[id].write_cache()
         graph[id].mark_as_rechecked()
 
diff --git a/mypy/checker.py b/mypy/checker.py
index b5669f4..2661452 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -2,14 +2,15 @@
 
 import itertools
 import fnmatch
+from contextlib import contextmanager
 
 from typing import (
-    Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple
+    Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator
 )
 
 from mypy.errors import Errors, report_internal_error
 from mypy.nodes import (
-    SymbolTable, Node, MypyFile, Var, Expression, Lvalue,
+    SymbolTable, Statement, MypyFile, Var, Expression, Lvalue,
     OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo,
     ClassDef, GDEF, Block, AssignmentStmt, NameExpr, MemberExpr, IndexExpr,
     TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, IfStmt,
@@ -19,38 +20,36 @@ from mypy.nodes import (
     TypeApplication, DictExpr, SliceExpr, FuncExpr, TempNode, SymbolTableNode,
     Context, ListComprehension, ConditionalExpr, GeneratorExpr,
     Decorator, SetExpr, TypeVarExpr, NewTypeExpr, PrintStmt,
-    LITERAL_TYPE, BreakStmt, ContinueStmt, ComparisonExpr, StarExpr,
-    YieldFromExpr, NamedTupleExpr, SetComprehension,
+    LITERAL_TYPE, BreakStmt, PassStmt, ContinueStmt, ComparisonExpr, StarExpr,
+    YieldFromExpr, NamedTupleExpr, TypedDictExpr, SetComprehension,
     DictionaryComprehension, ComplexExpr, EllipsisExpr, TypeAliasExpr,
     RefExpr, YieldExpr, BackquoteExpr, ImportFrom, ImportAll, ImportBase,
     AwaitExpr,
-    CONTRAVARIANT, COVARIANT
-)
-from mypy.nodes import function_type, method_type, method_type_with_fallback
+    CONTRAVARIANT, COVARIANT)
 from mypy import nodes
 from mypy.types import (
     Type, AnyType, CallableType, Void, FunctionLike, Overloaded, TupleType,
-    Instance, NoneTyp, ErrorType, strip_type,
+    Instance, NoneTyp, ErrorType, strip_type, TypeType,
     UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType,
-    true_only, false_only
+    true_only, false_only, function_type
 )
 from mypy.sametypes import is_same_type
 from mypy.messages import MessageBuilder
 import mypy.checkexpr
-from mypy.checkmember import map_type_from_supertype
+from mypy.checkmember import map_type_from_supertype, bind_self, erase_to_bound
 from mypy import messages
 from mypy.subtypes import (
-    is_subtype, is_equivalent, is_proper_subtype,
-    is_more_precise, restrict_subtype_away
+    is_subtype, is_equivalent, is_proper_subtype, is_more_precise, restrict_subtype_away,
+    is_subtype_ignoring_tvars
 )
 from mypy.maptype import map_instance_to_supertype
-from mypy.semanal import self_type, set_callable_name, refers_to_fullname
+from mypy.semanal import fill_typevars, set_callable_name, refers_to_fullname
 from mypy.erasetype import erase_typevars
 from mypy.expandtype import expand_type
 from mypy.visitor import NodeVisitor
 from mypy.join import join_types
 from mypy.treetransform import TransformVisitor
-from mypy.meet import meet_simple, nearest_builtin_ancestor, is_overlapping_types
+from mypy.meet import meet_simple, is_overlapping_types
 from mypy.binder import ConditionalTypeBinder
 from mypy.options import Options
 
@@ -59,13 +58,16 @@ from mypy import experiments
 
 T = TypeVar('T')
 
+LAST_PASS = 1  # Pass numbers start at 0
+
 
 # A node which is postponed to be type checked during the next pass.
 DeferredNode = NamedTuple(
     'DeferredNode',
     [
-        ('node', Node),
+        ('node', FuncItem),
         ('context_type_name', Optional[str]),  # Name of the surrounding class (for error messages)
+        ('active_class', Optional[Type]),  # And its type (for selftype handline)
     ])
 
 
@@ -73,6 +75,8 @@ class TypeChecker(NodeVisitor[Type]):
     """Mypy type checker.
 
     Type check mypy source files that have been semantically analyzed.
+
+    You must create a separate instance for each source file.
     """
 
     # Are we type checking a stub?
@@ -82,23 +86,20 @@ class TypeChecker(NodeVisitor[Type]):
     # Utility for generating messages
     msg = None  # type: MessageBuilder
     # Types of type checked nodes
-    type_map = None  # type: Dict[Node, Type]
-    # Types of type checked nodes within this specific module
-    module_type_map = None  # type: Dict[Node, Type]
+    type_map = None  # type: Dict[Expression, Type]
 
     # Helper for managing conditional types
     binder = None  # type: ConditionalTypeBinder
     # Helper for type checking expressions
     expr_checker = None  # type: mypy.checkexpr.ExpressionChecker
 
+    scope = None  # type: Scope
     # Stack of function return types
     return_types = None  # type: List[Type]
     # Type context for type inference
     type_context = None  # type: List[Type]
     # Flags; true for dynamically typed functions
     dynamic_funcs = None  # type: List[bool]
-    # Stack of functions being type checked
-    function_stack = None  # type: List[FuncItem]
     # Stack of collections of variables with partial types
     partial_types = None  # type: List[Dict[Var, Context]]
     globals = None  # type: SymbolTable
@@ -121,55 +122,60 @@ class TypeChecker(NodeVisitor[Type]):
     # directly or indirectly.
     module_refs = None  # type: Set[str]
 
-    def __init__(self, errors: Errors, modules: Dict[str, MypyFile]) -> None:
+    def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Options,
+                 tree: MypyFile, path: str) -> None:
         """Construct a type checker.
 
         Use errors to report type check errors.
         """
         self.errors = errors
         self.modules = modules
+        self.options = options
+        self.tree = tree
+        self.path = path
         self.msg = MessageBuilder(errors, modules)
-        self.type_map = {}
-        self.module_type_map = {}
-        self.binder = ConditionalTypeBinder()
         self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg)
+        self.scope = Scope(tree)
+        self.binder = ConditionalTypeBinder()
+        self.globals = tree.names
         self.return_types = []
         self.type_context = []
         self.dynamic_funcs = []
-        self.function_stack = []
         self.partial_types = []
         self.deferred_nodes = []
-        self.pass_num = 0
-        self.current_node_deferred = False
+        self.type_map = {}
         self.module_refs = set()
-
-    def visit_file(self, file_node: MypyFile, path: str, options: Options) -> None:
-        """Type check a mypy file with the given path."""
-        self.options = options
         self.pass_num = 0
-        self.is_stub = file_node.is_stub
-        self.errors.set_file(path)
-        self.globals = file_node.names
-        self.enter_partial_types()
-        self.is_typeshed_stub = self.errors.is_typeshed_file(path)
-        self.module_type_map = {}
-        self.module_refs = set()
-        if self.options.strict_optional_whitelist is None:
-            self.suppress_none_errors = not self.options.show_none_errors
+        self.current_node_deferred = False
+        self.is_stub = tree.is_stub
+        self.is_typeshed_stub = errors.is_typeshed_file(path)
+        if options.strict_optional_whitelist is None:
+            self.suppress_none_errors = not options.show_none_errors
         else:
             self.suppress_none_errors = not any(fnmatch.fnmatch(path, pattern)
                                                 for pattern
-                                                in self.options.strict_optional_whitelist)
+                                                in options.strict_optional_whitelist)
 
-        for d in file_node.defs:
-            self.accept(d)
+    def check_first_pass(self) -> None:
+        """Type check the entire file, but defer functions with unresolved references.
 
-        self.leave_partial_types()
+        Unresolved references are forward references to variables
+        whose types haven't been inferred yet.  They may occur later
+        in the same file or in a different file that's being processed
+        later (usually due to an import cycle).
 
-        if self.deferred_nodes:
-            self.check_second_pass()
+        Deferred functions will be processed by check_second_pass().
+        """
+        self.errors.set_file(self.path)
+        self.enter_partial_types()
 
-        self.current_node_deferred = False
+        with self.binder.top_frame_context():
+            for d in self.tree.defs:
+                self.accept(d)
+
+        self.leave_partial_types()
+
+        assert not self.current_node_deferred
 
         all_ = self.globals.get('__all__')
         if all_ is not None and all_.type is not None:
@@ -180,28 +186,48 @@ class TypeChecker(NodeVisitor[Type]):
                 self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
                           all_.node)
 
-        del self.options
+    def check_second_pass(self) -> bool:
+        """Run second or following pass of type checking.
 
-    def check_second_pass(self) -> None:
-        """Run second pass of type checking which goes through deferred nodes."""
-        self.pass_num = 1
-        for node, type_name in self.deferred_nodes:
+        This goes through deferred nodes, returning True if there were any.
+        """
+        if not self.deferred_nodes:
+            return False
+        self.errors.set_file(self.path)
+        self.pass_num += 1
+        todo = self.deferred_nodes
+        self.deferred_nodes = []
+        done = set()  # type: Set[FuncItem]
+        for node, type_name, active_class in todo:
+            if node in done:
+                continue
+            # This is useful for debugging:
+            # print("XXX in pass %d, class %s, function %s" %
+            #       (self.pass_num, type_name, node.fullname() or node.name()))
+            done.add(node)
             if type_name:
                 self.errors.push_type(type_name)
-            self.accept(node)
+
+            if active_class:
+                with self.scope.push_class(active_class):
+                    self.accept(node)
+            else:
+                self.accept(node)
             if type_name:
                 self.errors.pop_type()
-        self.deferred_nodes = []
+        return True
 
     def handle_cannot_determine_type(self, name: str, context: Context) -> None:
-        if self.pass_num == 0 and self.function_stack:
+        node = self.scope.top_function()
+        if self.pass_num < LAST_PASS and node is not None:
             # Don't report an error yet. Just defer.
-            node = self.function_stack[-1]
             if self.errors.type_name:
                 type_name = self.errors.type_name[-1]
             else:
                 type_name = None
-            self.deferred_nodes.append(DeferredNode(node, type_name))
+            # Shouldn't we freeze the entire scope?
+            active_class = self.scope.active_class()
+            self.deferred_nodes.append(DeferredNode(node, type_name, active_class))
             # Set a marker so that we won't infer additional types in this
             # function. Any inferred types could be bogus, because there's at
             # least one type that we don't know.
@@ -209,7 +235,8 @@ class TypeChecker(NodeVisitor[Type]):
         else:
             self.msg.cannot_determine_type(name, context)
 
-    def accept(self, node: Node, type_context: Type = None) -> Type:
+    def accept(self, node: Union[Expression, Statement, FuncItem],
+               type_context: Type = None) -> Type:
         """Type check a node in the given type context."""
         self.type_context.append(type_context)
         try:
@@ -217,28 +244,32 @@ class TypeChecker(NodeVisitor[Type]):
         except Exception as err:
             report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
         self.type_context.pop()
-        self.store_type(node, typ)
+        if typ is not None:
+            assert isinstance(node, Expression)
+            self.store_type(node, typ)
         if not self.in_checked_function():
             return AnyType()
         else:
             return typ
 
-    def accept_loop(self, body: Union[IfStmt, Block], else_body: Block = None) -> Type:
+    def accept_loop(self, body: Statement, else_body: Statement = None, *,
+                    exit_condition: Expression = None) -> None:
         """Repeatedly type check a loop body until the frame doesn't change.
+        If exit_condition is set, assume it must be False on exit from the loop.
 
         Then check the else_body.
         """
         # The outer frame accumulates the results of all iterations
-        with self.binder.frame_context(1) as outer_frame:
-            self.binder.push_loop_frame()
+        with self.binder.frame_context(can_skip=False):
             while True:
-                with self.binder.frame_context(1):
-                    # We may skip each iteration
-                    self.binder.options_on_return[-1].append(outer_frame)
+                with self.binder.frame_context(can_skip=True,
+                                               break_frame=2, continue_frame=1):
                     self.accept(body)
                 if not self.binder.last_pop_changed:
                     break
-            self.binder.pop_loop_frame()
+            if exit_condition:
+                _, else_map = self.find_isinstance_check(exit_condition)
+                self.push_type_map(else_map)
             if else_body:
                 self.accept(else_body)
 
@@ -261,6 +292,7 @@ class TypeChecker(NodeVisitor[Type]):
             self.check_method_override(defn)
             self.check_inplace_operator_method(defn)
         self.check_overlapping_overloads(defn)
+        return None
 
     def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
         for i, item in enumerate(defn.items):
@@ -457,10 +489,11 @@ class TypeChecker(NodeVisitor[Type]):
                                        messages.INCOMPATIBLE_REDEFINITION,
                                        'redefinition with type',
                                        'original type')
+        return None
 
     def check_func_item(self, defn: FuncItem,
                         type_override: CallableType = None,
-                        name: str = None) -> Type:
+                        name: str = None) -> None:
         """Type check a function.
 
         If type_override is provided, use it as the function type.
@@ -471,7 +504,6 @@ class TypeChecker(NodeVisitor[Type]):
         if isinstance(defn, FuncDef):
             fdef = defn
 
-        self.function_stack.append(defn)
         self.dynamic_funcs.append(defn.is_dynamic() and not type_override)
 
         if fdef:
@@ -493,7 +525,6 @@ class TypeChecker(NodeVisitor[Type]):
             self.errors.pop_function()
 
         self.dynamic_funcs.pop()
-        self.function_stack.pop()
         self.current_node_deferred = False
 
     def check_func_def(self, defn: FuncItem, typ: CallableType, name: str) -> None:
@@ -502,7 +533,7 @@ class TypeChecker(NodeVisitor[Type]):
         for item, typ in self.expand_typevars(defn, typ):
             old_binder = self.binder
             self.binder = ConditionalTypeBinder()
-            with self.binder.frame_context():
+            with self.binder.top_frame_context():
                 defn.expanded.append(item)
 
                 # We may be checking a function definition or an anonymous
@@ -579,12 +610,22 @@ class TypeChecker(NodeVisitor[Type]):
                 for i in range(len(typ.arg_types)):
                     arg_type = typ.arg_types[i]
 
-                    # Refuse covariant parameter type variables
-                    if isinstance(arg_type, TypeVarType):
+                    ref_type = self.scope.active_class()
+                    if (isinstance(defn, FuncDef) and ref_type is not None and i == 0
+                            and not defn.is_static
+                            and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]):
+                        if defn.is_class or defn.name() == '__new__':
+                            ref_type = mypy.types.TypeType(ref_type)
+                        erased = erase_to_bound(arg_type)
+                        if not is_subtype_ignoring_tvars(ref_type, erased):
+                            self.fail("The erased type of self '{}' "
+                                      "is not a supertype of its class '{}'"
+                                      .format(erased, ref_type), defn)
+                    elif isinstance(arg_type, TypeVarType):
+                        # Refuse covariant parameter type variables
+                        # TODO: check recuresively for inner type variables
                         if arg_type.variance == COVARIANT:
-                            self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT,
-                                      arg_type)
-
+                            self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, arg_type)
                     if typ.arg_kinds[i] == nodes.ARG_STAR:
                         # builtins.tuple[T] is typing.Tuple[T, ...]
                         arg_type = self.named_generic_type('builtins.tuple',
@@ -602,13 +643,44 @@ class TypeChecker(NodeVisitor[Type]):
                         self.accept(init)
 
             # Type check body in a new scope.
-            with self.binder.frame_context():
-                self.accept(item.body)
+            with self.binder.top_frame_context():
+                with self.scope.push_function(defn):
+                    self.accept(item.body)
+                unreachable = self.binder.is_unreachable()
+
+            if (self.options.warn_no_return and not unreachable
+                    and not isinstance(self.return_types[-1], (Void, NoneTyp, AnyType))
+                    and not defn.is_generator):
+                # Control flow fell off the end of a function that was
+                # declared to return a non-None type.
+                # Allow functions that are entirely pass/Ellipsis.
+                if self.is_trivial_body(defn.body):
+                    pass
+                else:
+                    self.msg.note(messages.MISSING_RETURN_STATEMENT, defn)
 
             self.return_types.pop()
 
             self.binder = old_binder
 
+    def is_trivial_body(self, block: Block) -> bool:
+        body = block.body
+
+        # Skip a docstring
+        if (isinstance(body[0], ExpressionStmt) and
+                isinstance(body[0].expr, (StrExpr, UnicodeExpr))):
+            body = block.body[1:]
+
+        if len(body) == 0:
+            # There's only a docstring.
+            return True
+        elif len(body) > 1:
+            return False
+        stmt = body[0]
+        return (isinstance(stmt, PassStmt) or
+                (isinstance(stmt, ExpressionStmt) and
+                 isinstance(stmt.expr, EllipsisExpr)))
+
     def check_reverse_op_method(self, defn: FuncItem, typ: CallableType,
                                 method: str) -> None:
         """Check a reverse operator method such as __radd__."""
@@ -739,11 +811,11 @@ class TypeChecker(NodeVisitor[Type]):
         method = defn.name()
         if method not in nodes.inplace_operator_methods:
             return
-        typ = self.method_type(defn)
+        typ = bind_self(self.function_type(defn))
         cls = defn.info
         other_method = '__' + method[3:]
         if cls.has_readable_member(other_method):
-            instance = self_type(cls)
+            instance = fill_typevars(cls)
             typ2 = self.expr_checker.analyze_external_member_access(
                 other_method, instance, defn)
             fail = False
@@ -819,7 +891,7 @@ class TypeChecker(NodeVisitor[Type]):
             # The name of the method is defined in the base class.
 
             # Construct the type of the overriding method.
-            typ = self.method_type(defn)
+            typ = bind_self(self.function_type(defn), self.scope.active_class())
             # Map the overridden method type to subtype context so that
             # it can be checked for compatibility.
             original_type = base_attr.type
@@ -832,7 +904,7 @@ class TypeChecker(NodeVisitor[Type]):
                     assert False, str(base_attr.node)
             if isinstance(original_type, FunctionLike):
                 original = map_type_from_supertype(
-                    method_type(original_type),
+                    bind_self(original_type, self.scope.active_class()),
                     defn.info, base)
                 # Check that the types are compatible.
                 # TODO overloaded signatures
@@ -915,18 +987,19 @@ class TypeChecker(NodeVisitor[Type]):
         self.enter_partial_types()
         old_binder = self.binder
         self.binder = ConditionalTypeBinder()
-        with self.binder.frame_context():
-            self.accept(defn.defs)
+        with self.binder.top_frame_context():
+            with self.scope.push_class(fill_typevars(defn.info)):
+                self.accept(defn.defs)
         self.binder = old_binder
         if not defn.has_incompatible_baseclass:
             # Otherwise we've already found errors; more errors are not useful
             self.check_multiple_inheritance(typ)
         self.leave_partial_types()
         self.errors.pop_type()
+        return None
 
     def check_multiple_inheritance(self, typ: TypeInfo) -> None:
         """Check for multiple inheritance related errors."""
-
         if len(typ.bases) <= 1:
             # No multiple inheritance.
             return
@@ -940,13 +1013,6 @@ class TypeChecker(NodeVisitor[Type]):
                     # checks suffice (these are implemented elsewhere).
                     if name in base2.names and base2 not in base.mro:
                         self.check_compatibility(name, base, base2, typ)
-        # Verify that base class layouts are compatible.
-        builtin_bases = [nearest_builtin_ancestor(base.type)
-                         for base in typ.bases]
-        for base1 in builtin_bases:
-            for base2 in builtin_bases:
-                if not (base1 in base2.mro or base2 in base1.mro):
-                    self.fail(messages.INSTANCE_LAYOUT_CONFLICT, typ)
 
     def check_compatibility(self, name: str, base1: TypeInfo,
                             base2: TypeInfo, ctx: Context) -> None:
@@ -971,8 +1037,8 @@ class TypeChecker(NodeVisitor[Type]):
         if (isinstance(first_type, FunctionLike) and
                 isinstance(second_type, FunctionLike)):
             # Method override
-            first_sig = method_type(first_type)
-            second_sig = method_type(second_type)
+            first_sig = bind_self(first_type)
+            second_sig = bind_self(second_type)
             ok = is_subtype(first_sig, second_sig)
         elif first_type and second_type:
             ok = is_equivalent(first_type, second_type)
@@ -988,11 +1054,13 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_import_from(self, node: ImportFrom) -> Type:
         self.check_import(node)
+        return None
 
     def visit_import_all(self, node: ImportAll) -> Type:
         self.check_import(node)
+        return None
 
-    def check_import(self, node: ImportBase) -> Type:
+    def check_import(self, node: ImportBase) -> None:
         for assign in node.assignments:
             lvalue = assign.lvalues[0]
             lvalue_type, _, __ = self.check_lvalue(lvalue)
@@ -1011,11 +1079,13 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_block(self, b: Block) -> Type:
         if b.is_unreachable:
+            self.binder.unreachable()
             return None
         for s in b.body:
-            self.accept(s)
-            if self.binder.breaking_out:
+            if self.binder.is_unreachable():
                 break
+            self.accept(s)
+        return None
 
     def visit_assignment_stmt(self, s: AssignmentStmt) -> Type:
         """Type check an assignment statement.
@@ -1032,6 +1102,7 @@ class TypeChecker(NodeVisitor[Type]):
             rvalue = self.temp_node(self.type_map[s.rvalue], s)
             for lv in s.lvalues[:-1]:
                 self.check_assignment(lv, rvalue, s.type is None)
+        return None
 
     def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type: bool = True,
                          new_syntax: bool = False) -> None:
@@ -1272,9 +1343,9 @@ class TypeChecker(NodeVisitor[Type]):
                 inferred = cast(Var, lvalue.node)
                 assert isinstance(inferred, Var)
             else:
-                m = cast(MemberExpr, lvalue)
-                self.accept(m.expr)
-                inferred = m.def_var
+                assert isinstance(lvalue, MemberExpr)
+                self.accept(lvalue.expr)
+                inferred = lvalue.def_var
         elif isinstance(lvalue, IndexExpr):
             index_lvalue = lvalue
         elif isinstance(lvalue, MemberExpr):
@@ -1449,12 +1520,17 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_expression_stmt(self, s: ExpressionStmt) -> Type:
         self.accept(s.expr)
+        return None
 
     def visit_return_stmt(self, s: ReturnStmt) -> Type:
         """Type check a return statement."""
-        self.binder.breaking_out = True
-        if self.is_within_function():
-            defn = self.function_stack[-1]
+        self.check_return_stmt(s)
+        self.binder.unreachable()
+        return None
+
+    def check_return_stmt(self, s: ReturnStmt) -> None:
+        defn = self.scope.top_function()
+        if defn is not None:
             if defn.is_generator:
                 return_type = self.get_generator_return_type(self.return_types[-1],
                                                              defn.is_coroutine)
@@ -1466,13 +1542,13 @@ class TypeChecker(NodeVisitor[Type]):
                 typ = self.accept(s.expr, return_type)
                 # Returning a value of type Any is always fine.
                 if isinstance(typ, AnyType):
-                    return None
+                    return
 
                 if self.is_unusable_type(return_type):
                     # Lambdas are allowed to have a unusable returns.
                     # Functions returning a value of type None are allowed to have a Void return.
-                    if isinstance(self.function_stack[-1], FuncExpr) or isinstance(typ, NoneTyp):
-                        return None
+                    if isinstance(self.scope.top_function(), FuncExpr) or isinstance(typ, NoneTyp):
+                        return
                     self.fail(messages.NO_RETURN_VALUE_EXPECTED, s)
                 else:
                     self.check_subtype(
@@ -1484,84 +1560,43 @@ class TypeChecker(NodeVisitor[Type]):
                         msg=messages.INCOMPATIBLE_RETURN_VALUE_TYPE)
             else:
                 # Empty returns are valid in Generators with Any typed returns.
-                if (self.function_stack[-1].is_generator and isinstance(return_type, AnyType)):
-                    return None
+                if (defn.is_generator and isinstance(return_type, AnyType)):
+                    return
 
                 if isinstance(return_type, (Void, NoneTyp, AnyType)):
-                    return None
+                    return
 
                 if self.in_checked_function():
                     self.fail(messages.RETURN_VALUE_EXPECTED, s)
 
-    def wrap_generic_type(self, typ: Instance, rtyp: Instance, check_type:
-                          str, context: Context) -> Type:
-        n_diff = self.count_nested_types(rtyp, check_type) - self.count_nested_types(typ,
-                                                                                     check_type)
-        if n_diff == 1:
-            return self.named_generic_type(check_type, [typ])
-        elif n_diff == 0 or n_diff > 1:
-            self.fail(messages.INCOMPATIBLE_RETURN_VALUE_TYPE
-                + ": expected {}, got {}".format(rtyp, typ), context)
-            return typ
-        return typ
-
-    def count_nested_types(self, typ: Instance, check_type: str) -> int:
-        c = 0
-        while is_subtype(typ, self.named_type(check_type)):
-            c += 1
-            typ = map_instance_to_supertype(self.named_generic_type(check_type, typ.args),
-                                            self.lookup_typeinfo(check_type))
-            if typ.args:
-                typ = cast(Instance, typ.args[0])
-            else:
-                return c
-        return c
-
     def visit_if_stmt(self, s: IfStmt) -> Type:
         """Type check an if statement."""
-        breaking_out = True
         # This frame records the knowledge from previous if/elif clauses not being taken.
-        with self.binder.frame_context():
+        # Fall-through to the original frame is handled explicitly in each block.
+        with self.binder.frame_context(can_skip=False, fall_through=0):
             for e, b in zip(s.expr, s.body):
                 t = self.accept(e)
                 self.check_usable_type(t, e)
-                if_map, else_map = find_isinstance_check(e, self.type_map)
-                if if_map is None:
-                    # The condition is always false
-                    # XXX should issue a warning?
-                    pass
-                else:
-                    # Only type check body if the if condition can be true.
-                    with self.binder.frame_context(2):
-                        if if_map:
-                            for var, type in if_map.items():
-                                self.binder.push(var, type)
-
-                        self.accept(b)
-                    breaking_out = breaking_out and self.binder.last_pop_breaking_out
-
-                    if else_map:
-                        for var, type in else_map.items():
-                            self.binder.push(var, type)
-                if else_map is None:
-                    # The condition is always true => remaining elif/else blocks
-                    # can never be reached.
-
-                    # Might also want to issue a warning
-                    # print("Warning: isinstance always true")
-                    break
-            else:  # Didn't break => can't prove one of the conditions is always true
-                with self.binder.frame_context(2):
-                    if s.else_body:
-                        self.accept(s.else_body)
-                breaking_out = breaking_out and self.binder.last_pop_breaking_out
-        if breaking_out:
-            self.binder.breaking_out = True
+                if_map, else_map = self.find_isinstance_check(e)
+
+                # XXX Issue a warning if condition is always False?
+                with self.binder.frame_context(can_skip=True, fall_through=2):
+                    self.push_type_map(if_map)
+                    self.accept(b)
+
+                # XXX Issue a warning if condition is always True?
+                self.push_type_map(else_map)
+
+            with self.binder.frame_context(can_skip=False, fall_through=2):
+                if s.else_body:
+                    self.accept(s.else_body)
         return None
 
     def visit_while_stmt(self, s: WhileStmt) -> Type:
         """Type check a while statement."""
-        self.accept_loop(IfStmt([s.expr], [s.body], None), s.else_body)
+        self.accept_loop(IfStmt([s.expr], [s.body], None), s.else_body,
+                         exit_condition=s.expr)
+        return None
 
     def visit_operator_assignment_stmt(self,
                                        s: OperatorAssignmentStmt) -> Type:
@@ -1576,26 +1611,28 @@ class TypeChecker(NodeVisitor[Type]):
         else:
             if not is_subtype(rvalue_type, lvalue_type):
                 self.msg.incompatible_operator_assignment(s.op, s)
+        return None
 
     def visit_assert_stmt(self, s: AssertStmt) -> Type:
         self.accept(s.expr)
 
         # If this is asserting some isinstance check, bind that type in the following code
-        true_map, _ = find_isinstance_check(s.expr, self.type_map)
+        true_map, _ = self.find_isinstance_check(s.expr)
 
-        if true_map:
-            for var, type in true_map.items():
-                self.binder.push(var, type)
+        self.push_type_map(true_map)
+        return None
 
     def visit_raise_stmt(self, s: RaiseStmt) -> Type:
         """Type check a raise statement."""
-        self.binder.breaking_out = True
         if s.expr:
             self.type_check_raise(s.expr, s)
         if s.from_expr:
-            self.type_check_raise(s.from_expr, s)
+            self.type_check_raise(s.from_expr, s, True)
+        self.binder.unreachable()
+        return None
 
-    def type_check_raise(self, e: Expression, s: RaiseStmt) -> None:
+    def type_check_raise(self, e: Expression, s: RaiseStmt,
+                         optional: bool = False) -> None:
         typ = self.accept(e)
         if isinstance(typ, FunctionLike):
             if typ.is_type_obj():
@@ -1615,101 +1652,127 @@ class TypeChecker(NodeVisitor[Type]):
         if isinstance(typ, Instance) and typ.type.fallback_to_any:
             # OK!
             return
-        self.check_subtype(typ,
-                           self.named_type('builtins.BaseException'), s,
-                           messages.INVALID_EXCEPTION)
+        expected_type = self.named_type('builtins.BaseException')  # type: Type
+        if optional:
+            expected_type = UnionType([expected_type, NoneTyp()])
+        self.check_subtype(typ, expected_type, s, messages.INVALID_EXCEPTION)
 
     def visit_try_stmt(self, s: TryStmt) -> Type:
         """Type check a try statement."""
         # Our enclosing frame will get the result if the try/except falls through.
-        # This one gets all possible intermediate states
-        with self.binder.frame_context():
+        # This one gets all possible states after the try block exited abnormally
+        # (by exception, return, break, etc.)
+        with self.binder.frame_context(can_skip=False, fall_through=0):
+            # Not only might the body of the try statement exit
+            # abnormally, but so might an exception handler or else
+            # clause. The finally clause runs in *all* cases, so we
+            # need an outer try frame to catch all intermediate states
+            # in case an exception is raised during an except or else
+            # clause. As an optimization, only create the outer try
+            # frame when there actually is a finally clause.
+            self.visit_try_without_finally(s, try_frame=bool(s.finally_body))
             if s.finally_body:
-                self.binder.try_frames.add(len(self.binder.frames) - 1)
-                breaking_out = self.visit_try_without_finally(s)
-                self.binder.try_frames.remove(len(self.binder.frames) - 1)
-                # First we check finally_body is type safe for all intermediate frames
+                # First we check finally_body is type safe on all abnormal exit paths
                 self.accept(s.finally_body)
-                breaking_out = breaking_out or self.binder.breaking_out
-            else:
-                breaking_out = self.visit_try_without_finally(s)
 
-        if not breaking_out and s.finally_body:
-            # Then we try again for the more restricted set of options that can fall through
+        if s.finally_body:
+            # Then we try again for the more restricted set of options
+            # that can fall through. (Why do we need to check the
+            # finally clause twice? Depending on whether the finally
+            # clause was reached by the try clause falling off the end
+            # or exiting abnormally, after completing the finally clause
+            # either flow will continue to after the entire try statement
+            # or the exception/return/etc. will be processed and control
+            # flow will escape. We need to check that the finally clause
+            # type checks in both contexts, but only the resulting types
+            # from the latter context affect the type state in the code
+            # that follows the try statement.)
             self.accept(s.finally_body)
-        self.binder.breaking_out = breaking_out
+
         return None
 
-    def visit_try_without_finally(self, s: TryStmt) -> bool:
+    def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None:
         """Type check a try statement, ignoring the finally block.
 
-        Return whether we are guaranteed to be breaking out.
-        Otherwise, it will place the results possible frames of
-        that don't break out into self.binder.frames[-2].
+        On entry, the top frame should receive all flow that exits the
+        try block abnormally (i.e., such that the else block does not
+        execute), and its parent should receive all flow that exits
+        the try block normally.
         """
-        breaking_out = True
-        # This frame records the possible states that exceptions can leave variables in
-        # during the try: block
-        with self.binder.frame_context():
-            with self.binder.frame_context(3):
-                self.binder.try_frames.add(len(self.binder.frames) - 2)
-                self.accept(s.body)
-                self.binder.try_frames.remove(len(self.binder.frames) - 2)
-                if s.else_body:
-                    self.accept(s.else_body)
-            breaking_out = breaking_out and self.binder.last_pop_breaking_out
-            for i in range(len(s.handlers)):
-                with self.binder.frame_context(3):
-                    if s.types[i]:
-                        t = self.visit_except_handler_test(s.types[i])
+        # This frame will run the else block if the try fell through.
+        # In that case, control flow continues to the parent of what
+        # was the top frame on entry.
+        with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=try_frame):
+            # This frame receives exit via exception, and runs exception handlers
+            with self.binder.frame_context(can_skip=False, fall_through=2):
+                # Finally, the body of the try statement
+                with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=True):
+                    self.accept(s.body)
+                for i in range(len(s.handlers)):
+                    with self.binder.frame_context(can_skip=True, fall_through=4):
+                        if s.types[i]:
+                            t = self.visit_except_handler_test(s.types[i])
+                            if s.vars[i]:
+                                # To support local variables, we make this a definition line,
+                                # causing assignment to set the variable's type.
+                                s.vars[i].is_def = True
+                                # We also temporarily set current_node_deferred to False to
+                                # make sure the inference happens.
+                                # TODO: Use a better solution, e.g. a
+                                # separate Var for each except block.
+                                am_deferring = self.current_node_deferred
+                                self.current_node_deferred = False
+                                self.check_assignment(s.vars[i], self.temp_node(t, s.vars[i]))
+                                self.current_node_deferred = am_deferring
+                        self.accept(s.handlers[i])
                         if s.vars[i]:
-                            # To support local variables, we make this a definition line,
-                            # causing assignment to set the variable's type.
-                            s.vars[i].is_def = True
-                            self.check_assignment(s.vars[i], self.temp_node(t, s.vars[i]))
-                    self.accept(s.handlers[i])
-                    if s.vars[i]:
-                        # Exception variables are deleted in python 3 but not python 2.
-                        # But, since it's bad form in python 2 and the type checking
-                        # wouldn't work very well, we delete it anyway.
-
-                        # Unfortunately, this doesn't let us detect usage before the
-                        # try/except block.
-                        if self.options.python_version[0] >= 3:
-                            source = s.vars[i].name
-                        else:
-                            source = ('(exception variable "{}", which we do not accept outside'
-                                      'except: blocks even in python 2)'.format(s.vars[i].name))
-                        var = cast(Var, s.vars[i].node)
-                        var.type = DeletedType(source=source)
-                        self.binder.cleanse(s.vars[i])
-                breaking_out = breaking_out and self.binder.last_pop_breaking_out
-        return breaking_out
+                            # Exception variables are deleted in python 3 but not python 2.
+                            # But, since it's bad form in python 2 and the type checking
+                            # wouldn't work very well, we delete it anyway.
+
+                            # Unfortunately, this doesn't let us detect usage before the
+                            # try/except block.
+                            if self.options.python_version[0] >= 3:
+                                source = s.vars[i].name
+                            else:
+                                source = ('(exception variable "{}", which we do not '
+                                          'accept outside except: blocks even in '
+                                          'python 2)'.format(s.vars[i].name))
+                            var = cast(Var, s.vars[i].node)
+                            var.type = DeletedType(source=source)
+                            self.binder.cleanse(s.vars[i])
+            if s.else_body:
+                self.accept(s.else_body)
 
     def visit_except_handler_test(self, n: Expression) -> Type:
         """Type check an exception handler test clause."""
-        type = self.accept(n)
+        typ = self.accept(n)
 
         all_types = []  # type: List[Type]
-        test_types = type.items if isinstance(type, TupleType) else [type]
+        test_types = typ.items if isinstance(typ, TupleType) else [typ]
 
         for ttype in test_types:
             if isinstance(ttype, AnyType):
                 all_types.append(ttype)
                 continue
 
-            if not isinstance(ttype, FunctionLike):
+            if isinstance(ttype, FunctionLike):
+                item = ttype.items()[0]
+                if not item.is_type_obj():
+                    self.fail(messages.INVALID_EXCEPTION_TYPE, n)
+                    return AnyType()
+                exc_type = item.ret_type
+            elif isinstance(ttype, TypeType):
+                exc_type = ttype.item
+            else:
                 self.fail(messages.INVALID_EXCEPTION_TYPE, n)
                 return AnyType()
 
-            item = ttype.items()[0]
-            ret_type = item.ret_type
-            if not (is_subtype(ret_type, self.named_type('builtins.BaseException'))
-                    and item.is_type_obj()):
+            if not is_subtype(exc_type, self.named_type('builtins.BaseException')):
                 self.fail(messages.INVALID_EXCEPTION_TYPE, n)
                 return AnyType()
 
-            all_types.append(ret_type)
+            all_types.append(exc_type)
 
         return UnionType.make_simplified_union(all_types)
 
@@ -1721,6 +1784,7 @@ class TypeChecker(NodeVisitor[Type]):
             item_type = self.analyze_iterable_item_type(s.expr)
         self.analyze_index_variables(s.index, item_type, s)
         self.accept_loop(s.body, s.else_body)
+        return None
 
     def analyze_async_iterable_item_type(self, expr: Expression) -> Type:
         """Analyse async iterable expression and return iterator item type."""
@@ -1788,7 +1852,8 @@ class TypeChecker(NodeVisitor[Type]):
             m.line = s.line
             c = CallExpr(m, [e.index], [nodes.ARG_POS], [None])
             c.line = s.line
-            return c.accept(self)
+            c.accept(self)
+            return None
         else:
             def flatten(t: Expression) -> List[Expression]:
                 """Flatten a nested sequence of tuples/lists into one list of nodes."""
@@ -1812,7 +1877,7 @@ class TypeChecker(NodeVisitor[Type]):
                 if d.fullname == 'typing.no_type_check':
                     e.var.type = AnyType()
                     e.var.is_ready = True
-                    return NoneTyp()
+                    return None
 
         e.func.accept(self)
         sig = self.function_type(e.func)  # type: Type
@@ -1833,6 +1898,7 @@ class TypeChecker(NodeVisitor[Type]):
         e.var.is_ready = True
         if e.func.is_property:
             self.check_incompatible_property_override(e)
+        return None
 
     def check_incompatible_property_override(self, e: Decorator) -> None:
         if not e.var.is_settable_property and e.func.info is not None:
@@ -1853,6 +1919,7 @@ class TypeChecker(NodeVisitor[Type]):
             else:
                 self.check_with_item(expr, target)
         self.accept(s.body)
+        return None
 
     def check_async_with_item(self, expr: Expression, target: Expression) -> None:
         echk = self.expr_checker
@@ -1888,6 +1955,7 @@ class TypeChecker(NodeVisitor[Type]):
             if not isinstance(target_type, NoneTyp):
                 # TODO: Also verify the type of 'write'.
                 self.expr_checker.analyze_external_member_access('write', target_type, s.target)
+        return None
 
     #
     # Expressions
@@ -1986,13 +2054,11 @@ class TypeChecker(NodeVisitor[Type]):
         return self.expr_checker.visit_member_expr(e)
 
     def visit_break_stmt(self, s: BreakStmt) -> Type:
-        self.binder.breaking_out = True
-        self.binder.allow_jump(self.binder.loop_frames[-1] - 1)
+        self.binder.handle_break()
         return None
 
     def visit_continue_stmt(self, s: ContinueStmt) -> Type:
-        self.binder.breaking_out = True
-        self.binder.allow_jump(self.binder.loop_frames[-1])
+        self.binder.handle_continue()
         return None
 
     def visit_int_expr(self, e: IntExpr) -> Type:
@@ -2054,6 +2120,10 @@ class TypeChecker(NodeVisitor[Type]):
         # TODO: Perhaps return a type object type?
         return AnyType()
 
+    def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
+        # TODO: Perhaps return a type object type?
+        return AnyType()
+
     def visit_list_expr(self, e: ListExpr) -> Type:
         return self.expr_checker.visit_list_expr(e)
 
@@ -2195,19 +2265,9 @@ class TypeChecker(NodeVisitor[Type]):
         """Return instance type 'str'."""
         return self.named_type('builtins.str')
 
-    def check_type_equivalency(self, t1: Type, t2: Type, node: Context,
-                               msg: str = messages.INCOMPATIBLE_TYPES) -> None:
-        """Generate an error if the types are not equivalent. The
-        dynamic type is equivalent with all types.
-        """
-        if not is_equivalent(t1, t2):
-            self.fail(msg, node)
-
-    def store_type(self, node: Node, typ: Type) -> None:
+    def store_type(self, node: Expression, typ: Type) -> None:
         """Store the type of a node in the type map."""
         self.type_map[node] = typ
-        if typ is not None:
-            self.module_type_map[node] = typ
 
     def in_checked_function(self) -> bool:
         """Should we type-check the current function?
@@ -2281,13 +2341,6 @@ class TypeChecker(NodeVisitor[Type]):
                 return partial_types
         return None
 
-    def is_within_function(self) -> bool:
-        """Are we currently type checking within a function?
-
-        I.e. not at class body or at the top level.
-        """
-        return self.return_types != []
-
     def is_unusable_type(self, typ: Type):
         """Is this type an unusable type?
 
@@ -2320,9 +2373,17 @@ class TypeChecker(NodeVisitor[Type]):
     def function_type(self, func: FuncBase) -> FunctionLike:
         return function_type(func, self.named_type('builtins.function'))
 
-    def method_type(self, func: FuncBase) -> FunctionLike:
-        return method_type_with_fallback(func, self.named_type('builtins.function'))
+    # TODO: These next two functions should refer to TypeMap below
+    def find_isinstance_check(self, n: Expression) -> Tuple[Optional[Dict[Expression, Type]],
+                                                            Optional[Dict[Expression, Type]]]:
+        return find_isinstance_check(n, self.type_map)
 
+    def push_type_map(self, type_map: Optional[Dict[Expression, Type]]) -> None:
+        if type_map is None:
+            self.binder.unreachable()
+        else:
+            for expr, type in type_map.items():
+                self.binder.push(expr, type)
 
 # Data structure returned by find_isinstance_check representing
 # information learned from the truth or falsehood of a condition.  The
@@ -2338,7 +2399,8 @@ class TypeChecker(NodeVisitor[Type]):
 # probably be better to have the dict keyed by the nodes' literal_hash
 # field instead.
 
-TypeMap = Optional[Dict[Node, Type]]
+
+TypeMap = Optional[Dict[Expression, Type]]
 
 
 def conditional_type_map(expr: Expression,
@@ -2355,9 +2417,11 @@ def conditional_type_map(expr: Expression,
     if proposed_type:
         if current_type:
             if is_proper_subtype(current_type, proposed_type):
-                return {expr: proposed_type}, None
+                # Expression is always of type proposed_type
+                return {}, None
             elif not is_overlapping_types(current_type, proposed_type):
-                return None, {expr: current_type}
+                # Expression is never of type proposed_type
+                return None, {}
             else:
                 remaining_type = restrict_subtype_away(current_type, proposed_type)
                 return {expr: proposed_type}, {expr: remaining_type}
@@ -2368,10 +2432,31 @@ def conditional_type_map(expr: Expression,
         return {}, {}
 
 
+def is_true_literal(n: Expression) -> bool:
+    return (refers_to_fullname(n, 'builtins.True')
+            or isinstance(n, IntExpr) and n.value == 1)
+
+
+def is_false_literal(n: Expression) -> bool:
+    return (refers_to_fullname(n, 'builtins.False')
+            or isinstance(n, IntExpr) and n.value == 0)
+
+
 def is_literal_none(n: Expression) -> bool:
     return isinstance(n, NameExpr) and n.fullname == 'builtins.None'
 
 
+def is_optional(t: Type) -> bool:
+    return isinstance(t, UnionType) and any(isinstance(e, NoneTyp) for e in t.items)
+
+
+def remove_optional(typ: Type) -> Type:
+    if isinstance(typ, UnionType):
+        return UnionType.make_union([t for t in typ.items if not isinstance(t, NoneTyp)])
+    else:
+        return typ
+
+
 def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
     """Calculate what information we can learn from the truth of (e1 and e2)
     in terms of the information that we can learn from the truth of e1 and
@@ -2417,7 +2502,7 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
 
 
 def find_isinstance_check(node: Expression,
-                          type_map: Dict[Node, Type],
+                          type_map: Dict[Expression, Type],
                           ) -> Tuple[TypeMap, TypeMap]:
     """Find any isinstance checks (within a chain of ands).  Includes
     implicit and explicit checks for None.
@@ -2430,20 +2515,23 @@ def find_isinstance_check(node: Expression,
 
     Guaranteed to not return None, None. (But may return {}, {})
     """
-    if isinstance(node, CallExpr):
+    if is_true_literal(node):
+        return {}, None
+    elif is_false_literal(node):
+        return None, {}
+    elif isinstance(node, CallExpr):
         if refers_to_fullname(node.callee, 'builtins.isinstance'):
             expr = node.args[0]
             if expr.literal == LITERAL_TYPE:
                 vartype = type_map[expr]
                 type = get_isinstance_type(node.args[1], type_map)
                 return conditional_type_map(expr, vartype, type)
-    elif (isinstance(node, ComparisonExpr) and any(is_literal_none(n) for n in node.operands) and
-          experiments.STRICT_OPTIONAL):
+    elif (isinstance(node, ComparisonExpr) and experiments.STRICT_OPTIONAL):
         # Check for `x is None` and `x is not None`.
         is_not = node.operators == ['is not']
-        if is_not or node.operators == ['is']:
-            if_vars = {}  # type: Dict[Node, Type]
-            else_vars = {}  # type: Dict[Node, Type]
+        if any(is_literal_none(n) for n in node.operands) and (is_not or node.operators == ['is']):
+            if_vars = {}  # type: Dict[Expression, Type]
+            else_vars = {}  # type: Dict[Expression, Type]
             for expr in node.operands:
                 if expr.literal == LITERAL_TYPE and not is_literal_none(expr) and expr in type_map:
                     # This should only be true at most once: there should be
@@ -2456,13 +2544,27 @@ def find_isinstance_check(node: Expression,
             if is_not:
                 if_vars, else_vars = else_vars, if_vars
             return if_vars, else_vars
+        # Check for `x == y` where x is of type Optional[T] and y is of type T
+        # or a type that overlaps with T (or vice versa).
+        elif node.operators == ['==']:
+            first_type = type_map[node.operands[0]]
+            second_type = type_map[node.operands[1]]
+            if is_optional(first_type) != is_optional(second_type):
+                if is_optional(first_type):
+                    optional_type, comp_type = first_type, second_type
+                    optional_expr = node.operands[0]
+                else:
+                    optional_type, comp_type = second_type, first_type
+                    optional_expr = node.operands[1]
+                if is_overlapping_types(optional_type, comp_type):
+                    return {optional_expr: remove_optional(optional_type)}, {}
     elif isinstance(node, RefExpr):
         # Restrict the type of the variable to True-ish/False-ish in the if and else branches
         # respectively
         vartype = type_map[node]
         if_type = true_only(vartype)
         else_type = false_only(vartype)
-        ref = node  # type: Node
+        ref = node  # type: Expression
         if_map = {ref: if_type} if not isinstance(if_type, UninhabitedType) else None
         else_map = {ref: else_type} if not isinstance(else_type, UninhabitedType) else None
         return if_map, else_map
@@ -2490,7 +2592,7 @@ def find_isinstance_check(node: Expression,
     return {}, {}
 
 
-def get_isinstance_type(expr: Expression, type_map: Dict[Node, Type]) -> Type:
+def get_isinstance_type(expr: Expression, type_map: Dict[Expression, Type]) -> Type:
     type = type_map[expr]
 
     if isinstance(type, TupleType):
@@ -2517,13 +2619,11 @@ def get_isinstance_type(expr: Expression, type_map: Dict[Node, Type]) -> Type:
         return UnionType(types)
 
 
-def expand_node(defn: FuncItem, map: Dict[TypeVarId, Type]) -> Node:
-    visitor = TypeTransformVisitor(map)
-    return defn.accept(visitor)
-
-
 def expand_func(defn: FuncItem, map: Dict[TypeVarId, Type]) -> FuncItem:
-    return cast(FuncItem, expand_node(defn, map))
+    visitor = TypeTransformVisitor(map)
+    ret = defn.accept(visitor)
+    assert isinstance(ret, FuncItem)
+    return ret
 
 
 class TypeTransformVisitor(TransformVisitor):
@@ -2638,7 +2738,11 @@ def infer_operator_assignment_method(type: Type, operator: str) -> Tuple[bool, s
 def is_valid_inferred_type(typ: Type) -> bool:
     """Is an inferred type valid?
 
-    Examples of invalid types include the None type or a type with a None component.
+    Examples of invalid types include the None type or List[<uninhabited>].
+
+    When not doing strict Optional checking, all types containing None are
+    invalid.  When doing strict Optional checking, only None and types that are
+    incompletely defined (i.e. contain UninhabitedType) are invalid.
     """
     if is_same_type(typ, NoneTyp()):
         # With strict Optional checking, we *may* eventually infer NoneTyp, but
@@ -2646,14 +2750,57 @@ def is_valid_inferred_type(typ: Type) -> bool:
         # resolution happens in leave_partial_types when we pop a partial types
         # scope.
         return False
+    return is_valid_inferred_type_component(typ)
+
+
+def is_valid_inferred_type_component(typ: Type) -> bool:
+    """Is this part of a type a valid inferred type?
+
+    In strict Optional mode this excludes bare None types, as otherwise every
+    type containing None would be invalid.
+    """
+    if not experiments.STRICT_OPTIONAL:
+        if is_same_type(typ, NoneTyp()):
+            return False
     if is_same_type(typ, UninhabitedType()):
         return False
     elif isinstance(typ, Instance):
         for arg in typ.args:
-            if not is_valid_inferred_type(arg):
+            if not is_valid_inferred_type_component(arg):
                 return False
     elif isinstance(typ, TupleType):
         for item in typ.items:
-            if not is_valid_inferred_type(item):
+            if not is_valid_inferred_type_component(item):
                 return False
     return True
+
+
+class Scope:
+    # We keep two stacks combined, to maintain the relative order
+    stack = None  # type: List[Union[Type, FuncItem, MypyFile]]
+
+    def __init__(self, module: MypyFile) -> None:
+        self.stack = [module]
+
+    def top_function(self) -> Optional[FuncItem]:
+        for e in reversed(self.stack):
+            if isinstance(e, FuncItem):
+                return e
+        return None
+
+    def active_class(self) -> Optional[Type]:
+        if isinstance(self.stack[-1], Type):
+            return self.stack[-1]
+        return None
+
+    @contextmanager
+    def push_function(self, item: FuncItem) -> Iterator[None]:
+        self.stack.append(item)
+        yield
+        self.stack.pop()
+
+    @contextmanager
+    def push_class(self, t: Type) -> Iterator[None]:
+        self.stack.append(t)
+        yield
+        self.stack.pop()
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index cc31585..32cbbcb 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -6,19 +6,20 @@ from mypy.types import (
     Type, AnyType, CallableType, Overloaded, NoneTyp, Void, TypeVarDef,
     TupleType, Instance, TypeVarId, TypeVarType, ErasedType, UnionType,
     PartialType, DeletedType, UnboundType, UninhabitedType, TypeType,
-    true_only, false_only, is_named_instance
+    true_only, false_only, is_named_instance, function_type,
+    get_typ_args, set_typ_args,
 )
 from mypy.nodes import (
     NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
-    Node, MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
+    MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
     OpExpr, UnaryExpr, IndexExpr, CastExpr, RevealTypeExpr, TypeApplication, ListExpr,
     TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context, Expression,
     ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
     ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
     DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr,
     TypeAliasExpr, BackquoteExpr, ARG_POS, ARG_NAMED, ARG_STAR2, MODULE_REF,
+    UNBOUND_TVAR, BOUND_TVAR,
 )
-from mypy.nodes import function_type
 from mypy import nodes
 import mypy.checker
 from mypy import types
@@ -32,11 +33,11 @@ from mypy.subtypes import is_subtype, is_equivalent
 from mypy import applytype
 from mypy import erasetype
 from mypy.checkmember import analyze_member_access, type_object_type
-from mypy.semanal import self_type
 from mypy.constraints import get_actual_type
 from mypy.checkstrformat import StringFormatterChecker
 from mypy.expandtype import expand_type
 from mypy.util import split_module_names
+from mypy.semanal import fill_typevars
 
 from mypy import experiments
 
@@ -322,8 +323,9 @@ class ExpressionChecker:
                     callee)
         elif isinstance(callee, Instance):
             call_function = analyze_member_access('__call__', callee, context,
-                                         False, False, False, self.named_type,
-                                         self.not_ready_callback, self.msg, chk=self.chk)
+                                                  False, False, False, self.named_type,
+                                                  self.not_ready_callback, self.msg,
+                                                  original_type=callee, chk=self.chk)
             return self.check_call(call_function, args, arg_kinds, context, arg_names,
                                    callable_node, arg_messages)
         elif isinstance(callee, TypeVarType):
@@ -460,15 +462,17 @@ class ExpressionChecker:
                 # See also github issues #462 and #360.
                 ret_type = NoneTyp()
         args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx)
-        # Only substitute non-None and non-erased types.
+        # Only substitute non-Uninhabited and non-erased types.
         new_args = []  # type: List[Type]
         for arg in args:
-            if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg):
+            if isinstance(arg, UninhabitedType) or has_erased_component(arg):
+                new_args.append(None)
+            elif not experiments.STRICT_OPTIONAL and isinstance(arg, NoneTyp):
+                # Don't substitute None types in non-strict-Optional mode.
                 new_args.append(None)
             else:
                 new_args.append(arg)
-        return cast(CallableType, self.apply_generic_arguments(callable, new_args,
-                                                           error_context))
+        return self.apply_generic_arguments(callable, new_args, error_context)
 
     def infer_function_type_arguments(self, callee_type: CallableType,
                                       args: List[Expression],
@@ -558,9 +562,8 @@ class ExpressionChecker:
         for i, arg in enumerate(inferred_args):
             if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg):
                 inferred_args[i] = None
+        callee_type = self.apply_generic_arguments(callee_type, inferred_args, context)
 
-        callee_type = cast(CallableType, self.apply_generic_arguments(
-            callee_type, inferred_args, context))
         arg_types = self.infer_arg_types_in_context2(
             callee_type, args, arg_kinds, formal_to_actual)
 
@@ -606,8 +609,7 @@ class ExpressionChecker:
         # Apply the inferred types to the function type. In this case the
         # return type must be CallableType, since we give the right number of type
         # arguments.
-        return cast(CallableType, self.apply_generic_arguments(callee_type,
-                                                           inferred_args, context))
+        return self.apply_generic_arguments(callee_type, inferred_args, context)
 
     def check_argument_count(self, callee: CallableType, actual_types: List[Type],
                              actual_kinds: List[int], actual_names: List[str],
@@ -721,10 +723,10 @@ class ExpressionChecker:
 
                 # There may be some remaining tuple varargs items that haven't
                 # been checked yet. Handle them.
+                tuplet = arg_types[actual]
                 if (callee.arg_kinds[i] == nodes.ARG_STAR and
                         arg_kinds[actual] == nodes.ARG_STAR and
-                        isinstance(arg_types[actual], TupleType)):
-                    tuplet = cast(TupleType, arg_types[actual])
+                        isinstance(tuplet, TupleType)):
                     while tuple_counter[0] < len(tuplet.items):
                         actual_type = get_actual_type(arg_type,
                                                       arg_kinds[actual],
@@ -877,22 +879,10 @@ class ExpressionChecker:
         return ok
 
     def apply_generic_arguments(self, callable: CallableType, types: List[Type],
-                                context: Context) -> Type:
+                                context: Context) -> CallableType:
         """Simple wrapper around mypy.applytype.apply_generic_arguments."""
         return applytype.apply_generic_arguments(callable, types, self.msg, context)
 
-    def apply_generic_arguments2(self, overload: Overloaded, types: List[Type],
-                                 context: Context) -> Type:
-        items = []  # type: List[CallableType]
-        for item in overload.items():
-            applied = self.apply_generic_arguments(item, types, context)
-            if isinstance(applied, CallableType):
-                items.append(applied)
-            else:
-                # There was an error.
-                return AnyType()
-        return Overloaded(items)
-
     def visit_member_expr(self, e: MemberExpr) -> Type:
         """Visit member expression (of form e.id)."""
         self.chk.module_refs.update(extract_refexpr_names(e))
@@ -907,10 +897,11 @@ class ExpressionChecker:
             return self.analyze_ref_expr(e)
         else:
             # This is a reference to a non-module attribute.
-            return analyze_member_access(e.name, self.accept(e.expr), e,
+            original_type = self.accept(e.expr)
+            return analyze_member_access(e.name, original_type, e,
                                          is_lvalue, False, False,
                                          self.named_type, self.not_ready_callback, self.msg,
-                                         chk=self.chk)
+                                         original_type=original_type, chk=self.chk)
 
     def analyze_external_member_access(self, member: str, base_type: Type,
                                        context: Context) -> Type:
@@ -920,7 +911,7 @@ class ExpressionChecker:
         # TODO remove; no private definitions in mypy
         return analyze_member_access(member, base_type, context, False, False, False,
                                      self.named_type, self.not_ready_callback, self.msg,
-                                     chk=self.chk)
+                                     original_type=base_type, chk=self.chk)
 
     def visit_int_expr(self, e: IntExpr) -> Type:
         """Type check an integer literal (trivial)."""
@@ -1081,7 +1072,7 @@ class ExpressionChecker:
         """
         method_type = analyze_member_access(method, base_type, context, False, False, True,
                                             self.named_type, self.not_ready_callback, local_errors,
-                                            chk=self.chk)
+                                            original_type=base_type, chk=self.chk)
         return self.check_call(method_type, [arg], [nodes.ARG_POS],
                                context, arg_messages=local_errors)
 
@@ -1188,22 +1179,15 @@ class ExpressionChecker:
         assert e.op in ('and', 'or')  # Checked by visit_op_expr
 
         if e.op == 'and':
-            right_map, left_map = \
-                mypy.checker.find_isinstance_check(e.left, self.chk.type_map)
+            right_map, left_map = self.chk.find_isinstance_check(e.left)
             restricted_left_type = false_only(left_type)
             result_is_left = not left_type.can_be_true
         elif e.op == 'or':
-            left_map, right_map = \
-                mypy.checker.find_isinstance_check(e.left, self.chk.type_map)
+            left_map, right_map = self.chk.find_isinstance_check(e.left)
             restricted_left_type = true_only(left_type)
             result_is_left = not left_type.can_be_false
 
-        with self.chk.binder.frame_context():
-            if right_map:
-                for var, type in right_map.items():
-                    self.chk.binder.push(var, type)
-
-            right_type = self.accept(e.right, left_type)
+        right_type = self.analyze_cond_branch(right_map, e.right, left_type)
 
         self.check_usable_type(left_type, context)
         self.check_usable_type(right_type, context)
@@ -1371,17 +1355,86 @@ class ExpressionChecker:
     def visit_reveal_type_expr(self, expr: RevealTypeExpr) -> Type:
         """Type check a reveal_type expression."""
         revealed_type = self.accept(expr.expr)
-        self.msg.reveal_type(revealed_type, expr)
+        if not self.chk.current_node_deferred:
+            self.msg.reveal_type(revealed_type, expr)
         return revealed_type
 
     def visit_type_application(self, tapp: TypeApplication) -> Type:
         """Type check a type application (expr[type, ...])."""
-        self.chk.fail(messages.GENERIC_TYPE_NOT_VALID_AS_EXPRESSION, tapp)
+        tp = self.accept(tapp.expr)
+        if isinstance(tp, CallableType):
+            if not tp.is_type_obj():
+                self.chk.fail(messages.ONLY_CLASS_APPLICATION, tapp)
+            if len(tp.variables) != len(tapp.types):
+                self.msg.incompatible_type_application(len(tp.variables),
+                                                       len(tapp.types), tapp)
+                return AnyType()
+            return self.apply_generic_arguments(tp, tapp.types, tapp)
+        elif isinstance(tp, Overloaded):
+            if not tp.is_type_obj():
+                self.chk.fail(messages.ONLY_CLASS_APPLICATION, tapp)
+            for item in tp.items():
+                if len(item.variables) != len(tapp.types):
+                    self.msg.incompatible_type_application(len(item.variables),
+                                                           len(tapp.types), tapp)
+                    return AnyType()
+            return Overloaded([self.apply_generic_arguments(item, tapp.types, tapp)
+                               for item in tp.items()])
         return AnyType()
 
     def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type:
+        """Get type of a type alias (could be generic) in a runtime expression."""
+        if isinstance(alias.type, Instance) and alias.type.invalid:
+            # An invalid alias, error already has been reported
+            return AnyType()
+        item = alias.type
+        if not alias.in_runtime:
+            # We don't replace TypeVar's with Any for alias used as Alias[T](42).
+            item = self.replace_tvars_any(item)
+        if isinstance(item, Instance):
+            # Normally we get a callable type (or overloaded) with .is_type_obj() true
+            # representing the class's constructor
+            tp = type_object_type(item.type, self.named_type)
+        else:
+            # This type is invalid in most runtime contexts
+            # and corresponding an error will be reported.
+            return alias.fallback
+        if isinstance(tp, CallableType):
+            if len(tp.variables) != len(item.args):
+                self.msg.incompatible_type_application(len(tp.variables),
+                                                       len(item.args), item)
+                return AnyType()
+            return self.apply_generic_arguments(tp, item.args, item)
+        elif isinstance(tp, Overloaded):
+            for it in tp.items():
+                if len(it.variables) != len(item.args):
+                    self.msg.incompatible_type_application(len(it.variables),
+                                                           len(item.args), item)
+                    return AnyType()
+            return Overloaded([self.apply_generic_arguments(it, item.args, item)
+                               for it in tp.items()])
         return AnyType()
 
+    def replace_tvars_any(self, tp: Type) -> Type:
+        """Replace all type variables of a type alias tp with Any. Basically, this function
+        finishes what could not be done in method TypeAnalyser.visit_unbound_type()
+        from typeanal.py.
+        """
+        typ_args = get_typ_args(tp)
+        new_args = typ_args[:]
+        for i, arg in enumerate(typ_args):
+            if isinstance(arg, UnboundType):
+                sym = None
+                try:
+                    sym = self.chk.lookup_qualified(arg.name)
+                except KeyError:
+                    pass
+                if sym and (sym.kind == UNBOUND_TVAR or sym.kind == BOUND_TVAR):
+                    new_args[i] = AnyType()
+            else:
+                new_args[i] = self.replace_tvars_any(arg)
+        return set_typ_args(tp, new_args, tp.line, tp.column)
+
     def visit_list_expr(self, e: ListExpr) -> Type:
         """Type check a list expression [...]."""
         return self.check_lst_expr(e.items, 'builtins.list', '<list>', e)
@@ -1568,9 +1621,8 @@ class ExpressionChecker:
         # they must be considered as indeterminate. We use ErasedType since it
         # does not affect type inference results (it is for purposes like this
         # only).
-        ctx = replace_meta_vars(ctx, ErasedType())
-
-        callable_ctx = cast(CallableType, ctx)
+        callable_ctx = replace_meta_vars(ctx, ErasedType())
+        assert isinstance(callable_ctx, CallableType)
 
         arg_kinds = [arg.kind for arg in e.arguments]
 
@@ -1610,10 +1662,19 @@ class ExpressionChecker:
                         return AnyType()
                     if not self.chk.in_checked_function():
                         return AnyType()
-                    return analyze_member_access(e.name, self_type(e.info), e,
-                                                 is_lvalue, True, False,
-                                                 self.named_type, self.not_ready_callback,
-                                                 self.msg, base, chk=self.chk)
+                    args = self.chk.scope.top_function().arguments
+                    # An empty args with super() is an error; we need something in declared_self
+                    if not args:
+                        self.chk.fail('super() requires at least one positional argument', e)
+                        return AnyType()
+                    declared_self = args[0].variable.type
+                    return analyze_member_access(name=e.name, typ=fill_typevars(e.info), node=e,
+                                                 is_lvalue=False, is_super=True, is_operator=False,
+                                                 builtin_type=self.named_type,
+                                                 not_ready_callback=self.not_ready_callback,
+                                                 msg=self.msg, override_info=base,
+                                                 original_type=declared_self, chk=self.chk)
+            assert False, 'unreachable'
         else:
             # Invalid super. This has been reported by the semantic analyzer.
             return AnyType()
@@ -1642,7 +1703,7 @@ class ExpressionChecker:
                                          type_name: str,
                                          id_for_messages: str) -> Type:
         """Type check a generator expression or a list comprehension."""
-        with self.chk.binder.frame_context():
+        with self.chk.binder.frame_context(can_skip=True, fall_through=0):
             self.check_for_comp(gen)
 
             # Infer the type of the list comprehension by using a synthetic generic
@@ -1662,7 +1723,7 @@ class ExpressionChecker:
 
     def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type:
         """Type check a dictionary comprehension."""
-        with self.chk.binder.frame_context():
+        with self.chk.binder.frame_context(can_skip=True, fall_through=0):
             self.check_for_comp(e)
 
             # Infer the type of the list comprehension by using a synthetic generic
@@ -1709,7 +1770,7 @@ class ExpressionChecker:
 
         # Gain type information from isinstance if it is there
         # but only for the current expression
-        if_map, else_map = mypy.checker.find_isinstance_check(e.cond, self.chk.type_map)
+        if_map, else_map = self.chk.find_isinstance_check(e.cond)
 
         if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx)
 
@@ -1734,12 +1795,15 @@ class ExpressionChecker:
 
         return res
 
-    def analyze_cond_branch(self, map: Optional[Dict[Node, Type]],
-                            node: Node, context: Optional[Type]) -> Type:
-        with self.chk.binder.frame_context():
-            if map:
-                for var, type in map.items():
-                    self.chk.binder.push(var, type)
+    def analyze_cond_branch(self, map: Optional[Dict[Expression, Type]],
+                            node: Expression, context: Optional[Type]) -> Type:
+        with self.chk.binder.frame_context(can_skip=True, fall_through=0):
+            if map is None:
+                # We still need to type check node, in case we want to
+                # process it for isinstance checks later
+                self.accept(node, context=context)
+                return UninhabitedType()
+            self.chk.push_type_map(map)
             return self.accept(node, context=context)
 
     def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
@@ -1750,7 +1814,7 @@ class ExpressionChecker:
     # Helpers
     #
 
-    def accept(self, node: Node, context: Type = None) -> Type:
+    def accept(self, node: Expression, context: Type = None) -> Type:
         """Type check a node. Alias for TypeChecker.accept."""
         return self.chk.accept(node, context)
 
@@ -1758,10 +1822,6 @@ class ExpressionChecker:
         """Generate an error if type is Void."""
         self.chk.check_usable_type(typ, context)
 
-    def is_boolean(self, typ: Type) -> bool:
-        """Is type compatible with bool?"""
-        return is_subtype(typ, self.chk.bool_type())
-
     def named_type(self, name: str) -> Instance:
         """Return an instance type with type given by the name and no type
         arguments. Alias for TypeChecker.named_type.
@@ -1793,14 +1853,6 @@ class ExpressionChecker:
                     [self.named_type('builtins.unicode'),
                      AnyType()])))
 
-    def has_non_method(self, typ: Type, member: str) -> bool:
-        """Does type have a member variable / property with the given name?"""
-        if isinstance(typ, Instance):
-            return (not typ.type.has_method(member) and
-                    typ.type.has_readable_member(member))
-        else:
-            return False
-
     def has_member(self, typ: Type, member: str) -> bool:
         """Does type have member with the given name?"""
         # TODO TupleType => also consider tuple attributes
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 7241b72..9a55103 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -1,25 +1,28 @@
 """Type checking of attribute access"""
 
-from typing import cast, Callable, List, Dict, Optional
+from typing import cast, Callable, List, Optional, TypeVar
 
 from mypy.types import (
-    Type, Instance, AnyType, TupleType, CallableType, FunctionLike, TypeVarId, TypeVarDef,
-    Overloaded, TypeVarType, TypeTranslator, UnionType, PartialType,
-    DeletedType, NoneTyp, TypeType
+    Type, Instance, AnyType, TupleType, CallableType, FunctionLike, TypeVarDef,
+    Overloaded, TypeVarType, UnionType, PartialType,
+    DeletedType, NoneTyp, TypeType, function_type
 )
 from mypy.nodes import TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context, MypyFile
-from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2, OpExpr, ComparisonExpr
-from mypy.nodes import function_type, Decorator, OverloadedFuncDef
+from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2
+from mypy.nodes import Decorator, OverloadedFuncDef
 from mypy.messages import MessageBuilder
 from mypy.maptype import map_instance_to_supertype
-from mypy.expandtype import expand_type_by_instance
-from mypy.nodes import method_type, method_type_with_fallback
-from mypy.semanal import self_type
+from mypy.expandtype import expand_type_by_instance, expand_type
+from mypy.infer import infer_type_arguments
+from mypy.semanal import fill_typevars
 from mypy import messages
 from mypy import subtypes
-if False:  # import for forward declaration only
+MYPY = False
+if MYPY:  # import for forward declaration only
     import mypy.checker
 
+from mypy import experiments
+
 
 def analyze_member_access(name: str,
                           typ: Type,
@@ -29,19 +32,24 @@ def analyze_member_access(name: str,
                           is_operator: bool,
                           builtin_type: Callable[[str], Instance],
                           not_ready_callback: Callable[[str, Context], None],
-                          msg: MessageBuilder,
+                          msg: MessageBuilder, *,
+                          original_type: Type,
                           override_info: TypeInfo = None,
-                          report_type: Type = None,
                           chk: 'mypy.checker.TypeChecker' = None) -> Type:
-    """Analyse attribute access.
+    """Return the type of attribute `name` of typ.
 
     This is a general operation that supports various different variations:
 
       1. lvalue or non-lvalue access (i.e. setter or getter access)
       2. supertype access (when using super(); is_super == True and
          override_info should refer to the supertype)
+
+    original_type is the most precise inferred or declared type of the base object
+    that we have available. typ is generally a supertype of original_type.
+    When looking for an attribute of typ, we may perform recursive calls targeting
+    the fallback type, for example.
+    original_type is always the type used in the initial call.
     """
-    report_type = report_type or typ
     if isinstance(typ, Instance):
         if name == '__init__' and not is_super:
             # Accessing __init__ in statically typed code would compromise
@@ -55,29 +63,35 @@ def analyze_member_access(name: str,
         if override_info:
             info = override_info
 
+        if (experiments.find_occurrences and
+                info.name() == experiments.find_occurrences[0] and
+                name == experiments.find_occurrences[1]):
+            msg.note("Occurrence of '{}.{}'".format(*experiments.find_occurrences), node)
+
         # Look up the member. First look up the method dictionary.
         method = info.get_method(name)
         if method:
             if method.is_property:
                 assert isinstance(method, OverloadedFuncDef)
                 return analyze_var(name, method.items[0].var, typ, info, node, is_lvalue, msg,
-                                   not_ready_callback)
+                                   original_type, not_ready_callback)
             if is_lvalue:
                 msg.cant_assign_to_method(node)
-            typ = map_instance_to_supertype(typ, method.info)
+            signature = function_type(method, builtin_type('builtins.function'))
             if name == '__new__':
                 # __new__ is special and behaves like a static method -- don't strip
                 # the first argument.
-                signature = function_type(method, builtin_type('builtins.function'))
+                pass
             else:
-                signature = method_type_with_fallback(method, builtin_type('builtins.function'))
+                signature = bind_self(signature, original_type)
+            typ = map_instance_to_supertype(typ, method.info)
             return expand_type_by_instance(signature, typ)
         else:
             # Not a method.
             return analyze_member_var_access(name, typ, info, node,
                                              is_lvalue, is_super, builtin_type,
                                              not_ready_callback, msg,
-                                             report_type=report_type, chk=chk)
+                                             original_type=original_type, chk=chk)
     elif isinstance(typ, AnyType):
         # The base object has dynamic type.
         return AnyType()
@@ -87,20 +101,21 @@ def analyze_member_access(name: str,
         # The only attribute NoneType has are those it inherits from object
         return analyze_member_access(name, builtin_type('builtins.object'), node, is_lvalue,
                                      is_super, is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type, chk=chk)
+                                     original_type=original_type, chk=chk)
     elif isinstance(typ, UnionType):
         # The base object has dynamic type.
         msg.disable_type_names += 1
         results = [analyze_member_access(name, subtype, node, is_lvalue, is_super,
                                          is_operator, builtin_type, not_ready_callback, msg,
-                                         chk=chk)
+                                         original_type=original_type, chk=chk)
                    for subtype in typ.items]
         msg.disable_type_names -= 1
         return UnionType.make_simplified_union(results)
     elif isinstance(typ, TupleType):
         # Actually look up from the fallback instance type.
         return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
-                                     is_operator, builtin_type, not_ready_callback, msg, chk=chk)
+                                     is_operator, builtin_type, not_ready_callback, msg,
+                                     original_type=original_type, chk=chk)
     elif isinstance(typ, FunctionLike) and typ.is_type_obj():
         # Class attribute.
         # TODO super?
@@ -123,24 +138,25 @@ def analyze_member_access(name: str,
                 # the corresponding method in the current instance to avoid this edge case.
                 # See https://github.com/python/mypy/pull/1787 for more info.
                 result = analyze_class_attribute_access(ret_type, name, node, is_lvalue,
-                                                        builtin_type, not_ready_callback, msg)
+                                                        builtin_type, not_ready_callback, msg,
+                                                        original_type=original_type)
                 if result:
                     return result
             # Look up from the 'type' type.
             return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
                                          is_operator, builtin_type, not_ready_callback, msg,
-                                         report_type=report_type, chk=chk)
+                                         original_type=original_type, chk=chk)
         else:
             assert False, 'Unexpected type {}'.format(repr(ret_type))
     elif isinstance(typ, FunctionLike):
         # Look up from the 'function' type.
         return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
                                      is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type, chk=chk)
+                                     original_type=original_type, chk=chk)
     elif isinstance(typ, TypeVarType):
         return analyze_member_access(name, typ.upper_bound, node, is_lvalue, is_super,
                                      is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type, chk=chk)
+                                     original_type=original_type, chk=chk)
     elif isinstance(typ, DeletedType):
         msg.deleted_as_rvalue(typ, node)
         return AnyType()
@@ -155,17 +171,18 @@ def analyze_member_access(name: str,
         if item and not is_operator:
             # See comment above for why operators are skipped
             result = analyze_class_attribute_access(item, name, node, is_lvalue,
-                                                    builtin_type, not_ready_callback, msg)
+                                                    builtin_type, not_ready_callback, msg,
+                                                    original_type=original_type)
             if result:
                 return result
         fallback = builtin_type('builtins.type')
         return analyze_member_access(name, fallback, node, is_lvalue, is_super,
                                      is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type, chk=chk)
+                                     original_type=original_type, chk=chk)
 
     if chk and chk.should_suppress_optional_error([typ]):
         return AnyType()
-    return msg.has_no_attr(report_type, name, node)
+    return msg.has_no_attr(original_type, name, node)
 
 
 def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
@@ -173,12 +190,13 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
                               builtin_type: Callable[[str], Instance],
                               not_ready_callback: Callable[[str, Context], None],
                               msg: MessageBuilder,
-                              report_type: Type = None,
+                              original_type: Type,
                               chk: 'mypy.checker.TypeChecker' = None) -> Type:
     """Analyse attribute access that does not target a method.
 
-    This is logically part of analyze_member_access and the arguments are
-    similar.
+    This is logically part of analyze_member_access and the arguments are similar.
+
+    original_type is the type of E in the expression E.var
     """
     # It was not a method. Try looking up a variable.
     v = lookup_member_var_or_accessor(info, name, is_lvalue)
@@ -188,16 +206,18 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
         # The associated Var node of a decorator contains the type.
         v = vv.var
     if isinstance(v, Var):
-        return analyze_var(name, v, itype, info, node, is_lvalue, msg, not_ready_callback)
+        return analyze_var(name, v, itype, info, node, is_lvalue, msg,
+                           original_type, not_ready_callback)
     elif isinstance(v, FuncDef):
         assert False, "Did not expect a function"
     elif not v and name not in ['__getattr__', '__setattr__']:
         if not is_lvalue:
             method = info.get_method('__getattr__')
             if method:
+                function = function_type(method, builtin_type('builtins.function'))
+                bound_method = bind_self(function, original_type)
                 typ = map_instance_to_supertype(itype, method.info)
-                getattr_type = expand_type_by_instance(
-                    method_type_with_fallback(method, builtin_type('builtins.function')), typ)
+                getattr_type = expand_type_by_instance(bound_method, typ)
                 if isinstance(getattr_type, CallableType):
                     return getattr_type.ret_type
 
@@ -211,15 +231,17 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
     else:
         if chk and chk.should_suppress_optional_error([itype]):
             return AnyType()
-        return msg.has_no_attr(report_type or itype, name, node)
+        return msg.has_no_attr(original_type, name, node)
 
 
 def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Context,
-               is_lvalue: bool, msg: MessageBuilder,
-               not_ready_callback: Callable[[str, Context], None]) -> Type:
+                is_lvalue: bool, msg: MessageBuilder, original_type: Type,
+                not_ready_callback: Callable[[str, Context], None]) -> Type:
     """Analyze access to an attribute via a Var node.
 
     This is conceptually part of analyze_member_access and the arguments are similar.
+
+    original_type is the type of E in the expression E.var
     """
     # Found a member variable.
     itype = map_instance_to_supertype(itype, var.info)
@@ -231,7 +253,7 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
         if is_lvalue and var.is_property and not var.is_settable_property:
             # TODO allow setting attributes in subclass (although it is probably an error)
             msg.read_only_property(name, info, node)
-        if var.is_initialized_in_class and isinstance(t, FunctionLike):
+        if var.is_initialized_in_class and isinstance(t, FunctionLike) and not t.is_type_obj():
             if is_lvalue:
                 if var.is_property:
                     if not var.is_settable_property:
@@ -245,11 +267,12 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
                 # class.
                 functype = t
                 check_method_type(functype, itype, var.is_classmethod, node, msg)
-                signature = method_type(functype)
+                signature = bind_self(functype, original_type)
                 if var.is_property:
                     # A property cannot have an overloaded type => the cast
                     # is fine.
-                    return cast(CallableType, signature).ret_type
+                    assert isinstance(signature, CallableType)
+                    return signature.ret_type
                 else:
                     return signature
         return t
@@ -275,7 +298,7 @@ def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: Messag
 
 
 def lookup_member_var_or_accessor(info: TypeInfo, name: str,
-                                  is_lvalue: bool) -> SymbolNode:
+                                  is_lvalue: bool) -> Optional[SymbolNode]:
     """Find the attribute/accessor node that refers to a member of a type."""
     # TODO handle lvalues
     node = info.get(name)
@@ -298,7 +321,7 @@ def check_method_type(functype: FunctionLike, itype: Instance, is_classmethod: b
             # we passed to typ.fallback in analyze_member_access. See #1432.
             if isinstance(selfarg, TupleType):
                 selfarg = selfarg.fallback
-            if not subtypes.is_equivalent(selfarg, itype):
+            if not subtypes.is_subtype(selfarg, itype):
                 msg.invalid_method_type(item, context)
         else:
             # Check that cls argument has type 'Any' or valid class type.
@@ -320,7 +343,9 @@ def analyze_class_attribute_access(itype: Instance,
                                    is_lvalue: bool,
                                    builtin_type: Callable[[str], Instance],
                                    not_ready_callback: Callable[[str, Context], None],
-                                   msg: MessageBuilder) -> Type:
+                                   msg: MessageBuilder,
+                                   original_type: Type) -> Type:
+    """original_type is the type of E in the expression E.var"""
     node = itype.type.get(name)
     if not node:
         if itype.type.fallback_to_any:
@@ -343,7 +368,7 @@ def analyze_class_attribute_access(itype: Instance,
         if isinstance(t, PartialType):
             return handle_partial_attribute_type(t, is_lvalue, msg, node.node)
         is_classmethod = is_decorated and cast(Decorator, node.node).func.is_class
-        return add_class_tvars(t, itype.type, is_classmethod, builtin_type)
+        return add_class_tvars(t, itype, is_classmethod, builtin_type, original_type)
     elif isinstance(node.node, Var):
         not_ready_callback(name, context)
         return AnyType()
@@ -362,25 +387,35 @@ def analyze_class_attribute_access(itype: Instance,
         return function_type(cast(FuncBase, node.node), builtin_type('builtins.function'))
 
 
-def add_class_tvars(t: Type, info: TypeInfo, is_classmethod: bool,
-                    builtin_type: Callable[[str], Instance]) -> Type:
+def add_class_tvars(t: Type, itype: Instance, is_classmethod: bool,
+                    builtin_type: Callable[[str], Instance],
+                    original_type: Type) -> Type:
+    """Instantiate type variables during analyze_class_attribute_access,
+    e.g T and Q in the following:
+
+    def A(Generic(T)):
+        @classmethod
+        def foo(cls: Type[Q]) -> Tuple[T, Q]: ...
+
+    class B(A): pass
+
+    B.foo()
+
+    original_type is the value of the type B in the expression B.foo()
+    """
+    # TODO: verify consistency between Q and T
+    info = itype.type  # type: TypeInfo
     if isinstance(t, CallableType):
         # TODO: Should we propagate type variable values?
-        vars = [TypeVarDef(n, i + 1, None, builtin_type('builtins.object'), tv.variance)
-                for (i, n), tv in zip(enumerate(info.type_vars), info.defn.type_vars)]
-        arg_types = t.arg_types
-        arg_kinds = t.arg_kinds
-        arg_names = t.arg_names
+        tvars = [TypeVarDef(n, i + 1, None, builtin_type('builtins.object'), tv.variance)
+                 for (i, n), tv in zip(enumerate(info.type_vars), info.defn.type_vars)]
         if is_classmethod:
-            arg_types = arg_types[1:]
-            arg_kinds = arg_kinds[1:]
-            arg_names = arg_names[1:]
-        return t.copy_modified(arg_types=arg_types, arg_kinds=arg_kinds, arg_names=arg_names,
-                               variables=vars + t.variables)
+            t = bind_self(t, original_type)
+        return t.copy_modified(variables=tvars + t.variables)
     elif isinstance(t, Overloaded):
-        return Overloaded([cast(CallableType, add_class_tvars(i, info, is_classmethod,
-                                                              builtin_type))
-                           for i in t.items()])
+        return Overloaded([cast(CallableType, add_class_tvars(item, itype, is_classmethod,
+                                                              builtin_type, original_type))
+                           for item in t.items()])
     return t
 
 
@@ -423,7 +458,7 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) ->
 
 def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo,
                                    fallback: Instance) -> FunctionLike:
-    signature = method_type_with_fallback(init_or_new, fallback)
+    signature = bind_self(function_type(init_or_new, fallback))
 
     # The __init__ method might come from a generic superclass
     # (init_or_new.info) with type variables that do not map
@@ -447,8 +482,9 @@ def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo,
         return class_callable(signature, info, fallback, special_sig)
     else:
         # Overloaded __init__/__new__.
+        assert isinstance(signature, Overloaded)
         items = []  # type: List[CallableType]
-        for item in cast(Overloaded, signature).items():
+        for item in signature.items():
             items.append(class_callable(item, info, fallback, special_sig))
         return Overloaded(items)
 
@@ -461,7 +497,7 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance,
     variables.extend(init_type.variables)
 
     callable_type = init_type.copy_modified(
-        ret_type=self_type(info), fallback=type_type, name=None, variables=variables,
+        ret_type=fill_typevars(info), fallback=type_type, name=None, variables=variables,
         special_sig=special_sig)
     c = callable_type.with_name('"{}"'.format(info.name()))
     c.is_classmethod_class = True
@@ -482,7 +518,7 @@ def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
     Now S in the context of D would be mapped to E[T] in the context of C.
     """
     # Create the type of self in subtype, of form t[a1, ...].
-    inst_type = self_type(sub_info)
+    inst_type = fill_typevars(sub_info)
     if isinstance(inst_type, TupleType):
         inst_type = inst_type.fallback
     # Map the type of self to supertype. This gets us a description of the
@@ -496,3 +532,82 @@ def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
     # in inst_type they are interpreted in subtype context. This works even if
     # the names of type variables in supertype and subtype overlap.
     return expand_type_by_instance(typ, inst_type)
+
+
+F = TypeVar('F', bound=FunctionLike)
+
+
+def bind_self(method: F, original_type: Type = None) -> F:
+    """Return a copy of `method`, with the type of its first parameter (usually
+    self or cls) bound to original_type.
+
+    If the type of `self` is a generic type (T, or Type[T] for classmethods),
+    instantiate every occurrence of type with original_type in the rest of the
+    signature and in the return type.
+
+    original_type is the type of E in the expression E.copy(). It is None in
+    compatibility checks. In this case we treat it as the erasure of the
+    declared type of self.
+
+    This way we can express "the type of self". For example:
+
+    T = TypeVar('T', bound='A')
+    class A:
+        def copy(self: T) -> T: ...
+
+    class B(A): pass
+
+    b = B().copy()  # type: B
+
+    """
+    if isinstance(method, Overloaded):
+        return cast(F, Overloaded([bind_self(c, method) for c in method.items()]))
+    assert isinstance(method, CallableType)
+    func = method
+    if not func.arg_types:
+        # invalid method. return something
+        return cast(F, func)
+    if func.arg_kinds[0] == ARG_STAR:
+        # The signature is of the form 'def foo(*args, ...)'.
+        # In this case we shouldn't drop the first arg,
+        # since func will be absorbed by the *args.
+
+        # TODO: infer bounds on the type of *args?
+        return cast(F, func)
+    self_param_type = func.arg_types[0]
+    if func.variables and (isinstance(self_param_type, TypeVarType) or
+                           (isinstance(self_param_type, TypeType) and
+                            isinstance(self_param_type.item, TypeVarType))):
+        if original_type is None:
+            # Type check method override
+            # XXX value restriction as union?
+            original_type = erase_to_bound(self_param_type)
+
+        typearg = infer_type_arguments([x.id for x in func.variables],
+                                       self_param_type, original_type)[0]
+
+        def expand(target: Type) -> Type:
+            return expand_type(target, {func.variables[0].id: typearg})
+
+        arg_types = [expand(x) for x in func.arg_types[1:]]
+        ret_type = expand(func.ret_type)
+        variables = func.variables[1:]
+    else:
+        arg_types = func.arg_types[1:]
+        ret_type = func.ret_type
+        variables = func.variables
+    res = func.copy_modified(arg_types=arg_types,
+                             arg_kinds=func.arg_kinds[1:],
+                             arg_names=func.arg_names[1:],
+                             variables=variables,
+                             ret_type=ret_type)
+    return cast(F, res)
+
+
+def erase_to_bound(t: Type):
+    if isinstance(t, TypeVarType):
+        return t.upper_bound
+    if isinstance(t, TypeType):
+        if isinstance(t.item, TypeVarType):
+            return TypeType(t.item.upper_bound)
+    return t
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index 5af63e9..ddf69c2 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -8,7 +8,7 @@ from mypy.types import (
     Type, AnyType, TupleType, Instance, UnionType
 )
 from mypy.nodes import (
-    Node, StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context
+    StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression
 )
 if False:
     # break import cycle only needed for mypy
@@ -57,9 +57,10 @@ class StringFormatterChecker:
 
     # TODO: In Python 3, the bytes formatting has a more restricted set of options
     # compared to string formatting.
+    # TODO: Bytes formatting in Python 3 is only supported in 3.5 and up.
     def check_str_interpolation(self,
                                 str: Union[StrExpr, BytesExpr, UnicodeExpr],
-                                replacements: Node) -> Type:
+                                replacements: Expression) -> Type:
         """Check the types of the 'replacements' in a string interpolation
         expression: str % replacements
         """
@@ -114,7 +115,7 @@ class StringFormatterChecker:
         return has_key
 
     def check_simple_str_interpolation(self, specifiers: List[ConversionSpecifier],
-                                       replacements: Node) -> None:
+                                       replacements: Expression) -> None:
         checkers = self.build_replacement_checkers(specifiers, replacements)
         if checkers is None:
             return
@@ -149,13 +150,12 @@ class StringFormatterChecker:
                     check_type(rep_type)
 
     def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier],
-                                       replacements: Node) -> None:
-        dict_with_only_str_literal_keys = (isinstance(replacements, DictExpr) and
-                                          all(isinstance(k, (StrExpr, BytesExpr))
-                                              for k, v in replacements.items))
-        if dict_with_only_str_literal_keys:
+                                       replacements: Expression) -> None:
+        if (isinstance(replacements, DictExpr) and
+                all(isinstance(k, (StrExpr, BytesExpr))
+                    for k, v in replacements.items)):
             mapping = {}  # type: Dict[str, Type]
-            for k, v in cast(DictExpr, replacements).items:
+            for k, v in replacements.items:
                 key_str = cast(StrExpr, k).value
                 mapping[key_str] = self.accept(v)
 
@@ -183,9 +183,9 @@ class StringFormatterChecker:
                                    'expression has type', 'expected type for mapping is')
 
     def build_replacement_checkers(self, specifiers: List[ConversionSpecifier],
-                                   context: Context) -> List[Tuple[Callable[[Node], None],
+                                   context: Context) -> List[Tuple[Callable[[Expression], None],
                                                                    Callable[[Type], None]]]:
-        checkers = []  # type: List[Tuple[Callable[[Node], None], Callable[[Type], None]]]
+        checkers = []  # type: List[Tuple[Callable[[Expression], None], Callable[[Type], None]]]
         for specifier in specifiers:
             checker = self.replacement_checkers(specifier, context)
             if checker is None:
@@ -194,13 +194,13 @@ class StringFormatterChecker:
         return checkers
 
     def replacement_checkers(self, specifier: ConversionSpecifier,
-                             context: Context) -> List[Tuple[Callable[[Node], None],
+                             context: Context) -> List[Tuple[Callable[[Expression], None],
                                                              Callable[[Type], None]]]:
         """Returns a list of tuples of two functions that check whether a replacement is
         of the right type for the specifier. The first functions take a node and checks
         its type in the right type context. The second function just checks a type.
         """
-        checkers = []  # type: List[ Tuple[ Callable[[Node], None], Callable[[Type], None] ] ]
+        checkers = []  # type: List[Tuple[Callable[[Expression], None], Callable[[Type], None]]]
 
         if specifier.width == '*':
             checkers.append(self.checkers_for_star(context))
@@ -218,7 +218,7 @@ class StringFormatterChecker:
             checkers.append(c)
         return checkers
 
-    def checkers_for_star(self, context: Context) -> Tuple[Callable[[Node], None],
+    def checkers_for_star(self, context: Context) -> Tuple[Callable[[Expression], None],
                                                            Callable[[Type], None]]:
         """Returns a tuple of check functions that check whether, respectively,
         a node or a type is compatible with a star in a conversion specifier
@@ -229,14 +229,14 @@ class StringFormatterChecker:
             expected = self.named_type('builtins.int')
             self.chk.check_subtype(type, expected, context, '* wants int')
 
-        def check_node(node: Node) -> None:
-            type = self.accept(node, expected)
+        def check_expr(expr: Expression) -> None:
+            type = self.accept(expr, expected)
             check_type(type)
 
-        return check_node, check_type
+        return check_expr, check_type
 
     def checkers_for_regular_type(self, type: str,
-                                  context: Context) -> Tuple[Callable[[Node], None],
+                                  context: Context) -> Tuple[Callable[[Expression], None],
                                                              Callable[[Type], None]]:
         """Returns a tuple of check functions that check whether, respectively,
         a node or a type is compatible with 'type'. Return None in case of an
@@ -250,14 +250,15 @@ class StringFormatterChecker:
                               messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
                               'expression has type', 'placeholder has type')
 
-        def check_node(node: Node) -> None:
-            type = self.accept(node, expected_type)
+        def check_expr(expr: Expression) -> None:
+            type = self.accept(expr, expected_type)
             check_type(type)
 
-        return check_node, check_type
+        return check_expr, check_type
 
-    def checkers_for_c_type(self, type: str, context: Context) -> Tuple[Callable[[Node], None],
-                                                                        Callable[[Type], None]]:
+    def checkers_for_c_type(self, type: str,
+                            context: Context) -> Tuple[Callable[[Expression], None],
+                                                       Callable[[Type], None]]:
         """Returns a tuple of check functions that check whether, respectively,
         a node or a type is compatible with 'type' that is a character type
         """
@@ -270,14 +271,14 @@ class StringFormatterChecker:
                               messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
                               'expression has type', 'placeholder has type')
 
-        def check_node(node: Node) -> None:
+        def check_expr(expr: Expression) -> None:
             """int, or str with length 1"""
-            type = self.accept(node, expected_type)
-            if isinstance(node, (StrExpr, BytesExpr)) and len(cast(StrExpr, node).value) != 1:
+            type = self.accept(expr, expected_type)
+            if isinstance(expr, (StrExpr, BytesExpr)) and len(cast(StrExpr, expr).value) != 1:
                 self.msg.requires_int_or_char(context)
             check_type(type)
 
-        return check_node, check_type
+        return check_expr, check_type
 
     def conversion_type(self, p: str, context: Context) -> Type:
         """Return the type that is accepted for a string interpolation
@@ -310,6 +311,6 @@ class StringFormatterChecker:
         """
         return self.chk.named_type(name)
 
-    def accept(self, node: Node, context: Type = None) -> Type:
+    def accept(self, expr: Expression, context: Type = None) -> Type:
         """Type check a node. Alias for TypeChecker.accept."""
-        return self.chk.accept(node, context)
+        return self.chk.accept(expr, context)
diff --git a/mypy/constraints.py b/mypy/constraints.py
index f204c02..e26e583 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -1,6 +1,6 @@
 """Type inference constraints."""
 
-from typing import List, Optional, cast
+from typing import List, Optional
 
 from mypy.types import (
     CallableType, Type, TypeVisitor, UnboundType, AnyType, Void, NoneTyp, TypeVarType,
diff --git a/mypy/errors.py b/mypy/errors.py
index 541e4ca..373f6e7 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -1,10 +1,9 @@
-import os
 import os.path
 import sys
 import traceback
 from collections import OrderedDict, defaultdict
 
-from typing import Tuple, List, TypeVar, Set, Dict, Optional
+from typing import Tuple, List, TypeVar, Set, Dict
 
 from mypy.options import Options
 
@@ -146,10 +145,6 @@ class Errors:
     def set_file_ignored_lines(self, file: str, ignored_lines: Set[int] = None) -> None:
         self.ignored_lines[file] = ignored_lines
 
-    def mark_file_ignored_lines_used(self, file: str, used_ignored_lines: Set[int] = None
-                                     ) -> None:
-        self.used_ignored_lines[file] |= used_ignored_lines
-
     def push_function(self, name: str) -> None:
         """Set the current function or member short name (it can be None)."""
         self.function_or_member.append(name)
@@ -164,14 +159,6 @@ class Errors:
     def pop_type(self) -> None:
         self.type_name.pop()
 
-    def push_import_context(self, path: str, line: int) -> None:
-        """Add a (file, line) tuple to the import context."""
-        self.import_ctx.append((os.path.normpath(path), line))
-
-    def pop_import_context(self) -> None:
-        """Remove the topmost item from the import context."""
-        self.import_ctx.pop()
-
     def import_context(self) -> List[Tuple[str, int]]:
         """Return a copy of the import context."""
         return self.import_ctx[:]
@@ -290,7 +277,9 @@ class Errors:
 
         for e in errors:
             # Report module import context, if different from previous message.
-            if e.import_ctx != prev_import_context:
+            if self.hide_error_context:
+                pass
+            elif e.import_ctx != prev_import_context:
                 last = len(e.import_ctx) - 1
                 i = last
                 while i >= 0:
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index c299163..c1ff808 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -1,4 +1,4 @@
-from typing import Dict, Tuple, List, cast
+from typing import Dict, List
 
 from mypy.types import (
     Type, Instance, CallableType, TypeVisitor, UnboundType, ErrorType, AnyType,
@@ -85,7 +85,9 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
     def visit_overloaded(self, t: Overloaded) -> Type:
         items = []  # type: List[CallableType]
         for item in t.items():
-            items.append(cast(CallableType, item.accept(self)))
+            new_item = item.accept(self)
+            assert isinstance(new_item, CallableType)
+            items.append(new_item)
         return Overloaded(items)
 
     def visit_tuple_type(self, t: TupleType) -> Type:
diff --git a/mypy/experiments.py b/mypy/experiments.py
index a4684cc..8ac437e 100644
--- a/mypy/experiments.py
+++ b/mypy/experiments.py
@@ -1 +1,3 @@
+from typing import Optional, Tuple
 STRICT_OPTIONAL = False
+find_occurrences = None  # type: Optional[Tuple[str, str]]
diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index 764c716..293454e 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -20,11 +20,11 @@ def expr_to_unanalyzed_type(expr: Expression) -> Type:
     """
     if isinstance(expr, NameExpr):
         name = expr.name
-        return UnboundType(name, line=expr.line)
+        return UnboundType(name, line=expr.line, column=expr.column)
     elif isinstance(expr, MemberExpr):
         fullname = get_member_expr_fullname(expr)
         if fullname:
-            return UnboundType(fullname, line=expr.line)
+            return UnboundType(fullname, line=expr.line, column=expr.column)
         else:
             raise TypeTranslationError()
     elif isinstance(expr, IndexExpr):
@@ -42,7 +42,7 @@ def expr_to_unanalyzed_type(expr: Expression) -> Type:
             raise TypeTranslationError()
     elif isinstance(expr, ListExpr):
         return TypeList([expr_to_unanalyzed_type(t) for t in expr.items],
-                        line=expr.line)
+                        line=expr.line, column=expr.column)
     elif isinstance(expr, (StrExpr, BytesExpr)):
         # Parse string literal type.
         try:
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 85d7ac8..7463b9a 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -157,8 +157,6 @@ class ASTConverter(ast35.NodeTransformer):
         op_name = ASTConverter.op_map.get(type(op))
         if op_name is None:
             raise RuntimeError('Unknown operator ' + str(type(op)))
-        elif op_name == '@':
-            raise RuntimeError('mypy does not support the MatMult operator')
         else:
             return op_name
 
@@ -298,6 +296,10 @@ class ASTConverter(ast35.NodeTransformer):
 
         func_type = None
         if any(arg_types) or return_type:
+            if len(arg_types) > len(arg_kinds):
+                raise FastParserError('Type signature has too many arguments', n.lineno, offset=0)
+            if len(arg_types) < len(arg_kinds):
+                raise FastParserError('Type signature has too few arguments', n.lineno, offset=0)
             func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
                                      arg_kinds,
                                      arg_names,
@@ -373,9 +375,10 @@ class ASTConverter(ast35.NodeTransformer):
         if isinstance(n, ast35.Name):
             return n.id
         elif isinstance(n, ast35.Attribute):
-            return "{}.{}".format(self.stringify_name(n.value), n.attr)
-        else:
-            assert False, "can't stringify " + str(type(n))
+            sv = self.stringify_name(n.value)
+            if sv is not None:
+                return "{}.{}".format(sv, n.attr)
+        return None  # Can't do it.
 
     # ClassDef(identifier name,
     #  expr* bases,
@@ -389,6 +392,8 @@ class ASTConverter(ast35.NodeTransformer):
         metaclass = None
         if metaclass_arg:
             metaclass = self.stringify_name(metaclass_arg.value)
+            if metaclass is None:
+                metaclass = '<error>'  # To be reported later
 
         cdef = ClassDef(n.name,
                         self.as_block(n.body, n.lineno),
@@ -431,6 +436,7 @@ class ASTConverter(ast35.NodeTransformer):
             typ = parse_type_comment(n.type_comment, n.lineno)
         elif new_syntax:
             typ = TypeConverter(line=n.lineno).visit(n.annotation)  # type: ignore
+            typ.column = n.annotation.col_offset
         if n.value is None:  # always allow 'x: int'
             rvalue = TempNode(AnyType())  # type: Expression
         else:
@@ -735,6 +741,9 @@ class ASTConverter(ast35.NodeTransformer):
     # Num(object n) -- a number as a PyObject.
     @with_line
     def visit_Num(self, n: ast35.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]:
+        if getattr(n, 'contains_underscores', None) and self.pyversion < (3, 6):
+            raise FastParserError('Underscores in numeric literals are only '
+                                  'supported in Python 3.6', n.lineno, n.col_offset)
         if isinstance(n.n, int):
             return IntExpr(n.n)
         elif isinstance(n.n, float):
@@ -863,7 +872,9 @@ class TypeConverter(ast35.NodeTransformer):
 
     # Subscript(expr value, slice slice, expr_context ctx)
     def visit_Subscript(self, n: ast35.Subscript) -> Type:
-        assert isinstance(n.slice, ast35.Index)
+        if not isinstance(n.slice, ast35.Index):
+            raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, self.line,
+                                        getattr(n, 'col_offset', -1))
 
         value = self.visit(n.value)
 
@@ -906,3 +917,7 @@ class TypeCommentParseError(Exception):
         self.msg = msg
         self.lineno = lineno
         self.offset = offset
+
+
+class FastParserError(TypeCommentParseError):
+    pass
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index 76e95bb..19af86c 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -53,6 +53,7 @@ except ImportError:
         print('The typed_ast package required by --fast-parser is only compatible with'
               ' Python 3.3 and greater.')
     sys.exit(1)
+from mypy.fastparse import FastParserError
 
 T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt])
 U = TypeVar('U', bound=Node)
@@ -302,6 +303,10 @@ class ASTConverter(ast27.NodeTransformer):
 
         func_type = None
         if any(arg_types) or return_type:
+            if len(arg_types) > len(arg_kinds):
+                raise FastParserError('Type signature has too many arguments', n.lineno, offset=0)
+            if len(arg_types) < len(arg_kinds):
+                raise FastParserError('Type signature has too few arguments', n.lineno, offset=0)
             func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
                                      arg_kinds,
                                      arg_names,
diff --git a/mypy/fixup.py b/mypy/fixup.py
index eec31ec..147238d 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -1,14 +1,14 @@
 """Fix up various things after deserialization."""
 
-from typing import Any, Dict, Optional, cast
+from typing import Any, Dict, Optional
 
 from mypy.nodes import (MypyFile, SymbolNode, SymbolTable, SymbolTableNode,
                         TypeInfo, FuncDef, OverloadedFuncDef, Decorator, Var,
                         TypeVarExpr, ClassDef,
-                        LDEF, MDEF, GDEF, MODULE_REF)
+                        LDEF, MDEF, GDEF)
 from mypy.types import (CallableType, EllipsisType, Instance, Overloaded, TupleType,
                         TypeList, TypeVarType, UnboundType, UnionType, TypeVisitor,
-                        UninhabitedType, TypeType)
+                        TypeType)
 from mypy.visitor import NodeVisitor
 
 
@@ -248,31 +248,3 @@ def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str) -> SymbolTa
         node = stnode.node
         assert isinstance(node, TypeInfo)
         names = node.names
-
-
-def store_qualified(modules: Dict[str, MypyFile], name: str, info: SymbolNode) -> None:
-    head = name
-    rest = []
-    while True:
-        head, tail = head.rsplit('.', 1)
-        mod = modules.get(head)
-        if mod is not None:
-            rest.append(tail)
-            break
-    names = mod.names
-    while True:
-        assert rest, "Cannot find %s" % (name,)
-        key = rest.pop()
-        if key not in names:
-            assert not rest, "Cannot find %s for %s" % (key, name)
-            # Store it.
-            # TODO: kind might be something else?
-            names[key] = SymbolTableNode(GDEF, info)
-            return
-        stnode = names[key]
-        node = stnode.node
-        if not rest:
-            stnode.node = info
-            return
-        assert isinstance(node, TypeInfo)
-        names = node.names
diff --git a/mypy/join.py b/mypy/join.py
index c6d6333..6d86106 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -82,6 +82,9 @@ def join_types(s: Type, t: Type) -> Type:
     if isinstance(s, NoneTyp) and not isinstance(t, NoneTyp):
         s, t = t, s
 
+    if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType):
+        s, t = t, s
+
     # Use a visitor to handle non-trivial cases.
     return t.accept(TypeJoinVisitor(s))
 
@@ -232,8 +235,9 @@ class TypeJoinVisitor(TypeVisitor[Type]):
             for i in range(t.length()):
                 items.append(self.join(t.items[i], self.s.items[i]))
             # join fallback types if they are different
-            from typing import cast
-            return TupleType(items, cast(Instance, join_instances(self.s.fallback, t.fallback)))
+            fallback = join_instances(self.s.fallback, t.fallback)
+            assert isinstance(fallback, Instance)
+            return TupleType(items, fallback)
         else:
             return self.default(self.s)
 
diff --git a/mypy/lex.py b/mypy/lex.py
index f074de9..66800a6 100644
--- a/mypy/lex.py
+++ b/mypy/lex.py
@@ -195,13 +195,13 @@ str_prefixes = set(['r', 'b', 'br', 'rb', 'u', 'ur', 'R', 'B', 'U'])
 
 # List of regular expressions that match non-alphabetical operators
 operators = [re.compile('[-+*/<>.%&|^~]'),
-             re.compile('==|!=|<=|>=|\\*\\*|//|<<|>>|<>')]
+             re.compile('==|!=|<=|>=|\\*\\*|@|//|<<|>>|<>')]
 
 # List of regular expressions that match punctuator tokens
 punctuators = [re.compile('[=,()@`]|(->)'),
                re.compile('\\['),
                re.compile(']'),
-               re.compile('([-+*/%&|^]|\\*\\*|//|<<|>>)=')]
+               re.compile('([-+*/%@&|^]|\\*\\*|//|<<|>>)=')]
 
 
 # Map single-character string escape sequences to corresponding characters.
diff --git a/mypy/main.py b/mypy/main.py
index 62d068c..19656e1 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -5,6 +5,7 @@ import configparser
 import os
 import re
 import sys
+import time
 
 from typing import Any, Dict, List, Mapping, Optional, Set, Tuple
 
@@ -12,6 +13,7 @@ from mypy import build
 from mypy import defaults
 from mypy import git
 from mypy import experiments
+from mypy import util
 from mypy.build import BuildSource, BuildResult, PYTHON_EXTENSIONS
 from mypy.errors import CompileError
 from mypy.options import Options, BuildType
@@ -28,20 +30,25 @@ def main(script_path: str) -> None:
     Args:
         script_path: Path to the 'mypy' script (used for finding data files).
     """
+    t0 = time.time()
     if script_path:
         bin_dir = find_bin_directory(script_path)
     else:
         bin_dir = None
     sources, options = process_options(sys.argv[1:])
-    f = sys.stdout
+    serious = False
     try:
         res = type_check_only(sources, bin_dir, options)
         a = res.errors
     except CompileError as e:
         a = e.messages
         if not e.use_stdout:
-            f = sys.stderr
+            serious = True
+    if options.junit_xml:
+        t1 = time.time()
+        util.write_junit_xml(t1 - t0, serious, a, options.junit_xml)
     if a:
+        f = sys.stderr if serious else sys.stdout
         for m in a:
             f.write(m + '\n')
         sys.exit(1)
@@ -160,6 +167,8 @@ def process_options(args: List[str],
                         " --check-untyped-defs enabled")
     parser.add_argument('--warn-redundant-casts', action='store_true',
                         help="warn about casting an expression to its inferred type")
+    parser.add_argument('--warn-no-return', action='store_true',
+                        help="warn about functions that end without returning")
     parser.add_argument('--warn-unused-ignores', action='store_true',
                         help="warn about unneeded '# type: ignore' comments")
     parser.add_argument('--hide-error-context', action='store_true',
@@ -180,6 +189,7 @@ def process_options(args: List[str],
                         "(experimental -- read documentation before using!).  "
                         "Implies --strict-optional.  Has the undesirable side-effect of "
                         "suppressing other errors in non-whitelisted files.")
+    parser.add_argument('--junit-xml', help="write junit.xml to the given file")
     parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error")
     parser.add_argument('--show-traceback', '--tb', action='store_true',
                         help="show traceback on fatal error")
@@ -188,6 +198,8 @@ def process_options(args: List[str],
                         help="dump type inference stats")
     parser.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module',
                         help="use a custom typing module")
+    parser.add_argument('--custom-typeshed-dir', metavar='DIR',
+                        help="use the custom typeshed in DIR")
     parser.add_argument('--scripts-are-modules', action='store_true',
                         help="Script x becomes module x instead of __main__")
     parser.add_argument('--config-file',
@@ -196,6 +208,9 @@ def process_options(args: List[str],
     parser.add_argument('--show-column-numbers', action='store_true',
                         dest='show_column_numbers',
                         help="Show column numbers in error messages")
+    parser.add_argument('--find-occurrences', metavar='CLASS.MEMBER',
+                        dest='special-opts:find_occurrences',
+                        help="print out all usages of a class member (experimental)")
     # hidden options
     # --shadow-file a.py tmp.py will typecheck tmp.py in place of a.py.
     # Useful for tools to make transformations to a file to get more
@@ -220,7 +235,7 @@ def process_options(args: List[str],
     report_group = parser.add_argument_group(
         title='report generation',
         description='Generate a report in the specified format.')
-    for report_type in reporter_classes:
+    for report_type in sorted(reporter_classes):
         report_group.add_argument('--%s-report' % report_type.replace('_', '-'),
                                   metavar='DIR',
                                   dest='special-opts:%s_report' % report_type)
@@ -290,6 +305,12 @@ def process_options(args: List[str],
         options.strict_optional = True
     if options.strict_optional:
         experiments.STRICT_OPTIONAL = True
+    if special_opts.find_occurrences:
+        experiments.find_occurrences = special_opts.find_occurrences.split('.')
+        if len(experiments.find_occurrences) < 2:
+            parser.error("Can only find occurrences of class members.")
+        if len(experiments.find_occurrences) != 2:
+            parser.error("Can only find occurrences of non-nested class members.")
 
     # Set reports.
     for flag, val in vars(special_opts).items():
@@ -432,6 +453,9 @@ config_types = {
     'python_version': lambda s: tuple(map(int, s.split('.'))),
     'strict_optional_whitelist': lambda s: s.split(),
     'custom_typing_module': str,
+    'custom_typeshed_dir': str,
+    'mypy_path': lambda s: [p.strip() for p in re.split('[,:]', s)],
+    'junit_xml': str,
 }
 
 
@@ -461,19 +485,22 @@ def parse_config_file(options: Options, filename: str) -> None:
         if name.startswith('mypy-'):
             prefix = '%s: [%s]' % (filename, name)
             updates, report_dirs = parse_section(prefix, options, section)
-            # TODO: Limit updates to flags that can be per-file.
             if report_dirs:
-                print("%s: Per-file sections should not specify reports (%s)" %
+                print("%s: Per-module sections should not specify reports (%s)" %
                       (prefix, ', '.join(s + '_report' for s in sorted(report_dirs))),
                       file=sys.stderr)
-            if set(updates) - Options.PER_FILE_OPTIONS:
-                print("%s: Per-file sections should only specify per-file flags (%s)" %
-                      (prefix, ', '.join(sorted(set(updates) - Options.PER_FILE_OPTIONS))),
+            if set(updates) - Options.PER_MODULE_OPTIONS:
+                print("%s: Per-module sections should only specify per-module flags (%s)" %
+                      (prefix, ', '.join(sorted(set(updates) - Options.PER_MODULE_OPTIONS))),
                       file=sys.stderr)
-                updates = {k: v for k, v in updates.items() if k in Options.PER_FILE_OPTIONS}
+                updates = {k: v for k, v in updates.items() if k in Options.PER_MODULE_OPTIONS}
             globs = name[5:]
             for glob in globs.split(','):
-                options.per_file_options[glob] = updates
+                # For backwards compatibility, replace (back)slashes with dots.
+                glob = glob.replace(os.sep, '.')
+                if os.altsep:
+                    glob = glob.replace(os.altsep, '.')
+                options.per_module_options[glob] = updates
 
 
 def parse_section(prefix: str, template: Options,
diff --git a/mypy/maptype.py b/mypy/maptype.py
index dc8e7b2..ff76035 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -1,4 +1,4 @@
-from typing import Dict, List, cast
+from typing import Dict, List
 
 from mypy.expandtype import expand_type
 from mypy.nodes import TypeInfo
@@ -36,7 +36,11 @@ def map_instance_to_supertypes(instance: Instance,
                 a.extend(map_instance_to_direct_supertypes(t, sup))
             types = a
         result.extend(types)
-    return result
+    if result:
+        return result
+    else:
+        # Nothing. Presumably due to an error. Construct a dummy using Any.
+        return [Instance(supertype, [AnyType()] * len(supertype.type_vars))]
 
 
 def class_derivation_paths(typ: TypeInfo,
@@ -72,7 +76,9 @@ def map_instance_to_direct_supertypes(instance: Instance,
     for b in typ.bases:
         if b.type == supertype:
             env = instance_to_type_environment(instance)
-            result.append(cast(Instance, expand_type(b, env)))
+            t = expand_type(b, env)
+            assert isinstance(t, Instance)
+            result.append(t)
 
     if result:
         return result
diff --git a/mypy/meet.py b/mypy/meet.py
index 63e3aae..18796ae 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -1,4 +1,4 @@
-from typing import cast, List
+from typing import List
 
 from mypy.join import is_similar_callables, combine_similar_callables
 from mypy.types import (
@@ -7,7 +7,6 @@ from mypy.types import (
     DeletedType, UninhabitedType, TypeType
 )
 from mypy.subtypes import is_subtype
-from mypy.nodes import TypeInfo
 
 from mypy import experiments
 
@@ -114,14 +113,6 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
     return True
 
 
-def nearest_builtin_ancestor(type: TypeInfo) -> TypeInfo:
-    for base in type.mro:
-        if base.defn.is_builtinclass:
-            return base
-    else:
-        return None
-
-
 class TypeMeetVisitor(TypeVisitor[Type]):
     def __init__(self, s: Type) -> None:
         self.s = s
diff --git a/mypy/messages.py b/mypy/messages.py
index b4828f1..e3eec25 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -23,13 +23,9 @@ from mypy.nodes import (
 # that do not have any parameters.
 
 NO_RETURN_VALUE_EXPECTED = 'No return value expected'
+MISSING_RETURN_STATEMENT = 'Missing return statement'
 INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type'
 RETURN_VALUE_EXPECTED = 'Return value expected'
-BOOLEAN_VALUE_EXPECTED = 'Boolean value expected'
-BOOLEAN_EXPECTED_FOR_IF = 'Boolean value expected for if condition'
-BOOLEAN_EXPECTED_FOR_WHILE = 'Boolean value expected for while condition'
-BOOLEAN_EXPECTED_FOR_UNTIL = 'Boolean value expected for until condition'
-BOOLEAN_EXPECTED_FOR_NOT = 'Boolean value expected for not operand'
 INVALID_EXCEPTION = 'Exception must be derived from BaseException'
 INVALID_EXCEPTION_TYPE = 'Exception type must be derived from BaseException'
 INVALID_RETURN_TYPE_FOR_GENERATOR = \
@@ -49,22 +45,12 @@ INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in yield'
 INCOMPATIBLE_TYPES_IN_YIELD_FROM = 'Incompatible types in "yield from"'
 INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION = 'Incompatible types in string interpolation'
 INIT_MUST_HAVE_NONE_RETURN_TYPE = 'The return type of "__init__" must be None'
-GETTER_TYPE_INCOMPATIBLE_WITH_SETTER = \
-    'Type of getter incompatible with setter'
 TUPLE_INDEX_MUST_BE_AN_INT_LITERAL = 'Tuple index must be an integer literal'
 TUPLE_SLICE_MUST_BE_AN_INT_LITERAL = 'Tuple slice must be an integer literal'
 TUPLE_INDEX_OUT_OF_RANGE = 'Tuple index out of range'
-TYPE_CONSTANT_EXPECTED = 'Type "Constant" or initializer expected'
-INCOMPATIBLE_PAIR_ITEM_TYPE = 'Incompatible Pair item type'
-INVALID_TYPE_APPLICATION_TARGET_TYPE = 'Invalid type application target type'
-INCOMPATIBLE_TUPLE_ITEM_TYPE = 'Incompatible tuple item type'
-INCOMPATIBLE_KEY_TYPE = 'Incompatible dictionary key type'
-INCOMPATIBLE_VALUE_TYPE = 'Incompatible dictionary value type'
 NEED_ANNOTATION_FOR_VAR = 'Need type annotation for variable'
 ITERABLE_EXPECTED = 'Iterable expected'
 ASYNC_ITERABLE_EXPECTED = 'AsyncIterable expected'
-INCOMPATIBLE_TYPES_IN_FOR = 'Incompatible types in for statement'
-INCOMPATIBLE_ARRAY_VAR_ARGS = 'Incompatible variable arguments in call'
 INVALID_SLICE_INDEX = 'Slice index must be an integer or None'
 CANNOT_INFER_LAMBDA_TYPE = 'Cannot infer type of lambda'
 CANNOT_INFER_ITEM_TYPE = 'Cannot infer iterable item type'
@@ -75,14 +61,12 @@ INCONSISTENT_ABSTRACT_OVERLOAD = \
     'Overloaded method has both abstract and non-abstract variants'
 READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE = \
     'Read-only property cannot override read-write property'
-INSTANCE_LAYOUT_CONFLICT = 'Instance layout conflict in multiple inheritance'
 FORMAT_REQUIRES_MAPPING = 'Format requires a mapping'
-GENERIC_TYPE_NOT_VALID_AS_EXPRESSION = \
-    "Generic type is prohibited as a runtime expression (use a type alias or '# type:' comment)"
 RETURN_TYPE_CANNOT_BE_CONTRAVARIANT = "Cannot use a contravariant type variable as return type"
 FUNCTION_PARAMETER_CANNOT_BE_COVARIANT = "Cannot use a covariant type variable as a parameter"
 INCOMPATIBLE_IMPORT_OF = "Incompatible import of"
 FUNCTION_TYPE_EXPECTED = "Function is missing a type annotation"
+ONLY_CLASS_APPLICATION = "Type application is only supported for generic classes"
 RETURN_TYPE_EXPECTED = "Function is missing a return type annotation"
 ARGUMENT_TYPE_EXPECTED = "Function is missing a type annotation for one or more arguments"
 KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE = \
@@ -350,7 +334,12 @@ class MessageBuilder:
             # Indexed set.
             self.fail('Unsupported target for indexed assignment', context)
         elif member == '__call__':
-            self.fail('{} not callable'.format(self.format(typ)), context)
+            if isinstance(typ, Instance) and (typ.type.fullname() == 'builtins.function'):
+                # "'function' not callable" is a confusing error message.
+                # Explain that the problem is that the type of the function is not known.
+                self.fail('Cannot call function of unknown type', context)
+            else:
+                self.fail('{} not callable'.format(self.format(typ)), context)
         else:
             # The non-special case: a missing ordinary attribute.
             if not self.disable_type_names:
@@ -409,9 +398,6 @@ class MessageBuilder:
                     op, self.format(typ))
             self.fail(msg, context)
 
-    def type_expected_as_right_operand_of_is(self, context: Context) -> None:
-        self.fail('Type expected as right operand of "is"', context)
-
     def not_callable(self, typ: Type, context: Context) -> Type:
         self.fail('{} not callable'.format(self.format(typ)), context)
         return AnyType()
@@ -594,11 +580,6 @@ class MessageBuilder:
         else:
             self.fail('No overload variant matches argument types {}'.format(arg_types), context)
 
-    def function_variants_overlap(self, n1: int, n2: int,
-                                  context: Context) -> None:
-        self.fail('Function signature variants {} and {} overlap'.format(
-            n1 + 1, n2 + 1), context)
-
     def invalid_cast(self, target_type: Type, source_type: Type,
                      context: Context) -> None:
         if not self.check_unusable_type(source_type, context):
@@ -626,19 +607,6 @@ class MessageBuilder:
         self.fail('Result type of {} incompatible in assignment'.format(op),
                   context)
 
-    def incompatible_value_count_in_assignment(self, lvalue_count: int,
-                                               rvalue_count: int,
-                                               context: Context) -> None:
-        if rvalue_count < lvalue_count:
-            self.fail('Need {} values to assign'.format(lvalue_count), context)
-        elif rvalue_count > lvalue_count:
-            self.fail('Too many values to assign', context)
-
-    def type_incompatible_with_supertype(self, name: str, supertype: TypeInfo,
-                                         context: Context) -> None:
-        self.fail('Type of "{}" incompatible with supertype "{}"'.format(
-            name, supertype.name), context)
-
     def signature_incompatible_with_supertype(
             self, name: str, name_in_super: str, supertype: str,
             context: Context) -> None:
@@ -667,16 +635,11 @@ class MessageBuilder:
             target = '"{}" of {}'.format(name_in_super, target)
         return target
 
-    def boolean_return_value_expected(self, method: str,
-                                      context: Context) -> None:
-        self.fail('Boolean return value expected for method "{}"'.format(
-            method), context)
-
     def incompatible_type_application(self, expected_arg_count: int,
                                       actual_arg_count: int,
                                       context: Context) -> None:
         if expected_arg_count == 0:
-            self.fail('Type application targets a non-generic function',
+            self.fail('Type application targets a non-generic function or class',
                       context)
         elif actual_arg_count > expected_arg_count:
             self.fail('Type application has too many types ({} expected)'
@@ -685,11 +648,6 @@ class MessageBuilder:
             self.fail('Type application has too few types ({} expected)'
                       .format(expected_arg_count), context)
 
-    def incompatible_array_item_type(self, typ: Type, index: int,
-                                     context: Context) -> None:
-        self.fail('Array item {} has incompatible type {}'.format(
-            index, self.format(typ)), context)
-
     def could_not_infer_type_arguments(self, callee_type: CallableType, n: int,
                                        context: Context) -> None:
         if callee_type.name and n > 0:
@@ -708,14 +666,6 @@ class MessageBuilder:
             self.fail('Argument after ** must be a dictionary',
                       context)
 
-    def incomplete_type_var_match(self, member: str, context: Context) -> None:
-        self.fail('"{}" has incomplete match to supertype type variable'
-                  .format(member), context)
-
-    def not_implemented(self, msg: str, context: Context) -> Type:
-        self.fail('Feature not implemented yet ({})'.format(msg), context)
-        return AnyType()
-
     def undefined_in_superclass(self, member: str, context: Context) -> None:
         self.fail('"{}" undefined in superclass'.format(member), context)
 
@@ -737,9 +687,6 @@ class MessageBuilder:
     def too_many_string_formatting_arguments(self, context: Context) -> None:
         self.fail('Not all arguments converted during string formatting', context)
 
-    def incomplete_conversion_specifier_format(self, context: Context) -> None:
-        self.fail('Incomplete format', context)
-
     def unsupported_placeholder(self, placeholder: str, context: Context) -> None:
         self.fail('Unsupported format character \'%s\'' % placeholder, context)
 
@@ -805,16 +752,6 @@ class MessageBuilder:
         self.fail('Overloaded function signatures {} and {} overlap with '
                   'incompatible return types'.format(index1, index2), context)
 
-    def invalid_reverse_operator_signature(self, reverse: str, other: str,
-                                           context: Context) -> None:
-        self.fail('"Any" return type expected since argument to {} '
-                  'does not support {}'.format(reverse, other), context)
-
-    def reverse_operator_method_with_any_arg_must_return_any(
-            self, method: str, context: Context) -> None:
-        self.fail('"Any" return type expected since argument to {} '
-                  'has type "Any"'.format(method), context)
-
     def operator_method_signatures_overlap(
             self, reverse_class: str, reverse_method: str, forward_class: str,
             forward_method: str, context: Context) -> None:
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 469b308..013373c 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -1,8 +1,7 @@
 """Abstract syntax tree node classes (i.e. parse tree)."""
 
 import os
-import re
-from abc import abstractmethod, ABCMeta
+from abc import abstractmethod
 
 from typing import (
     Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional
@@ -91,14 +90,20 @@ reverse_type_aliases = dict((name.replace('__builtins__', 'builtins'), alias)
                             for alias, name in type_aliases.items())  # type: Dict[str, str]
 
 
+# See [Note Literals and literal_hash] below
+Key = tuple
+
+
 class Node(Context):
     """Common base class for all non-type parse tree nodes."""
 
     line = -1
     column = -1
 
+    # TODO: Move to Expression
+    # See [Note Literals and literal_hash] below
     literal = LITERAL_NO
-    literal_hash = None  # type: Any
+    literal_hash = None  # type: Key
 
     def __str__(self) -> str:
         ans = self.accept(mypy.strconv.StrConv())
@@ -140,10 +145,50 @@ class Expression(Node):
     """An expression node."""
 
 
-# TODO: Union['NameExpr', 'TupleExpr', 'ListExpr', 'MemberExpr', 'IndexExpr']; see #1783.
+# TODO:
+# Lvalue = Union['NameExpr', 'MemberExpr', 'IndexExpr', 'SuperExpr', 'StarExpr'
+#                'TupleExpr', 'ListExpr']; see #1783.
 Lvalue = Expression
 
 
+# [Note Literals and literal_hash]
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#
+# Mypy uses the term "literal" to refer to any expression built out of
+# the following:
+#
+# * Plain literal expressions, like `1` (integer, float, string, etc.)
+#
+# * Compound literal expressions, like `(lit1, lit2)` (list, dict,
+#   set, or tuple)
+#
+# * Operator expressions, like `lit1 + lit2`
+#
+# * Variable references, like `x`
+#
+# * Member references, like `lit.m`
+#
+# * Index expressions, like `lit[0]`
+#
+# A typical "literal" looks like `x[(i,j+1)].m`.
+#
+# An expression that is a literal has a `literal_hash`, with the
+# following properties.
+#
+# * `literal_hash` is a Key: a tuple containing basic data types and
+#   possibly other Keys. So it can be used as a key in a dictionary
+#   that will be compared by value (as opposed to the Node itself,
+#   which is compared by identity).
+#
+# * Two expressions have equal `literal_hash`es if and only if they
+#   are syntactically equal expressions. (NB: Actually, we also
+#   identify as equal expressions like `3` and `3.0`; is this a good
+#   idea?)
+#
+# * The elements of `literal_hash` that are tuples are exactly the
+#   subexpressions of the original expression (e.g. the base and index
+#   of an index expression, or the operands of an operator expression).
+
 class SymbolNode(Node):
     # Nodes that can be stored in a symbol table.
 
@@ -155,10 +200,8 @@ class SymbolNode(Node):
     @abstractmethod
     def fullname(self) -> str: pass
 
-    # NOTE: Can't use @abstractmethod, since many subclasses of Node
-    # don't implement serialize().
-    def serialize(self) -> Any:
-        raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__))
+    @abstractmethod
+    def serialize(self) -> JsonDict: pass
 
     @classmethod
     def deserialize(cls, data: JsonDict) -> 'SymbolNode':
@@ -171,7 +214,7 @@ class SymbolNode(Node):
         raise NotImplementedError('unexpected .class {}'.format(classname))
 
 
-class MypyFile(SymbolNode, Statement):
+class MypyFile(SymbolNode):
     """The abstract syntax tree of a single source file."""
 
     # Module name ('__main__' for initial file)
@@ -242,8 +285,11 @@ class MypyFile(SymbolNode, Statement):
 
 class ImportBase(Statement):
     """Base class for all import statements."""
-    is_unreachable = False
-    is_top_level = False  # Set by semanal.FirstPass
+
+    is_unreachable = False  # Set by semanal.FirstPass if inside `if False` etc.
+    is_top_level = False  # Ditto if outside any class or def
+    is_mypy_only = False  # Ditto if inside `if TYPE_CHECKING` or `if MYPY`
+
     # If an import replaces existing definitions, we construct dummy assignment
     # statements that assign the imported names to the names in the current scope,
     # for type checking purposes. Example:
@@ -300,7 +346,7 @@ class ImportAll(ImportBase):
         return visitor.visit_import_all(self)
 
 
-class FuncBase(SymbolNode):
+class FuncBase(Node):
     """Abstract base class for function-like nodes"""
 
     # Type signature. This is usually CallableType or Overloaded, but it can be something else for
@@ -317,11 +363,8 @@ class FuncBase(SymbolNode):
     def fullname(self) -> str:
         return self._fullname
 
-    def is_method(self) -> bool:
-        return bool(self.info)
-
 
-class OverloadedFuncDef(FuncBase, Statement):
+class OverloadedFuncDef(FuncBase, SymbolNode, Statement):
     """A logical node representing all the variants of an overloaded function.
 
     This node has no explicit representation in the source program.
@@ -365,7 +408,7 @@ class Argument(Node):
 
     variable = None  # type: Var
     type_annotation = None  # type: Optional[mypy.types.Type]
-    initializater = None  # type: Optional[Expression]
+    initializer = None  # type: Optional[Expression]
     kind = None  # type: int
     initialization_statement = None  # type: Optional[AssignmentStmt]
 
@@ -406,24 +449,6 @@ class Argument(Node):
             self.initialization_statement.set_line(self.line, self.column)
             self.initialization_statement.lvalues[0].set_line(self.line, self.column)
 
-    def serialize(self) -> JsonDict:
-        # Note: we are deliberately not saving the type annotation since
-        # it is not used by later stages of mypy.
-        data = {'.class': 'Argument',
-                'kind': self.kind,
-                'variable': self.variable.serialize(),
-                }  # type: JsonDict
-        # TODO: initializer?
-        return data
-
-    @classmethod
-    def deserialize(cls, data: JsonDict) -> 'Argument':
-        assert data['.class'] == 'Argument'
-        return Argument(Var.deserialize(data['variable']),
-                        None,
-                        None,  # TODO: initializer?
-                        kind=data['kind'])
-
 
 class FuncItem(FuncBase):
     arguments = []  # type: List[Argument]
@@ -476,7 +501,7 @@ class FuncItem(FuncBase):
         return self.type is None
 
 
-class FuncDef(FuncItem, Statement):
+class FuncDef(FuncItem, SymbolNode, Statement):
     """Function definition.
 
     This is a non-lambda function defined using 'def'.
@@ -506,9 +531,6 @@ class FuncDef(FuncItem, Statement):
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_func_def(self)
 
-    def is_constructor(self) -> bool:
-        return self.info is not None and self._name == '__init__'
-
     def serialize(self) -> JsonDict:
         # We're deliberating omitting arguments and storing only arg_names and
         # arg_kinds for space-saving reasons (arguments is not used in later
@@ -591,7 +613,7 @@ class Decorator(SymbolNode, Statement):
         return dec
 
 
-class Var(SymbolNode, Statement):
+class Var(SymbolNode):
     """A variable.
 
     It can refer to global/local variable or a data attribute.
@@ -669,8 +691,6 @@ class ClassDef(Statement):
     info = None  # type: TypeInfo  # Related TypeInfo
     metaclass = ''
     decorators = None  # type: List[Expression]
-    # Built-in/extension class? (single implementation inheritance only)
-    is_builtinclass = False
     has_incompatible_baseclass = False
 
     def __init__(self,
@@ -699,7 +719,6 @@ class ClassDef(Statement):
                 'fullname': self.fullname,
                 'type_vars': [v.serialize() for v in self.type_vars],
                 'metaclass': self.metaclass,
-                'is_builtinclass': self.is_builtinclass,
                 }
 
     @classmethod
@@ -711,7 +730,6 @@ class ClassDef(Statement):
                        metaclass=data['metaclass'],
                        )
         res.fullname = data['fullname']
-        res.is_builtinclass = data['is_builtinclass']
         return res
 
 
@@ -798,10 +816,10 @@ class OperatorAssignmentStmt(Statement):
     """Operator assignment statement such as x += 1"""
 
     op = ''
-    lvalue = None  # type: Expression
+    lvalue = None  # type: Lvalue
     rvalue = None  # type: Expression
 
-    def __init__(self, op: str, lvalue: Expression, rvalue: Expression) -> None:
+    def __init__(self, op: str, lvalue: Lvalue, rvalue: Expression) -> None:
         self.op = op
         self.lvalue = lvalue
         self.rvalue = rvalue
@@ -826,14 +844,14 @@ class WhileStmt(Statement):
 
 class ForStmt(Statement):
     # Index variables
-    index = None  # type: Expression
+    index = None  # type: Lvalue
     # Expression to iterate
     expr = None  # type: Expression
     body = None  # type: Block
     else_body = None  # type: Block
     is_async = False  # True if `async for ...` (PEP 492, Python 3.5)
 
-    def __init__(self, index: Expression, expr: Expression, body: Block,
+    def __init__(self, index: Lvalue, expr: Expression, body: Block,
                  else_body: Block) -> None:
         self.index = index
         self.expr = expr
@@ -865,9 +883,9 @@ class AssertStmt(Statement):
 
 
 class DelStmt(Statement):
-    expr = None  # type: Expression
+    expr = None  # type: Lvalue
 
-    def __init__(self, expr: Expression) -> None:
+    def __init__(self, expr: Lvalue) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -940,11 +958,11 @@ class TryStmt(Statement):
 
 class WithStmt(Statement):
     expr = None  # type: List[Expression]
-    target = None  # type: List[Expression]
+    target = None  # type: List[Lvalue]
     body = None  # type: Block
     is_async = False  # True if `async with ...` (PEP 492, Python 3.5)
 
-    def __init__(self, expr: List[Expression], target: List[Expression],
+    def __init__(self, expr: List[Expression], target: List[Lvalue],
                  body: Block) -> None:
         self.expr = expr
         self.target = target
@@ -1000,7 +1018,7 @@ class IntExpr(Expression):
 
     def __init__(self, value: int) -> None:
         self.value = value
-        self.literal_hash = value
+        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_int_expr(self)
@@ -1025,7 +1043,7 @@ class StrExpr(Expression):
 
     def __init__(self, value: str) -> None:
         self.value = value
-        self.literal_hash = value
+        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_str_expr(self)
@@ -1039,7 +1057,7 @@ class BytesExpr(Expression):
 
     def __init__(self, value: str) -> None:
         self.value = value
-        self.literal_hash = value
+        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_bytes_expr(self)
@@ -1053,7 +1071,7 @@ class UnicodeExpr(Expression):
 
     def __init__(self, value: str) -> None:
         self.value = value
-        self.literal_hash = value
+        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_unicode_expr(self)
@@ -1067,7 +1085,7 @@ class FloatExpr(Expression):
 
     def __init__(self, value: float) -> None:
         self.value = value
-        self.literal_hash = value
+        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_float_expr(self)
@@ -1081,7 +1099,7 @@ class ComplexExpr(Expression):
 
     def __init__(self, value: complex) -> None:
         self.value = value
-        self.literal_hash = value
+        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_complex_expr(self)
@@ -1269,7 +1287,7 @@ class IndexExpr(Expression):
         self.analyzed = None
         if self.index.literal == LITERAL_YES:
             self.literal = self.base.literal
-            self.literal_hash = ('Member', base.literal_hash,
+            self.literal_hash = ('Index', base.literal_hash,
                                  index.literal_hash)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -1303,6 +1321,7 @@ op_methods = {
     '%': '__mod__',
     '//': '__floordiv__',
     '**': '__pow__',
+    '@': '__matmul__',
     '&': '__and__',
     '|': '__or__',
     '^': '__xor__',
@@ -1324,7 +1343,7 @@ ops_falling_back_to_cmp = {'__ne__', '__eq__',
 
 
 ops_with_inplace_method = {
-    '+', '-', '*', '/', '%', '//', '**', '&', '|', '^', '<<', '>>'}
+    '+', '-', '*', '/', '%', '//', '**', '@', '&', '|', '^', '<<', '>>'}
 
 inplace_operator_methods = set(
     '__i' + op_methods[op][2:] for op in ops_with_inplace_method)
@@ -1337,6 +1356,7 @@ reverse_op_methods = {
     '__mod__': '__rmod__',
     '__floordiv__': '__rfloordiv__',
     '__pow__': '__rpow__',
+    '__matmul__': '__rmatmul__',
     '__and__': '__rand__',
     '__or__': '__ror__',
     '__xor__': '__rxor__',
@@ -1388,7 +1408,7 @@ class ComparisonExpr(Expression):
         self.operands = operands
         self.method_types = []
         self.literal = min(o.literal for o in self.operands)
-        self.literal_hash = (('Comparison',) + tuple(operators) +
+        self.literal_hash = ((cast(Any, 'Comparison'),) + tuple(operators) +
                              tuple(o.literal_hash for o in operands))
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -1479,7 +1499,7 @@ class ListExpr(Expression):
         self.items = items
         if all(x.literal == LITERAL_YES for x in items):
             self.literal = LITERAL_YES
-            self.literal_hash = ('List',) + tuple(x.literal_hash for x in items)
+            self.literal_hash = (cast(Any, 'List'),) + tuple(x.literal_hash for x in items)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_list_expr(self)
@@ -1497,8 +1517,8 @@ class DictExpr(Expression):
         if all(x[0] and x[0].literal == LITERAL_YES and x[1].literal == LITERAL_YES
                for x in items):
             self.literal = LITERAL_YES
-            self.literal_hash = ('Dict',) + tuple(
-                (x[0].literal_hash, x[1].literal_hash) for x in items)  # type: ignore
+            self.literal_hash = (cast(Any, 'Dict'),) + tuple(
+                (x[0].literal_hash, x[1].literal_hash) for x in items)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_dict_expr(self)
@@ -1513,7 +1533,7 @@ class TupleExpr(Expression):
         self.items = items
         if all(x.literal == LITERAL_YES for x in items):
             self.literal = LITERAL_YES
-            self.literal_hash = ('Tuple',) + tuple(x.literal_hash for x in items)
+            self.literal_hash = (cast(Any, 'Tuple'),) + tuple(x.literal_hash for x in items)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_tuple_expr(self)
@@ -1528,7 +1548,7 @@ class SetExpr(Expression):
         self.items = items
         if all(x.literal == LITERAL_YES for x in items):
             self.literal = LITERAL_YES
-            self.literal_hash = ('Set',) + tuple(x.literal_hash for x in items)
+            self.literal_hash = (cast(Any, 'Set'),) + tuple(x.literal_hash for x in items)
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_set_expr(self)
@@ -1540,9 +1560,9 @@ class GeneratorExpr(Expression):
     left_expr = None  # type: Expression
     sequences = None  # type: List[Expression]
     condlists = None  # type: List[List[Expression]]
-    indices = None  # type: List[Expression]
+    indices = None  # type: List[Lvalue]
 
-    def __init__(self, left_expr: Expression, indices: List[Expression],
+    def __init__(self, left_expr: Expression, indices: List[Lvalue],
                  sequences: List[Expression], condlists: List[List[Expression]]) -> None:
         self.left_expr = left_expr
         self.sequences = sequences
@@ -1584,9 +1604,9 @@ class DictionaryComprehension(Expression):
     value = None  # type: Expression
     sequences = None  # type: List[Expression]
     condlists = None  # type: List[List[Expression]]
-    indices = None  # type: List[Expression]
+    indices = None  # type: List[Lvalue]
 
-    def __init__(self, key: Expression, value: Expression, indices: List[Expression],
+    def __init__(self, key: Expression, value: Expression, indices: List[Lvalue],
                  sequences: List[Expression], condlists: List[List[Expression]]) -> None:
         self.key = key
         self.value = value
@@ -1713,16 +1733,25 @@ class TypeAliasExpr(Expression):
     """Type alias expression (rvalue)."""
 
     type = None  # type: mypy.types.Type
-
-    def __init__(self, type: 'mypy.types.Type') -> None:
+    # Simple fallback type for aliases that are invalid in runtime expressions
+    # (for example Union, Tuple, Callable).
+    fallback = None  # type: mypy.types.Type
+    # This type alias is subscripted in a runtime expression like Alias[int](42)
+    # (not in a type context like type annotation or base class).
+    in_runtime = False  # type: bool
+
+    def __init__(self, type: 'mypy.types.Type', fallback: 'mypy.types.Type' = None,
+                 in_runtime: bool = False) -> None:
         self.type = type
+        self.fallback = fallback
+        self.in_runtime = in_runtime
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_type_alias_expr(self)
 
 
 class NamedTupleExpr(Expression):
-    """Named tuple expression namedtuple(...)."""
+    """Named tuple expression namedtuple(...) or NamedTuple(...)."""
 
     # The class representation of this named tuple (its tuple_type attribute contains
     # the tuple item types)
@@ -1735,6 +1764,19 @@ class NamedTupleExpr(Expression):
         return visitor.visit_namedtuple_expr(self)
 
 
+class TypedDictExpr(Expression):
+    """Typed dict expression TypedDict(...)."""
+
+    # The class representation of this typed dict
+    info = None  # type: TypeInfo
+
+    def __init__(self, info: 'TypeInfo') -> None:
+        self.info = info
+
+    def accept(self, visitor: NodeVisitor[T]) -> T:
+        return visitor.visit_typeddict_expr(self)
+
+
 class PromoteExpr(Expression):
     """Ducktype class decorator expression _promote(...)."""
 
@@ -1857,18 +1899,18 @@ class TypeInfo(SymbolNode):
     # Is this a named tuple type?
     is_named_tuple = False
 
+    # Is this a typed dict type?
+    is_typed_dict = False
+
     # Is this a newtype type?
     is_newtype = False
 
-    # Is this a dummy from deserialization?
-    is_dummy = False
-
     # Alternative to fullname() for 'anonymous' classes.
     alt_fullname = None  # type: Optional[str]
 
     FLAGS = [
         'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple',
-        'is_newtype', 'is_dummy'
+        'is_typed_dict', 'is_newtype'
     ]
 
     def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None:
@@ -1922,33 +1964,9 @@ class TypeInfo(SymbolNode):
     def has_readable_member(self, name: str) -> bool:
         return self.get(name) is not None
 
-    def has_writable_member(self, name: str) -> bool:
-        return self.has_var(name)
-
-    def has_var(self, name: str) -> bool:
-        return self.get_var(name) is not None
-
     def has_method(self, name: str) -> bool:
         return self.get_method(name) is not None
 
-    def get_var(self, name: str) -> Var:
-        for cls in self.mro:
-            if name in cls.names:
-                node = cls.names[name].node
-                if isinstance(node, Var):
-                    return node
-                else:
-                    return None
-        return None
-
-    def get_var_or_getter(self, name: str) -> SymbolNode:
-        # TODO getter
-        return self.get_var(name)
-
-    def get_var_or_setter(self, name: str) -> SymbolNode:
-        # TODO setter
-        return self.get_var(name)
-
     def get_method(self, name: str) -> FuncBase:
         if self.mro is None:  # Might be because of a previous error.
             return None
@@ -1993,18 +2011,6 @@ class TypeInfo(SymbolNode):
                     return True
         return False
 
-    def all_subtypes(self) -> 'Set[TypeInfo]':
-        """Return TypeInfos of all subtypes, including this type, as a set."""
-        subtypes = set([self])
-        for subt in self.subtypes:
-            for t in subt.all_subtypes():
-                subtypes.add(t)
-        return subtypes
-
-    def all_base_classes(self) -> 'List[TypeInfo]':
-        """Return a list of base classes, including indirect bases."""
-        assert False
-
     def direct_base_classes(self) -> 'List[TypeInfo]':
         """Return a direct base classes.
 
@@ -2026,7 +2032,7 @@ class TypeInfo(SymbolNode):
                             ('Names', sorted(self.names.keys()))],
                            'TypeInfo')
 
-    def serialize(self) -> Union[str, JsonDict]:
+    def serialize(self) -> JsonDict:
         # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates.
         data = {'.class': 'TypeInfo',
                 'module_name': self.module_name,
@@ -2230,57 +2236,6 @@ class SymbolTable(Dict[str, SymbolTableNode]):
         return st
 
 
-def function_type(func: FuncBase, fallback: 'mypy.types.Instance') -> 'mypy.types.FunctionLike':
-    if func.type:
-        assert isinstance(func.type, mypy.types.FunctionLike)
-        return func.type
-    else:
-        # Implicit type signature with dynamic types.
-        # Overloaded functions always have a signature, so func must be an ordinary function.
-        fdef = cast(FuncDef, func)
-        name = func.name()
-        if name:
-            name = '"{}"'.format(name)
-
-        return mypy.types.CallableType(
-            [mypy.types.AnyType()] * len(fdef.arg_names),
-            fdef.arg_kinds,
-            fdef.arg_names,
-            mypy.types.AnyType(),
-            fallback,
-            name,
-            implicit=True,
-        )
-
-
-def method_type_with_fallback(func: FuncBase,
-                              fallback: 'mypy.types.Instance') -> 'mypy.types.FunctionLike':
-    """Return the signature of a method (omit self)."""
-    return method_type(function_type(func, fallback))
-
-
-def method_type(sig: 'mypy.types.FunctionLike') -> 'mypy.types.FunctionLike':
-    if isinstance(sig, mypy.types.CallableType):
-        return method_callable(sig)
-    else:
-        sig = cast(mypy.types.Overloaded, sig)
-        items = []  # type: List[mypy.types.CallableType]
-        for c in sig.items():
-            items.append(method_callable(c))
-        return mypy.types.Overloaded(items)
-
-
-def method_callable(c: 'mypy.types.CallableType') -> 'mypy.types.CallableType':
-    if c.arg_kinds and c.arg_kinds[0] == ARG_STAR:
-        # The signature is of the form 'def foo(*args, ...)'.
-        # In this case we shouldn't drop the first arg,
-        # since self will be absorbed by the *args.
-        return c
-    return c.copy_modified(arg_types=c.arg_types[1:],
-                           arg_kinds=c.arg_kinds[1:],
-                           arg_names=c.arg_names[1:])
-
-
 class MroError(Exception):
     """Raised if a consistent mro cannot be determined for a class."""
 
diff --git a/mypy/options.py b/mypy/options.py
index 1d09456..c3f3107 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -16,7 +16,7 @@ class BuildType:
 class Options:
     """Options collected from flags."""
 
-    PER_FILE_OPTIONS = {
+    PER_MODULE_OPTIONS = {
         "silent_imports",
         "almost_silent",
         "disallow_untyped_calls",
@@ -25,16 +25,19 @@ class Options:
         "debug_cache",
         "strict_optional_whitelist",
         "show_none_errors",
+        "warn_no_return",
     }
 
-    OPTIONS_AFFECTING_CACHE = PER_FILE_OPTIONS | {"strict_optional"}
+    OPTIONS_AFFECTING_CACHE = PER_MODULE_OPTIONS | {"strict_optional"}
 
     def __init__(self) -> None:
         # -- build options --
         self.build_type = BuildType.STANDARD
         self.python_version = defaults.PYTHON3_VERSION
         self.platform = sys.platform
-        self.custom_typing_module = None  # type: str
+        self.custom_typing_module = None  # type: Optional[str]
+        self.custom_typeshed_dir = None  # type: Optional[str]
+        self.mypy_path = []  # type: List[str]
         self.report_dirs = {}  # type: Dict[str, str]
         self.silent_imports = False
         self.almost_silent = False
@@ -57,6 +60,9 @@ class Options:
         # Warn about casting an expression to its inferred type
         self.warn_redundant_casts = False
 
+        # Warn about falling off the end of a function returning non-None
+        self.warn_no_return = False
+
         # Warn about unused '# type: ignore' comments
         self.warn_unused_ignores = False
 
@@ -76,8 +82,11 @@ class Options:
         # Config file name
         self.config_file = None  # type: Optional[str]
 
-        # Per-file options (raw)
-        self.per_file_options = {}  # type: Dict[str, Dict[str, object]]
+        # Write junit.xml to given file
+        self.junit_xml = None  # type: Optional[str]
+
+        # Per-module options (raw)
+        self.per_module_options = {}  # type: Dict[str, Dict[str, object]]
 
         # -- development options --
         self.verbosity = 0  # More verbose messages (for troubleshooting)
@@ -111,11 +120,11 @@ class Options:
     def __repr__(self) -> str:
         return 'Options({})'.format(pprint.pformat(self.__dict__))
 
-    def clone_for_file(self, filename: str) -> 'Options':
+    def clone_for_module(self, module: str) -> 'Options':
         updates = {}
-        for glob in self.per_file_options:
-            if fnmatch.fnmatch(filename, glob):
-                updates.update(self.per_file_options[glob])
+        for pattern in self.per_module_options:
+            if self.module_matches_pattern(module, pattern):
+                updates.update(self.per_module_options[pattern])
         if not updates:
             return self
         new_options = Options()
@@ -123,5 +132,11 @@ class Options:
         new_options.__dict__.update(updates)
         return new_options
 
+    def module_matches_pattern(self, module: str, pattern: str) -> bool:
+        # If the pattern is 'mod.*', we want 'mod' to match that too.
+        # (That's so that a pattern specifying a package also matches
+        # that package's __init__.)
+        return fnmatch.fnmatch(module, pattern) or fnmatch.fnmatch(module + '.', pattern)
+
     def select_options_affecting_cache(self) -> Mapping[str, bool]:
         return {opt: getattr(self, opt) for opt in self.OPTIONS_AFFECTING_CACHE}
diff --git a/mypy/parse.py b/mypy/parse.py
index 5739056..f5fa404 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -40,11 +40,14 @@ from mypy.options import Options
 from mypy import experiments
 
 
+class ParseError(Exception): pass
+
+
 precedence = {
     '**': 16,
     '-u': 15, '+u': 15, '~': 15,   # unary operators (-, + and ~)
     '<cast>': 14,
-    '*': 13, '/': 13, '//': 13, '%': 13,
+    '*': 13, '/': 13, '//': 13, '%': 13, '@': 13,
     '+': 12, '-': 12,
     '>>': 11, '<<': 11,
     '&': 10,
@@ -61,7 +64,7 @@ precedence = {
 
 
 op_assign = set([
-    '+=', '-=', '*=', '/=', '//=', '%=', '**=', '|=', '&=', '^=', '>>=',
+    '+=', '-=', '*=', '/=', '//=', '%=', '**=', '@=', '|=', '&=', '^=', '>>=',
     '<<='])
 
 op_comp = set([
@@ -236,7 +239,7 @@ class Parser:
         node = None  # type: ImportBase
         if self.current_str() == '*':
             if name == '__future__':
-                self.parse_error()
+                raise self.parse_error()
             # An import all from a module node:
             self.skip()
             node = ImportAll(name, relative)
@@ -402,8 +405,7 @@ class Parser:
         elif isinstance(expr, MemberExpr):
             if isinstance(expr.expr, NameExpr):
                 return expr.expr.name == 'typing' and expr.name == 'no_type_check'
-        else:
-            return False
+        return False
 
     def parse_function(self, no_type_checks: bool = False) -> FuncDef:
         def_tok = self.expect('def')
@@ -454,6 +456,10 @@ class Parser:
                 else:
                     self.check_argument_kinds(arg_kinds, sig.arg_kinds,
                                               def_tok.line, def_tok.column)
+                    if len(sig.arg_types) > len(arg_kinds):
+                        raise ParseError('Type signature has too many arguments')
+                    if len(sig.arg_types) < len(arg_kinds):
+                        raise ParseError('Type signature has too few arguments')
                     typ = CallableType(
                         sig.arg_types,
                         arg_kinds,
@@ -628,7 +634,7 @@ class Parser:
                 elif self.current_str() in ['*', '**']:
                     if bare_asterisk_before == len(args):
                         # named arguments must follow bare *
-                        self.parse_error()
+                        raise self.parse_error()
 
                     arg = self.parse_asterisk_arg(
                         allow_signature,
@@ -790,6 +796,8 @@ class Parser:
         if self.current_str() == ':':
             self.skip()
             return self.parse_expression(precedence[','])
+        else:
+            return None
 
     def parse_arg_type(self, allow_signature: bool) -> Type:
         if self.current_str() == ':' and allow_signature:
@@ -987,7 +995,7 @@ class Parser:
         expr = None
         current = self.current()
         if current.string == 'yield':
-            self.parse_error()
+            raise self.parse_error()
         if not isinstance(current, Break):
             expr = self.parse_expression()
         node = ReturnStmt(expr)
@@ -1243,10 +1251,10 @@ class Parser:
             if self.current_str() == ',':
                 self.skip()
                 if isinstance(self.current(), Break):
-                    self.parse_error()
+                    raise self.parse_error()
             else:
                 if not isinstance(self.current(), Break):
-                    self.parse_error()
+                    raise self.parse_error()
         comma = False
         while not isinstance(self.current(), Break):
             args.append(self.parse_expression(precedence[',']))
@@ -1321,7 +1329,7 @@ class Parser:
                 expr = self.parse_ellipsis()
             else:
                 # Invalid expression.
-                self.parse_error()
+                raise self.parse_error()
 
         # Set the line of the expression node, if not specified. This
         # simplifies recording the line number as not every node type needs to
@@ -1496,7 +1504,7 @@ class Parser:
             elif self.current_str() == 'for' and items == []:
                 return self.parse_set_comprehension(key)
             elif self.current_str() != ':':
-                self.parse_error()
+                raise self.parse_error()
             colon = self.expect(':')
             value = self.parse_expression(precedence['<for>'])
             if self.current_str() == 'for' and items == []:
@@ -1666,7 +1674,7 @@ class Parser:
                 kinds.append(nodes.ARG_POS)
                 names.append(None)
             else:
-                self.parse_error()
+                raise self.parse_error()
             args.append(self.parse_expression(precedence[',']))
             if self.current_str() != ',':
                 break
@@ -1734,7 +1742,7 @@ class Parser:
         op_str = op.string
         if op_str == '~':
             self.ind -= 1
-            self.parse_error()
+            raise self.parse_error()
         right = self.parse_expression(prec)
         node = OpExpr(op_str, left, right)
         return node
@@ -1751,7 +1759,7 @@ class Parser:
                     op_str = 'not in'
                     self.skip()
                 else:
-                    self.parse_error()
+                    raise self.parse_error()
             elif op_str == 'is' and self.current_str() == 'not':
                 op_str = 'is not'
                 self.skip()
@@ -1818,7 +1826,7 @@ class Parser:
             self.ind += 1
             return self.tok[self.ind - 1]
         else:
-            self.parse_error()
+            raise self.parse_error()
 
     def expect_indent(self) -> Token:
         if isinstance(self.current(), Indent):
@@ -1836,10 +1844,7 @@ class Parser:
             self.ind += 1
             return current
         else:
-            self.parse_error()
-
-    def expect_colon_and_break(self) -> Tuple[Token, Token]:
-        return self.expect_type(Colon), self.expect_type(Break)
+            raise self.parse_error()
 
     def expect_break(self) -> Token:
         return self.expect_type(Break)
@@ -1853,9 +1858,9 @@ class Parser:
     def peek(self) -> Token:
         return self.tok[self.ind + 1]
 
-    def parse_error(self) -> None:
+    def parse_error(self) -> ParseError:
         self.parse_error_at(self.current())
-        raise ParseError()
+        return ParseError()
 
     def parse_error_at(self, tok: Token, skip: bool = True, reason: Optional[str] = None) -> None:
         msg = ''
@@ -1939,9 +1944,6 @@ class Parser:
             return None
 
 
-class ParseError(Exception): pass
-
-
 def token_repr(tok: Token) -> str:
     """Return a representation of a token for use in parse error messages."""
     if isinstance(tok, Break):
diff --git a/mypy/parsetype.py b/mypy/parsetype.py
index 73bb12e..1af0352 100644
--- a/mypy/parsetype.py
+++ b/mypy/parsetype.py
@@ -1,6 +1,6 @@
 """Type parser"""
 
-from typing import List, Tuple, Union, cast, Optional
+from typing import List, Tuple, Union, Optional
 
 from mypy.types import (
     Type, UnboundType, TupleType, TypeList, CallableType, StarType,
@@ -73,7 +73,7 @@ class TypeParser:
                 raise TypeParseError(e.token, self.ind)
             return result
         else:
-            self.parse_error()
+            raise self.parse_error()
 
     def parse_parens(self) -> Type:
         self.expect('(')
@@ -166,14 +166,14 @@ class TypeParser:
             self.ind += 1
             return self.tok[self.ind - 1]
         else:
-            self.parse_error()
+            raise self.parse_error()
 
     def expect_type(self, typ: type) -> Token:
         if isinstance(self.current_token(), typ):
             self.ind += 1
             return self.tok[self.ind - 1]
         else:
-            self.parse_error()
+            raise self.parse_error()
 
     def current_token(self) -> Token:
         return self.tok[self.ind]
@@ -181,8 +181,8 @@ class TypeParser:
     def current_token_str(self) -> str:
         return self.current_token().string
 
-    def parse_error(self) -> None:
-        raise TypeParseError(self.tok[self.ind], self.ind)
+    def parse_error(self) -> TypeParseError:
+        return TypeParseError(self.tok[self.ind], self.ind)
 
 
 def parse_str_as_type(typestr: str, line: int) -> Type:
@@ -199,20 +199,6 @@ def parse_str_as_type(typestr: str, line: int) -> Type:
     return result
 
 
-def parse_str_as_signature(typestr: str, line: int) -> CallableType:
-    """Parse a signature represented as a string.
-
-    Raise TypeParseError on parse error.
-    """
-
-    typestr = typestr.strip()
-    tokens = lex(typestr, line)[0]
-    result, i = parse_signature(tokens)
-    if i < len(tokens) - 2:
-        raise TypeParseError(tokens[i], i)
-    return result
-
-
 def parse_signature(tokens: List[Token]) -> Tuple[CallableType, int]:
     """Parse signature of form (argtype, ...) -> ...
 
diff --git a/mypy/report.py b/mypy/report.py
index 313fbf4..2bb55aa 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -6,16 +6,28 @@ import json
 import os
 import shutil
 import tokenize
+from operator import attrgetter
 
-from typing import Callable, Dict, List, Optional, Tuple, cast
+from typing import Any, Callable, Dict, List, Optional, Tuple, cast
 
-from mypy.nodes import MypyFile, Node, FuncDef
+import time
+
+import sys
+
+from mypy.nodes import MypyFile, Expression, FuncDef
 from mypy import stats
 from mypy.traverser import TraverserVisitor
 from mypy.types import Type
+from mypy.version import __version__
+
+try:
+    import lxml.etree as etree
+    LXML_INSTALLED = True
+except ImportError:
+    LXML_INSTALLED = False
 
 
-reporter_classes = {}  # type: Dict[str, Callable[[Reports, str], AbstractReporter]]
+reporter_classes = {}  # type: Dict[str, Tuple[Callable[[Reports, str], AbstractReporter], bool]]
 
 
 class Reports:
@@ -32,13 +44,19 @@ class Reports:
             return self.named_reporters[report_type]
         except KeyError:
             pass
-        reporter_cls = reporter_classes[report_type]
+        reporter_cls, needs_lxml = reporter_classes[report_type]
+        if needs_lxml and not LXML_INSTALLED:
+            print(('You must install the lxml package before you can run mypy'
+                   ' with `--{}-report`.\n'
+                   'You can do this with `python3 -m pip install lxml`.').format(report_type),
+                  file=sys.stderr)
+            raise ImportError
         reporter = reporter_cls(self, report_dir)
         self.reporters.append(reporter)
         self.named_reporters[report_type] = reporter
         return reporter
 
-    def file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         for reporter in self.reporters:
             reporter.on_file(tree, type_map)
 
@@ -52,7 +70,7 @@ class AbstractReporter(metaclass=ABCMeta):
         self.output_dir = output_dir
 
     @abstractmethod
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         pass
 
     @abstractmethod
@@ -60,6 +78,16 @@ class AbstractReporter(metaclass=ABCMeta):
         pass
 
 
+def register_reporter(report_name: str,
+                      reporter: Callable[[Reports, str], AbstractReporter],
+                      needs_lxml: bool = False) -> None:
+    reporter_classes[report_name] = (reporter, needs_lxml)
+
+
+def alias_reporter(source_reporter: str, target_reporter: str) -> None:
+    reporter_classes[target_reporter] = reporter_classes[source_reporter]
+
+
 class FuncCounterVisitor(TraverserVisitor):
     def __init__(self) -> None:
         super().__init__()
@@ -76,7 +104,7 @@ class LineCountReporter(AbstractReporter):
 
         stats.ensure_dir_exists(output_dir)
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         # Count physical lines.  This assumes the file's encoding is a
         # superset of ASCII (or at least uses \n in its line endings).
         physical_lines = len(open(tree.path, 'rb').readlines())
@@ -103,7 +131,8 @@ class LineCountReporter(AbstractReporter):
                 f.write('{:7} {:7} {:6} {:6} {}\n'.format(
                     c[0], c[1], c[2], c[3], p))
 
-reporter_classes['linecount'] = LineCountReporter
+
+register_reporter('linecount', LineCountReporter)
 
 
 class LineCoverageVisitor(TraverserVisitor):
@@ -195,7 +224,7 @@ class LineCoverageReporter(AbstractReporter):
 
         stats.ensure_dir_exists(output_dir)
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         tree_source = open(tree.path).readlines()
 
         coverage_visitor = LineCoverageVisitor(tree_source)
@@ -212,7 +241,8 @@ class LineCoverageReporter(AbstractReporter):
         with open(os.path.join(self.output_dir, 'coverage.json'), 'w') as f:
             json.dump({'lines': self.lines_covered}, f)
 
-reporter_classes['linecoverage'] = LineCoverageReporter
+
+register_reporter('linecoverage', LineCoverageReporter)
 
 
 class OldHtmlReporter(AbstractReporter):
@@ -222,12 +252,14 @@ class OldHtmlReporter(AbstractReporter):
     variables to preserve state for the index.
     """
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         stats.generate_html_report(tree, tree.path, type_map, self.output_dir)
 
     def on_finish(self) -> None:
         stats.generate_html_index(self.output_dir)
-reporter_classes['old-html'] = OldHtmlReporter
+
+
+register_reporter('old-html', OldHtmlReporter)
 
 
 class FileInfo:
@@ -250,8 +282,6 @@ class MemoryXmlReporter(AbstractReporter):
     """
 
     def __init__(self, reports: Reports, output_dir: str) -> None:
-        import lxml.etree as etree
-
         super().__init__(reports, output_dir)
 
         self.xslt_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.xslt')
@@ -262,9 +292,7 @@ class MemoryXmlReporter(AbstractReporter):
         self.last_xml = None  # type: etree._ElementTree
         self.files = []  # type: List[FileInfo]
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
-        import lxml.etree as etree
-
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         self.last_xml = None
         path = os.path.relpath(tree.path)
         if stats.is_special_module(path):
@@ -300,8 +328,6 @@ class MemoryXmlReporter(AbstractReporter):
         self.files.append(file_info)
 
     def on_finish(self) -> None:
-        import lxml.etree as etree
-
         self.last_xml = None
         # index_path = os.path.join(self.output_dir, 'index.xml')
         output_files = sorted(self.files, key=lambda x: x.module)
@@ -323,7 +349,129 @@ class MemoryXmlReporter(AbstractReporter):
 
         self.last_xml = doc
 
-reporter_classes['memory-xml'] = MemoryXmlReporter
+
+register_reporter('memory-xml', MemoryXmlReporter, needs_lxml=True)
+
+
+def get_line_rate(covered_lines: int, total_lines: int) -> str:
+    if total_lines == 0:
+        return str(1.0)
+    else:
+        return '{:.4f}'.format(covered_lines / total_lines)
+
+
+class CoberturaPackage(object):
+    """Container for XML and statistics mapping python modules to Cobertura package
+    """
+    def __init__(self, name: str) -> None:
+        self.name = name
+        self.classes = {}  # type: Dict[str, etree._Element]
+        self.packages = {}  # type: Dict[str, CoberturaPackage]
+        self.total_lines = 0
+        self.covered_lines = 0
+
+    def as_xml(self) -> Any:
+        package_element = etree.Element('package',
+                                        name=self.name,
+                                        complexity='1.0')
+        package_element.attrib['branch-rate'] = '0'
+        package_element.attrib['line-rate'] = get_line_rate(self.covered_lines, self.total_lines)
+        classes_element = etree.SubElement(package_element, 'classes')
+        for class_name in sorted(self.classes):
+            classes_element.append(self.classes[class_name])
+        self.add_packages(package_element)
+        return package_element
+
+    def add_packages(self, parent_element: Any) -> None:
+        if self.packages:
+            packages_element = etree.SubElement(parent_element, 'packages')
+            for package in sorted(self.packages.values(), key=attrgetter('name')):
+                packages_element.append(package.as_xml())
+
+
+class CoberturaXmlReporter(AbstractReporter):
+    """Reporter for generating Cobertura compliant XML.
+    """
+
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        super().__init__(reports, output_dir)
+
+        self.root = etree.Element('coverage',
+                                  timestamp=str(int(time.time())),
+                                  version=__version__)
+        self.doc = etree.ElementTree(self.root)
+        self.root_package = CoberturaPackage('.')
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+        path = os.path.relpath(tree.path)
+        visitor = stats.StatisticsVisitor(inferred=True, typemap=type_map, all_nodes=True)
+        tree.accept(visitor)
+
+        class_name = os.path.basename(path)
+        file_info = FileInfo(path, tree._fullname)
+        class_element = etree.Element('class',
+                                      filename=path,
+                                      complexity='1.0',
+                                      name=class_name)
+        etree.SubElement(class_element, 'methods')
+        lines_element = etree.SubElement(class_element, 'lines')
+
+        with tokenize.open(path) as input_file:
+            class_lines_covered = 0
+            class_total_lines = 0
+            for lineno, _ in enumerate(input_file, 1):
+                status = visitor.line_map.get(lineno, stats.TYPE_EMPTY)
+                hits = 0
+                branch = False
+                if status == stats.TYPE_EMPTY:
+                    continue
+                class_total_lines += 1
+                if status != stats.TYPE_ANY:
+                    class_lines_covered += 1
+                    hits = 1
+                if status == stats.TYPE_IMPRECISE:
+                    branch = True
+                file_info.counts[status] += 1
+                line_element = etree.SubElement(lines_element, 'line',
+                                                number=str(lineno),
+                                                precision=stats.precision_names[status],
+                                                hits=str(hits),
+                                                branch=str(branch).lower())
+                if branch:
+                    line_element.attrib['condition-coverage'] = '50% (1/2)'
+            class_element.attrib['branch-rate'] = '0'
+            class_element.attrib['line-rate'] = get_line_rate(class_lines_covered,
+                                                              class_total_lines)
+            # parent_module is set to whichever module contains this file.  For most files, we want
+            # to simply strip the last element off of the module.  But for __init__.py files,
+            # the module == the parent module.
+            parent_module = file_info.module.rsplit('.', 1)[0]
+            if file_info.name.endswith('__init__.py'):
+                parent_module = file_info.module
+
+            if parent_module not in self.root_package.packages:
+                self.root_package.packages[parent_module] = CoberturaPackage(parent_module)
+            current_package = self.root_package.packages[parent_module]
+            packages_to_update = [self.root_package, current_package]
+            for package in packages_to_update:
+                package.total_lines += class_total_lines
+                package.covered_lines += class_lines_covered
+            current_package.classes[class_name] = class_element
+
+    def on_finish(self) -> None:
+        self.root.attrib['line-rate'] = get_line_rate(self.root_package.covered_lines,
+                                                      self.root_package.total_lines)
+        self.root.attrib['branch-rate'] = '0'
+        sources = etree.SubElement(self.root, 'sources')
+        source_element = etree.SubElement(sources, 'source')
+        source_element.text = os.getcwd()
+        self.root_package.add_packages(self.root)
+        out_path = os.path.join(self.output_dir, 'cobertura.xml')
+        self.doc.write(out_path, encoding='utf-8', pretty_print=True)
+        print('Generated Cobertura report:', os.path.abspath(out_path))
+
+
+register_reporter('cobertura-xml', CoberturaXmlReporter, needs_lxml=True)
 
 
 class AbstractXmlReporter(AbstractReporter):
@@ -347,7 +495,7 @@ class XmlReporter(AbstractXmlReporter):
     that makes it fail from file:// URLs but work on http:// URLs.
     """
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         last_xml = self.memory_xml.last_xml
         if last_xml is None:
             return
@@ -368,7 +516,8 @@ class XmlReporter(AbstractXmlReporter):
         shutil.copyfile(self.memory_xml.css_html_path, out_css)
         print('Generated XML report:', os.path.abspath(out_path))
 
-reporter_classes['xml'] = XmlReporter
+
+register_reporter('xml', XmlReporter, needs_lxml=True)
 
 
 class XsltHtmlReporter(AbstractXmlReporter):
@@ -379,14 +528,12 @@ class XsltHtmlReporter(AbstractXmlReporter):
     """
 
     def __init__(self, reports: Reports, output_dir: str) -> None:
-        import lxml.etree as etree
-
         super().__init__(reports, output_dir)
 
         self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path))
         self.param_html = etree.XSLT.strparam('html')
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         last_xml = self.memory_xml.last_xml
         if last_xml is None:
             return
@@ -409,7 +556,8 @@ class XsltHtmlReporter(AbstractXmlReporter):
         shutil.copyfile(self.memory_xml.css_html_path, out_css)
         print('Generated HTML report (via XSLT):', os.path.abspath(out_path))
 
-reporter_classes['xslt-html'] = XsltHtmlReporter
+
+register_reporter('xslt-html', XsltHtmlReporter, needs_lxml=True)
 
 
 class XsltTxtReporter(AbstractXmlReporter):
@@ -419,13 +567,11 @@ class XsltTxtReporter(AbstractXmlReporter):
     """
 
     def __init__(self, reports: Reports, output_dir: str) -> None:
-        import lxml.etree as etree
-
         super().__init__(reports, output_dir)
 
         self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path))
 
-    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+    def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
         pass
 
     def on_finish(self) -> None:
@@ -437,7 +583,8 @@ class XsltTxtReporter(AbstractXmlReporter):
             out_file.write(transformed_txt)
         print('Generated TXT report (via XSLT):', os.path.abspath(out_path))
 
-reporter_classes['xslt-txt'] = XsltTxtReporter
 
-reporter_classes['html'] = reporter_classes['xslt-html']
-reporter_classes['txt'] = reporter_classes['xslt-txt']
+register_reporter('xslt-txt', XsltTxtReporter, needs_lxml=True)
+
+alias_reporter('xslt-html', 'html')
+alias_reporter('xslt-txt', 'txt')
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 836e56b..5e213a8 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -44,7 +44,7 @@ TODO: Check if the third pass slows down type checking significantly.
 """
 
 from typing import (
-    List, Dict, Set, Tuple, cast, Any, TypeVar, Union, Optional, Callable
+    List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable
 )
 
 from mypy.nodes import (
@@ -53,17 +53,17 @@ from mypy.nodes import (
     ImportFrom, ImportAll, Block, LDEF, NameExpr, MemberExpr,
     IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt,
     RaiseStmt, AssertStmt, OperatorAssignmentStmt, WhileStmt,
-    ForStmt, BreakStmt, ContinueStmt, IfStmt, TryStmt, WithStmt, DelStmt,
+    ForStmt, BreakStmt, ContinueStmt, IfStmt, TryStmt, WithStmt, DelStmt, PassStmt,
     GlobalDecl, SuperExpr, DictExpr, CallExpr, RefExpr, OpExpr, UnaryExpr,
     SliceExpr, CastExpr, RevealTypeExpr, TypeApplication, Context, SymbolTable,
     SymbolTableNode, BOUND_TVAR, UNBOUND_TVAR, ListComprehension, GeneratorExpr,
     FuncExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr, NewTypeExpr,
     StrExpr, BytesExpr, PrintStmt, ConditionalExpr, PromoteExpr,
     ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, MroError, type_aliases,
-    YieldFromExpr, NamedTupleExpr, NonlocalDecl, SymbolNode,
+    YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SymbolNode,
     SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr,
     YieldExpr, ExecStmt, Argument, BackquoteExpr, ImportBase, AwaitExpr,
-    IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr,
+    IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr, TempNode,
     COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES,
 )
 from mypy.visitor import NodeVisitor
@@ -71,9 +71,9 @@ from mypy.traverser import TraverserVisitor
 from mypy.errors import Errors, report_internal_error
 from mypy.types import (
     NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType,
-    FunctionLike, UnboundType, TypeList, TypeVarDef,
-    replace_leading_arg_type, TupleType, UnionType, StarType, EllipsisType, TypeType)
-from mypy.nodes import function_type, implicit_module_attrs
+    FunctionLike, UnboundType, TypeList, TypeVarDef, TypeType,
+    TupleType, UnionType, StarType, EllipsisType, function_type)
+from mypy.nodes import implicit_module_attrs
 from mypy.typeanal import TypeAnalyser, TypeAnalyserPass3, analyze_type_alias
 from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
 from mypy.sametypes import is_same_type
@@ -84,10 +84,20 @@ from mypy.options import Options
 T = TypeVar('T')
 
 
-# Inferred value of an expression.
-ALWAYS_TRUE = 0
-ALWAYS_FALSE = 1
-TRUTH_VALUE_UNKNOWN = 2
+# Inferred truth value of an expression.
+ALWAYS_TRUE = 1
+MYPY_TRUE = 2  # True in mypy, False at runtime
+ALWAYS_FALSE = 3
+MYPY_FALSE = 4  # False in mypy, True at runtime
+TRUTH_VALUE_UNKNOWN = 5
+
+inverted_truth_mapping = {
+    ALWAYS_TRUE: ALWAYS_FALSE,
+    ALWAYS_FALSE: ALWAYS_TRUE,
+    TRUTH_VALUE_UNKNOWN: TRUTH_VALUE_UNKNOWN,
+    MYPY_TRUE: MYPY_FALSE,
+    MYPY_FALSE: MYPY_TRUE,
+}
 
 # Map from obsolete name to the current spelling.
 obsolete_name_mapping = {
@@ -167,7 +177,7 @@ class SemanticAnalyzer(NodeVisitor):
     bound_tvars = None  # type: List[SymbolTableNode]
     # Stack of type variables that were bound by outer classess
     tvar_stack = None  # type: List[List[SymbolTableNode]]
-    # Per-file options
+    # Per-module options
     options = None  # type: Options
 
     # Stack of functions being analyzed
@@ -270,9 +280,7 @@ class SemanticAnalyzer(NodeVisitor):
                     if defn.name() in self.type.names:
                         # Redefinition. Conditional redefinition is okay.
                         n = self.type.names[defn.name()].node
-                        if self.is_conditional_func(n, defn):
-                            defn.original_def = cast(FuncDef, n)
-                        else:
+                        if not self.set_original_def(n, defn):
                             self.name_already_defined(defn.name(), defn)
                     self.type.names[defn.name()] = SymbolTableNode(MDEF, defn)
                 self.prepare_method_signature(defn)
@@ -282,9 +290,7 @@ class SemanticAnalyzer(NodeVisitor):
                     if defn.name() in self.locals[-1]:
                         # Redefinition. Conditional redefinition is okay.
                         n = self.locals[-1][defn.name()].node
-                        if self.is_conditional_func(n, defn):
-                            defn.original_def = cast(FuncDef, n)
-                        else:
+                        if not self.set_original_def(n, defn):
                             self.name_already_defined(defn.name(), defn)
                     else:
                         self.add_local(defn, defn)
@@ -294,11 +300,7 @@ class SemanticAnalyzer(NodeVisitor):
                     symbol = self.globals.get(defn.name())
                     if isinstance(symbol.node, FuncDef) and symbol.node != defn:
                         # This is redefinition. Conditional redefinition is okay.
-                        original_def = symbol.node
-                        if self.is_conditional_func(original_def, defn):
-                            # Conditional function definition -- multiple defs are ok.
-                            defn.original_def = original_def
-                        else:
+                        if not self.set_original_def(symbol.node, defn):
                             # Report error.
                             self.check_no_global(defn.name(), defn, True)
             if phase_info == FUNCTION_FIRST_PHASE_POSTPONE_SECOND:
@@ -320,19 +322,21 @@ class SemanticAnalyzer(NodeVisitor):
     def prepare_method_signature(self, func: FuncDef) -> None:
         """Check basic signature validity and tweak annotation of self/cls argument."""
         # Only non-static methods are special.
+        functype = func.type
         if not func.is_static:
             if not func.arguments:
                 self.fail('Method must have at least one argument', func)
-            elif func.type:
-                sig = cast(FunctionLike, func.type)
-                if func.is_class:
-                    leading_type = self.class_type(self.type)
-                else:
-                    leading_type = self_type(self.type)
-                func.type = replace_implicit_first_type(sig, leading_type)
+            elif isinstance(functype, CallableType):
+                self_type = functype.arg_types[0]
+                if isinstance(self_type, AnyType):
+                    if func.is_class or func.name() == '__new__':
+                        leading_type = self.class_type(self.type)
+                    else:
+                        leading_type = fill_typevars(self.type)
+                    func.type = replace_implicit_first_type(functype, leading_type)
 
-    def is_conditional_func(self, previous: Node, new: FuncDef) -> bool:
-        """Does 'new' conditionally redefine 'previous'?
+    def set_original_def(self, previous: Node, new: FuncDef) -> bool:
+        """If 'new' conditionally redefine 'previous', set 'previous' as original
 
         We reject straight redefinitions of functions, as they are usually
         a programming error. For example:
@@ -340,7 +344,11 @@ class SemanticAnalyzer(NodeVisitor):
         . def f(): ...
         . def f(): ...  # Error: 'f' redefined
         """
-        return isinstance(previous, (FuncDef, Var)) and new.is_conditional
+        if isinstance(previous, (FuncDef, Var)) and new.is_conditional:
+            new.original_def = previous
+            return True
+        else:
+            return False
 
     def update_function_type_variables(self, defn: FuncDef) -> None:
         """Make any type variables in the signature of defn explicit.
@@ -349,8 +357,8 @@ class SemanticAnalyzer(NodeVisitor):
         if defn is generic.
         """
         if defn.type:
-            functype = cast(CallableType, defn.type)
-            typevars = self.infer_type_variables(functype)
+            assert isinstance(defn.type, CallableType)
+            typevars = self.infer_type_variables(defn.type)
             # Do not define a new type variable if already defined in scope.
             typevars = [(name, tvar) for name, tvar in typevars
                         if not self.is_defined_type_var(name, defn)]
@@ -360,7 +368,7 @@ class SemanticAnalyzer(NodeVisitor):
                                    tvar[1].values, tvar[1].upper_bound,
                                    tvar[1].variance)
                         for i, tvar in enumerate(typevars)]
-                functype.variables = defs
+                defn.type.variables = defs
 
     def infer_type_variables(self,
                              type: CallableType) -> List[Tuple[str, TypeVarExpr]]:
@@ -374,8 +382,7 @@ class SemanticAnalyzer(NodeVisitor):
                     tvars.append(tvar_expr)
         return list(zip(names, tvars))
 
-    def find_type_variables_in_type(
-            self, type: Type) -> List[Tuple[str, TypeVarExpr]]:
+    def find_type_variables_in_type(self, type: Type) -> List[Tuple[str, TypeVarExpr]]:
         """Return a list of all unique type variable references in type.
 
         This effectively does partial name binding, results of which are mostly thrown away.
@@ -385,7 +392,8 @@ class SemanticAnalyzer(NodeVisitor):
             name = type.name
             node = self.lookup_qualified(name, type)
             if node and node.kind == UNBOUND_TVAR:
-                result.append((name, cast(TypeVarExpr, node.node)))
+                assert isinstance(node.node, TypeVarExpr)
+                result.append((name, node.node))
             for arg in type.args:
                 result.extend(self.find_type_variables_in_type(arg))
         elif isinstance(type, TypeList):
@@ -412,8 +420,9 @@ class SemanticAnalyzer(NodeVisitor):
             item.is_overload = True
             item.func.is_overload = True
             item.accept(self)
-            t.append(cast(CallableType, function_type(item.func,
-                                                  self.builtin_type('builtins.function'))))
+            callable = function_type(item.func, self.builtin_type('builtins.function'))
+            assert isinstance(callable, CallableType)
+            t.append(callable)
             if item.func.is_property and i == 0:
                 # This defines a property, probably with a setter and/or deleter.
                 self.analyze_property_with_multi_part_definition(defn)
@@ -511,8 +520,9 @@ class SemanticAnalyzer(NodeVisitor):
         nodes = []  # type: List[SymbolTableNode]
         if defn.type:
             tt = defn.type
+            assert isinstance(tt, CallableType)
+            items = tt.variables
             names = self.type_var_names()
-            items = cast(CallableType, tt).variables
             for item in items:
                 name = item.name
                 if name in names:
@@ -536,7 +546,8 @@ class SemanticAnalyzer(NodeVisitor):
         return node
 
     def check_function_signature(self, fdef: FuncItem) -> None:
-        sig = cast(CallableType, fdef.type)
+        sig = fdef.type
+        assert isinstance(sig, CallableType)
         if len(sig.arg_types) < len(fdef.arguments):
             self.fail('Type signature has too few arguments', fdef)
             # Add dummy Any arguments to prevent crashes later.
@@ -547,6 +558,8 @@ class SemanticAnalyzer(NodeVisitor):
 
     def visit_class_def(self, defn: ClassDef) -> None:
         self.clean_up_bases_and_infer_type_variables(defn)
+        if self.analyze_namedtuple_classdef(defn):
+            return
         self.setup_class_def_analysis(defn)
 
         self.bind_class_type_vars(defn)
@@ -559,8 +572,6 @@ class SemanticAnalyzer(NodeVisitor):
 
         self.enter_class(defn)
 
-        self.setup_is_builtinclass(defn)
-
         # Analyze class body.
         defn.defs.accept(self)
 
@@ -606,15 +617,6 @@ class SemanticAnalyzer(NodeVisitor):
     def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None:
         decorator.accept(self)
 
-    def setup_is_builtinclass(self, defn: ClassDef) -> None:
-        for decorator in defn.decorators:
-            if refers_to_fullname(decorator, 'typing.builtinclass'):
-                defn.is_builtinclass = True
-        if defn.fullname == 'builtins.object':
-            # Only 'object' is marked as a built-in class, as otherwise things elsewhere
-            # would break. We need a better way of dealing with built-in classes.
-            defn.is_builtinclass = True
-
     def calculate_abstract_status(self, typ: TypeInfo) -> None:
         """Calculate abstract status of a class.
 
@@ -721,9 +723,60 @@ class SemanticAnalyzer(NodeVisitor):
         unbound = t
         sym = self.lookup_qualified(unbound.name, unbound)
         if sym is not None and sym.kind == UNBOUND_TVAR:
-            return unbound.name, cast(TypeVarExpr, sym.node)
+            assert isinstance(sym.node, TypeVarExpr)
+            return unbound.name, sym.node
         return None
 
+    def analyze_namedtuple_classdef(self, defn: ClassDef) -> bool:
+        # special case for NamedTuple
+        for base_expr in defn.base_type_exprs:
+            if isinstance(base_expr, RefExpr):
+                base_expr.accept(self)
+                if base_expr.fullname == 'typing.NamedTuple':
+                    node = self.lookup(defn.name, defn)
+                    if node is not None:
+                        node.kind = GDEF  # TODO in process_namedtuple_definition also applies here
+                        items, types = self.check_namedtuple_classdef(defn)
+                        node.node = self.build_namedtuple_typeinfo(defn.name, items, types)
+                        return True
+        return False
+
+    def check_namedtuple_classdef(self, defn: ClassDef) -> Tuple[List[str], List[Type]]:
+        NAMEDTUP_CLASS_ERROR = ('Invalid statement in NamedTuple definition; '
+                               'expected "field_name: field_type"')
+        if self.options.python_version < (3, 6):
+            self.fail('NamedTuple class syntax is only supported in Python 3.6', defn)
+            return [], []
+        if len(defn.base_type_exprs) > 1:
+            self.fail('NamedTuple should be a single base', defn)
+        items = []  # type: List[str]
+        types = []  # type: List[Type]
+        for stmt in defn.defs.body:
+            if not isinstance(stmt, AssignmentStmt):
+                # Still allow pass or ... (for empty namedtuples).
+                if (not isinstance(stmt, PassStmt) and
+                    not (isinstance(stmt, ExpressionStmt) and
+                         isinstance(stmt.expr, EllipsisExpr))):
+                    self.fail(NAMEDTUP_CLASS_ERROR, stmt)
+            elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr):
+                # An assignment, but an invalid one.
+                self.fail(NAMEDTUP_CLASS_ERROR, stmt)
+            else:
+                # Append name and type in this case...
+                name = stmt.lvalues[0].name
+                items.append(name)
+                types.append(AnyType() if stmt.type is None else self.anal_type(stmt.type))
+                # ...despite possible minor failures that allow further analyzis.
+                if name.startswith('_'):
+                    self.fail('NamedTuple field name cannot start with an underscore: {}'
+                              .format(name), stmt)
+                if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax:
+                    self.fail(NAMEDTUP_CLASS_ERROR, stmt)
+                elif not isinstance(stmt.rvalue, TempNode):
+                    # x: int assigns rvalue to TempNode(AnyType())
+                    self.fail('Right hand side values are not supported in NamedTuple', stmt)
+        return items, types
+
     def setup_class_def_analysis(self, defn: ClassDef) -> None:
         """Prepare for the analysis of a class definition."""
         if not defn.info:
@@ -757,10 +810,6 @@ class SemanticAnalyzer(NodeVisitor):
                 if info.tuple_type:
                     self.fail("Class has two incompatible bases derived from tuple", defn)
                     defn.has_incompatible_baseclass = True
-                if (not self.is_stub_file
-                        and not info.is_named_tuple
-                        and base.fallback.type.fullname() == 'builtins.tuple'):
-                    self.fail("Tuple[...] not supported as a base class outside a stub file", defn)
                 info.tuple_type = base
                 base_types.append(base.fallback)
             elif isinstance(base, Instance):
@@ -846,6 +895,9 @@ class SemanticAnalyzer(NodeVisitor):
 
     def analyze_metaclass(self, defn: ClassDef) -> None:
         if defn.metaclass:
+            if defn.metaclass == '<error>':
+                self.fail("Dynamic metaclass not supported for '%s'" % defn.name, defn)
+                return
             sym = self.lookup_qualified(defn.metaclass, defn)
             if sym is not None and not isinstance(sym.node, TypeInfo):
                 self.fail("Invalid metaclass '%s'" % defn.metaclass, defn)
@@ -865,16 +917,15 @@ class SemanticAnalyzer(NodeVisitor):
 
     def named_type(self, qualified_name: str, args: List[Type] = None) -> Instance:
         sym = self.lookup_qualified(qualified_name, None)
-        return Instance(cast(TypeInfo, sym.node), args or [])
+        assert isinstance(sym.node, TypeInfo)
+        return Instance(sym.node, args or [])
 
     def named_type_or_none(self, qualified_name: str, args: List[Type] = None) -> Instance:
         sym = self.lookup_fully_qualified_or_none(qualified_name)
         if not sym:
             return None
-        return Instance(cast(TypeInfo, sym.node), args or [])
-
-    def is_instance_type(self, t: Type) -> bool:
-        return isinstance(t, Instance)
+        assert isinstance(sym.node, TypeInfo)
+        return Instance(sym.node, args or [])
 
     def bind_class_type_variables_in_symbol_table(
             self, info: TypeInfo) -> List[SymbolTableNode]:
@@ -913,11 +964,13 @@ class SemanticAnalyzer(NodeVisitor):
         """
         while '.' in id:
             parent, child = id.rsplit('.', 1)
-            modules_loaded = parent in self.modules and id in self.modules
-            if modules_loaded and child not in self.modules[parent].names:
-                sym = SymbolTableNode(MODULE_REF, self.modules[id], parent,
-                        module_public=module_public)
-                self.modules[parent].names[child] = sym
+            parent_mod = self.modules.get(parent)
+            if parent_mod and child not in parent_mod.names:
+                child_mod = self.modules.get(id)
+                if child_mod:
+                    sym = SymbolTableNode(MODULE_REF, child_mod, parent,
+                                          module_public=module_public)
+                    parent_mod.names[child] = sym
             id = parent
 
     def add_module_symbol(self, id: str, as_id: str, module_public: bool,
@@ -931,48 +984,47 @@ class SemanticAnalyzer(NodeVisitor):
 
     def visit_import_from(self, imp: ImportFrom) -> None:
         import_id = self.correct_relative_import(imp)
-        if import_id in self.modules:
-            module = self.modules[import_id]
-            self.add_submodules_to_parent_modules(import_id, True)
-            for id, as_id in imp.names:
-                node = module.names.get(id)
-
-                # If the module does not contain a symbol with the name 'id',
-                # try checking if it's a module instead.
-                if id not in module.names or node.kind == UNBOUND_IMPORTED:
-                    possible_module_id = import_id + '.' + id
-                    mod = self.modules.get(possible_module_id)
-                    if mod is not None:
-                        node = SymbolTableNode(MODULE_REF, mod, import_id)
-                        self.add_submodules_to_parent_modules(possible_module_id, True)
-
-                if node and node.kind != UNBOUND_IMPORTED:
-                    node = self.normalize_type_alias(node, imp)
-                    if not node:
-                        return
-                    imported_id = as_id or id
-                    existing_symbol = self.globals.get(imported_id)
-                    if existing_symbol:
-                        # Import can redefine a variable. They get special treatment.
-                        if self.process_import_over_existing_name(
-                                imported_id, existing_symbol, node, imp):
-                            continue
-                    # 'from m import x as x' exports x in a stub file.
-                    module_public = not self.is_stub_file or as_id is not None
-                    symbol = SymbolTableNode(node.kind, node.node,
-                                             self.cur_mod_id,
-                                             node.type_override,
-                                             module_public=module_public)
-                    self.add_symbol(imported_id, symbol, imp)
-                else:
-                    message = "Module '{}' has no attribute '{}'".format(import_id, id)
-                    extra = self.undefined_name_extra_info('{}.{}'.format(import_id, id))
-                    if extra:
-                        message += " {}".format(extra)
-                    self.fail(message, imp)
-        else:
-            # Missing module.
-            for id, as_id in imp.names:
+        self.add_submodules_to_parent_modules(import_id, True)
+        module = self.modules.get(import_id)
+        for id, as_id in imp.names:
+            node = module.names.get(id) if module else None
+
+            # If the module does not contain a symbol with the name 'id',
+            # try checking if it's a module instead.
+            if not node or node.kind == UNBOUND_IMPORTED:
+                possible_module_id = import_id + '.' + id
+                mod = self.modules.get(possible_module_id)
+                if mod is not None:
+                    node = SymbolTableNode(MODULE_REF, mod, import_id)
+                    self.add_submodules_to_parent_modules(possible_module_id, True)
+
+            if node and node.kind != UNBOUND_IMPORTED:
+                node = self.normalize_type_alias(node, imp)
+                if not node:
+                    return
+                imported_id = as_id or id
+                existing_symbol = self.globals.get(imported_id)
+                if existing_symbol:
+                    # Import can redefine a variable. They get special treatment.
+                    if self.process_import_over_existing_name(
+                            imported_id, existing_symbol, node, imp):
+                        continue
+                # 'from m import x as x' exports x in a stub file.
+                module_public = not self.is_stub_file or as_id is not None
+                symbol = SymbolTableNode(node.kind, node.node,
+                                         self.cur_mod_id,
+                                         node.type_override,
+                                         module_public=module_public)
+                self.add_symbol(imported_id, symbol, imp)
+            elif module:
+                # Missing attribute.
+                message = "Module '{}' has no attribute '{}'".format(import_id, id)
+                extra = self.undefined_name_extra_info('{}.{}'.format(import_id, id))
+                if extra:
+                    message += " {}".format(extra)
+                self.fail(message, imp)
+            else:
+                # Missing module.
                 self.add_unknown_symbol(as_id or id, imp, is_import=True)
 
     def process_import_over_existing_name(self,
@@ -1038,7 +1090,8 @@ class SemanticAnalyzer(NodeVisitor):
                                 name, existing_symbol, node, i):
                             continue
                     self.add_symbol(name, SymbolTableNode(node.kind, node.node,
-                                                          self.cur_mod_id), i)
+                                                          self.cur_mod_id,
+                                                          node.type_override), i)
         else:
             # Don't add any dummy symbols for 'from x import *' if 'x' is unknown.
             pass
@@ -1070,7 +1123,8 @@ class SemanticAnalyzer(NodeVisitor):
         if b:
             self.visit_block(b)
 
-    def anal_type(self, t: Type, allow_tuple_literal: bool = False) -> Type:
+    def anal_type(self, t: Type, allow_tuple_literal: bool = False,
+                  aliasing: bool = False) -> Type:
         if t:
             if allow_tuple_literal:
                 # Types such as (t1, t2, ...) only allowed in assignment statements. They'll
@@ -1087,7 +1141,8 @@ class SemanticAnalyzer(NodeVisitor):
                     return TupleType(items, self.builtin_type('builtins.tuple'), t.line)
             a = TypeAnalyser(self.lookup_qualified,
                              self.lookup_fully_qualified,
-                             self.fail)
+                             self.fail,
+                             aliasing=aliasing)
             return t.accept(a)
         else:
             return None
@@ -1117,7 +1172,8 @@ class SemanticAnalyzer(NodeVisitor):
                     node.kind = TYPE_ALIAS
                     node.type_override = res
                     if isinstance(s.rvalue, IndexExpr):
-                        s.rvalue.analyzed = TypeAliasExpr(res)
+                        s.rvalue.analyzed = TypeAliasExpr(res,
+                                                          fallback=self.alias_fallback(res))
         if s.type:
             # Store type into nodes.
             for lvalue in s.lvalues:
@@ -1126,6 +1182,7 @@ class SemanticAnalyzer(NodeVisitor):
         self.process_newtype_declaration(s)
         self.process_typevar_declaration(s)
         self.process_namedtuple_definition(s)
+        self.process_typeddict_definition(s)
 
         if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and
                 s.lvalues[0].name == '__all__' and s.lvalues[0].kind == GDEF and
@@ -1154,6 +1211,19 @@ class SemanticAnalyzer(NodeVisitor):
             return self.named_type_or_none('builtins.unicode')
         return None
 
+    def alias_fallback(self, tp: Type) -> Instance:
+        """Make a dummy Instance with no methods. It is used as a fallback type
+        to detect errors for non-Instance aliases (i.e. Unions, Tuples, Callables).
+        """
+        kind = (' to Callable' if isinstance(tp, CallableType) else
+                ' to Tuple' if isinstance(tp, TupleType) else
+                ' to Union' if isinstance(tp, UnionType) else '')
+        cdef = ClassDef('Type alias' + kind, Block([]))
+        fb_info = TypeInfo(SymbolTable(), cdef, self.cur_mod_id)
+        fb_info.bases = [self.object_type()]
+        fb_info.mro = [fb_info, self.object_type().type]
+        return Instance(fb_info, [])
+
     def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None:
         """Check if assignment creates a type alias and set it up as needed."""
         # For now, type aliases only work at the top level of a module.
@@ -1244,11 +1314,10 @@ class SemanticAnalyzer(NodeVisitor):
                 lval.accept(self)
         elif (isinstance(lval, TupleExpr) or
               isinstance(lval, ListExpr)):
-            items = cast(Any, lval).items
+            items = lval.items
             if len(items) == 0 and isinstance(lval, TupleExpr):
                 self.fail("Can't assign to ()", lval)
-            self.analyze_tuple_or_list_lvalue(cast(Union[ListExpr, TupleExpr], lval),
-                                              add_global, explicit_type)
+            self.analyze_tuple_or_list_lvalue(lval, add_global, explicit_type)
         elif isinstance(lval, StarExpr):
             if nested:
                 self.analyze_lvalue(lval.expr, nested, add_global, explicit_type)
@@ -1262,9 +1331,7 @@ class SemanticAnalyzer(NodeVisitor):
                                      explicit_type: bool = False) -> None:
         """Analyze an lvalue or assignment target that is a list or tuple."""
         items = lval.items
-        star_exprs = [cast(StarExpr, item)
-                      for item in items
-                      if isinstance(item, StarExpr)]
+        star_exprs = [item for item in items if isinstance(item, StarExpr)]
 
         if len(star_exprs) > 1:
             self.fail('Two starred expressions in assignment', lval)
@@ -1396,14 +1463,14 @@ class SemanticAnalyzer(NodeVisitor):
         if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)):
             self.fail("Argument 1 to NewType(...) must be a string literal", context)
             has_failed = True
-        elif cast(StrExpr, call.args[0]).value != name:
+        elif args[0].value != name:
             msg = "String argument 1 '{}' to NewType(...) does not match variable name '{}'"
-            self.fail(msg.format(cast(StrExpr, call.args[0]).value, name), context)
+            self.fail(msg.format(args[0].value, name), context)
             has_failed = True
 
         # Check second argument
         try:
-            unanalyzed_type = expr_to_unanalyzed_type(call.args[1])
+            unanalyzed_type = expr_to_unanalyzed_type(args[1])
         except TypeTranslationError:
             self.fail("Argument 2 to NewType(...) must be a valid type", context)
             return None
@@ -1441,7 +1508,8 @@ class SemanticAnalyzer(NodeVisitor):
         if not call:
             return
 
-        lvalue = cast(NameExpr, s.lvalues[0])
+        lvalue = s.lvalues[0]
+        assert isinstance(lvalue, NameExpr)
         name = lvalue.name
         if not lvalue.is_def:
             if s.type:
@@ -1482,9 +1550,9 @@ class SemanticAnalyzer(NodeVisitor):
                 or not call.arg_kinds[0] == ARG_POS):
             self.fail("TypeVar() expects a string literal as first argument", context)
             return False
-        if cast(StrExpr, call.args[0]).value != name:
+        elif call.args[0].value != name:
             msg = "String argument 1 '{}' to TypeVar(...) does not match variable name '{}'"
-            self.fail(msg.format(cast(StrExpr, call.args[0]).value, name), context)
+            self.fail(msg.format(call.args[0].value, name), context)
             return False
         return True
 
@@ -1497,9 +1565,9 @@ class SemanticAnalyzer(NodeVisitor):
         if not isinstance(s.rvalue, CallExpr):
             return None
         call = s.rvalue
-        if not isinstance(call.callee, RefExpr):
-            return None
         callee = call.callee
+        if not isinstance(callee, RefExpr):
+            return None
         if callee.fullname != 'typing.TypeVar':
             return None
         return call
@@ -1578,10 +1646,9 @@ class SemanticAnalyzer(NodeVisitor):
         # Yes, it's a valid namedtuple definition. Add it to the symbol table.
         node = self.lookup(name, s)
         node.kind = GDEF   # TODO locally defined namedtuple
-        # TODO call.analyzed
         node.node = named_tuple
 
-    def check_namedtuple(self, node: Expression, var_name: str = None) -> TypeInfo:
+    def check_namedtuple(self, node: Expression, var_name: str = None) -> Optional[TypeInfo]:
         """Check if a call defines a namedtuple.
 
         The optional var_name argument is the name of the variable to
@@ -1595,9 +1662,9 @@ class SemanticAnalyzer(NodeVisitor):
         if not isinstance(node, CallExpr):
             return None
         call = node
-        if not isinstance(call.callee, RefExpr):
-            return None
         callee = call.callee
+        if not isinstance(callee, RefExpr):
+            return None
         fullname = callee.fullname
         if fullname not in ('collections.namedtuple', 'typing.NamedTuple'):
             return None
@@ -1606,9 +1673,9 @@ class SemanticAnalyzer(NodeVisitor):
             # Error. Construct dummy return value.
             return self.build_namedtuple_typeinfo('namedtuple', [], [])
         else:
-            # Give it a unique name derived from the line number.
             name = cast(StrExpr, call.args[0]).value
             if name != var_name:
+                # Give it a unique name derived from the line number.
                 name += '@' + str(call.line)
             info = self.build_namedtuple_typeinfo(name, items, types)
             # Store it as a global just in case it would remain anonymous.
@@ -1619,7 +1686,7 @@ class SemanticAnalyzer(NodeVisitor):
 
     def parse_namedtuple_args(self, call: CallExpr,
                               fullname: str) -> Tuple[List[str], List[Type], bool]:
-        # TODO Share code with check_argument_count in checkexpr.py?
+        # TODO: Share code with check_argument_count in checkexpr.py?
         args = call.args
         if len(args) < 2:
             return self.fail_namedtuple_arg("Too few arguments for namedtuple()", call)
@@ -1633,14 +1700,14 @@ class SemanticAnalyzer(NodeVisitor):
                 "namedtuple() expects a string literal as the first argument", call)
         types = []  # type: List[Type]
         ok = True
-        if not isinstance(args[1], ListExpr):
+        if not isinstance(args[1], (ListExpr, TupleExpr)):
             if (fullname == 'collections.namedtuple'
                     and isinstance(args[1], (StrExpr, BytesExpr, UnicodeExpr))):
                 str_expr = cast(StrExpr, args[1])
                 items = str_expr.value.replace(',', ' ').split()
             else:
                 return self.fail_namedtuple_arg(
-                    "List literal expected as the second argument to namedtuple()", call)
+                    "List or tuple literal expected as the second argument to namedtuple()", call)
         else:
             listexpr = args[1]
             if fullname == 'collections.namedtuple':
@@ -1733,31 +1800,41 @@ class SemanticAnalyzer(NodeVisitor):
         add_field(Var('_field_types', dictype), is_initialized_in_class=True)
         add_field(Var('_source', strtype), is_initialized_in_class=True)
 
-        # TODO: SelfType should be bind to actual 'self'
-        this_type = self_type(info)
+        tvd = TypeVarDef('NT', 1, [], info.tuple_type)
+        selftype = TypeVarType(tvd)
 
         def add_method(funcname: str, ret: Type, args: List[Argument], name=None,
                        is_classmethod=False) -> None:
-            if not is_classmethod:
-                args = [Argument(Var('self'), this_type, None, ARG_POS)] + args
+            if is_classmethod:
+                first = [Argument(Var('cls'), TypeType(selftype), None, ARG_POS)]
+            else:
+                first = [Argument(Var('self'), selftype, None, ARG_POS)]
+            args = first + args
+
             types = [arg.type_annotation for arg in args]
             items = [arg.variable.name() for arg in args]
             arg_kinds = [arg.kind for arg in args]
             signature = CallableType(types, arg_kinds, items, ret, function_type,
                                      name=name or info.name() + '.' + funcname)
-            signature.is_classmethod_class = is_classmethod
+            signature.variables = [tvd]
             func = FuncDef(funcname, args, Block([]), typ=signature)
             func.info = info
             func.is_class = is_classmethod
-            info.names[funcname] = SymbolTableNode(MDEF, func)
+            if is_classmethod:
+                v = Var(funcname, signature)
+                v.is_classmethod = True
+                v.info = info
+                dec = Decorator(func, [NameExpr('classmethod')], v)
+                info.names[funcname] = SymbolTableNode(MDEF, dec)
+            else:
+                info.names[funcname] = SymbolTableNode(MDEF, func)
 
-        add_method('_replace', ret=this_type,
+        add_method('_replace', ret=selftype,
                    args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED) for var in vars])
         add_method('__init__', ret=NoneTyp(), name=info.name(),
                    args=[Argument(var, var.type, None, ARG_POS) for var in vars])
         add_method('_asdict', args=[], ret=ordereddictype)
-        # FIX: make it actual class method
-        add_method('_make', ret=this_type, is_classmethod=True,
+        add_method('_make', ret=selftype, is_classmethod=True,
                    args=[Argument(Var('iterable', iterable_type), iterable_type, None, ARG_POS),
                          Argument(Var('new'), AnyType(), EllipsisExpr(), ARG_NAMED),
                          Argument(Var('len'), AnyType(), EllipsisExpr(), ARG_NAMED)])
@@ -1776,6 +1853,114 @@ class SemanticAnalyzer(NodeVisitor):
                 result.append(AnyType())
         return result
 
+    def process_typeddict_definition(self, s: AssignmentStmt) -> None:
+        """Check if s defines a TypedDict; if yes, store the definition in symbol table."""
+        if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr):
+            return
+        lvalue = s.lvalues[0]
+        name = lvalue.name
+        typed_dict = self.check_typeddict(s.rvalue, name)
+        if typed_dict is None:
+            return
+        # Yes, it's a valid TypedDict definition. Add it to the symbol table.
+        node = self.lookup(name, s)
+        node.kind = GDEF   # TODO locally defined TypedDict
+        node.node = typed_dict
+
+    def check_typeddict(self, node: Expression, var_name: str = None) -> Optional[TypeInfo]:
+        """Check if a call defines a TypedDict.
+
+        The optional var_name argument is the name of the variable to
+        which this is assigned, if any.
+
+        If it does, return the corresponding TypeInfo. Return None otherwise.
+
+        If the definition is invalid but looks like a TypedDict,
+        report errors but return (some) TypeInfo.
+        """
+        if not isinstance(node, CallExpr):
+            return None
+        call = node
+        callee = call.callee
+        if not isinstance(callee, RefExpr):
+            return None
+        fullname = callee.fullname
+        if fullname != 'mypy_extensions.TypedDict':
+            return None
+        items, types, ok = self.parse_typeddict_args(call, fullname)
+        if not ok:
+            # Error. Construct dummy return value.
+            return self.build_typeddict_typeinfo('TypedDict', [], [])
+        else:
+            name = cast(StrExpr, call.args[0]).value
+            if name != var_name:
+                # Give it a unique name derived from the line number.
+                name += '@' + str(call.line)
+            info = self.build_typeddict_typeinfo(name, items, types)
+            # Store it as a global just in case it would remain anonymous.
+            self.globals[name] = SymbolTableNode(GDEF, info, self.cur_mod_id)
+        call.analyzed = TypedDictExpr(info)
+        call.analyzed.set_line(call.line, call.column)
+        return info
+
+    def parse_typeddict_args(self, call: CallExpr,
+                             fullname: str) -> Tuple[List[str], List[Type], bool]:
+        # TODO: Share code with check_argument_count in checkexpr.py?
+        args = call.args
+        if len(args) < 2:
+            return self.fail_typeddict_arg("Too few arguments for TypedDict()", call)
+        if len(args) > 2:
+            return self.fail_typeddict_arg("Too many arguments for TypedDict()", call)
+        # TODO: Support keyword arguments
+        if call.arg_kinds != [ARG_POS, ARG_POS]:
+            return self.fail_typeddict_arg("Unexpected arguments to TypedDict()", call)
+        if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)):
+            return self.fail_typeddict_arg(
+                "TypedDict() expects a string literal as the first argument", call)
+        if not isinstance(args[1], DictExpr):
+            return self.fail_typeddict_arg(
+                "TypedDict() expects a dictionary literal as the second argument", call)
+        dictexpr = args[1]
+        items, types, ok = self.parse_typeddict_fields_with_types(dictexpr.items, call)
+        return items, types, ok
+
+    def parse_typeddict_fields_with_types(self, dict_items: List[Tuple[Expression, Expression]],
+                                          context: Context) -> Tuple[List[str], List[Type], bool]:
+        items = []  # type: List[str]
+        types = []  # type: List[Type]
+        for (field_name_expr, field_type_expr) in dict_items:
+            if isinstance(field_name_expr, (StrExpr, BytesExpr, UnicodeExpr)):
+                items.append(field_name_expr.value)
+            else:
+                return self.fail_typeddict_arg("Invalid TypedDict() field name", field_name_expr)
+            try:
+                type = expr_to_unanalyzed_type(field_type_expr)
+            except TypeTranslationError:
+                return self.fail_typeddict_arg('Invalid field type', field_type_expr)
+            types.append(self.anal_type(type))
+        return items, types, True
+
+    def fail_typeddict_arg(self, message: str,
+                           context: Context) -> Tuple[List[str], List[Type], bool]:
+        self.fail(message, context)
+        return [], [], False
+
+    def build_typeddict_typeinfo(self, name: str, items: List[str],
+                                 types: List[Type]) -> TypeInfo:
+        strtype = self.named_type('__builtins__.str')  # type: Type
+        dictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
+                   or self.object_type())
+        fallback = dictype
+
+        info = self.basic_new_typeinfo(name, fallback)
+        info.is_typed_dict = True
+
+        # (TODO: Store {items, types} inside "info" somewhere for use later.
+        #        Probably inside a new "info.keys" field which
+        #        would be analogous to "info.names".)
+
+        return info
+
     def visit_decorator(self, dec: Decorator) -> None:
         for d in dec.decorators:
             d.accept(self)
@@ -1932,6 +2117,8 @@ class SemanticAnalyzer(NodeVisitor):
             return True
         elif isinstance(s, TupleExpr):
             return all(self.is_valid_del_target(item) for item in s.items)
+        else:
+            return False
 
     def visit_global_decl(self, g: GlobalDecl) -> None:
         for name in g.names:
@@ -2061,6 +2248,7 @@ class SemanticAnalyzer(NodeVisitor):
                 return
             expr.analyzed = RevealTypeExpr(expr.args[0])
             expr.analyzed.line = expr.line
+            expr.analyzed.column = expr.column
             expr.analyzed.accept(self)
         elif refers_to_fullname(expr.callee, 'typing.Any'):
             # Special form Any(...).
@@ -2145,7 +2333,8 @@ class SemanticAnalyzer(NodeVisitor):
             # This branch handles the case foo.bar where foo is a module.
             # In this case base.node is the module's MypyFile and we look up
             # bar in its namespace.  This must be done for all types of bar.
-            file = cast(MypyFile, base.node)
+            file = base.node
+            assert isinstance(file, MypyFile)
             n = file.names.get(expr.name, None) if file is not None else None
             if n:
                 n = self.normalize_type_alias(n, expr)
@@ -2196,7 +2385,16 @@ class SemanticAnalyzer(NodeVisitor):
 
     def visit_index_expr(self, expr: IndexExpr) -> None:
         expr.base.accept(self)
-        if refers_to_class_or_function(expr.base):
+        if isinstance(expr.base, RefExpr) and expr.base.kind == TYPE_ALIAS:
+            # Special form -- subscripting a generic type alias.
+            # Perform the type substitution and create a new alias.
+            res = analyze_type_alias(expr,
+                                     self.lookup_qualified,
+                                     self.lookup_fully_qualified,
+                                     self.fail)
+            expr.analyzed = TypeAliasExpr(res, fallback=self.alias_fallback(res),
+                                          in_runtime=True)
+        elif refers_to_class_or_function(expr.base):
             # Special form -- type application.
             # Translate index to an unanalyzed type.
             types = []  # type: List[Type]
@@ -2210,7 +2408,7 @@ class SemanticAnalyzer(NodeVisitor):
                 except TypeTranslationError:
                     self.fail('Type expected within [...]', expr)
                     return
-                typearg = self.anal_type(typearg)
+                typearg = self.anal_type(typearg, aliasing=True)
                 types.append(typearg)
             expr.analyzed = TypeApplication(expr.base, types)
             expr.analyzed.line = expr.line
@@ -2350,7 +2548,8 @@ class SemanticAnalyzer(NodeVisitor):
         # 5. Builtins
         b = self.globals.get('__builtins__', None)
         if b:
-            table = cast(MypyFile, b.node).names
+            assert isinstance(b.node, MypyFile)
+            table = b.node.names
             if name in table:
                 if name[0] == "_" and name[1] != "_":
                     self.name_not_defined(name, ctx)
@@ -2405,8 +2604,8 @@ class SemanticAnalyzer(NodeVisitor):
 
     def builtin_type(self, fully_qualified_name: str) -> Instance:
         node = self.lookup_fully_qualified(fully_qualified_name)
-        info = cast(TypeInfo, node.node)
-        return Instance(info, [])
+        assert isinstance(node.node, TypeInfo)
+        return Instance(node.node, [])
 
     def lookup_fully_qualified(self, name: str) -> SymbolTableNode:
         """Lookup a fully qualified name.
@@ -2418,10 +2617,12 @@ class SemanticAnalyzer(NodeVisitor):
         parts = name.split('.')
         n = self.modules[parts[0]]
         for i in range(1, len(parts) - 1):
-            n = cast(MypyFile, n.names[parts[i]].node)
-        return n.names[parts[-1]]
+            next_sym = n.names[parts[i]]
+            assert isinstance(next_sym.node, MypyFile)
+            n = next_sym.node
+        return n.names.get(parts[-1])
 
-    def lookup_fully_qualified_or_none(self, name: str) -> SymbolTableNode:
+    def lookup_fully_qualified_or_none(self, name: str) -> Optional[SymbolTableNode]:
         """Lookup a fully qualified name.
 
         Assume that the name is defined. This happens in the global namespace -- the local
@@ -2434,11 +2635,16 @@ class SemanticAnalyzer(NodeVisitor):
             next_sym = n.names.get(parts[i])
             if not next_sym:
                 return None
-            n = cast(MypyFile, next_sym.node)
+            assert isinstance(next_sym.node, MypyFile)
+            n = next_sym.node
         return n.names.get(parts[-1])
 
     def qualified_name(self, n: str) -> str:
-        return self.cur_mod_id + '.' + n
+        if self.type is not None:
+            base = self.type._fullname
+        else:
+            base = self.cur_mod_id
+        return base + '.' + n
 
     def enter(self) -> None:
         self.locals.append(SymbolTable())
@@ -2459,6 +2665,9 @@ class SemanticAnalyzer(NodeVisitor):
     def is_class_scope(self) -> bool:
         return self.type is not None and not self.is_func_scope()
 
+    def is_module_scope(self) -> bool:
+        return not (self.is_class_scope() or self.is_func_scope())
+
     def add_symbol(self, name: str, node: SymbolTableNode,
                    context: Context) -> None:
         if self.is_func_scope():
@@ -2484,14 +2693,7 @@ class SemanticAnalyzer(NodeVisitor):
                     self.name_already_defined(name, context)
             self.globals[name] = node
 
-    def add_var(self, v: Var, ctx: Context) -> None:
-        if self.is_func_scope():
-            self.add_local(v, ctx)
-        else:
-            self.globals[v.name()] = SymbolTableNode(GDEF, v, self.cur_mod_id)
-            v._fullname = self.qualified_name(v.name())
-
-    def add_local(self, node: Union[Var, FuncBase], ctx: Context) -> None:
+    def add_local(self, node: Union[Var, FuncDef, OverloadedFuncDef], ctx: Context) -> None:
         name = node.name()
         if name in self.locals[-1]:
             self.name_already_defined(name, ctx)
@@ -2641,47 +2843,61 @@ class FirstPass(NodeVisitor):
         self.sem.block_depth[-1] -= 1
 
     def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
-        for lval in s.lvalues:
-            self.analyze_lvalue(lval, explicit_type=s.type is not None)
+        if self.sem.is_module_scope():
+            for lval in s.lvalues:
+                self.analyze_lvalue(lval, explicit_type=s.type is not None)
 
     def visit_func_def(self, func: FuncDef) -> None:
         sem = self.sem
         func.is_conditional = sem.block_depth[-1] > 0
         func._fullname = sem.qualified_name(func.name())
-        if func.name() in sem.globals:
+        at_module = sem.is_module_scope()
+        if at_module and func.name() in sem.globals:
             # Already defined in this module.
             original_sym = sem.globals[func.name()]
             if original_sym.kind == UNBOUND_IMPORTED:
                 # Ah this is an imported name. We can't resolve them now, so we'll postpone
                 # this until the main phase of semantic analysis.
                 return
-            original_def = original_sym.node
-            if sem.is_conditional_func(original_def, func):
-                # Conditional function definition -- multiple defs are ok.
-                func.original_def = cast(FuncDef, original_def)
-            else:
+            if not sem.set_original_def(original_sym.node, func):
                 # Report error.
                 sem.check_no_global(func.name(), func)
         else:
-            sem.globals[func.name()] = SymbolTableNode(GDEF, func, sem.cur_mod_id)
+            if at_module:
+                sem.globals[func.name()] = SymbolTableNode(GDEF, func, sem.cur_mod_id)
+            # Also analyze the function body (in case there are conditional imports).
+            sem.function_stack.append(func)
+            sem.errors.push_function(func.name())
+            sem.enter()
+            func.body.accept(self)
+            sem.leave()
+            sem.errors.pop_function()
+            sem.function_stack.pop()
 
     def visit_overloaded_func_def(self, func: OverloadedFuncDef) -> None:
-        self.sem.check_no_global(func.name(), func, True)
+        kind = self.kind_by_scope()
+        if kind == GDEF:
+            self.sem.check_no_global(func.name(), func, True)
         func._fullname = self.sem.qualified_name(func.name())
-        self.sem.globals[func.name()] = SymbolTableNode(GDEF, func,
-                                                        self.sem.cur_mod_id)
+        if kind == GDEF:
+            self.sem.globals[func.name()] = SymbolTableNode(kind, func, self.sem.cur_mod_id)
 
     def visit_class_def(self, cdef: ClassDef) -> None:
-        self.sem.check_no_global(cdef.name, cdef)
+        kind = self.kind_by_scope()
+        if kind == LDEF:
+            return
+        elif kind == GDEF:
+            self.sem.check_no_global(cdef.name, cdef)
         cdef.fullname = self.sem.qualified_name(cdef.name)
         info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id)
         info.set_line(cdef.line, cdef.column)
         cdef.info = info
-        self.sem.globals[cdef.name] = SymbolTableNode(GDEF, info,
-                                                      self.sem.cur_mod_id)
+        if kind == GDEF:
+            self.sem.globals[cdef.name] = SymbolTableNode(kind, info, self.sem.cur_mod_id)
         self.process_nested_classes(cdef)
 
     def process_nested_classes(self, outer_def: ClassDef) -> None:
+        self.sem.enter_class(outer_def)
         for node in outer_def.defs.body:
             if isinstance(node, ClassDef):
                 node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id)
@@ -2693,20 +2909,28 @@ class FirstPass(NodeVisitor):
                 symbol = SymbolTableNode(MDEF, node.info)
                 outer_def.info.names[node.name] = symbol
                 self.process_nested_classes(node)
+            elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)):
+                node.accept(self)
+        self.sem.leave_class()
 
     def visit_import_from(self, node: ImportFrom) -> None:
         # We can't bind module names during the first pass, as the target module might be
         # unprocessed. However, we add dummy unbound imported names to the symbol table so
         # that we at least know that the name refers to a module.
-        node.is_top_level = True
+        at_module = self.sem.is_module_scope()
+        node.is_top_level = at_module
+        if not at_module:
+            return
         for name, as_name in node.names:
             imported_name = as_name or name
             if imported_name not in self.sem.globals:
                 self.sem.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
 
     def visit_import(self, node: Import) -> None:
-        node.is_top_level = True
+        node.is_top_level = self.sem.is_module_scope()
         # This is similar to visit_import_from -- see the comment there.
+        if not self.sem.is_module_scope():
+            return
         for id, as_id in node.ids:
             imported_id = as_id or id
             if imported_id not in self.sem.globals:
@@ -2716,28 +2940,31 @@ class FirstPass(NodeVisitor):
                 self.sem.globals[imported_id] = SymbolTableNode(UNBOUND_IMPORTED, None)
 
     def visit_import_all(self, node: ImportAll) -> None:
-        node.is_top_level = True
+        node.is_top_level = self.sem.is_module_scope()
 
     def visit_while_stmt(self, s: WhileStmt) -> None:
-        s.body.accept(self)
-        if s.else_body:
-            s.else_body.accept(self)
+        if self.sem.is_module_scope():
+            s.body.accept(self)
+            if s.else_body:
+                s.else_body.accept(self)
 
     def visit_for_stmt(self, s: ForStmt) -> None:
-        self.analyze_lvalue(s.index)
-        s.body.accept(self)
-        if s.else_body:
-            s.else_body.accept(self)
+        if self.sem.is_module_scope():
+            self.analyze_lvalue(s.index)
+            s.body.accept(self)
+            if s.else_body:
+                s.else_body.accept(self)
 
     def visit_with_stmt(self, s: WithStmt) -> None:
-        for n in s.target:
-            if n:
-                self.analyze_lvalue(n)
-        s.body.accept(self)
+        if self.sem.is_module_scope():
+            for n in s.target:
+                if n:
+                    self.analyze_lvalue(n)
+            s.body.accept(self)
 
     def visit_decorator(self, d: Decorator) -> None:
         d.var._fullname = self.sem.qualified_name(d.var.name())
-        self.sem.add_symbol(d.var.name(), SymbolTableNode(GDEF, d.var), d)
+        self.sem.add_symbol(d.var.name(), SymbolTableNode(self.kind_by_scope(), d.var), d)
 
     def visit_if_stmt(self, s: IfStmt) -> None:
         infer_reachability_of_if_statement(s, pyversion=self.pyversion, platform=self.platform)
@@ -2747,10 +2974,22 @@ class FirstPass(NodeVisitor):
             s.else_body.accept(self)
 
     def visit_try_stmt(self, s: TryStmt) -> None:
-        self.sem.analyze_try_stmt(s, self, add_global=True)
+        if self.sem.is_module_scope():
+            self.sem.analyze_try_stmt(s, self, add_global=self.sem.is_module_scope())
 
     def analyze_lvalue(self, lvalue: Lvalue, explicit_type: bool = False) -> None:
-        self.sem.analyze_lvalue(lvalue, add_global=True, explicit_type=explicit_type)
+        self.sem.analyze_lvalue(lvalue, add_global=self.sem.is_module_scope(),
+                                explicit_type=explicit_type)
+
+    def kind_by_scope(self) -> int:
+        if self.sem.is_module_scope():
+            return GDEF
+        elif self.sem.is_class_scope():
+            return MDEF
+        elif self.sem.is_func_scope():
+            return LDEF
+        else:
+            assert False, "Couldn't determine scope"
 
 
 class ThirdPass(TraverserVisitor):
@@ -2849,6 +3088,8 @@ class ThirdPass(TraverserVisitor):
 
     def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
         self.analyze(s.type)
+        if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr):
+            self.analyze(s.rvalue.analyzed.type)
         super().visit_assignment_stmt(s)
 
     def visit_cast_expr(self, e: CastExpr) -> None:
@@ -2883,7 +3124,7 @@ class ThirdPass(TraverserVisitor):
         return Instance(sym.node, args or [])
 
 
-def self_type(typ: TypeInfo) -> Union[Instance, TupleType]:
+def fill_typevars(typ: TypeInfo) -> Union[Instance, TupleType]:
     """For a non-generic type, return instance type representing the type.
     For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn].
     """
@@ -2898,11 +3139,12 @@ def self_type(typ: TypeInfo) -> Union[Instance, TupleType]:
 
 def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
     if isinstance(sig, CallableType):
-        return replace_leading_arg_type(sig, new)
-    else:
-        sig = cast(Overloaded, sig)
+        return sig.copy_modified(arg_types=[new] + sig.arg_types[1:])
+    elif isinstance(sig, Overloaded):
         return Overloaded([cast(CallableType, replace_implicit_first_type(i, new))
                            for i in sig.items()])
+    else:
+        assert False
 
 
 def set_callable_name(sig: Type, fdef: FuncDef) -> Type:
@@ -2981,12 +3223,16 @@ def infer_reachability_of_if_statement(s: IfStmt,
                                        platform: str) -> None:
     for i in range(len(s.expr)):
         result = infer_if_condition_value(s.expr[i], pyversion, platform)
-        if result == ALWAYS_FALSE:
-            # The condition is always false, so we skip the if/elif body.
+        if result in (ALWAYS_FALSE, MYPY_FALSE):
+            # The condition is considered always false, so we skip the if/elif body.
             mark_block_unreachable(s.body[i])
-        elif result == ALWAYS_TRUE:
-            # This condition is always true, so all of the remaining
-            # elif/else bodies will never be executed.
+        elif result in (ALWAYS_TRUE, MYPY_TRUE):
+            # This condition is considered always true, so all of the remaining
+            # elif/else bodies should not be checked.
+            if result == MYPY_TRUE:
+                # This condition is false at runtime; this will affect
+                # import priorities.
+                mark_block_mypy_only(s.body[i])
             for body in s.body[i + 1:]:
                 mark_block_unreachable(body)
             if s.else_body:
@@ -2998,7 +3244,8 @@ def infer_if_condition_value(expr: Expression, pyversion: Tuple[int, int], platf
     """Infer whether if condition is always true/false.
 
     Return ALWAYS_TRUE if always true, ALWAYS_FALSE if always false,
-    and TRUTH_VALUE_UNKNOWN otherwise.
+    MYPY_TRUE if true under mypy and false at runtime, MYPY_FALSE if
+    false under mypy and true at runtime, else TRUTH_VALUE_UNKNOWN.
     """
     name = ''
     negated = False
@@ -3022,12 +3269,9 @@ def infer_if_condition_value(expr: Expression, pyversion: Tuple[int, int], platf
         elif name == 'PY3':
             result = ALWAYS_TRUE if pyversion[0] == 3 else ALWAYS_FALSE
         elif name == 'MYPY' or name == 'TYPE_CHECKING':
-            result = ALWAYS_TRUE
+            result = MYPY_TRUE
     if negated:
-        if result == ALWAYS_TRUE:
-            result = ALWAYS_FALSE
-        elif result == ALWAYS_FALSE:
-            result = ALWAYS_TRUE
+        result = inverted_truth_mapping[result]
     return result
 
 
@@ -3202,6 +3446,23 @@ class MarkImportsUnreachableVisitor(TraverserVisitor):
         node.is_unreachable = True
 
 
+def mark_block_mypy_only(block: Block) -> None:
+    block.accept(MarkImportsMypyOnlyVisitor())
+
+
+class MarkImportsMypyOnlyVisitor(TraverserVisitor):
+    """Visitor that sets is_mypy_only (which affects priority)."""
+
+    def visit_import(self, node: Import) -> None:
+        node.is_mypy_only = True
+
+    def visit_import_from(self, node: ImportFrom) -> None:
+        node.is_mypy_only = True
+
+    def visit_import_all(self, node: ImportAll) -> None:
+        node.is_mypy_only = True
+
+
 def is_identity_signature(sig: Type) -> bool:
     """Is type a callable of form T -> T (where T is a type variable)?"""
     if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]:
diff --git a/mypy/solve.py b/mypy/solve.py
index 1ebeb92..07346f0 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -1,6 +1,7 @@
 """Type inference constraint solving"""
 
 from typing import List, Dict
+from collections import defaultdict
 
 from mypy.types import Type, Void, NoneTyp, AnyType, ErrorType, UninhabitedType, TypeVarId
 from mypy.constraints import Constraint, SUPERTYPE_OF
@@ -23,11 +24,9 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
     pick AnyType.
     """
     # Collect a list of constraints for each type variable.
-    cmap = {}  # type: Dict[TypeVarId, List[Constraint]]
+    cmap = defaultdict(list)  # type: Dict[TypeVarId, List[Constraint]]
     for con in constraints:
-        a = cmap.get(con.type_var, [])  # type: List[Constraint]
-        a.append(con)
-        cmap[con.type_var] = a
+        cmap[con.type_var].append(con)
 
     res = []  # type: List[Type]
 
diff --git a/mypy/stats.py b/mypy/stats.py
index e6c6111..5d6df35 100644
--- a/mypy/stats.py
+++ b/mypy/stats.py
@@ -12,8 +12,8 @@ from mypy.types import (
 )
 from mypy import nodes
 from mypy.nodes import (
-    Node, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr, MypyFile,
-    MemberExpr, OpExpr, ComparisonExpr, IndexExpr, UnaryExpr, YieldFromExpr
+    Expression, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr, MypyFile,
+    MemberExpr, OpExpr, ComparisonExpr, IndexExpr, UnaryExpr, YieldFromExpr, RefExpr
 )
 
 
@@ -31,7 +31,7 @@ precision_names = [
 
 
 class StatisticsVisitor(TraverserVisitor):
-    def __init__(self, inferred: bool, typemap: Dict[Node, Type] = None,
+    def __init__(self, inferred: bool, typemap: Dict[Expression, Type] = None,
                  all_nodes: bool = False) -> None:
         self.inferred = inferred
         self.typemap = typemap
@@ -101,7 +101,7 @@ class StatisticsVisitor(TraverserVisitor):
                 else:
                     items = [lvalue]
                 for item in items:
-                    if hasattr(item, 'is_def') and cast(Any, item).is_def:
+                    if isinstance(item, RefExpr) and item.is_def:
                         t = self.typemap.get(item)
                         if t:
                             self.type(t)
@@ -148,7 +148,7 @@ class StatisticsVisitor(TraverserVisitor):
         self.process_node(o)
         super().visit_unary_expr(o)
 
-    def process_node(self, node: Node) -> None:
+    def process_node(self, node: Expression) -> None:
         if self.all_nodes:
             typ = self.typemap.get(node)
             if typ:
@@ -198,7 +198,7 @@ class StatisticsVisitor(TraverserVisitor):
 
 
 def dump_type_stats(tree: MypyFile, path: str, inferred: bool = False,
-                    typemap: Dict[Node, Type] = None) -> None:
+                    typemap: Dict[Expression, Type] = None) -> None:
     if is_special_module(path):
         return
     print(path)
@@ -265,7 +265,7 @@ def is_complex(t: Type) -> bool:
 html_files = []  # type: List[Tuple[str, str, int, int]]
 
 
-def generate_html_report(tree: MypyFile, path: str, type_map: Dict[Node, Type],
+def generate_html_report(tree: MypyFile, path: str, type_map: Dict[Expression, Type],
                          output_dir: str) -> None:
     if is_special_module(path):
         return
diff --git a/mypy/strconv.py b/mypy/strconv.py
index 5c8c37b..ba01ea8 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -3,7 +3,7 @@
 import re
 import os
 
-from typing import Any, List, Tuple, Optional, Union
+from typing import Any, List, Tuple, Optional, Union, Sequence
 
 from mypy.util import dump_tagged, short_type
 import mypy.nodes
@@ -11,7 +11,7 @@ from mypy.visitor import NodeVisitor
 
 
 class StrConv(NodeVisitor[str]):
-    """Visitor for converting a Node to a human-readable string.
+    """Visitor for converting a node to a human-readable string.
 
     For example, an MypyFile node from program '1' is converted into
     something like this:
@@ -21,16 +21,16 @@ class StrConv(NodeVisitor[str]):
         ExpressionStmt:1(
           IntExpr(1)))
     """
-    def dump(self, nodes: List[Any], obj: 'mypy.nodes.Node') -> str:
+    def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str:
         """Convert a list of items to a multiline pretty-printed string.
 
         The tag is produced from the type name of obj and its line
         number. See mypy.util.dump_tagged for a description of the nodes
         argument.
         """
-        return dump_tagged(nodes, short_type(obj) + ':' + str(obj.line))
+        return dump_tagged(nodes, short_type(obj) + ':' + str(obj.get_line()))
 
-    def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[Any]:
+    def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]:
         """Return a list in a format suitable for dump() that represents the
         arguments and the body of a function. The caller can then decorate the
         array with information specific to methods, global functions or
@@ -141,8 +141,6 @@ class StrConv(NodeVisitor[str]):
             a.insert(1, 'Metaclass({})'.format(o.metaclass))
         if o.decorators:
             a.insert(1, ('Decorators', o.decorators))
-        if o.is_builtinclass:
-            a.insert(1, 'Builtinclass')
         if o.info and o.info._promote:
             a.insert(1, 'Promote({})'.format(o.info._promote))
         if o.info and o.info.tuple_type:
@@ -347,7 +345,7 @@ class StrConv(NodeVisitor[str]):
     def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> str:
         if o.analyzed:
             return o.analyzed.accept(self)
-        args = []  # type: List[mypy.nodes.Node]
+        args = []  # type: List[mypy.nodes.Expression]
         extra = []  # type: List[Union[str, Tuple[str, List[Any]]]]
         for i, kind in enumerate(o.arg_kinds):
             if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]:
@@ -422,6 +420,10 @@ class StrConv(NodeVisitor[str]):
                                                   o.info.name(),
                                                   o.info.tuple_type)
 
+    def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> str:
+        return 'TypedDictExpr:{}({})'.format(o.line,
+                                             o.info.name())
+
     def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> str:
         return 'PromoteExpr:{}({})'.format(o.line, o.type)
 
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 2bf7965..d302adf 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -70,6 +70,7 @@ Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
                                  ('modules', List[str]),
                                  ('ignore_errors', bool),
                                  ('recursive', bool),
+                                 ('fast_parser', bool),
                                  ])
 
 
@@ -77,6 +78,7 @@ def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
                              add_header: bool = False, sigs: Dict[str, str] = {},
                              class_sigs: Dict[str, str] = {},
                              pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+                             fast_parser: bool = False,
                              no_import: bool = False,
                              search_path: List[str] = [],
                              interpreter: str = sys.executable) -> None:
@@ -103,7 +105,8 @@ def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
             target += '.pyi'
         target = os.path.join(output_dir, target)
         generate_stub(module_path, output_dir, module_all,
-                      target=target, add_header=add_header, module=module, pyversion=pyversion)
+                      target=target, add_header=add_header, module=module,
+                      pyversion=pyversion, fast_parser=fast_parser)
     if not quiet:
         print('Created %s' % target)
 
@@ -168,10 +171,12 @@ def load_python_module_info(module: str, interpreter: str) -> Tuple[str, Optiona
 
 def generate_stub(path: str, output_dir: str, _all_: Optional[List[str]] = None,
                   target: str = None, add_header: bool = False, module: str = None,
-                  pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
+                  pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+                  fast_parser: bool = False) -> None:
     source = open(path, 'rb').read()
     options = MypyOptions()
     options.python_version = pyversion
+    options.fast_parser = fast_parser
     try:
         ast = mypy.parse.parse(source, fnam=path, errors=None, options=options)
     except mypy.errors.CompileError as e:
@@ -612,6 +617,7 @@ def main() -> None:
                                      sigs=sigs,
                                      class_sigs=class_sigs,
                                      pyversion=options.pyversion,
+                                     fast_parser=options.fast_parser,
                                      no_import=options.no_import,
                                      search_path=options.search_path,
                                      interpreter=options.interpreter)
@@ -631,6 +637,7 @@ def parse_options() -> Options:
     doc_dir = ''
     search_path = []  # type: List[str]
     interpreter = ''
+    fast_parser = False
     while args and args[0].startswith('-'):
         if args[0] == '--doc-dir':
             doc_dir = args[1]
@@ -645,6 +652,8 @@ def parse_options() -> Options:
             args = args[1:]
         elif args[0] == '--recursive':
             recursive = True
+        elif args[0] == '--fast-parser':
+            fast_parser = True
         elif args[0] == '--ignore-errors':
             ignore_errors = True
         elif args[0] == '--py2':
@@ -667,7 +676,8 @@ def parse_options() -> Options:
                    interpreter=interpreter,
                    modules=args,
                    ignore_errors=ignore_errors,
-                   recursive=recursive)
+                   recursive=recursive,
+                   fast_parser=fast_parser)
 
 
 def default_python2_interpreter() -> str:
@@ -695,6 +705,7 @@ def usage() -> None:
         Options:
           --py2           run in Python 2 mode (default: Python 3 mode)
           --recursive     traverse listed modules to generate inner package modules as well
+          --fast-parser   enable experimental fast parser
           --ignore-errors ignore errors when trying to generate stubs for modules
           --no-import     don't import the modules, just parse and analyze them
                           (doesn't work with C extension modules and doesn't
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index fff5df0..bfb3c4b 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1,4 +1,4 @@
-from typing import cast, List, Dict, Callable
+from typing import List, Dict, Callable
 
 from mypy.types import (
     Type, AnyType, UnboundType, TypeVisitor, ErrorType, Void, NoneTyp,
@@ -120,6 +120,8 @@ class SubtypeVisitor(TypeVisitor[bool]):
         if left.type.fallback_to_any:
             return True
         right = self.right
+        if isinstance(right, TupleType) and right.fallback.type.is_enum:
+            return is_subtype(left, right.fallback)
         if isinstance(right, Instance):
             if left.type._promote and is_subtype(left.type._promote,
                                                  self.right,
@@ -140,10 +142,9 @@ class SubtypeVisitor(TypeVisitor[bool]):
 
     def visit_type_var(self, left: TypeVarType) -> bool:
         right = self.right
-        if isinstance(right, TypeVarType):
-            return left.id == right.id
-        else:
-            return is_subtype(left.upper_bound, self.right)
+        if isinstance(right, TypeVarType) and left.id == right.id:
+            return True
+        return is_subtype(left.upper_bound, self.right)
 
     def visit_callable_type(self, left: CallableType) -> bool:
         right = self.right
@@ -333,7 +334,7 @@ def unify_generic_callable(type: CallableType, target: CallableType,
         return None
     msg = messages.temp_message_builder()
     applied = mypy.applytype.apply_generic_arguments(type, inferred_vars, msg, context=target)
-    if msg.is_errors() or not isinstance(applied, CallableType):
+    if msg.is_errors():
         return None
     return applied
 
@@ -344,7 +345,8 @@ def restrict_subtype_away(t: Type, s: Type) -> Type:
     Currently just remove elements of a union type.
     """
     if isinstance(t, UnionType):
-        new_items = [item for item in t.items if not is_subtype(item, s)]
+        new_items = [item for item in t.items if (not is_subtype(item, s)
+                                                  or isinstance(item, AnyType))]
         return UnionType.make_union(new_items)
     else:
         return t
diff --git a/mypy/traverser.py b/mypy/traverser.py
index d77b003..18b0d57 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -51,6 +51,10 @@ class TraverserVisitor(NodeVisitor[None]):
             item.accept(self)
 
     def visit_class_def(self, o: ClassDef) -> None:
+        for d in o.decorators:
+            d.accept(self)
+        for base in o.base_type_exprs:
+            base.accept(self)
         o.defs.accept(self)
 
     def visit_decorator(self, o: Decorator) -> None:
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index 100ff78..1384d13 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -17,7 +17,7 @@ from mypy.nodes import (
     SliceExpr, OpExpr, UnaryExpr, FuncExpr, TypeApplication, PrintStmt,
     SymbolTable, RefExpr, TypeVarExpr, NewTypeExpr, PromoteExpr,
     ComparisonExpr, TempNode, StarExpr, Statement, Expression,
-    YieldFromExpr, NamedTupleExpr, NonlocalDecl, SetComprehension,
+    YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SetComprehension,
     DictionaryComprehension, ComplexExpr, TypeAliasExpr, EllipsisExpr,
     YieldExpr, ExecStmt, Argument, BackquoteExpr, AwaitExpr,
 )
@@ -159,16 +159,6 @@ class TransformVisitor(NodeVisitor[Node]):
         new.is_generator = original.is_generator
         new.line = original.line
 
-    def duplicate_inits(self,
-                        inits: List[AssignmentStmt]) -> List[AssignmentStmt]:
-        result = []  # type: List[AssignmentStmt]
-        for init in inits:
-            if init:
-                result.append(self.duplicate_assignment(init))
-            else:
-                result.append(None)
-        return result
-
     def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef:
         items = [self.visit_decorator(decorator)
                  for decorator in node.items]
@@ -190,7 +180,6 @@ class TransformVisitor(NodeVisitor[Node]):
         new.info = node.info
         new.decorators = [self.expr(decorator)
                           for decorator in node.decorators]
-        new.is_builtinclass = node.is_builtinclass
         return new
 
     def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl:
@@ -492,6 +481,9 @@ class TransformVisitor(NodeVisitor[Node]):
     def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr:
         return NamedTupleExpr(node.info)
 
+    def visit_typeddict_expr(self, node: TypedDictExpr) -> Node:
+        return TypedDictExpr(node.info)
+
     def visit__promote_expr(self, node: PromoteExpr) -> PromoteExpr:
         return PromoteExpr(node.type)
 
@@ -579,9 +571,6 @@ class TransformVisitor(NodeVisitor[Node]):
     def types(self, types: List[Type]) -> List[Type]:
         return [self.type(type) for type in types]
 
-    def optional_types(self, types: List[Type]) -> List[Type]:
-        return [self.optional_type(type) for type in types]
-
 
 class FuncMapInitializer(TraverserVisitor):
     """This traverser creates mappings from nested FuncDefs to placeholder FuncDefs.
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 931cf7c..fc0d897 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1,14 +1,14 @@
 """Semantic analysis of types"""
 
-from typing import Callable, cast, List
+from typing import Callable, cast, List, Optional
 
 from mypy.types import (
     Type, UnboundType, TypeVarType, TupleType, UnionType, Instance,
     AnyType, CallableType, Void, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor,
-    StarType, PartialType, EllipsisType, UninhabitedType, TypeType
+    StarType, PartialType, EllipsisType, UninhabitedType, TypeType, get_typ_args, set_typ_args,
 )
 from mypy.nodes import (
-    BOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
+    BOUND_TVAR, UNBOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
     TypeInfo, Context, SymbolTableNode, Var, Expression,
     IndexExpr, RefExpr
 )
@@ -40,6 +40,10 @@ def analyze_type_alias(node: Expression,
     # that we don't support straight string literals as type aliases
     # (only string literals within index expressions).
     if isinstance(node, RefExpr):
+        if node.kind == UNBOUND_TVAR or node.kind == BOUND_TVAR:
+            fail_func('Type variable "{}" is invalid as target for type alias'.format(
+                node.fullname), node)
+            return None
         if not (isinstance(node.node, TypeInfo) or
                 node.fullname == 'typing.Any' or
                 node.kind == TYPE_ALIAS):
@@ -48,7 +52,8 @@ def analyze_type_alias(node: Expression,
         base = node.base
         if isinstance(base, RefExpr):
             if not (isinstance(base.node, TypeInfo) or
-                    base.fullname in type_constructors):
+                    base.fullname in type_constructors or
+                    base.kind == TYPE_ALIAS):
                 return None
         else:
             return None
@@ -61,7 +66,7 @@ def analyze_type_alias(node: Expression,
     except TypeTranslationError:
         fail_func('Invalid type alias', node)
         return None
-    analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, fail_func)
+    analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, fail_func, aliasing=True)
     return type.accept(analyzer)
 
 
@@ -74,10 +79,12 @@ class TypeAnalyser(TypeVisitor[Type]):
     def __init__(self,
                  lookup_func: Callable[[str, Context], SymbolTableNode],
                  lookup_fqn_func: Callable[[str], SymbolTableNode],
-                 fail_func: Callable[[str, Context], None]) -> None:
+                 fail_func: Callable[[str, Context], None], *,
+                 aliasing: bool = False) -> None:
         self.lookup = lookup_func
         self.lookup_fqn_func = lookup_fqn_func
         self.fail = fail_func
+        self.aliasing = aliasing
 
     def visit_unbound_type(self, t: UnboundType) -> Type:
         if t.optional:
@@ -112,9 +119,9 @@ class TypeAnalyser(TypeVisitor[Type]):
                     return self.builtin_type('builtins.tuple')
                 if len(t.args) == 2 and isinstance(t.args[1], EllipsisType):
                     # Tuple[T, ...] (uniform, variable-length tuple)
-                    node = self.lookup_fqn_func('builtins.tuple')
-                    tuple_info = cast(TypeInfo, node.node)
-                    return Instance(tuple_info, [t.args[0].accept(self)], t.line)
+                    instance = self.builtin_type('builtins.tuple', [t.args[0].accept(self)])
+                    instance.line = t.line
+                    return instance
                 return self.tuple_type(self.anal_array(t.args))
             elif fullname == 'typing.Union':
                 items = self.anal_array(t.args)
@@ -141,8 +148,22 @@ class TypeAnalyser(TypeVisitor[Type]):
                 item = items[0]
                 return TypeType(item, line=t.line)
             elif sym.kind == TYPE_ALIAS:
-                # TODO: Generic type aliases.
-                return sym.type_override
+                override = sym.type_override
+                an_args = self.anal_array(t.args)
+                all_vars = self.get_type_var_names(override)
+                exp_len = len(all_vars)
+                act_len = len(an_args)
+                if exp_len > 0 and act_len == 0:
+                    # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...]
+                    return self.replace_alias_tvars(override, all_vars, [AnyType()] * exp_len,
+                                                    t.line, t.column)
+                if exp_len == 0 and act_len == 0:
+                    return override
+                if act_len != exp_len:
+                    self.fail('Bad number of arguments for type alias, expected: %s, given: %s'
+                              % (exp_len, act_len), t)
+                    return t
+                return self.replace_alias_tvars(override, all_vars, an_args, t.line, t.column)
             elif not isinstance(sym.node, TypeInfo):
                 name = sym.fullname
                 if name is None:
@@ -153,7 +174,9 @@ class TypeAnalyser(TypeVisitor[Type]):
                     # as a base class -- however, this will fail soon at runtime so the problem
                     # is pretty minor.
                     return AnyType()
-                self.fail('Invalid type "{}"'.format(name), t)
+                # Allow unbound type variables when defining an alias
+                if not (self.aliasing and sym.kind == UNBOUND_TVAR):
+                    self.fail('Invalid type "{}"'.format(name), t)
                 return t
             info = sym.node  # type: TypeInfo
             if len(t.args) > 0 and info.fullname() == 'builtins.tuple':
@@ -166,7 +189,7 @@ class TypeAnalyser(TypeVisitor[Type]):
                 # checked only later, since we do not always know the
                 # valid count at this point. Thus we may construct an
                 # Instance with an invalid number of type arguments.
-                instance = Instance(info, self.anal_array(t.args), t.line)
+                instance = Instance(info, self.anal_array(t.args), t.line, t.column)
                 tup = info.tuple_type
                 if tup is None:
                     return instance
@@ -181,6 +204,54 @@ class TypeAnalyser(TypeVisitor[Type]):
         else:
             return AnyType()
 
+    def get_type_var_names(self, tp: Type) -> List[str]:
+        """Get all type variable names that are present in a generic type alias
+        in order of textual appearance (recursively, if needed).
+        """
+        tvars = []  # type: List[str]
+        typ_args = get_typ_args(tp)
+        for arg in typ_args:
+            tvar = self.get_tvar_name(arg)
+            if tvar:
+                tvars.append(tvar)
+            else:
+                subvars = self.get_type_var_names(arg)
+                if subvars:
+                    tvars.extend(subvars)
+        # Get unique type variables in order of appearance
+        all_tvars = set(tvars)
+        new_tvars = []
+        for t in tvars:
+            if t in all_tvars:
+                new_tvars.append(t)
+                all_tvars.remove(t)
+        return new_tvars
+
+    def get_tvar_name(self, t: Type) -> Optional[str]:
+        if not isinstance(t, UnboundType):
+            return None
+        sym = self.lookup(t.name, t)
+        if sym is not None and (sym.kind == UNBOUND_TVAR or sym.kind == BOUND_TVAR):
+            return t.name
+        return None
+
+    def replace_alias_tvars(self, tp: Type, vars: List[str], subs: List[Type],
+                            newline: int, newcolumn: int) -> Type:
+        """Replace type variables in a generic type alias tp with substitutions subs
+        resetting context. Length of subs should be already checked.
+        """
+        typ_args = get_typ_args(tp)
+        new_args = typ_args[:]
+        for i, arg in enumerate(typ_args):
+            tvar = self.get_tvar_name(arg)
+            if tvar and tvar in vars:
+                # Perform actual substitution...
+                new_args[i] = subs[vars.index(tvar)]
+            else:
+                # ...recursively, if needed.
+                new_args[i] = self.replace_alias_tvars(arg, vars, subs, newline, newcolumn)
+        return set_typ_args(tp, new_args, newline, newcolumn)
+
     def visit_any(self, t: AnyType) -> Type:
         return t
 
@@ -291,8 +362,8 @@ class TypeAnalyser(TypeVisitor[Type]):
 
     def builtin_type(self, fully_qualified_name: str, args: List[Type] = None) -> Instance:
         node = self.lookup_fqn_func(fully_qualified_name)
-        info = cast(TypeInfo, node.node)
-        return Instance(info, args or [])
+        assert isinstance(node.node, TypeInfo)
+        return Instance(node.node, args or [])
 
     def tuple_type(self, items: List[Type]) -> TupleType:
         return TupleType(items, fallback=self.builtin_type('builtins.tuple', [AnyType()]))
@@ -345,9 +416,10 @@ class TypeAnalyserPass3(TypeVisitor[None]):
             # otherwise the type checker may crash as it expects
             # things to be right.
             t.args = [AnyType() for _ in info.type_vars]
+            t.invalid = True
         elif info.defn.type_vars:
             # Check type argument values.
-            for arg, TypeVar in zip(t.args, info.defn.type_vars):
+            for (i, arg), TypeVar in zip(enumerate(t.args), info.defn.type_vars):
                 if TypeVar.values:
                     if isinstance(arg, TypeVarType):
                         arg_values = arg.values
@@ -359,7 +431,7 @@ class TypeAnalyserPass3(TypeVisitor[None]):
                     else:
                         arg_values = [arg]
                     self.check_type_var_values(info, arg_values,
-                                               TypeVar.values, t)
+                                               TypeVar.values, i + 1, t)
                 if not satisfies_upper_bound(arg, TypeVar.upper_bound):
                     self.fail('Type argument "{}" of "{}" must be '
                               'a subtype of "{}"'.format(
@@ -368,12 +440,16 @@ class TypeAnalyserPass3(TypeVisitor[None]):
             arg.accept(self)
 
     def check_type_var_values(self, type: TypeInfo, actuals: List[Type],
-                              valids: List[Type], context: Context) -> None:
+                              valids: List[Type], arg_number: int, context: Context) -> None:
         for actual in actuals:
             if (not isinstance(actual, AnyType) and
                     not any(is_same_type(actual, value) for value in valids)):
-                self.fail('Invalid type argument value for "{}"'.format(
-                    type.name()), context)
+                if len(actuals) > 1 or not isinstance(actual, Instance):
+                    self.fail('Invalid type argument value for "{}"'.format(
+                        type.name()), context)
+                else:
+                    self.fail('Type argument {} of "{}" has incompatible value "{}"'.format(
+                        arg_number, type.name(), actual.type.name()), context)
 
     def visit_callable_type(self, t: CallableType) -> None:
         t.ret_type.accept(self)
diff --git a/mypy/types.py b/mypy/types.py
index 09e473d..34c1ff2 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -181,6 +181,7 @@ class UnboundType(Type):
     optional = False
     # is this type a return type?
     is_ret_type = False
+
     # special case for X[()]
     empty_tuple_index = False
 
@@ -211,7 +212,7 @@ class UnboundType(Type):
                 }
 
     @classmethod
-    def deserialize(self, data: JsonDict) -> 'UnboundType':
+    def deserialize(cls, data: JsonDict) -> 'UnboundType':
         assert data['.class'] == 'UnboundType'
         return UnboundType(data['name'],
                            [Type.deserialize(a) for a in data['args']])
@@ -247,7 +248,7 @@ class TypeList(Type):
                 }
 
     @classmethod
-    def deserialize(self, data: JsonDict) -> 'TypeList':
+    def deserialize(cls, data: JsonDict) -> 'TypeList':
         assert data['.class'] == 'TypeList'
         return TypeList([Type.deserialize(t) for t in data['items']])
 
@@ -365,7 +366,7 @@ class NoneTyp(Type):
                 }
 
     @classmethod
-    def deserialize(self, data: JsonDict) -> 'NoneTyp':
+    def deserialize(cls, data: JsonDict) -> 'NoneTyp':
         assert data['.class'] == 'NoneTyp'
         return NoneTyp(is_ret_type=data['is_ret_type'])
 
@@ -401,7 +402,7 @@ class DeletedType(Type):
                 'source': self.source}
 
     @classmethod
-    def deserialize(self, data: JsonDict) -> 'DeletedType':
+    def deserialize(cls, data: JsonDict) -> 'DeletedType':
         assert data['.class'] == 'DeletedType'
         return DeletedType(data['source'])
 
@@ -414,7 +415,8 @@ class Instance(Type):
 
     type = None  # type: mypy.nodes.TypeInfo
     args = None  # type: List[Type]
-    erased = False      # True if result of type variable substitution
+    erased = False  # True if result of type variable substitution
+    invalid = False  # True if recovered after incorrect number of type arguments error
 
     def __init__(self, typ: mypy.nodes.TypeInfo, args: List[Type],
                  line: int = -1, column: int = -1, erased: bool = False) -> None:
@@ -575,6 +577,7 @@ class CallableType(FunctionLike):
                  ) -> None:
         if variables is None:
             variables = []
+        assert len(arg_types) == len(arg_kinds)
         self.arg_types = arg_types
         self.arg_kinds = arg_kinds
         self.arg_names = arg_names
@@ -746,7 +749,7 @@ class Overloaded(FunctionLike):
                 }
 
     @classmethod
-    def deserialize(self, data: JsonDict) -> 'Overloaded':
+    def deserialize(cls, data: JsonDict) -> 'Overloaded':
         assert data['.class'] == 'Overloaded'
         return Overloaded([CallableType.deserialize(t) for t in data['items']])
 
@@ -879,7 +882,7 @@ class UnionType(Type):
                 items[i] = true_or_false(ti)
 
         simplified_set = [items[i] for i in range(len(items)) if i not in removed]
-        return UnionType.make_union(simplified_set)
+        return UnionType.make_union(simplified_set, line, column)
 
     def length(self) -> int:
         return len(self.items)
@@ -955,7 +958,7 @@ class EllipsisType(Type):
         return {'.class': 'EllipsisType'}
 
     @classmethod
-    def deserialize(self, data: JsonDict) -> 'EllipsisType':
+    def deserialize(cls, data: JsonDict) -> 'EllipsisType':
         assert data['.class'] == 'EllipsisType'
         return EllipsisType()
 
@@ -1438,14 +1441,6 @@ def strip_type(typ: Type) -> Type:
         return typ
 
 
-def replace_leading_arg_type(t: CallableType, self_type: Type) -> CallableType:
-    """Return a copy of a callable type with a different self argument type.
-
-    Assume that the callable is the signature of a method.
-    """
-    return t.copy_modified(arg_types=[self_type] + t.arg_types[1:])
-
-
 def is_named_instance(t: Type, fullname: str) -> bool:
     return (isinstance(t, Instance) and
             t.type is not None and
@@ -1507,3 +1502,51 @@ def true_or_false(t: Type) -> Type:
     new_t.can_be_true = type(new_t).can_be_true
     new_t.can_be_false = type(new_t).can_be_false
     return new_t
+
+
+def function_type(func: mypy.nodes.FuncBase, fallback: Instance) -> FunctionLike:
+    if func.type:
+        assert isinstance(func.type, FunctionLike)
+        return func.type
+    else:
+        # Implicit type signature with dynamic types.
+        # Overloaded functions always have a signature, so func must be an ordinary function.
+        assert isinstance(func, mypy.nodes.FuncItem), str(func)
+        fdef = cast(mypy.nodes.FuncItem, func)
+        name = func.name()
+        if name:
+            name = '"{}"'.format(name)
+
+        return CallableType(
+            [AnyType()] * len(fdef.arg_names),
+            fdef.arg_kinds,
+            fdef.arg_names,
+            AnyType(),
+            fallback,
+            name,
+            implicit=True,
+        )
+
+
+def get_typ_args(tp: Type) -> List[Type]:
+    """Get all type arguments from a parameterizable Type."""
+    if not isinstance(tp, (Instance, UnionType, TupleType, CallableType)):
+        return []
+    typ_args = (tp.args if isinstance(tp, Instance) else
+                tp.items if not isinstance(tp, CallableType) else
+                tp.arg_types + [tp.ret_type])
+    return typ_args
+
+
+def set_typ_args(tp: Type, new_args: List[Type], line: int = -1, column: int = -1) -> Type:
+    """Return a copy of a parameterizable Type with arguments set to new_args."""
+    if isinstance(tp, Instance):
+        return Instance(tp.type, new_args, line, column)
+    if isinstance(tp, TupleType):
+        return tp.copy_modified(items=new_args)
+    if isinstance(tp, UnionType):
+        return UnionType.make_simplified_union(new_args, line, column)
+    if isinstance(tp, CallableType):
+        return tp.copy_modified(arg_types=new_args[:-1], ret_type=new_args[-1],
+                                line=line, column=column)
+    return tp
diff --git a/mypy/util.py b/mypy/util.py
index c5b635e..e5c9e5e 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -2,7 +2,8 @@
 
 import re
 import subprocess
-from typing import TypeVar, List, Any, Tuple, Optional
+from xml.sax.saxutils import escape
+from typing import TypeVar, List, Tuple, Optional, Sequence
 
 
 T = TypeVar('T')
@@ -51,7 +52,7 @@ def array_repr(a: List[T]) -> List[str]:
     return aa
 
 
-def dump_tagged(nodes: List[Any], tag: str) -> str:
+def dump_tagged(nodes: Sequence[object], tag: str) -> str:
     """Convert an array into a pretty-printed multiline string representation.
 
     The format is
@@ -113,3 +114,39 @@ def try_find_python2_interpreter() -> Optional[str]:
         except OSError:
             pass
     return None
+
+
+PASS_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="0" failures="0" name="mypy" skips="0" tests="1" time="{time:.3f}">
+  <testcase classname="mypy" file="mypy" line="1" name="mypy" time="{time:.3f}">
+  </testcase>
+</testsuite>
+"""
+
+FAIL_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="0" failures="1" name="mypy" skips="0" tests="1" time="{time:.3f}">
+  <testcase classname="mypy" file="mypy" line="1" name="mypy" time="{time:.3f}">
+    <failure message="mypy produced messages">{text}</failure>
+  </testcase>
+</testsuite>
+"""
+
+ERROR_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="1" failures="0" name="mypy" skips="0" tests="1" time="{time:.3f}">
+  <testcase classname="mypy" file="mypy" line="1" name="mypy" time="{time:.3f}">
+    <error message="mypy produced errors">{text}</error>
+  </testcase>
+</testsuite>
+"""
+
+
+def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str) -> None:
+    """XXX"""
+    if not messages and not serious:
+        xml = PASS_TEMPLATE.format(time=dt)
+    elif not serious:
+        xml = FAIL_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt)
+    else:
+        xml = ERROR_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt)
+    with open(path, 'wb') as f:
+        f.write(xml.encode('utf-8'))
diff --git a/mypy/version.py b/mypy/version.py
index 68eb9b6..551168e 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1 +1,12 @@
-__version__ = '0.4.5'
+import os
+from mypy import git
+
+__version__ = '0.4.6'
+base_version = __version__
+
+mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+if git.is_git_repo(mypy_dir) and git.have_git():
+    __version__ += '-' + git.git_revision(mypy_dir).decode('utf-8')
+    if git.is_dirty(mypy_dir):
+        __version__ += '-dirty'
+del mypy_dir
diff --git a/mypy/visitor.py b/mypy/visitor.py
index b4c2cc8..33f287b 100644
--- a/mypy/visitor.py
+++ b/mypy/visitor.py
@@ -225,6 +225,9 @@ class NodeVisitor(Generic[T]):
     def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T:
         pass
 
+    def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T:
+        pass
+
     def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T:
         pass
 
diff --git a/setup.cfg b/setup.cfg
index ea994ef..dfa2885 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -11,3 +11,11 @@ exclude = mypy/codec/*
 #   E704: multiple statements on one line (def)
 #   E402: module level import not at top of file
 ignore = E251,E128,F401,W601,E701,W503,E704,E402
+
+[coverage:run]
+branch = true
+source = mypy
+parallel = true
+
+[coverage:report]
+show_missing = true
diff --git a/setup.py b/setup.py
index 3733f72..374f76b 100644
--- a/setup.py
+++ b/setup.py
@@ -11,12 +11,12 @@ if sys.version_info < (3, 2, 0):
 
 from distutils.core import setup
 from distutils.command.build_py import build_py
-from mypy.version import __version__
+from mypy.version import base_version
 from mypy import git
 
 git.verify_git_integrity_or_abort(".")
 
-version = __version__
+version = base_version
 description = 'Optional static typing for Python'
 long_description = '''
 Mypy -- Optional Static Typing for Python
@@ -31,19 +31,6 @@ types.
 '''.lstrip()
 
 
-def cache_version_id():
-    """Returns the version id to use for the incremental hash.
-
-    If setup.py is run from a git repo, the git commit hash will be
-    included if possible. If not, then this function will fall back to
-    using the default version id from mypy/version.py."""
-    if git.is_git_repo('.') and git.have_git():
-        return __version__ + '-' + git.git_revision('.').decode('utf-8')
-    else:
-        # Default fallback
-        return __version__
-
-
 def find_data_files(base, globs):
     """Find all interesting data files, for setup(data_files=)
 
@@ -71,7 +58,7 @@ class CustomPythonBuild(build_py):
         path = os.path.join(self.build_lib, 'mypy')
         self.mkpath(path)
         with open(os.path.join(path, 'version.py'), 'w') as stream:
-            stream.write('__version__ = "{}"\n'.format(cache_version_id()))
+            stream.write('__version__ = "{}"\n'.format(version))
 
     def run(self):
         self.execute(self.pin_version, ())
@@ -97,6 +84,7 @@ classifiers = [
     'Topic :: Software Development',
 ]
 
+
 package_dir = {'mypy': 'mypy'}
 if sys.version_info < (3, 5, 0):
     package_dir[''] = 'lib-typing/3.2'
diff --git a/typeshed/stdlib/2.7/xml/sax/__init__.pyi b/typeshed/stdlib/2.7/xml/sax/__init__.pyi
deleted file mode 100644
index 9e3d009..0000000
--- a/typeshed/stdlib/2.7/xml/sax/__init__.pyi
+++ /dev/null
@@ -1,20 +0,0 @@
-# Stubs for xml.sax (Python 2.7)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-class SAXException(Exception):
-    def __init__(self, msg, exception=None): ...
-    def getMessage(self): ...
-    def getException(self): ...
-    def __getitem__(self, ix): ...
-
-class SAXParseException(SAXException):
-    def __init__(self, msg, exception, locator): ...
-    def getColumnNumber(self): ...
-    def getLineNumber(self): ...
-    def getPublicId(self): ...
-    def getSystemId(self): ...
-
-class SAXNotRecognizedException(SAXException): ...
-class SAXNotSupportedException(SAXException): ...
-class SAXReaderNotAvailable(SAXNotSupportedException): ...
diff --git a/typeshed/stdlib/2.7/BaseHTTPServer.pyi b/typeshed/stdlib/2/BaseHTTPServer.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/BaseHTTPServer.pyi
rename to typeshed/stdlib/2/BaseHTTPServer.pyi
diff --git a/typeshed/stdlib/2.7/ConfigParser.pyi b/typeshed/stdlib/2/ConfigParser.pyi
similarity index 93%
rename from typeshed/stdlib/2.7/ConfigParser.pyi
rename to typeshed/stdlib/2/ConfigParser.pyi
index 5fdf380..ae6915e 100644
--- a/typeshed/stdlib/2.7/ConfigParser.pyi
+++ b/typeshed/stdlib/2/ConfigParser.pyi
@@ -14,30 +14,25 @@ class Error(Exception):
 
 class NoSectionError(Error):
     section = ... # type: str
-    args = ... # type: Tuple[str]
     def __init__(self, section: str) -> None: ...
 
 class DuplicateSectionError(Error):
     section = ... # type: str
-    args = ... # type: Tuple[str]
     def __init__(self, section: str) -> None: ...
 
 class NoOptionError(Error):
     section = ... # type: str
     option = ... # type: str
-    args = ... # type: Tuple[str,str]
     def __init__(self, option: str, section: str) -> None: ...
 
 class InterpolationError(Error):
     section = ... # type: str
     option = ... # type: str
     msg = ... # type: str
-    args = ... # type: Tuple[str,str,str]
     def __init__(self, option: str, section: str, msg: str) -> None: ...
 
 class InterpolationMissingOptionError(InterpolationError):
     reference = ... # type: str
-    args = ... # type: Tuple[str,str,str,str]
     def __init__(self, option: str, section: str, rawval: str, reference: str) -> None: ...
 
 class InterpolationSyntaxError(InterpolationError): ...
@@ -48,14 +43,12 @@ class InterpolationDepthError(InterpolationError):
 class ParsingError(Error):
     filename = ... # type: str
     errors = ... # type: list[Tuple[Any,Any]]
-    args = ... # type: Tuple[str]
     def __init__(self, filename: str) -> None: ...
     def append(self, lineno: Any, line: Any) -> None: ...
 
 class MissingSectionHeaderError(ParsingError):
     lineno = ... # type: Any
     line = ... # type: Any
-    args = ... # type: Tuple[str,Any,Any]
     def __init__(self, filename: str, lineno: Any, line: Any) -> None: ...
 
 
diff --git a/typeshed/stdlib/2.7/Cookie.pyi b/typeshed/stdlib/2/Cookie.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/Cookie.pyi
rename to typeshed/stdlib/2/Cookie.pyi
diff --git a/typeshed/stdlib/2.7/HTMLParser.pyi b/typeshed/stdlib/2/HTMLParser.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/HTMLParser.pyi
rename to typeshed/stdlib/2/HTMLParser.pyi
diff --git a/typeshed/stdlib/2.7/Queue.pyi b/typeshed/stdlib/2/Queue.pyi
similarity index 75%
rename from typeshed/stdlib/2.7/Queue.pyi
rename to typeshed/stdlib/2/Queue.pyi
index 61ba510..e1e41fb 100644
--- a/typeshed/stdlib/2.7/Queue.pyi
+++ b/typeshed/stdlib/2/Queue.pyi
@@ -1,6 +1,6 @@
 # Stubs for Queue (Python 2)
 
-from typing import Any, TypeVar, Generic
+from typing import Any, TypeVar, Generic, Optional
 
 _T = TypeVar('_T')
 
@@ -20,9 +20,9 @@ class Queue(Generic[_T]):
     def qsize(self) -> int: ...
     def empty(self) -> bool: ...
     def full(self) -> bool: ...
-    def put(self, item: _T, block: bool = ..., timeout: float = ...) -> None: ...
+    def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ...
     def put_nowait(self, item: _T) -> None: ...
-    def get(self, block: bool = ..., timeout: float = ...) -> _T: ...
+    def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ...
     def get_nowait(self) -> _T: ...
 
 class PriorityQueue(Queue): ...
diff --git a/typeshed/stdlib/2.7/SocketServer.pyi b/typeshed/stdlib/2/SocketServer.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/SocketServer.pyi
rename to typeshed/stdlib/2/SocketServer.pyi
diff --git a/typeshed/stdlib/2.7/StringIO.pyi b/typeshed/stdlib/2/StringIO.pyi
similarity index 97%
rename from typeshed/stdlib/2.7/StringIO.pyi
rename to typeshed/stdlib/2/StringIO.pyi
index 3c90129..17fe7a1 100644
--- a/typeshed/stdlib/2.7/StringIO.pyi
+++ b/typeshed/stdlib/2/StringIO.pyi
@@ -6,6 +6,7 @@ class StringIO(IO[AnyStr], Generic[AnyStr]):
     closed = ... # type: bool
     softspace = ... # type: int
     len = ... # type: int
+    name = ... # type: str
     def __init__(self, buf: AnyStr = ...) -> None: ...
     def __iter__(self) -> Iterator[AnyStr]: ...
     def next(self) -> AnyStr: ...
diff --git a/typeshed/stdlib/2.7/UserDict.pyi b/typeshed/stdlib/2/UserDict.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/UserDict.pyi
rename to typeshed/stdlib/2/UserDict.pyi
diff --git a/typeshed/stdlib/2.7/UserList.pyi b/typeshed/stdlib/2/UserList.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/UserList.pyi
rename to typeshed/stdlib/2/UserList.pyi
diff --git a/typeshed/stdlib/2.7/UserString.pyi b/typeshed/stdlib/2/UserString.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/UserString.pyi
rename to typeshed/stdlib/2/UserString.pyi
diff --git a/typeshed/stdlib/2.7/__builtin__.pyi b/typeshed/stdlib/2/__builtin__.pyi
similarity index 98%
rename from typeshed/stdlib/2.7/__builtin__.pyi
rename to typeshed/stdlib/2/__builtin__.pyi
index 9e3dd33..bedc1c4 100644
--- a/typeshed/stdlib/2.7/__builtin__.pyi
+++ b/typeshed/stdlib/2/__builtin__.pyi
@@ -21,12 +21,13 @@ _T1 = TypeVar('_T1')
 _T2 = TypeVar('_T2')
 _T3 = TypeVar('_T3')
 _T4 = TypeVar('_T4')
+_TT = TypeVar('_TT', bound='type')
 
-staticmethod = object()  # Special, only valid as a decorator.
-classmethod = object()  # Special, only valid as a decorator.
+class staticmethod: pass   # Special, only valid as a decorator.
+class classmethod: pass  # Special, only valid as a decorator.
 
 class object:
-    __doc__ = ...  # type: str
+    __doc__ = ...  # type: Optional[str]
     __class__ = ...  # type: type
 
     def __init__(self) -> None: ...
@@ -60,7 +61,7 @@ class type:
     # Note: the documentation doesnt specify what the return type is, the standard
     # implementation seems to be returning a list.
     def mro(self) -> List[type]: ...
-    def __subclasses__(self) -> List[type]: ...
+    def __subclasses__(self: _TT) -> List[_TT]: ...
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     @overload
@@ -552,7 +553,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def viewitems(self) -> ItemsView[_KT, _VT]: ...
     @staticmethod
     @overload
-    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method
+    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method (mypy/issues#328)
     @staticmethod
     @overload
     def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ...
@@ -832,7 +833,7 @@ class memoryview(Sized, Container[bytes]):
     def tolist(self) -> List[int]: ...
 
 class BaseException:
-    args = ...  # type: Any
+    args = ...  # type: Tuple[Any, ...]
     message = ...  # type: str
     def __init__(self, *args: Any) -> None: ...
     def with_traceback(self, tb: Any) -> BaseException: ...
@@ -912,7 +913,7 @@ class file(BinaryIO):
     @overload
     def __init__(self, file: unicode, mode: str = 'r', buffering: int = ...) -> None: ...
     @overload
-    def __init__(file: int, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: int, mode: str = 'r', buffering: int = ...) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def read(self, n: int = ...) -> str: ...
     def __enter__(self) -> BinaryIO: ...
diff --git a/typeshed/stdlib/2.7/__future__.pyi b/typeshed/stdlib/2/__future__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/__future__.pyi
rename to typeshed/stdlib/2/__future__.pyi
diff --git a/typeshed/stdlib/2.7/_ast.pyi b/typeshed/stdlib/2/_ast.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_ast.pyi
rename to typeshed/stdlib/2/_ast.pyi
diff --git a/typeshed/stdlib/2.7/_codecs.pyi b/typeshed/stdlib/2/_codecs.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_codecs.pyi
rename to typeshed/stdlib/2/_codecs.pyi
diff --git a/typeshed/stdlib/2.7/_collections.pyi b/typeshed/stdlib/2/_collections.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_collections.pyi
rename to typeshed/stdlib/2/_collections.pyi
diff --git a/typeshed/stdlib/2.7/_functools.pyi b/typeshed/stdlib/2/_functools.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_functools.pyi
rename to typeshed/stdlib/2/_functools.pyi
diff --git a/typeshed/stdlib/2.7/_hotshot.pyi b/typeshed/stdlib/2/_hotshot.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_hotshot.pyi
rename to typeshed/stdlib/2/_hotshot.pyi
diff --git a/typeshed/stdlib/2.7/_io.pyi b/typeshed/stdlib/2/_io.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_io.pyi
rename to typeshed/stdlib/2/_io.pyi
diff --git a/typeshed/stdlib/2.7/_json.pyi b/typeshed/stdlib/2/_json.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_json.pyi
rename to typeshed/stdlib/2/_json.pyi
diff --git a/typeshed/stdlib/2.7/_md5.pyi b/typeshed/stdlib/2/_md5.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_md5.pyi
rename to typeshed/stdlib/2/_md5.pyi
diff --git a/typeshed/stdlib/2.7/_random.pyi b/typeshed/stdlib/2/_random.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_random.pyi
rename to typeshed/stdlib/2/_random.pyi
diff --git a/typeshed/stdlib/2.7/_sha.pyi b/typeshed/stdlib/2/_sha.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_sha.pyi
rename to typeshed/stdlib/2/_sha.pyi
diff --git a/typeshed/stdlib/2.7/_sha256.pyi b/typeshed/stdlib/2/_sha256.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_sha256.pyi
rename to typeshed/stdlib/2/_sha256.pyi
diff --git a/typeshed/stdlib/2.7/_sha512.pyi b/typeshed/stdlib/2/_sha512.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_sha512.pyi
rename to typeshed/stdlib/2/_sha512.pyi
diff --git a/typeshed/stdlib/2.7/_socket.pyi b/typeshed/stdlib/2/_socket.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_socket.pyi
rename to typeshed/stdlib/2/_socket.pyi
diff --git a/typeshed/stdlib/2.7/_sre.pyi b/typeshed/stdlib/2/_sre.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_sre.pyi
rename to typeshed/stdlib/2/_sre.pyi
diff --git a/typeshed/stdlib/2.7/_struct.pyi b/typeshed/stdlib/2/_struct.pyi
similarity index 90%
rename from typeshed/stdlib/2.7/_struct.pyi
rename to typeshed/stdlib/2/_struct.pyi
index a9048b7..9aea0a9 100644
--- a/typeshed/stdlib/2.7/_struct.pyi
+++ b/typeshed/stdlib/2/_struct.pyi
@@ -9,7 +9,7 @@ class Struct(object):
     format = ...  # type: str
 
     def __init__(self, fmt: str) -> None: ...
-    def pack_into(buffer: bytearray, offset: int, obj: Any) -> None: ...
+    def pack_into(self, buffer: bytearray, offset: int, obj: Any) -> None: ...
     def pack(self, *args) -> str: ...
     def unpack(self, s:str) -> Tuple[Any]: ...
     def unpack_from(self, buffer: bytearray, offset:int = ...) -> Tuple[Any]: ...
diff --git a/typeshed/stdlib/2.7/_symtable.pyi b/typeshed/stdlib/2/_symtable.pyi
similarity index 93%
rename from typeshed/stdlib/2.7/_symtable.pyi
rename to typeshed/stdlib/2/_symtable.pyi
index b2c9290..fd596d2 100644
--- a/typeshed/stdlib/2.7/_symtable.pyi
+++ b/typeshed/stdlib/2/_symtable.pyi
@@ -36,6 +36,6 @@ class symtable(object):
     type = ...  # type: int
     varnames = ...  # type: List[str]
 
-    def __init__(src: str, filename: str, startstr: str) -> None: ...
+    def __init__(self, src: str, filename: str, startstr: str) -> None: ...
 
 
diff --git a/typeshed/stdlib/2.7/_warnings.pyi b/typeshed/stdlib/2/_warnings.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_warnings.pyi
rename to typeshed/stdlib/2/_warnings.pyi
diff --git a/typeshed/stdlib/2.7/_weakref.pyi b/typeshed/stdlib/2/_weakref.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_weakref.pyi
rename to typeshed/stdlib/2/_weakref.pyi
diff --git a/typeshed/stdlib/2.7/_weakrefset.pyi b/typeshed/stdlib/2/_weakrefset.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/_weakrefset.pyi
rename to typeshed/stdlib/2/_weakrefset.pyi
diff --git a/typeshed/stdlib/2.7/abc.pyi b/typeshed/stdlib/2/abc.pyi
similarity index 95%
rename from typeshed/stdlib/2.7/abc.pyi
rename to typeshed/stdlib/2/abc.pyi
index 0eb26f0..3ae3b52 100644
--- a/typeshed/stdlib/2.7/abc.pyi
+++ b/typeshed/stdlib/2/abc.pyi
@@ -12,7 +12,6 @@ def abstractmethod(funcobj: Any) -> Any: ...
 class ABCMeta(type):
     # TODO: FrozenSet
     __abstractmethods__ = ...  # type: Set[Any]
-    __doc__ = ...  # type: str
     _abc_cache = ...  # type: _weakrefset.WeakSet
     _abc_invalidation_counter = ...  # type: int
     _abc_negative_cache = ...  # type: _weakrefset.WeakSet
@@ -31,7 +30,6 @@ class _C:
 # TODO: The real abc.abstractproperty inherits from "property".
 class abstractproperty(object):
     def __new__(cls, func: Any) -> Any: ...
-    __doc__ = ...  # type: str
     __isabstractmethod__ = ...  # type: bool
     doc = ...  # type: Any
     fdel = ...  # type: Any
diff --git a/typeshed/stdlib/2.7/array.pyi b/typeshed/stdlib/2/array.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/array.pyi
rename to typeshed/stdlib/2/array.pyi
diff --git a/typeshed/stdlib/2.7/ast.pyi b/typeshed/stdlib/2/ast.pyi
similarity index 96%
rename from typeshed/stdlib/2.7/ast.pyi
rename to typeshed/stdlib/2/ast.pyi
index 6e11445..5bad0bc 100644
--- a/typeshed/stdlib/2.7/ast.pyi
+++ b/typeshed/stdlib/2/ast.pyi
@@ -33,12 +33,10 @@ def literal_eval(node_or_string: Union[str, AST]) -> Any: ...
 def walk(node: AST) -> Iterator[AST]: ...
 
 class NodeVisitor():
-    __doc__ = ...  # type: str
     def visit(self, node: AST) -> Any: ...
     def generic_visit(self, node: AST) -> None: ...
 
 class NodeTransformer(NodeVisitor):
-    __doc__ = ...  # type: str
     def generic_visit(self, node: AST) -> None: ...
 
 
diff --git a/typeshed/stdlib/2.7/atexit.pyi b/typeshed/stdlib/2/atexit.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/atexit.pyi
rename to typeshed/stdlib/2/atexit.pyi
diff --git a/typeshed/stdlib/2.7/base64.pyi b/typeshed/stdlib/2/base64.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/base64.pyi
rename to typeshed/stdlib/2/base64.pyi
diff --git a/typeshed/stdlib/2.7/binascii.pyi b/typeshed/stdlib/2/binascii.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/binascii.pyi
rename to typeshed/stdlib/2/binascii.pyi
diff --git a/typeshed/stdlib/2.7/builtins.pyi b/typeshed/stdlib/2/builtins.pyi
similarity index 98%
rename from typeshed/stdlib/2.7/builtins.pyi
rename to typeshed/stdlib/2/builtins.pyi
index 9e3dd33..bedc1c4 100644
--- a/typeshed/stdlib/2.7/builtins.pyi
+++ b/typeshed/stdlib/2/builtins.pyi
@@ -21,12 +21,13 @@ _T1 = TypeVar('_T1')
 _T2 = TypeVar('_T2')
 _T3 = TypeVar('_T3')
 _T4 = TypeVar('_T4')
+_TT = TypeVar('_TT', bound='type')
 
-staticmethod = object()  # Special, only valid as a decorator.
-classmethod = object()  # Special, only valid as a decorator.
+class staticmethod: pass   # Special, only valid as a decorator.
+class classmethod: pass  # Special, only valid as a decorator.
 
 class object:
-    __doc__ = ...  # type: str
+    __doc__ = ...  # type: Optional[str]
     __class__ = ...  # type: type
 
     def __init__(self) -> None: ...
@@ -60,7 +61,7 @@ class type:
     # Note: the documentation doesnt specify what the return type is, the standard
     # implementation seems to be returning a list.
     def mro(self) -> List[type]: ...
-    def __subclasses__(self) -> List[type]: ...
+    def __subclasses__(self: _TT) -> List[_TT]: ...
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     @overload
@@ -552,7 +553,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def viewitems(self) -> ItemsView[_KT, _VT]: ...
     @staticmethod
     @overload
-    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method
+    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method (mypy/issues#328)
     @staticmethod
     @overload
     def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ...
@@ -832,7 +833,7 @@ class memoryview(Sized, Container[bytes]):
     def tolist(self) -> List[int]: ...
 
 class BaseException:
-    args = ...  # type: Any
+    args = ...  # type: Tuple[Any, ...]
     message = ...  # type: str
     def __init__(self, *args: Any) -> None: ...
     def with_traceback(self, tb: Any) -> BaseException: ...
@@ -912,7 +913,7 @@ class file(BinaryIO):
     @overload
     def __init__(self, file: unicode, mode: str = 'r', buffering: int = ...) -> None: ...
     @overload
-    def __init__(file: int, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: int, mode: str = 'r', buffering: int = ...) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def read(self, n: int = ...) -> str: ...
     def __enter__(self) -> BinaryIO: ...
diff --git a/typeshed/stdlib/2.7/cPickle.pyi b/typeshed/stdlib/2/cPickle.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/cPickle.pyi
rename to typeshed/stdlib/2/cPickle.pyi
diff --git a/typeshed/stdlib/2.7/cStringIO.pyi b/typeshed/stdlib/2/cStringIO.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/cStringIO.pyi
rename to typeshed/stdlib/2/cStringIO.pyi
diff --git a/typeshed/stdlib/2.7/calendar.pyi b/typeshed/stdlib/2/calendar.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/calendar.pyi
rename to typeshed/stdlib/2/calendar.pyi
diff --git a/typeshed/stdlib/2.7/codecs.pyi b/typeshed/stdlib/2/codecs.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/codecs.pyi
rename to typeshed/stdlib/2/codecs.pyi
diff --git a/typeshed/stdlib/2.7/collections.pyi b/typeshed/stdlib/2/collections.pyi
similarity index 98%
rename from typeshed/stdlib/2.7/collections.pyi
rename to typeshed/stdlib/2/collections.pyi
index 4d004fd..398d753 100644
--- a/typeshed/stdlib/2.7/collections.pyi
+++ b/typeshed/stdlib/2/collections.pyi
@@ -8,7 +8,8 @@
 
 from typing import (
     Any, Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload, Iterator, Type,
-    Sized, Optional, List, Set, Sequence, Union, Reversible, MutableMapping, MutableSequence
+    Sized, Optional, List, Set, Sequence, Union, Reversible, MutableMapping, MutableSequence,
+    Container
 )
 import typing
 
diff --git a/typeshed/stdlib/2.7/compileall.pyi b/typeshed/stdlib/2/compileall.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/compileall.pyi
rename to typeshed/stdlib/2/compileall.pyi
diff --git a/typeshed/stdlib/2/cookielib.pyi b/typeshed/stdlib/2/cookielib.pyi
new file mode 100644
index 0000000..f7389df
--- /dev/null
+++ b/typeshed/stdlib/2/cookielib.pyi
@@ -0,0 +1,110 @@
+# Stubs for cookielib (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class Cookie:
+    version = ...  # type: Any
+    name = ...  # type: Any
+    value = ...  # type: Any
+    port = ...  # type: Any
+    port_specified = ...  # type: Any
+    domain = ...  # type: Any
+    domain_specified = ...  # type: Any
+    domain_initial_dot = ...  # type: Any
+    path = ...  # type: Any
+    path_specified = ...  # type: Any
+    secure = ...  # type: Any
+    expires = ...  # type: Any
+    discard = ...  # type: Any
+    comment = ...  # type: Any
+    comment_url = ...  # type: Any
+    rfc2109 = ...  # type: Any
+    def __init__(self, version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest, rfc2109=False): ...
+    def has_nonstandard_attr(self, name): ...
+    def get_nonstandard_attr(self, name, default=None): ...
+    def set_nonstandard_attr(self, name, value): ...
+    def is_expired(self, now=None): ...
+
+class CookiePolicy:
+    def set_ok(self, cookie, request): ...
+    def return_ok(self, cookie, request): ...
+    def domain_return_ok(self, domain, request): ...
+    def path_return_ok(self, path, request): ...
+
+class DefaultCookiePolicy(CookiePolicy):
+    DomainStrictNoDots = ...  # type: Any
+    DomainStrictNonDomain = ...  # type: Any
+    DomainRFC2965Match = ...  # type: Any
+    DomainLiberal = ...  # type: Any
+    DomainStrict = ...  # type: Any
+    netscape = ...  # type: Any
+    rfc2965 = ...  # type: Any
+    rfc2109_as_netscape = ...  # type: Any
+    hide_cookie2 = ...  # type: Any
+    strict_domain = ...  # type: Any
+    strict_rfc2965_unverifiable = ...  # type: Any
+    strict_ns_unverifiable = ...  # type: Any
+    strict_ns_domain = ...  # type: Any
+    strict_ns_set_initial_dollar = ...  # type: Any
+    strict_ns_set_path = ...  # type: Any
+    def __init__(self, blocked_domains=None, allowed_domains=None, netscape=True, rfc2965=False, rfc2109_as_netscape=None, hide_cookie2=False, strict_domain=False, strict_rfc2965_unverifiable=True, strict_ns_unverifiable=False, strict_ns_domain=..., strict_ns_set_initial_dollar=False, strict_ns_set_path=False): ...
+    def blocked_domains(self): ...
+    def set_blocked_domains(self, blocked_domains): ...
+    def is_blocked(self, domain): ...
+    def allowed_domains(self): ...
+    def set_allowed_domains(self, allowed_domains): ...
+    def is_not_allowed(self, domain): ...
+    def set_ok(self, cookie, request): ...
+    def set_ok_version(self, cookie, request): ...
+    def set_ok_verifiability(self, cookie, request): ...
+    def set_ok_name(self, cookie, request): ...
+    def set_ok_path(self, cookie, request): ...
+    def set_ok_domain(self, cookie, request): ...
+    def set_ok_port(self, cookie, request): ...
+    def return_ok(self, cookie, request): ...
+    def return_ok_version(self, cookie, request): ...
+    def return_ok_verifiability(self, cookie, request): ...
+    def return_ok_secure(self, cookie, request): ...
+    def return_ok_expires(self, cookie, request): ...
+    def return_ok_port(self, cookie, request): ...
+    def return_ok_domain(self, cookie, request): ...
+    def domain_return_ok(self, domain, request): ...
+    def path_return_ok(self, path, request): ...
+
+class Absent: ...
+
+class CookieJar:
+    non_word_re = ...  # type: Any
+    quote_re = ...  # type: Any
+    strict_domain_re = ...  # type: Any
+    domain_re = ...  # type: Any
+    dots_re = ...  # type: Any
+    magic_re = ...  # type: Any
+    def __init__(self, policy=None): ...
+    def set_policy(self, policy): ...
+    def add_cookie_header(self, request): ...
+    def make_cookies(self, response, request): ...
+    def set_cookie_if_ok(self, cookie, request): ...
+    def set_cookie(self, cookie): ...
+    def extract_cookies(self, response, request): ...
+    def clear(self, domain=None, path=None, name=None): ...
+    def clear_session_cookies(self): ...
+    def clear_expired_cookies(self): ...
+    def __iter__(self): ...
+    def __len__(self): ...
+
+class LoadError(IOError): ...
+
+class FileCookieJar(CookieJar):
+    filename = ...  # type: Any
+    delayload = ...  # type: Any
+    def __init__(self, filename=None, delayload=False, policy=None): ...
+    def save(self, filename=None, ignore_discard=False, ignore_expires=False): ...
+    def load(self, filename=None, ignore_discard=False, ignore_expires=False): ...
+    def revert(self, filename=None, ignore_discard=False, ignore_expires=False): ...
+
+MozillaCookieJar = FileCookieJar
+LWPCookieJar = FileCookieJar
+def lwp_cookie_str(cookie: Cookie) -> str: ...
diff --git a/typeshed/stdlib/2.7/copy.pyi b/typeshed/stdlib/2/copy.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/copy.pyi
rename to typeshed/stdlib/2/copy.pyi
diff --git a/typeshed/stdlib/2.7/csv.pyi b/typeshed/stdlib/2/csv.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/csv.pyi
rename to typeshed/stdlib/2/csv.pyi
diff --git a/typeshed/stdlib/2.7/datetime.pyi b/typeshed/stdlib/2/datetime.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/datetime.pyi
rename to typeshed/stdlib/2/datetime.pyi
diff --git a/typeshed/stdlib/2.7/decimal.pyi b/typeshed/stdlib/2/decimal.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/decimal.pyi
rename to typeshed/stdlib/2/decimal.pyi
diff --git a/typeshed/stdlib/2.7/difflib.pyi b/typeshed/stdlib/2/difflib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/difflib.pyi
rename to typeshed/stdlib/2/difflib.pyi
diff --git a/typeshed/stdlib/2.7/distutils/__init__.pyi b/typeshed/stdlib/2/distutils/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/distutils/__init__.pyi
rename to typeshed/stdlib/2/distutils/__init__.pyi
diff --git a/typeshed/stdlib/2.7/distutils/emxccompiler.pyi b/typeshed/stdlib/2/distutils/emxccompiler.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/distutils/emxccompiler.pyi
rename to typeshed/stdlib/2/distutils/emxccompiler.pyi
diff --git a/typeshed/stdlib/2.7/doctest.pyi b/typeshed/stdlib/2/doctest.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/doctest.pyi
rename to typeshed/stdlib/2/doctest.pyi
diff --git a/typeshed/stdlib/2.7/email/MIMEText.pyi b/typeshed/stdlib/2/email/MIMEText.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/MIMEText.pyi
rename to typeshed/stdlib/2/email/MIMEText.pyi
diff --git a/typeshed/stdlib/2.7/email/__init__.pyi b/typeshed/stdlib/2/email/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/__init__.pyi
rename to typeshed/stdlib/2/email/__init__.pyi
diff --git a/typeshed/stdlib/2.7/email/_parseaddr.pyi b/typeshed/stdlib/2/email/_parseaddr.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/_parseaddr.pyi
rename to typeshed/stdlib/2/email/_parseaddr.pyi
diff --git a/typeshed/stdlib/2.7/email/mime/__init__.pyi b/typeshed/stdlib/2/email/mime/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/mime/__init__.pyi
rename to typeshed/stdlib/2/email/mime/__init__.pyi
diff --git a/typeshed/stdlib/2.7/email/mime/base.pyi b/typeshed/stdlib/2/email/mime/base.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/mime/base.pyi
rename to typeshed/stdlib/2/email/mime/base.pyi
diff --git a/typeshed/stdlib/2.7/email/mime/multipart.pyi b/typeshed/stdlib/2/email/mime/multipart.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/mime/multipart.pyi
rename to typeshed/stdlib/2/email/mime/multipart.pyi
diff --git a/typeshed/stdlib/2.7/email/mime/nonmultipart.pyi b/typeshed/stdlib/2/email/mime/nonmultipart.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/mime/nonmultipart.pyi
rename to typeshed/stdlib/2/email/mime/nonmultipart.pyi
diff --git a/typeshed/stdlib/2.7/email/mime/text.pyi b/typeshed/stdlib/2/email/mime/text.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/mime/text.pyi
rename to typeshed/stdlib/2/email/mime/text.pyi
diff --git a/typeshed/stdlib/2.7/email/utils.pyi b/typeshed/stdlib/2/email/utils.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/email/utils.pyi
rename to typeshed/stdlib/2/email/utils.pyi
diff --git a/typeshed/stdlib/2.7/encodings/__init__.pyi b/typeshed/stdlib/2/encodings/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/encodings/__init__.pyi
rename to typeshed/stdlib/2/encodings/__init__.pyi
diff --git a/typeshed/stdlib/2.7/encodings/utf_8.pyi b/typeshed/stdlib/2/encodings/utf_8.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/encodings/utf_8.pyi
rename to typeshed/stdlib/2/encodings/utf_8.pyi
diff --git a/typeshed/stdlib/2.7/exceptions.pyi b/typeshed/stdlib/2/exceptions.pyi
similarity index 97%
rename from typeshed/stdlib/2.7/exceptions.pyi
rename to typeshed/stdlib/2/exceptions.pyi
index 1f29e90..9b00e34 100644
--- a/typeshed/stdlib/2.7/exceptions.pyi
+++ b/typeshed/stdlib/2/exceptions.pyi
@@ -1,11 +1,11 @@
-from typing import Any, List, Optional
+from typing import Any, Tuple, Optional
 
 class StandardError(Exception): ...
 class ArithmeticError(StandardError): ...
 class AssertionError(StandardError): ...
 class AttributeError(StandardError): ...
 class BaseException(object):
-    args = ...  # type: List[Any]
+    args = ...  # type: Tuple[Any, ...]
     message = ...  # type: str
     def __getslice__(self, start, end) -> Any: ...
     def __getitem__(self, start, end) -> Any: ...
diff --git a/typeshed/stdlib/2.7/fcntl.pyi b/typeshed/stdlib/2/fcntl.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/fcntl.pyi
rename to typeshed/stdlib/2/fcntl.pyi
diff --git a/typeshed/stdlib/2.7/fileinput.pyi b/typeshed/stdlib/2/fileinput.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/fileinput.pyi
rename to typeshed/stdlib/2/fileinput.pyi
diff --git a/typeshed/stdlib/2.7/fnmatch.pyi b/typeshed/stdlib/2/fnmatch.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/fnmatch.pyi
rename to typeshed/stdlib/2/fnmatch.pyi
diff --git a/typeshed/stdlib/2.7/functools.pyi b/typeshed/stdlib/2/functools.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/functools.pyi
rename to typeshed/stdlib/2/functools.pyi
diff --git a/typeshed/stdlib/2/future_builtins.pyi b/typeshed/stdlib/2/future_builtins.pyi
new file mode 100644
index 0000000..a9b25b2
--- /dev/null
+++ b/typeshed/stdlib/2/future_builtins.pyi
@@ -0,0 +1,14 @@
+from typing import Any
+
+from itertools import ifilter as filter
+from itertools import imap as map
+from itertools import izip as zip
+
+
+def ascii(obj: Any) -> str: ...
+
+
+def hex(x: int) -> str: ...
+
+
+def oct(x: int) -> str: ...
diff --git a/typeshed/stdlib/2.7/gc.pyi b/typeshed/stdlib/2/gc.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/gc.pyi
rename to typeshed/stdlib/2/gc.pyi
diff --git a/typeshed/stdlib/2.7/genericpath.pyi b/typeshed/stdlib/2/genericpath.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/genericpath.pyi
rename to typeshed/stdlib/2/genericpath.pyi
diff --git a/typeshed/stdlib/2.7/getopt.pyi b/typeshed/stdlib/2/getopt.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/getopt.pyi
rename to typeshed/stdlib/2/getopt.pyi
diff --git a/typeshed/stdlib/2.7/getpass.pyi b/typeshed/stdlib/2/getpass.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/getpass.pyi
rename to typeshed/stdlib/2/getpass.pyi
diff --git a/typeshed/stdlib/2.7/gettext.pyi b/typeshed/stdlib/2/gettext.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/gettext.pyi
rename to typeshed/stdlib/2/gettext.pyi
diff --git a/typeshed/stdlib/2.7/glob.pyi b/typeshed/stdlib/2/glob.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/glob.pyi
rename to typeshed/stdlib/2/glob.pyi
diff --git a/typeshed/stdlib/2.7/grp.pyi b/typeshed/stdlib/2/grp.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/grp.pyi
rename to typeshed/stdlib/2/grp.pyi
diff --git a/typeshed/stdlib/2.7/gzip.pyi b/typeshed/stdlib/2/gzip.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/gzip.pyi
rename to typeshed/stdlib/2/gzip.pyi
diff --git a/typeshed/stdlib/2.7/hashlib.pyi b/typeshed/stdlib/2/hashlib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/hashlib.pyi
rename to typeshed/stdlib/2/hashlib.pyi
diff --git a/typeshed/stdlib/2.7/heapq.pyi b/typeshed/stdlib/2/heapq.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/heapq.pyi
rename to typeshed/stdlib/2/heapq.pyi
diff --git a/typeshed/stdlib/2.7/htmlentitydefs.pyi b/typeshed/stdlib/2/htmlentitydefs.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/htmlentitydefs.pyi
rename to typeshed/stdlib/2/htmlentitydefs.pyi
diff --git a/typeshed/stdlib/2.7/httplib.pyi b/typeshed/stdlib/2/httplib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/httplib.pyi
rename to typeshed/stdlib/2/httplib.pyi
diff --git a/typeshed/stdlib/2.7/imp.pyi b/typeshed/stdlib/2/imp.pyi
similarity index 95%
rename from typeshed/stdlib/2.7/imp.pyi
rename to typeshed/stdlib/2/imp.pyi
index d8a9ae1..ffb1ad3 100644
--- a/typeshed/stdlib/2.7/imp.pyi
+++ b/typeshed/stdlib/2/imp.pyi
@@ -32,4 +32,4 @@ def release_lock() -> None: ...
 
 class NullImporter:
     def __init__(self, path_string: str) -> None: ...
-    def find_module(fullname: str, path: str = ...) -> None: ...
+    def find_module(self, fullname: str, path: str = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/importlib.pyi b/typeshed/stdlib/2/importlib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/importlib.pyi
rename to typeshed/stdlib/2/importlib.pyi
diff --git a/typeshed/stdlib/2.7/inspect.pyi b/typeshed/stdlib/2/inspect.pyi
similarity index 82%
rename from typeshed/stdlib/2.7/inspect.pyi
rename to typeshed/stdlib/2/inspect.pyi
index 8abb2a0..3c10712 100644
--- a/typeshed/stdlib/2.7/inspect.pyi
+++ b/typeshed/stdlib/2/inspect.pyi
@@ -1,6 +1,6 @@
 # TODO incomplete
 from types import TracebackType, FrameType, ModuleType
-from typing import Any, Callable, List, Optional, Tuple, NamedTuple
+from typing import Any, Callable, List, Optional, Tuple, Union, NamedTuple
 
 # Types and members
 ModuleInfo = NamedTuple('ModuleInfo', [('name', str),
@@ -64,13 +64,20 @@ def getmro(cls: type) -> Tuple[type, ...]: ...
 # TODO getcallargs
 
 # The interpreter stack
-# TODO getframeinfo
-# TODO getouterframes
-def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameType]:
-    ...
+
+Traceback = NamedTuple('Traceback', [('filename', str),
+                                     ('lineno', int),
+                                     ('function', str),
+                                     ('code_context', List[str]),
+                                     ('index', int),
+                                    ])
 
 _FrameRecord = Tuple[FrameType, str, int, str, List[str], int]
 
+def getouterframes(frame: FrameType, context: int = ...) -> List[FrameType]: ...
+def getframeinfo(frame: Union[FrameType, TracebackType] , context: int = ...) -> Traceback: ...
+def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameType]: ...
+
 def currentframe() -> FrameType: ...
 def stack(context: int = ...) -> List[_FrameRecord]: ...
 def trace(context: int = ...) -> List[_FrameRecord]: ...
diff --git a/typeshed/stdlib/2.7/io.pyi b/typeshed/stdlib/2/io.pyi
similarity index 99%
rename from typeshed/stdlib/2.7/io.pyi
rename to typeshed/stdlib/2/io.pyi
index 3763463..add0bfb 100644
--- a/typeshed/stdlib/2.7/io.pyi
+++ b/typeshed/stdlib/2/io.pyi
@@ -49,6 +49,7 @@ class StringIO(TextIO):
     def __init__(self, initial_value: unicode = ...,
                  newline: unicode = ...) -> None: ...
     # TODO see comments in BinaryIO for missing functionality
+    name = ... # type: str
     def close(self) -> None: ...
     def closed(self) -> bool: ...
     def fileno(self) -> int: ...
diff --git a/typeshed/stdlib/2.7/itertools.pyi b/typeshed/stdlib/2/itertools.pyi
similarity index 96%
rename from typeshed/stdlib/2.7/itertools.pyi
rename to typeshed/stdlib/2/itertools.pyi
index 97623cb..2d275e7 100644
--- a/typeshed/stdlib/2.7/itertools.pyi
+++ b/typeshed/stdlib/2/itertools.pyi
@@ -3,7 +3,7 @@
 # Based on https://docs.python.org/2/library/itertools.html
 
 from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
-                    Union, Sequence, Generic)
+                    Union, Sequence, Generic, Optional)
 
 _T = TypeVar('_T')
 _S = TypeVar('_S')
@@ -40,7 +40,7 @@ def groupby(iterable: Iterable[_T],
 @overload
 def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ...
 @overload
-def islice(iterable: Iterable[_T], start: int, stop: int,
+def islice(iterable: Iterable[_T], start: int, stop: Optional[int],
            step: int = ...) -> Iterator[_T]: ...
 
 _T1 = TypeVar('_T1')
diff --git a/typeshed/stdlib/2.7/json.pyi b/typeshed/stdlib/2/json.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/json.pyi
rename to typeshed/stdlib/2/json.pyi
diff --git a/typeshed/stdlib/2.7/linecache.pyi b/typeshed/stdlib/2/linecache.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/linecache.pyi
rename to typeshed/stdlib/2/linecache.pyi
diff --git a/typeshed/stdlib/2.7/markupbase.pyi b/typeshed/stdlib/2/markupbase.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/markupbase.pyi
rename to typeshed/stdlib/2/markupbase.pyi
diff --git a/typeshed/stdlib/2.7/md5.pyi b/typeshed/stdlib/2/md5.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/md5.pyi
rename to typeshed/stdlib/2/md5.pyi
diff --git a/typeshed/stdlib/2.7/mimetools.pyi b/typeshed/stdlib/2/mimetools.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/mimetools.pyi
rename to typeshed/stdlib/2/mimetools.pyi
diff --git a/typeshed/stdlib/2.7/multiprocessing/__init__.pyi b/typeshed/stdlib/2/multiprocessing/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/multiprocessing/__init__.pyi
rename to typeshed/stdlib/2/multiprocessing/__init__.pyi
diff --git a/typeshed/stdlib/2.7/multiprocessing/process.pyi b/typeshed/stdlib/2/multiprocessing/process.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/multiprocessing/process.pyi
rename to typeshed/stdlib/2/multiprocessing/process.pyi
diff --git a/typeshed/stdlib/2.7/multiprocessing/util.pyi b/typeshed/stdlib/2/multiprocessing/util.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/multiprocessing/util.pyi
rename to typeshed/stdlib/2/multiprocessing/util.pyi
diff --git a/typeshed/stdlib/2.7/optparse.pyi b/typeshed/stdlib/2/optparse.pyi
similarity index 98%
rename from typeshed/stdlib/2.7/optparse.pyi
rename to typeshed/stdlib/2/optparse.pyi
index e315ac4..eb753b2 100644
--- a/typeshed/stdlib/2.7/optparse.pyi
+++ b/typeshed/stdlib/2/optparse.pyi
@@ -18,7 +18,6 @@ class OptParseError(Exception):
     def __init__(self, msg) -> None: ...
 
 class BadOptionError(OptParseError):
-    __doc__ = ...  # type: str
     opt_str = ...  # type: Any
     def __init__(self, opt_str) -> None: ...
 
@@ -65,7 +64,6 @@ class HelpFormatter:
     def store_option_strings(self, parser) -> None: ...
 
 class IndentedHelpFormatter(HelpFormatter):
-    __doc__ = ...  # type: str
     _long_opt_fmt = ...  # type: str
     _short_opt_fmt = ...  # type: str
     current_indent = ...  # type: int
@@ -93,7 +91,6 @@ class Option:
     TYPED_ACTIONS = ...  # type: Tuple[str, ...]
     TYPES = ...  # type: Tuple[str, ...]
     TYPE_CHECKER = ...  # type: Dict[str, Callable]
-    __doc__ = ...  # type: str
     _long_opts = ...  # type: List[Text]
     _short_opts = ...  # type: List[Text]
     action = ...  # type: str
@@ -162,7 +159,6 @@ class OptionGroup(OptionContainer):
     def set_title(self, title) -> None: ...
 
 class OptionParser(OptionContainer):
-    __doc__ = ...  # type: str
     _long_opt = ...  # type: Dict[Text, Any]
     _short_opt = ...  # type: Dict[Any, Any]
     allow_interspersed_args = ...  # type: bool
@@ -220,11 +216,9 @@ class OptionParser(OptionContainer):
     def set_usage(self, usage: Text) -> None: ...
 
 class OptionValueError(OptParseError):
-    __doc__ = ...  # type: str
     msg = ...  # type: Any
 
 class TitledHelpFormatter(HelpFormatter):
-    __doc__ = ...  # type: str
     _long_opt_fmt = ...  # type: str
     _short_opt_fmt = ...  # type: str
     current_indent = ...  # type: int
diff --git a/typeshed/stdlib/2.7/os/__init__.pyi b/typeshed/stdlib/2/os/__init__.pyi
similarity index 88%
rename from typeshed/stdlib/2.7/os/__init__.pyi
rename to typeshed/stdlib/2/os/__init__.pyi
index 40f5ed0..8b81870 100644
--- a/typeshed/stdlib/2.7/os/__init__.pyi
+++ b/typeshed/stdlib/2/os/__init__.pyi
@@ -2,9 +2,9 @@
 
 from typing import (
     List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, Iterator,
-    MutableMapping, NamedTuple
+    MutableMapping, NamedTuple, overload
 )
-from os import path
+from . import path
 
 error = OSError
 name = ... # type: str
@@ -50,8 +50,7 @@ def getsid(pid: int) -> int: ...
 def setsid() -> None: ...
 def setuid(pid: int) -> None: ...
 
-def strerror(code: int) -> str:
-    raise ValueError()
+def strerror(code: int) -> str: ...
 
 def umask(mask: int) -> int: ...
 def uname() -> Tuple[str, str, str, str, str]: ...
@@ -62,6 +61,9 @@ def fdopen(fd: int, *args, **kwargs) -> IO[Any]: ...
 def popen(command: str, *args, **kwargs) -> Optional[IO[Any]]: ...
 def tmpfile() -> IO[Any]: ...
 
+def tmpnam() -> str: ...
+def tempnam(dir: str = ..., prefix: str = ...) -> str: ...
+
 def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
 def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ...
 def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
@@ -131,8 +133,7 @@ pathconf_names = ... # type: Mapping[str, int]
 
 def readlink(path: AnyStr) -> AnyStr: ...
 def remove(path: unicode) -> None: ...
-def removedirs(path: unicode) -> None:
-    raise OSError()
+def removedirs(path: unicode) -> None: ...
 def rename(src: unicode, dst: unicode) -> None: ...
 def renames(old: unicode, new: unicode) -> None: ...
 def rmdir(path: unicode) -> None: ...
@@ -148,7 +149,6 @@ _StatVFS = NamedTuple('_StatVFS', [('f_bsize', int), ('f_frsize', int), ('f_bloc
 def fstatvfs(fd: int) -> _StatVFS: ...
 def statvfs(path: unicode) -> _StatVFS: ...
 
-# TODO: stat_float_times, tempnam, tmpnam, TMP_MAX
 def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
          followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
                                                     List[AnyStr]]]: ...
@@ -187,11 +187,8 @@ def execve(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[
 
 def _exit(n: int) -> None: ...
 
-def fork() -> int:
-    raise OSError()
-
-def forkpty() -> Tuple[int, int]:
-    raise OSError()
+def fork() -> int: ...
+def forkpty() -> Tuple[int, int]: ...
 
 def kill(pid: int, sig: int) -> None: ...
 def killpg(pgid: int, sig: int) -> None: ...
@@ -217,14 +214,14 @@ def startfile(path: unicode, operation: str = ...) -> None: ... # Windows only
 def system(command: unicode) -> int: ...
 def times() -> Tuple[float, float, float, float, float]: ...
 def wait() -> Tuple[int, int]: ... # Unix only
-def waitpid(pid: int, options: int) -> Tuple[int, int]:
-    raise OSError()
-# TODO: wait3, wait4, W...
+def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only
+def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only
+def waitpid(pid: int, options: int) -> Tuple[int, int]: ...
+
 def confstr(name: Union[str, int]) -> Optional[str]: ...
 confstr_names = ... # type: Mapping[str, int]
 
-def getloadavg() -> Tuple[float, float, float]:
-    raise OSError()
+def getloadavg() -> Tuple[float, float, float]: ...
 
 def sysconf(name: Union[str, int]) -> int: ...
 sysconf_names = ... # type: Mapping[str, int]
@@ -270,6 +267,7 @@ O_DIRECT = 0     # Gnu extension if in C library
 O_DIRECTORY = 0  # Gnu extension if in C library
 O_NOFOLLOW = 0   # Gnu extension if in C library
 O_NOATIME = 0    # Gnu extension if in C library
+O_LARGEFILE = 0  # Gnu extension if in C library
 
 P_NOWAIT = 0
 P_NOWAITO = 0
@@ -279,9 +277,26 @@ P_WAIT = 0
 
 # wait()/waitpid() options
 WNOHANG = 0  # Unix only
-#WCONTINUED = 0  # some Unix systems
-#WUNTRACED = 0  # Unix only
+WCONTINUED = 0  # some Unix systems
+WUNTRACED = 0  # Unix only
 
 P_ALL = 0
 WEXITED = 0
 WNOWAIT = 0
+
+TMP_MAX = 0
+
+# Below are Unix-only
+def WCOREDUMP(status: int) -> bool: ...
+def WEXITSTATUS(status: int) -> int: ...
+def WIFCONTINUED(status: int) -> bool: ...
+def WIFEXITED(status: int) -> bool: ...
+def WIFSIGNALED(status: int) -> bool: ...
+def WIFSTOPPED(status: int) -> bool: ...
+def WSTOPSIG(status: int) -> int: ...
+def WTERMSIG(status: int) -> int: ...
+
+ at overload
+def stat_float_times(newvalue: bool = ...) -> None: ...
+ at overload
+def stat_float_times() -> bool: ...
diff --git a/typeshed/stdlib/2.7/os/path.pyi b/typeshed/stdlib/2/os/path.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/os/path.pyi
rename to typeshed/stdlib/2/os/path.pyi
diff --git a/typeshed/stdlib/2.7/pdb.pyi b/typeshed/stdlib/2/pdb.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/pdb.pyi
rename to typeshed/stdlib/2/pdb.pyi
diff --git a/typeshed/stdlib/2.7/pickle.pyi b/typeshed/stdlib/2/pickle.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/pickle.pyi
rename to typeshed/stdlib/2/pickle.pyi
diff --git a/typeshed/stdlib/2.7/pipes.pyi b/typeshed/stdlib/2/pipes.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/pipes.pyi
rename to typeshed/stdlib/2/pipes.pyi
diff --git a/typeshed/stdlib/2.7/platform.pyi b/typeshed/stdlib/2/platform.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/platform.pyi
rename to typeshed/stdlib/2/platform.pyi
diff --git a/typeshed/stdlib/2.7/posix.pyi b/typeshed/stdlib/2/posix.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/posix.pyi
rename to typeshed/stdlib/2/posix.pyi
diff --git a/typeshed/stdlib/2.7/posixpath.pyi b/typeshed/stdlib/2/posixpath.pyi
similarity index 68%
rename from typeshed/stdlib/2.7/posixpath.pyi
rename to typeshed/stdlib/2/posixpath.pyi
index 5b6d64d..933f504 100644
--- a/typeshed/stdlib/2.7/posixpath.pyi
+++ b/typeshed/stdlib/2/posixpath.pyi
@@ -2,7 +2,7 @@
 #
 # NOTE: This dynamically typed stub was automatically generated by stubgen.
 
-from typing import Any
+from typing import Any, AnyStr, List
 from genericpath import *
 
 curdir = ... # type: Any
@@ -39,12 +39,12 @@ supports_unicode_filenames = ... # type: Any
 
 def relpath(path, start=...): ...
 
-# Names in __all__ with no definition:
-#   commonprefix
-#   exists
-#   getatime
-#   getctime
-#   getmtime
-#   getsize
-#   isdir
-#   isfile
+# posixpath imports these from genericpath.py:
+def commonprefix(list: List[AnyStr]) -> AnyStr: ...
+def exists(path: unicode) -> bool: ...
+def getatime(path: unicode) -> float: ...
+def getmtime(path: unicode) -> float: ...
+def getctime(path: unicode) -> float: ...
+def getsize(path: unicode) -> int: ...
+def isfile(path: unicode) -> bool: ...
+def isdir(path: unicode) -> bool: ...
diff --git a/typeshed/stdlib/2.7/pprint.pyi b/typeshed/stdlib/2/pprint.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/pprint.pyi
rename to typeshed/stdlib/2/pprint.pyi
diff --git a/typeshed/stdlib/2.7/pwd.pyi b/typeshed/stdlib/2/pwd.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/pwd.pyi
rename to typeshed/stdlib/2/pwd.pyi
diff --git a/typeshed/stdlib/2.7/quopri.pyi b/typeshed/stdlib/2/quopri.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/quopri.pyi
rename to typeshed/stdlib/2/quopri.pyi
diff --git a/typeshed/stdlib/2.7/random.pyi b/typeshed/stdlib/2/random.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/random.pyi
rename to typeshed/stdlib/2/random.pyi
diff --git a/typeshed/stdlib/2.7/re.pyi b/typeshed/stdlib/2/re.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/re.pyi
rename to typeshed/stdlib/2/re.pyi
diff --git a/typeshed/stdlib/2.7/resource.pyi b/typeshed/stdlib/2/resource.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/resource.pyi
rename to typeshed/stdlib/2/resource.pyi
diff --git a/typeshed/stdlib/2.7/rfc822.pyi b/typeshed/stdlib/2/rfc822.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/rfc822.pyi
rename to typeshed/stdlib/2/rfc822.pyi
diff --git a/typeshed/stdlib/2.7/robotparser.pyi b/typeshed/stdlib/2/robotparser.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/robotparser.pyi
rename to typeshed/stdlib/2/robotparser.pyi
diff --git a/typeshed/stdlib/2.7/runpy.pyi b/typeshed/stdlib/2/runpy.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/runpy.pyi
rename to typeshed/stdlib/2/runpy.pyi
diff --git a/typeshed/stdlib/2.7/select.pyi b/typeshed/stdlib/2/select.pyi
similarity index 94%
rename from typeshed/stdlib/2.7/select.pyi
rename to typeshed/stdlib/2/select.pyi
index 6af6846..32c3cae 100644
--- a/typeshed/stdlib/2.7/select.pyi
+++ b/typeshed/stdlib/2/select.pyi
@@ -94,7 +94,7 @@ class epoll(object):
     def fileno(self) -> int: ...
     def register(self, fd: int, eventmask: int = ...) -> None: ...
     def modify(self, fd: int, eventmask: int) -> None: ...
-    def unregister(fd: int) -> None: ...
-    def poll(timeout: float = ..., maxevents: int = ...) -> Any: ...
+    def unregister(self, fd: int) -> None: ...
+    def poll(self, timeout: float = ..., maxevents: int = ...) -> Any: ...
     @classmethod
-    def fromfd(self, fd: int) -> epoll: ...
+    def fromfd(cls, fd: int) -> epoll: ...
diff --git a/typeshed/stdlib/2.7/sha.pyi b/typeshed/stdlib/2/sha.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/sha.pyi
rename to typeshed/stdlib/2/sha.pyi
diff --git a/typeshed/stdlib/2.7/shelve.pyi b/typeshed/stdlib/2/shelve.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/shelve.pyi
rename to typeshed/stdlib/2/shelve.pyi
diff --git a/typeshed/stdlib/2.7/shlex.pyi b/typeshed/stdlib/2/shlex.pyi
similarity index 93%
rename from typeshed/stdlib/2.7/shlex.pyi
rename to typeshed/stdlib/2/shlex.pyi
index de37acf..8130090 100644
--- a/typeshed/stdlib/2.7/shlex.pyi
+++ b/typeshed/stdlib/2/shlex.pyi
@@ -10,7 +10,7 @@ class shlex:
     def sourcehook(self, filename: str) -> None: ...
     def push_source(self, stream: IO[Any], filename: str = ...) -> None: ...
     def pop_source(self) -> IO[Any]: ...
-    def error_leader(file: str = ..., line: int = ...) -> str: ...
+    def error_leader(self, file: str = ..., line: int = ...) -> str: ...
 
     commenters = ... # type: str
     wordchars = ... # type: str
diff --git a/typeshed/stdlib/2.7/shutil.pyi b/typeshed/stdlib/2/shutil.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/shutil.pyi
rename to typeshed/stdlib/2/shutil.pyi
diff --git a/typeshed/stdlib/2.7/signal.pyi b/typeshed/stdlib/2/signal.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/signal.pyi
rename to typeshed/stdlib/2/signal.pyi
diff --git a/typeshed/stdlib/2.7/simplejson/__init__.pyi b/typeshed/stdlib/2/simplejson/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/simplejson/__init__.pyi
rename to typeshed/stdlib/2/simplejson/__init__.pyi
diff --git a/typeshed/stdlib/2.7/simplejson/decoder.pyi b/typeshed/stdlib/2/simplejson/decoder.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/simplejson/decoder.pyi
rename to typeshed/stdlib/2/simplejson/decoder.pyi
diff --git a/typeshed/stdlib/2.7/simplejson/encoder.pyi b/typeshed/stdlib/2/simplejson/encoder.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/simplejson/encoder.pyi
rename to typeshed/stdlib/2/simplejson/encoder.pyi
diff --git a/typeshed/stdlib/2.7/simplejson/scanner.pyi b/typeshed/stdlib/2/simplejson/scanner.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/simplejson/scanner.pyi
rename to typeshed/stdlib/2/simplejson/scanner.pyi
diff --git a/typeshed/stdlib/2.7/smtplib.pyi b/typeshed/stdlib/2/smtplib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/smtplib.pyi
rename to typeshed/stdlib/2/smtplib.pyi
diff --git a/typeshed/stdlib/2.7/socket.pyi b/typeshed/stdlib/2/socket.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/socket.pyi
rename to typeshed/stdlib/2/socket.pyi
diff --git a/typeshed/stdlib/2.7/spwd.pyi b/typeshed/stdlib/2/spwd.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/spwd.pyi
rename to typeshed/stdlib/2/spwd.pyi
diff --git a/typeshed/stdlib/2.7/sqlite3/__init__.pyi b/typeshed/stdlib/2/sqlite3/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/sqlite3/__init__.pyi
rename to typeshed/stdlib/2/sqlite3/__init__.pyi
diff --git a/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi b/typeshed/stdlib/2/sqlite3/dbapi2.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
rename to typeshed/stdlib/2/sqlite3/dbapi2.pyi
diff --git a/typeshed/stdlib/2.7/ssl.pyi b/typeshed/stdlib/2/ssl.pyi
similarity index 98%
rename from typeshed/stdlib/2.7/ssl.pyi
rename to typeshed/stdlib/2/ssl.pyi
index 1225d9a..c9fafba 100644
--- a/typeshed/stdlib/2.7/ssl.pyi
+++ b/typeshed/stdlib/2/ssl.pyi
@@ -169,8 +169,8 @@ class SSLContext:
                      binary_form: bool = ...) -> Union[List[_PeerCertRetDictType], List[bytes]]: ...
     def set_default_verify_paths(self) -> None: ...
     def set_ciphers(self, ciphers: str) -> None: ...
-    def set_alpn_protocols(protocols: List[str]) -> None: ...
-    def set_npn_protocols(protocols: List[str]) -> None: ...
+    def set_alpn_protocols(self, protocols: List[str]) -> None: ...
+    def set_npn_protocols(self, protocols: List[str]) -> None: ...
     def set_servername_callback(self,
                                 server_name_callback: Optional[_SrvnmeCbType]) -> None: ...
     def load_dh_params(self, dhfile: str) -> None: ...
diff --git a/typeshed/stdlib/2.7/stat.pyi b/typeshed/stdlib/2/stat.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/stat.pyi
rename to typeshed/stdlib/2/stat.pyi
diff --git a/typeshed/stdlib/2.7/string.pyi b/typeshed/stdlib/2/string.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/string.pyi
rename to typeshed/stdlib/2/string.pyi
diff --git a/typeshed/stdlib/2.7/strop.pyi b/typeshed/stdlib/2/strop.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/strop.pyi
rename to typeshed/stdlib/2/strop.pyi
diff --git a/typeshed/stdlib/2.7/struct.pyi b/typeshed/stdlib/2/struct.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/struct.pyi
rename to typeshed/stdlib/2/struct.pyi
diff --git a/typeshed/stdlib/2.7/subprocess.pyi b/typeshed/stdlib/2/subprocess.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/subprocess.pyi
rename to typeshed/stdlib/2/subprocess.pyi
diff --git a/typeshed/stdlib/2.7/sys.pyi b/typeshed/stdlib/2/sys.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/sys.pyi
rename to typeshed/stdlib/2/sys.pyi
diff --git a/typeshed/stdlib/2.7/syslog.pyi b/typeshed/stdlib/2/syslog.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/syslog.pyi
rename to typeshed/stdlib/2/syslog.pyi
diff --git a/typeshed/stdlib/2.7/tempfile.pyi b/typeshed/stdlib/2/tempfile.pyi
similarity index 59%
rename from typeshed/stdlib/2.7/tempfile.pyi
rename to typeshed/stdlib/2/tempfile.pyi
index 7925392..5f252e7 100644
--- a/typeshed/stdlib/2.7/tempfile.pyi
+++ b/typeshed/stdlib/2/tempfile.pyi
@@ -8,10 +8,39 @@
 #       Avoid using Union[str, bytes] for return values, as it implies that
 #       an isinstance() check will often be required, which is inconvenient.
 
-from typing import Tuple, IO, Union, AnyStr, Any, overload
+from typing import Tuple, IO, Union, AnyStr, Any, overload, Iterator, List, Type, Optional
 
+import thread
+import random
+
+TMP_MAX = ...  # type: int
 tempdir = ...  # type: str
 template = ...  # type: str
+_name_sequence = ...  # type: Optional[_RandomNameSequence]
+
+class _RandomNameSequence:
+    _rng = ...  # type: random.Random
+    _rng_pid = ...  # type: int
+    characters = ...  # type: str
+    mutex = ...  # type: thread.LockType
+    rng = ...  # type: random.Random
+    def __iter__(self) -> "_RandomNameSequence": ...
+    def next(self) -> str: ...
+    # from os.path:
+    def normcase(self, path: AnyStr) -> AnyStr: ...
+
+class _TemporaryFileWrapper(IO[str]):
+    close_called = ...  # type: bool
+    delete = ...  # type: bool
+    file = ...  # type: IO
+    name = ...  # type: Any
+    def __init__(self, file: IO, name, delete:bool = ...) -> None: ...
+    def __del__(self) -> None: ...
+    def __enter__(self) -> "_TemporaryFileWrapper": ...
+    def __exit__(self, exc, value, tb) -> bool: ...
+    def __getattr__(self, name: unicode) -> Any: ...
+    def close(self) -> None: ...
+    def unlink(self, path: unicode) -> None: ...
 
 # TODO text files
 
@@ -20,7 +49,7 @@ def TemporaryFile(
         bufsize: int = ...,
         suffix: Union[bytes, unicode] = ...,
         prefix: Union[bytes, unicode] = ...,
-        dir: Union[bytes, unicode] = ...) -> IO[str]: ...
+        dir: Union[bytes, unicode] = ...) -> _TemporaryFileWrapper: ...
 def NamedTemporaryFile(
         mode: Union[bytes, unicode] = ...,
         bufsize: int = ...,
@@ -28,14 +57,14 @@ def NamedTemporaryFile(
         prefix: Union[bytes, unicode] = ...,
         dir: Union[bytes, unicode] = ...,
         delete: bool = ...
-        ) -> IO[str]: ...
+        ) -> _TemporaryFileWrapper: ...
 def SpooledTemporaryFile(
         max_size: int = ...,
         mode: Union[bytes, unicode] = ...,
         buffering: int = ...,
         suffix: Union[bytes, unicode] = ...,
         prefix: Union[bytes, unicode] = ...,
-        dir: Union[bytes, unicode] = ...) -> IO[str]:
+        dir: Union[bytes, unicode] = ...) -> _TemporaryFileWrapper:
     ...
 
 class TemporaryDirectory:
@@ -63,3 +92,7 @@ def mktemp() -> str: ...
 def mktemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: AnyStr = ...) -> AnyStr: ...
 def gettempdir() -> str: ...
 def gettempprefix() -> str: ...
+
+def _candidate_tempdir_list() -> List[str]: ...
+def _get_candidate_names() -> Optional[_RandomNameSequence]: ...
+def _get_default_tempdir() -> str: ...
diff --git a/typeshed/stdlib/2.7/textwrap.pyi b/typeshed/stdlib/2/textwrap.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/textwrap.pyi
rename to typeshed/stdlib/2/textwrap.pyi
diff --git a/typeshed/stdlib/2.7/thread.pyi b/typeshed/stdlib/2/thread.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/thread.pyi
rename to typeshed/stdlib/2/thread.pyi
diff --git a/typeshed/stdlib/2.7/time.pyi b/typeshed/stdlib/2/time.pyi
similarity index 84%
rename from typeshed/stdlib/2.7/time.pyi
rename to typeshed/stdlib/2/time.pyi
index 79f2526..6f234f2 100644
--- a/typeshed/stdlib/2.7/time.pyi
+++ b/typeshed/stdlib/2/time.pyi
@@ -1,7 +1,7 @@
 """Stub file for the 'time' module."""
 # See https://docs.python.org/2/library/time.html
 
-from typing import NamedTuple, Tuple, Union, Any
+from typing import NamedTuple, Tuple, Union, Any, Optional
 
 # ----- variables and constants -----
 accept2dyear = False
@@ -25,12 +25,12 @@ def asctime(t: struct_time = ...) -> str:
 
 def clock() -> float: ...
 
-def ctime(secs: float = ...) -> str:
+def ctime(secs: Optional[float] = ...) -> str:
     raise ValueError()
 
-def gmtime(secs: float = ...) -> struct_time: ...
+def gmtime(secs: Optional[float] = ...) -> struct_time: ...
 
-def localtime(secs: float = ...) -> struct_time: ...
+def localtime(secs: Optional[float] = ...) -> struct_time: ...
 
 def mktime(t: struct_time) -> float:
     raise OverflowError()
diff --git a/typeshed/stdlib/2.7/token.pyi b/typeshed/stdlib/2/token.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/token.pyi
rename to typeshed/stdlib/2/token.pyi
diff --git a/typeshed/stdlib/2.7/tokenize.pyi b/typeshed/stdlib/2/tokenize.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/tokenize.pyi
rename to typeshed/stdlib/2/tokenize.pyi
diff --git a/typeshed/stdlib/2.7/types.pyi b/typeshed/stdlib/2/types.pyi
similarity index 99%
rename from typeshed/stdlib/2.7/types.pyi
rename to typeshed/stdlib/2/types.pyi
index 607d9fa..c3b66f4 100644
--- a/typeshed/stdlib/2.7/types.pyi
+++ b/typeshed/stdlib/2/types.pyi
@@ -40,7 +40,6 @@ class FunctionType:
     __code__ = func_code
     __defaults__ = func_defaults
     __dict__ = func_dict
-    __doc__ = func_doc
     __globals__ = func_globals
     __name__ = func_name
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
diff --git a/typeshed/stdlib/2.7/typing.pyi b/typeshed/stdlib/2/typing.pyi
similarity index 99%
rename from typeshed/stdlib/2.7/typing.pyi
rename to typeshed/stdlib/2/typing.pyi
index 90a2229..e3ad6bd 100644
--- a/typeshed/stdlib/2.7/typing.pyi
+++ b/typeshed/stdlib/2/typing.pyi
@@ -12,7 +12,6 @@ Generic = object()
 Tuple = object()
 Callable = object()
 Type = object()
-builtinclass = object()
 _promote = object()
 
 # Type aliases
@@ -276,7 +275,7 @@ class TextIO(IO[unicode]):
     @property
     def encoding(self) -> str: ...
     @property
-    def errors(self) -> str: ...
+    def errors(self) -> Optional[str]: ...
     @property
     def line_buffering(self) -> bool: ...
     @property
diff --git a/typeshed/stdlib/2.7/unicodedata.pyi b/typeshed/stdlib/2/unicodedata.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/unicodedata.pyi
rename to typeshed/stdlib/2/unicodedata.pyi
diff --git a/typeshed/stdlib/2.7/unittest.pyi b/typeshed/stdlib/2/unittest.pyi
similarity index 95%
rename from typeshed/stdlib/2.7/unittest.pyi
rename to typeshed/stdlib/2/unittest.pyi
index 318405a..7de3be2 100644
--- a/typeshed/stdlib/2.7/unittest.pyi
+++ b/typeshed/stdlib/2/unittest.pyi
@@ -50,13 +50,6 @@ class _AssertRaisesContext(_AssertRaisesBaseContext):
     def __enter__(self) -> _AssertRaisesContext: ...
     def __exit__(self, exc_type, exc_value, tb) -> bool: ...
 
-class _AssertWarnsContext(_AssertRaisesBaseContext):
-    warning = ... # type: Any # TODO precise type
-    filename = ...  # type: str
-    lineno = 0
-    def __enter__(self) -> _AssertWarnsContext: ...
-    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
-
 class TestCase(Testable):
     def __init__(self, methodName: str = ...) -> None: ...
     # TODO failureException
@@ -134,8 +127,6 @@ class TestCase(Testable):
                          msg: object = ...) -> None: ...
     def assertNotIsInstance(self, obj: Any, cls: type,
                             msg: object = ...) -> None: ...
-    def assertWarns(self, expected_warning: type, callable_obj: Any = ...,
-                    *args: Any, **kwargs: Any) -> _AssertWarnsContext: ...
     def fail(self, msg: object = ...) -> None: ...
     def countTestCases(self) -> int: ...
     def defaultTestResult(self) -> TestResult: ...
diff --git a/typeshed/stdlib/2.7/urllib.pyi b/typeshed/stdlib/2/urllib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/urllib.pyi
rename to typeshed/stdlib/2/urllib.pyi
diff --git a/typeshed/stdlib/2.7/urllib2.pyi b/typeshed/stdlib/2/urllib2.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/urllib2.pyi
rename to typeshed/stdlib/2/urllib2.pyi
diff --git a/typeshed/stdlib/2.7/urlparse.pyi b/typeshed/stdlib/2/urlparse.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/urlparse.pyi
rename to typeshed/stdlib/2/urlparse.pyi
diff --git a/typeshed/stdlib/2.7/uuid.pyi b/typeshed/stdlib/2/uuid.pyi
similarity index 89%
rename from typeshed/stdlib/2.7/uuid.pyi
rename to typeshed/stdlib/2/uuid.pyi
index 485c720..ed3db1c 100644
--- a/typeshed/stdlib/2.7/uuid.pyi
+++ b/typeshed/stdlib/2/uuid.pyi
@@ -30,7 +30,7 @@ def uuid3(namespace: UUID, name: str) -> UUID: ...
 def uuid4() -> UUID: ...
 def uuid5(namespace: UUID, name: str) -> UUID: ...
 
-NAMESPACE_DNS = ... # type: str
-NAMESPACE_URL = ... # type: str
-NAMESPACE_OID = ... # type: str
-NAMESPACE_X500 = ... # type: str
+NAMESPACE_DNS = ... # type: UUID
+NAMESPACE_URL = ... # type: UUID
+NAMESPACE_OID = ... # type: UUID
+NAMESPACE_X500 = ... # type: UUID
diff --git a/typeshed/stdlib/2.7/weakref.pyi b/typeshed/stdlib/2/weakref.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/weakref.pyi
rename to typeshed/stdlib/2/weakref.pyi
diff --git a/typeshed/stdlib/2.7/wsgiref/__init__.pyi b/typeshed/stdlib/2/wsgiref/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/wsgiref/__init__.pyi
rename to typeshed/stdlib/2/wsgiref/__init__.pyi
diff --git a/typeshed/stdlib/2.7/wsgiref/validate.pyi b/typeshed/stdlib/2/wsgiref/validate.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/wsgiref/validate.pyi
rename to typeshed/stdlib/2/wsgiref/validate.pyi
diff --git a/typeshed/stdlib/2.7/xml/__init__.pyi b/typeshed/stdlib/2/xml/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/__init__.pyi
rename to typeshed/stdlib/2/xml/__init__.pyi
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementInclude.pyi b/typeshed/stdlib/2/xml/etree/ElementInclude.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/etree/ElementInclude.pyi
rename to typeshed/stdlib/2/xml/etree/ElementInclude.pyi
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementPath.pyi b/typeshed/stdlib/2/xml/etree/ElementPath.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/etree/ElementPath.pyi
rename to typeshed/stdlib/2/xml/etree/ElementPath.pyi
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi b/typeshed/stdlib/2/xml/etree/ElementTree.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/etree/ElementTree.pyi
rename to typeshed/stdlib/2/xml/etree/ElementTree.pyi
diff --git a/typeshed/stdlib/2.7/xml/etree/__init__.pyi b/typeshed/stdlib/2/xml/etree/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/etree/__init__.pyi
rename to typeshed/stdlib/2/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/2.7/xml/etree/cElementTree.pyi b/typeshed/stdlib/2/xml/etree/cElementTree.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/etree/cElementTree.pyi
rename to typeshed/stdlib/2/xml/etree/cElementTree.pyi
diff --git a/typeshed/stdlib/2.7/xxsubtype.pyi b/typeshed/stdlib/2/xxsubtype.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xxsubtype.pyi
rename to typeshed/stdlib/2/xxsubtype.pyi
diff --git a/typeshed/stdlib/2.7/zlib.pyi b/typeshed/stdlib/2/zlib.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/zlib.pyi
rename to typeshed/stdlib/2/zlib.pyi
diff --git a/typeshed/stdlib/2and3/distutils/extension.pyi b/typeshed/stdlib/2and3/distutils/extension.pyi
index 81dae10..5aa070e 100644
--- a/typeshed/stdlib/2and3/distutils/extension.pyi
+++ b/typeshed/stdlib/2and3/distutils/extension.pyi
@@ -6,8 +6,8 @@ import sys
 class Extension:
     if sys.version_info >= (3,):
         def __init__(self,
-                     *, name: str = ...,
-                     sources: List[str] = ...,
+                     name: str,
+                     sources: List[str],
                      include_dirs: List[str] = ...,
                      define_macros: List[Tuple[str, Optional[str]]] = ...,
                      undef_macros: List[str] = ...,
@@ -23,8 +23,8 @@ class Extension:
                      optional: bool = ...) -> None: ...
     else:
         def __init__(self,
-                     *, name: str = ...,
-                     sources: List[str] = ...,
+                     name: str,
+                     sources: List[str],
                      include_dirs: List[str] = ...,
                      define_macros: List[Tuple[str, Optional[str]]] = ...,
                      undef_macros: List[str] = ...,
diff --git a/typeshed/stdlib/2and3/pkgutil.pyi b/typeshed/stdlib/2and3/pkgutil.pyi
index d7767eb..42619db 100644
--- a/typeshed/stdlib/2and3/pkgutil.pyi
+++ b/typeshed/stdlib/2and3/pkgutil.pyi
@@ -24,7 +24,7 @@ def find_loader(fullname: str) -> Loader: ...
 def get_importer(path_item: str) -> Any: ...  # TODO precise type
 def get_loader(module_or_name: str) -> Loader: ...
 def iter_importers(fullname: str = ...) -> Generator[Any, None, None]: ...  # TODO precise type
-def iter_modules(path: Optional[str] = ...,
+def iter_modules(path: Optional[List[str]] = ...,
                  prefix: str = ...) -> _YMFNI: ...  # TODO precise type
 def walk_packages(path: Optional[str] = ..., prefix: str = ...,
                   onerror: Optional[Callable[[str], None]] = ...) -> _YMFNI: ...
diff --git a/typeshed/stdlib/2and3/threading.pyi b/typeshed/stdlib/2and3/threading.pyi
index f81b708..6d0e4a6 100644
--- a/typeshed/stdlib/2and3/threading.pyi
+++ b/typeshed/stdlib/2and3/threading.pyi
@@ -76,6 +76,10 @@ class Thread:
     def setDaemon(self, daemonic: bool) -> None: ...
 
 
+class _DummyThread(Thread):
+    pass
+
+
 class Lock:
     def __init__(self) -> None: ...
     def __enter__(self) -> bool: ...
@@ -87,9 +91,10 @@ class Lock:
     else:
         def acquire(self, blocking: bool = ...) -> bool: ...
     def release(self) -> None: ...
+    def locked(self) -> bool: ...
 
 
-class RLock:
+class _RLock:
     def __init__(self) -> None: ...
     def __enter__(self) -> bool: ...
     def __exit__(self, exc_type: Optional[Type[BaseException]],
@@ -102,8 +107,11 @@ class RLock:
     def release(self) -> None: ...
 
 
+RLock = _RLock
+
+
 class Condition:
-    def __init__(self, lock: Union[Lock, RLock, None] = ...) -> None: ...
+    def __init__(self, lock: Union[Lock, _RLock, None] = ...) -> None: ...
     def __enter__(self) -> bool: ...
     def __exit__(self, exc_type: Optional[Type[BaseException]],
                  exc_val: Optional[Exception],
@@ -119,6 +127,7 @@ class Condition:
                      timeout: Optional[float]) -> _T: ...
     def notify(self, n: int = ...) -> None: ...
     def notify_all(self) -> None: ...
+    def notifyAll(self) -> None: ...
 
 
 class Semaphore:
diff --git a/typeshed/stdlib/2and3/webbrowser.pyi b/typeshed/stdlib/2and3/webbrowser.pyi
index 95b340e..93cbaca 100644
--- a/typeshed/stdlib/2and3/webbrowser.pyi
+++ b/typeshed/stdlib/2and3/webbrowser.pyi
@@ -23,8 +23,8 @@ class BaseBrowser:
     def open_new_tab(self, url: str) -> bool: ...
 
 class GenericBrowser(BaseBrowser):
-    name = ... # type: List[str]
-    args = ... # type: str
+    args = ... # type: List[str]
+    name = ... # type: str
     basename = ... # type: str
     def __init__(self, name: str) -> None: ...
     def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
diff --git a/typeshed/stdlib/3/xml/sax/__init__.pyi b/typeshed/stdlib/2and3/xml/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3/xml/sax/__init__.pyi
rename to typeshed/stdlib/2and3/xml/__init__.pyi
diff --git a/typeshed/stdlib/2and3/xml/sax/__init__.pyi b/typeshed/stdlib/2and3/xml/sax/__init__.pyi
new file mode 100644
index 0000000..8d93a24
--- /dev/null
+++ b/typeshed/stdlib/2and3/xml/sax/__init__.pyi
@@ -0,0 +1,32 @@
+import xml.sax
+from xml.sax.xmlreader import InputSource
+from xml.sax.handler import ContentHandler, ErrorHandler
+
+class SAXException(Exception):
+    def __init__(self, msg, exception=None): ...
+    def getMessage(self): ...
+    def getException(self): ...
+    def __getitem__(self, ix): ...
+
+class SAXParseException(SAXException):
+    def __init__(self, msg, exception, locator): ...
+    def getColumnNumber(self): ...
+    def getLineNumber(self): ...
+    def getPublicId(self): ...
+    def getSystemId(self): ...
+
+class SAXNotRecognizedException(SAXException): ...
+class SAXNotSupportedException(SAXException): ...
+class SAXReaderNotAvailable(SAXNotSupportedException): ...
+
+default_parser_list = ...  # type: List[str]
+
+def make_parser(parser_list: List[str]) -> xml.sax.xmlreader.XMLReader: ...
+
+def parse(source: str, handler: xml.sax.handler.ContentHandler,
+          errorHandler: xml.sax.handler.ErrorHandler=...): ...
+
+def parseString(string: str, handler: xml.sax.handler.ContentHandler,
+                errorHandler: xml.sax.handler.ErrorHandler=...): ...
+
+def _create_parser(parser_name: str) -> xml.sax.xmlreader.XMLReader: ...
diff --git a/typeshed/stdlib/2.7/xml/sax/handler.pyi b/typeshed/stdlib/2and3/xml/sax/handler.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/sax/handler.pyi
rename to typeshed/stdlib/2and3/xml/sax/handler.pyi
diff --git a/typeshed/stdlib/2.7/xml/sax/saxutils.pyi b/typeshed/stdlib/2and3/xml/sax/saxutils.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/sax/saxutils.pyi
rename to typeshed/stdlib/2and3/xml/sax/saxutils.pyi
diff --git a/typeshed/stdlib/2.7/xml/sax/xmlreader.pyi b/typeshed/stdlib/2and3/xml/sax/xmlreader.pyi
similarity index 100%
rename from typeshed/stdlib/2.7/xml/sax/xmlreader.pyi
rename to typeshed/stdlib/2and3/xml/sax/xmlreader.pyi
diff --git a/typeshed/stdlib/2and3/zipfile.pyi b/typeshed/stdlib/2and3/zipfile.pyi
index 455ca0d..693aad3 100644
--- a/typeshed/stdlib/2and3/zipfile.pyi
+++ b/typeshed/stdlib/2and3/zipfile.pyi
@@ -27,7 +27,7 @@ class ZipFile:
                  exc_val: Optional[Exception],
                  exc_tb: Optional[TracebackType]) -> bool: ...
     def close(self) -> None: ...
-    def getinfo(self, name: str) -> None: ...
+    def getinfo(self, name: str) -> ZipInfo: ...
     def infolist(self) -> List[ZipInfo]: ...
     def namelist(self) -> List[str]: ...
     def open(self, name: _SZI, mode: str = ...,
diff --git a/typeshed/stdlib/3.4/asyncio/__init__.pyi b/typeshed/stdlib/3.4/asyncio/__init__.pyi
index b10a25e..7eb6e88 100644
--- a/typeshed/stdlib/3.4/asyncio/__init__.pyi
+++ b/typeshed/stdlib/3.4/asyncio/__init__.pyi
@@ -37,15 +37,23 @@ from asyncio.transports import (
 )
 from asyncio.futures import (
     Future as Future,
+    CancelledError as CancelledError,
+    TimeoutError as TimeoutError,
+    InvalidStateError as InvalidStateError,
 )
 from asyncio.tasks import (
-    sleep as sleep,
-    Task as Task,
     FIRST_COMPLETED as FIRST_COMPLETED,
     FIRST_EXCEPTION as FIRST_EXCEPTION,
     ALL_COMPLETED as ALL_COMPLETED,
+    as_completed as as_completed,
+    ensure_future as ensure_future,
+    gather as gather,
+    run_coroutine_threadsafe as run_coroutine_threadsafe,
+    shield as shield,
+    sleep as sleep,
     wait as wait,
     wait_for as wait_for,
+    Task as Task,
 )
 from asyncio.events import (
     AbstractEventLoopPolicy as AbstractEventLoopPolicy,
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
index eab1d4c..22d97bd 100644
--- a/typeshed/stdlib/3.4/asyncio/futures.pyi
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -5,6 +5,15 @@ __all__ = ... # type: str
 
 _T = TypeVar('_T')
 
+from concurrent.futures._base import (
+    Error as Error,
+)
+from concurrent.futures import (
+    CancelledError as CancelledError,
+    TimeoutError as TimeoutError,
+)
+class InvalidStateError(Error): ...
+
 class _TracebackLogger:
     __slots__ = ... # type: List[str]
     exc = ...  # type: BaseException
diff --git a/typeshed/stdlib/3.4/asyncio/locks.pyi b/typeshed/stdlib/3.4/asyncio/locks.pyi
index 4d665ae..2467ffe 100644
--- a/typeshed/stdlib/3.4/asyncio/locks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/locks.pyi
@@ -32,7 +32,7 @@ class Event:
     def is_set(self) -> bool: ...
     def set(self) -> None: ...
     def clear(self) -> None: ...
-    
+    @coroutine
     def wait(self) -> bool: ...
 
 class Condition(_ContextManagerMixin):
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index e5b101c..8d6d7fb 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -1,27 +1,37 @@
-from typing import Any, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable, Generator, Iterable, Awaitable, overload, Sequence, Iterator
+from typing import (Any, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable,
+                    Coroutine, Generator, Iterable, Awaitable, overload, Sequence, Iterator,
+                    Optional)
+import concurrent.futures
 
 __all__ = ... # type: str
 
 from .events import AbstractEventLoop
 from .futures import Future
 
+_T = TypeVar('_T')
+
 FIRST_EXCEPTION = 'FIRST_EXCEPTION'
 FIRST_COMPLETED = 'FIRST_COMPLETED'
 ALL_COMPLETED = 'ALL_COMPLETED'
-_T = TypeVar('_T')
-def as_completed(fs: Sequence[Future[_T]], *, loop: AbstractEventLoop = ..., timeout=None) -> Iterator[Generator[Any, None, _T]]: ...
-def ensure_future(coro_or_future: Union[Future[_T], Generator[Any, None, _T]], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
-def gather(*coros_or_futures: Sequence[Union[Future[_T], Generator[Any, None, _T]]], loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[_T]: ...
-def run_coroutine_threadsafe(coro: Generator[Any, None, _T], loop: AbstractEventLoop) -> Future[_T]: ...
-def shield(arg: Union[Future[_T], Generator[Any, None, _T]], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
+
+def as_completed(fs: Sequence[Future[_T]], *, loop: AbstractEventLoop = ...,
+                 timeout=None) -> Iterator[Generator[Any, None, _T]]: ...
+def ensure_future(coro_or_future: Union[Future[_T], Generator[Any, None, _T]],
+                  *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
+# TODO: gather() should use variadic type vars instead of _TAny.
+_TAny = Any
+def gather(*coros_or_futures: Union[Future[_TAny], Generator[Any, None, _TAny], Awaitable[_TAny]],
+           loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[List[_TAny]]: ...
+def run_coroutine_threadsafe(coro: Union[Generator[Any, None, _T], Coroutine[Any, None, _T], Awaitable[_T]],
+                             loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ...
+def shield(arg: Union[Future[_T], Generator[Any, None, _T]],
+           *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 def sleep(delay: float, result: _T = ..., loop: AbstractEventLoop = ...) -> Future[_T]: ...
 def wait(fs: List[Task[_T]], *, loop: AbstractEventLoop = ...,
-    timeout: float = ..., return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ...
-def wait_for(fut: Union[Future[_T], Generator[Any, None, _T]], timeout: float, *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
-
-class _GatheringFuture(Future[_T], Generic[_T]):
-    def __init__(self, children: Sequence[Union[Future[_T], Generator[Any, None, _T]]], *, loop: AbstractEventLoop = ...) -> None: ...
-    def cancel(self) -> bool: ...
+    timeout: float = ...,
+         return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ...
+def wait_for(fut: Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]], timeout: Optional[float],
+             *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 
 class Task(Future[_T], Generic[_T]):
     _all_tasks = ...  # type: Set[Task]
@@ -33,7 +43,8 @@ class Task(Future[_T], Generic[_T]):
 
     # Can't use a union, see mypy issue #1873.
     @overload
-    def __init__(self, coro: Generator[Any, None, _T], *, loop: AbstractEventLoop = ...) -> None: ...
+    def __init__(self, coro: Generator[Any, None, _T],
+                 *, loop: AbstractEventLoop = ...) -> None: ...
     @overload
     def __init__(self, coro: Awaitable[_T], *, loop: AbstractEventLoop = ...) -> None: ...
 
diff --git a/typeshed/stdlib/3.5/pathlib.pyi b/typeshed/stdlib/3.5/pathlib.pyi
index 34b2669..493692b 100644
--- a/typeshed/stdlib/3.5/pathlib.pyi
+++ b/typeshed/stdlib/3.5/pathlib.pyi
@@ -1,51 +1,53 @@
 # Stubs for pathlib (Python 3.5)
 
-from typing import Any, Generator, IO, Optional, Sequence, Tuple, Union
+from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union
 import os
 
+_P = TypeVar('_P', 'PurePath')
+
 class PurePath:
     parts = ...  # type: Tuple[str, ...]
     drive = ...  # type: str
     root = ...  # type: str
     anchor = ...  # type: str
-    parents = ...  # type: Sequence[PurePath]
-    parent = ...  # type: PurePath
     name = ...  # type: str
     suffix = ...  # type: str
     suffixes = ...  # type: List[str]
     stem = ...  # type: str
-    def __new__(cls, *args: Union[str, PurePath]) -> PurePath: ...
+    def __new__(cls: Type[_P], *args: Union[str, PurePath]) -> _P: ...
     def __lt__(self, other: PurePath) -> bool: ...
     def __le__(self, other: PurePath) -> bool: ...
     def __gt__(self, other: PurePath) -> bool: ...
     def __ge__(self, other: PurePath) -> bool: ...
-    def __truediv__(self, key: Union[str, PurePath]) -> PurePath: ...
-    def __rtruediv__(self, key: Union[str, PurePath]) -> PurePath: ...
+    def __truediv__(self: _P, key: Union[str, PurePath]) -> _P: ...
+    def __rtruediv__(self: _P, key: Union[str, PurePath]) -> _P: ...
     def __bytes__(self) -> bytes: ...
     def as_posix(self) -> str: ...
     def as_uri(self) -> str: ...
     def is_absolute(self) -> bool: ...
     def is_reserved(self) -> bool: ...
     def match(self, path_pattern: str) -> bool: ...
-    def relative_to(self, *other: Union[str, PurePath]) -> PurePath: ...
-    def with_name(self, name: str) -> PurePath: ...
-    def with_suffix(self, suffix: str) -> PurePath: ...
-    def joinpath(self, *other: Union[str, PurePath]) -> PurePath: ...
+    def relative_to(self: _P, *other: Union[str, PurePath]) -> _P: ...
+    def with_name(self: _P, name: str) -> _P: ...
+    def with_suffix(self: _P, suffix: str) -> _P: ...
+    def joinpath(self: _P, *other: Union[str, PurePath]) -> _P: ...
+    def parents(self: _P) -> Sequence[_P]: ...
+    def parent(self: _P) -> _P: ...
 
 class PurePosixPath(PurePath): ...
 class PureWindowsPath(PurePath): ...
 
 class Path(PurePath):
     @classmethod
-    def cwd(cls) -> Path: ...
+    def cwd(cls: Type[_P]) -> _P: ...
     @classmethod
-    def home(cls) -> Path: ...
-    def __new__(cls, *args: Union[str, PurePath], **kwargs: Any) -> Path: ...
-    def absolute(self) -> Path: ...
+    def home(cls: Type[_P]) -> _P: ...
+    def __new__(cls: Type[_P], *args: Union[str, PurePath], **kwargs: Any) -> _P: ...
+    def absolute(self: _P) -> _P: ...
     def stat(self) -> os.stat_result: ...
     def chmod(self, mode: int) -> None: ...
     def exists(self) -> bool: ...
-    def expanduser(self) -> Path: ...
+    def expanduser(self: _P) -> _P: ...
     def glob(self, pattern: str) -> Generator[Path, None, None]: ...
     def group(self) -> str: ...
     def is_dir(self) -> bool: ...
@@ -69,7 +71,7 @@ class Path(PurePath):
                   errors: Optional[str] = ...) -> str: ...
     def rename(self, target: Union[str, PurePath]) -> None: ...
     def replace(self, target: Union[str, PurePath]) -> None: ...
-    def resolve(self) -> Path: ...
+    def resolve(self: _P) -> _P: ...
     def rglob(self, pattern: str) -> Generator[Path, None, None]: ...
     def rmdir(self) -> None: ...
     def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ...
@@ -81,17 +83,6 @@ class Path(PurePath):
     def write_text(self, data: str, encoding: Optional[str] = ...,
                    errors: Optional[str] = ...) -> int: ...
 
-    # The following methods are re-stubbed here even though they only actually exist in the base
-    # class so that they return Path when called on a Path, rather than returning PurePath.
-    parents = ...  # type: Sequence[Path]
-    parent = ...  # type: Path
-    def __truediv__(self, key: Union[str, PurePath]) -> Path: ...
-    def __rtruediv__(self, key: Union[str, PurePath]) -> Path: ...
-    def relative_to(self, *other: Union[str, PurePath]) -> Path: ...
-    def with_name(self, name: str) -> Path: ...
-    def with_suffix(self, suffix: str) -> Path: ...
-    def joinpath(self, *args: Union[str, PurePath]) -> Path: ...
-
 
 class PosixPath(Path, PurePosixPath): ...
 class WindowsPath(Path, PureWindowsPath): ...
diff --git a/typeshed/stdlib/3/_importlib_modulespec.pyi b/typeshed/stdlib/3/_importlib_modulespec.pyi
index 20d8894..d0b4e26 100644
--- a/typeshed/stdlib/3/_importlib_modulespec.pyi
+++ b/typeshed/stdlib/3/_importlib_modulespec.pyi
@@ -5,7 +5,7 @@
 # - Loader in importlib.abc
 # - ModuleSpec in importlib.machinery (3.4 and later only)
 
-import abc
+from abc import ABCMeta
 import sys
 from typing import Any, Optional
 
@@ -26,14 +26,13 @@ if sys.version_info >= (3, 4):
 class ModuleType:
     __name__ = ... # type: str
     __file__ = ...  # type: str
-    __doc__ = ... # type: Optional[str]
     if sys.version_info >= (3, 4):
         __loader__ = ... # type: Optional[Loader]
         __package__ = ... # type: Optional[str]
         __spec__ = ... # type: Optional[ModuleSpec]
     def __init__(self, name: str, doc: str) -> None: ...
 
-class Loader(metaclass=abc.ABCMeta):
+class Loader(metaclass=ABCMeta):
     def load_module(self, fullname: str) -> ModuleType: ...
     if sys.version_info >= (3, 3):
         def module_repr(self, module: ModuleType) -> str: ...
diff --git a/typeshed/stdlib/3/abc.pyi b/typeshed/stdlib/3/abc.pyi
index 6b37f75..80287e1 100644
--- a/typeshed/stdlib/3/abc.pyi
+++ b/typeshed/stdlib/3/abc.pyi
@@ -3,7 +3,7 @@ import sys
 # Stubs for abc.
 
 # Thesee definitions have special processing in type checker.
-class ABCMeta:
+class ABCMeta(type):
     def register(cls: "ABCMeta", subclass: Any) -> None: ...
 abstractmethod = object()
 abstractproperty = object()
diff --git a/typeshed/stdlib/3/ast.pyi b/typeshed/stdlib/3/ast.pyi
index ac80368..86a8431 100644
--- a/typeshed/stdlib/3/ast.pyi
+++ b/typeshed/stdlib/3/ast.pyi
@@ -19,12 +19,10 @@ from _ast import (
 )
 
 class NodeVisitor():
-    __doc__ = ...  # type: str
     def visit(self, node: AST) -> Any: ...
     def generic_visit(self, node: AST) -> None: ...
 
 class NodeTransformer(NodeVisitor):
-    __doc__ = ...  # type: str
     def generic_visit(self, node: AST) -> None: ...
 
 def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 6f3e1f1..8c75706 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -1,7 +1,7 @@
 # Stubs for builtins (Python 3)
 
 from typing import (
-    TypeVar, Iterator, Iterable, overload,
+    TypeVar, Iterator, Iterable, overload, Container,
     Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic,
     Set, AbstractSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsBytes,
     SupportsAbs, SupportsRound, IO, Union, ItemsView, KeysView, ValuesView, ByteString, Optional
@@ -22,12 +22,13 @@ _T1 = TypeVar('_T1')
 _T2 = TypeVar('_T2')
 _T3 = TypeVar('_T3')
 _T4 = TypeVar('_T4')
+_TT = TypeVar('_TT', bound='type')
 
-staticmethod = object() # Only valid as a decorator.
-classmethod = object() # Only valid as a decorator.
+class staticmethod: pass   # Special, only valid as a decorator.
+class classmethod: pass  # Special, only valid as a decorator.
 
 class object:
-    __doc__ = ...  # type: str
+    __doc__ = ...  # type: Optional[str]
     __class__ = ...  # type: type
     __dict__ = ...  # type: Dict[str, Any]
 
@@ -57,7 +58,7 @@ class type:
     @overload
     def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
     def __call__(self, *args: Any, **kwds: Any) -> Any: ...
-    def __subclasses__(self) -> List[type]: ...
+    def __subclasses__(self: _TT) -> List[_TT]: ...
     # Note: the documentation doesnt specify what the return type is, the standard
     # implementation seems to be returning a list.
     def mro(self) -> List[type]: ...
@@ -421,12 +422,41 @@ class bytearray(MutableSequence[int], ByteString):
     def __gt__(self, x: bytes) -> bool: ...
     def __ge__(self, x: bytes) -> bool: ...
 
-class memoryview():
-    # TODO arg can be any obj supporting the buffer protocol
-    def __init__(self, b: bytearray) -> None: ...
+
+class memoryview(Sized, Container[bytes]):
+    format = ... # type: str
+    itemsize = ... # type: int
+    shape = ... # type: Optional[Tuple[int, ...]]
+    strides = ... # type: Optional[Tuple[int, ...]]
+    suboffsets = ... # type: Optional[Tuple[int, ...]]
+    readonly = ... # type: bool
+    ndim = ... # type: int
+
+    def __init__(self, obj: Union[str, bytes, bytearray, memoryview]) -> None: ...
+
+    @overload
+    def __getitem__(self, i: int) -> int: ...
+    @overload
+    def __getitem__(self, s: slice) -> memoryview: ...
+
+    def __contains__(self, x: object) -> bool: ...
+    def __iter__(self) -> Iterator[bytes]: ...
+    def __len__(self) -> int: ...
+
+    @overload
+    def __setitem__(self, i: int, o: bytes) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, o: Sequence[bytes]) -> None: ...
+    @overload
+    def __setitem__(self, s: slice, o: memoryview) -> None: ...
+
+    def tobytes(self) -> bytes: ...
+    def tolist(self) -> List[int]: ...
+
     if sys.version_info >= (3, 5):
         def hex(self) -> str: ...
 
+
 class bool(int, SupportsInt, SupportsFloat):
     def __init__(self, o: object = ...) -> None: ...
 
@@ -531,7 +561,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def items(self) -> ItemsView[_KT, _VT]: ...
     @staticmethod
     @overload
-    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method
+    def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ...  # TODO: Actually a class method (mypy/issues#328)
     @staticmethod
     @overload
     def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ...
@@ -693,7 +723,6 @@ def map(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1],
 def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ..., default:_T = ...) -> _T: ...
-# TODO memoryview
 @overload
 def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
@@ -703,8 +732,15 @@ def next(i: Iterator[_T]) -> _T: ...
 @overload
 def next(i: Iterator[_T], default: _T) -> _T: ...
 def oct(i: int) -> str: ...  # TODO __index__
-def open(file: Union[str, bytes, int], mode: str = 'r', buffering: int = -1, encoding: str = None,
-         errors: str = None, newline: str = None, closefd: bool = ...) -> IO[Any]: ...
+
+if sys.version_info >= (3, 6):
+    from pathlib import Path
+    def open(file: Union[str, bytes, int, Path], mode: str = 'r', buffering: int = -1, encoding: str = None,
+             errors: str = None, newline: str = None, closefd: bool = ...) -> IO[Any]: ...
+else:
+    def open(file: Union[str, bytes, int], mode: str = 'r', buffering: int = -1, encoding: str = None,
+             errors: str = None, newline: str = None, closefd: bool = ...) -> IO[Any]: ...
+
 def ord(c: Union[str, bytes, bytearray]) -> int: ...
 # TODO: in Python 3.2, print() does not support flush
 def print(*values: Any, sep: str = ' ', end: str = '\n', file: IO[str] = None, flush: bool = False) -> None: ...
@@ -759,7 +795,7 @@ Ellipsis = ...  # type: ellipsis
 # Exceptions
 
 class BaseException:
-    args = ...  # type: Any
+    args = ...  # type: Tuple[Any, ...]
     __cause__ = ... # type: BaseException
     __context__ = ... # type: BaseException
     __traceback__ = ... # type: TracebackType
@@ -818,6 +854,7 @@ class StopIteration(Exception):
 if sys.version_info >= (3, 5):
     class StopAsyncIteration(Exception):
         value = ...  # type: Any
+    class RecursionError(RuntimeError): ...
 class SyntaxError(Exception):
     msg = ...  # type: str
     lineno = ...  # type: int
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index f93990b..f9bd402 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -13,9 +13,11 @@ from typing import (
 # These are exported.
 # TODO reexport more.
 from typing import (
+    Container as Container,
     MutableMapping as MutableMapping,
     Sequence as Sequence,
     MutableSequence as MutableSequence,
+    MutableSet as MutableSet,
     AbstractSet as Set,
 )
 
diff --git a/typeshed/stdlib/3/collections/abc.pyi b/typeshed/stdlib/3/collections/abc.pyi
new file mode 100644
index 0000000..07b99ad
--- /dev/null
+++ b/typeshed/stdlib/3/collections/abc.pyi
@@ -0,0 +1,14 @@
+# Stubs for collections.abc (introduced from Python 3.3)
+#
+# https://docs.python.org/3.3/whatsnew/3.3.html#collections
+import sys
+
+if sys.version_info >= (3, 3):
+    from . import (
+        Container as Container,
+        MutableMapping as MutableMapping,
+        Sequence as Sequence,
+        MutableSequence as MutableSequence,
+        Set as Set,
+        MutableSet as MutableSet,
+    )
diff --git a/typeshed/stdlib/3/concurrent/futures/_base.pyi b/typeshed/stdlib/3/concurrent/futures/_base.pyi
index 819e79c..461c56a 100644
--- a/typeshed/stdlib/3/concurrent/futures/_base.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/_base.pyi
@@ -1,4 +1,4 @@
-from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional
+from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional, Set
 from collections import namedtuple
 
 FIRST_COMPLETED = ... # type: Any
@@ -41,4 +41,4 @@ class Executor:
 
 def as_completed(fs: Iterable[Future], timeout: Optional[float] = ...) -> Iterator[Future]: ...
 
-def wait(fs: Iterable[Future], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Iterable[Future], Iterable[Future]]: ...
+def wait(fs: Iterable[Future], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Set[Future], Set[Future]]: ...
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
index 3d2a58f..df7ed9f 100644
--- a/typeshed/stdlib/3/datetime.pyi
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -7,6 +7,8 @@ from typing import Optional, SupportsAbs, Tuple, overload
 MINYEAR = 0
 MAXYEAR = 0
 
+TimeTuple = Tuple[int, int, int, int, int, int, int, int, int]
+
 class tzinfo:
     def tzname(self, dt: Optional[datetime]) -> str: ...
     def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
@@ -72,7 +74,7 @@ class time:
     resolution = ...  # type: timedelta
 
     def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ...,
-                 tzinfo: tzinfo = ...) -> None: ...
+                 tzinfo: Optional[tzinfo] = ...) -> None: ...
 
     @property
     def hour(self) -> int: ...
@@ -83,7 +85,7 @@ class time:
     @property
     def microsecond(self) -> int: ...
     @property
-    def tzinfo(self) -> _tzinfo: ...
+    def tzinfo(self) -> Optional[_tzinfo]: ...
 
     def __le__(self, other: time) -> bool: ...
     def __lt__(self, other: time) -> bool: ...
@@ -97,7 +99,7 @@ class time:
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
     def replace(self, hour: int = ..., minute: int = ..., second: int = ...,
-                microsecond: int = ..., tzinfo: _tzinfo = None) -> time: ...
+                microsecond: int = ..., tzinfo: Optional[_tzinfo] = None) -> time: ...
 
 _date = date
 _time = time
@@ -153,7 +155,7 @@ class datetime:
 
     def __init__(self, year: int, month: int = ..., day: int = ..., hour: int = ...,
                  minute: int = ..., second: int = ..., microsecond: int = ...,
-                 tzinfo: tzinfo = ...) -> None: ...
+                 tzinfo: Optional[tzinfo] = ...) -> None: ...
 
     @property
     def year(self) -> int: ...
@@ -170,10 +172,10 @@ class datetime:
     @property
     def microsecond(self) -> int: ...
     @property
-    def tzinfo(self) -> _tzinfo: ...
+    def tzinfo(self) -> Optional[_tzinfo]: ...
 
     @classmethod
-    def fromtimestamp(cls, t: float, tz: timezone = ...) -> datetime: ...
+    def fromtimestamp(cls, t: float, tz: Optional[timezone] = ...) -> datetime: ...
     @classmethod
     def utcfromtimestamp(cls, t: float) -> datetime: ...
     @classmethod
@@ -181,7 +183,7 @@ class datetime:
     @classmethod
     def fromordinal(cls, n: int) -> datetime: ...
     @classmethod
-    def now(cls, tz: _tzinfo = ...) -> datetime: ...
+    def now(cls, tz: Optional[_tzinfo] = ...) -> datetime: ...
     @classmethod
     def utcnow(cls) -> datetime: ...
     @classmethod
@@ -189,16 +191,16 @@ class datetime:
     def strftime(self, fmt: str) -> str: ...
     def __format__(self, fmt: str) -> str: ...
     def toordinal(self) -> int: ...
-    def timetuple(self) -> tuple: ... # TODO return type
+    def timetuple(self) -> TimeTuple: ... # TODO return type
     def timestamp(self) -> float: ...
-    def utctimetuple(self) -> tuple: ... # TODO return type
+    def utctimetuple(self) -> TimeTuple: ... # TODO return type
     def date(self) -> _date: ...
     def time(self) -> _time: ...
     def timetz(self) -> _time: ...
     def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ...,
                 minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo:
-                _tzinfo = None) -> datetime: ...
-    def astimezone(self, tz: _tzinfo = ...) -> datetime: ...
+                Optional[_tzinfo] = None) -> datetime: ...
+    def astimezone(self, tz: Optional[_tzinfo] = ...) -> datetime: ...
     def ctime(self) -> str: ...
     def isoformat(self, sep: str = ...) -> str: ...
     @classmethod
diff --git a/typeshed/stdlib/3/functools.pyi b/typeshed/stdlib/3/functools.pyi
index 2bd79ef..c24faf8 100644
--- a/typeshed/stdlib/3/functools.pyi
+++ b/typeshed/stdlib/3/functools.pyi
@@ -28,7 +28,7 @@ class _lru_cache_wrapper(Generic[_T]):
     def cache_info(self) -> CacheInfo: ...
 
 class lru_cache():
-    def __init__(self, maxsize: int = ..., typed: bool = ...) -> None:
+    def __init__(self, maxsize: Optional[int] = ..., typed: bool = ...) -> None:
         pass
     def __call__(self, f: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ...
 
diff --git a/typeshed/stdlib/3/importlib/abc.pyi b/typeshed/stdlib/3/importlib/abc.pyi
index 96b3e28..3318417 100644
--- a/typeshed/stdlib/3/importlib/abc.pyi
+++ b/typeshed/stdlib/3/importlib/abc.pyi
@@ -1,4 +1,4 @@
-import abc
+from abc import ABCMeta, abstractmethod
 if sys.version_info >= (3, 4):
     from _importlib_modulespec import ModuleSpec
 import sys
@@ -11,27 +11,27 @@ _Path = Union[bytes, str]
 # exists in its own stub file (with ModuleSpec and ModuleType).
 from _importlib_modulespec import Loader as Loader  # Exported
 
-class Finder(metaclass=abc.ABCMeta): ...
+class Finder(metaclass=ABCMeta): ...
     # Technically this class defines the following method, but its subclasses
     # in this module violate its signature. Since this class is deprecated, it's
     # easier to simply ignore that this method exists.
-    #@abc.abstractmethod
+    #@abstractmethod
     #def find_module(self, fullname: str,
     #                path: Sequence[_Path] = None) -> Optional[Loader]: ...
 
 class ResourceLoader(Loader):
-    @abc.abstractmethod
+    @abstractmethod
     def get_data(self, path: _Path) -> bytes: ...
 
 class InspectLoader(Loader):
     def is_package(self, fullname: str) -> bool: ...
     def get_code(self, fullname: str) -> Optional[types.CodeType]: ...
     def load_module(self, fullname: str) -> types.ModuleType: ...
-    @abc.abstractmethod
+    @abstractmethod
     def get_source(self, fullname: str) -> Optional[str]: ...
     if sys.version_info >= (3, 4):
         def exec_module(self, module: types.ModuleType) -> None: ...
-    if sys.version_info == (3, 4):
+    if sys.version_info[:2] == (3, 4):
         def source_to_code(self, data: Union[bytes, str],
                            path: str = '<string>') -> types.CodeType: ...
     elif sys.version_info >= (3, 5):
@@ -40,7 +40,7 @@ class InspectLoader(Loader):
                            path: str = '<string>') -> types.CodeType: ...
 
 class ExecutionLoader(InspectLoader):
-    @abc.abstractmethod
+    @abstractmethod
     def get_filename(self, fullname: str) -> _Path: ...
     def get_code(self, fullname: str) -> Optional[types.CodeType]: ...
 
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
index de2c7a4..b9c9833 100644
--- a/typeshed/stdlib/3/io.pyi
+++ b/typeshed/stdlib/3/io.pyi
@@ -246,6 +246,7 @@ class TextIOWrapper(TextIO):
 class StringIO(TextIOWrapper):
     def __init__(self, initial_value: str = ...,
                  newline: Optional[str] = ...) -> None: ...
+    name = ... # type: str
     def getvalue(self) -> str: ...
 
 class IncrementalNewlineDecoder(codecs.IncrementalDecoder): ...
diff --git a/typeshed/stdlib/3/itertools.pyi b/typeshed/stdlib/3/itertools.pyi
index e24bf77..5432470 100644
--- a/typeshed/stdlib/3/itertools.pyi
+++ b/typeshed/stdlib/3/itertools.pyi
@@ -3,7 +3,7 @@
 # Based on http://docs.python.org/3.2/library/itertools.html
 
 from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
-                    Union, Sequence, Generic)
+                    Union, Sequence, Generic, Optional)
 
 _T = TypeVar('_T')
 _S = TypeVar('_S')
@@ -41,7 +41,7 @@ def groupby(iterable: Iterable[_T],
 @overload
 def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ...
 @overload
-def islice(iterable: Iterable[_T], start: int, stop: int,
+def islice(iterable: Iterable[_T], start: int, stop: Optional[int],
            step: int = ...) -> Iterator[_T]: ...
 
 def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ...
diff --git a/typeshed/stdlib/3/multiprocessing/__init__.pyi b/typeshed/stdlib/3/multiprocessing/__init__.pyi
index 2e142d5..bf30ab5 100644
--- a/typeshed/stdlib/3/multiprocessing/__init__.pyi
+++ b/typeshed/stdlib/3/multiprocessing/__init__.pyi
@@ -1,6 +1,6 @@
 # Stubs for multiprocessing
 
-from typing import Any, Callable, Iterable, Mapping
+from typing import Any, Callable, Iterable, Mapping, Optional, Dict, List
 
 from multiprocessing.process import current_process as current_process
 
@@ -8,6 +8,61 @@ class Lock():
     def acquire(self, block: bool = ..., timeout: int = ...) -> None: ...
     def release(self) -> None: ...
 
+class AsyncResult():
+    def get(self, timeout: float = -1) -> Any: ...
+    def wait(self, timeout: float = -1) -> None: ...
+    def ready(self) -> bool: ...
+    def successful(self) -> bool: ...
+
+class Pool():
+    def __init__(self, processes: Optional[int] = None,
+                 initializer: Optional[Callable[..., None]] = None,
+                 initargs: Iterable[Any] = (),
+                 maxtasksperchild: Optional[int] = None,
+                 context: Any = None) -> None: ...
+    def apply(self,
+              func: Callable[..., Any],
+              args: Iterable[Any]=(),
+              kwds: Dict[str, Any]={}) -> Any: ...
+    def apply_async(self,
+                func: Callable[..., Any],
+                args: Iterable[Any]=(),
+                kwds: Dict[str, Any]={},
+                callback: Callable[..., None] = None,
+                error_callback: Callable[[BaseException], None] = None) -> AsyncResult: ...
+    def map(self,
+            func: Callable[..., Any],
+            iterable: Iterable[Any]=(),
+            chunksize: Optional[int] = None) -> List[Any]: ...
+    def map_async(self, func: Callable[..., Any],
+                  iterable: Iterable[Any] = (),
+                  chunksize: Optional[int] = None,
+                  callback: Callable[..., None] = None,
+                  error_callback: Callable[[BaseException], None] = None) -> AsyncResult: ...
+    def imap(self,
+             func: Callable[..., Any],
+             iterable: Iterable[Any]=(),
+             chunksize: Optional[int] = None) -> Iterable[Any]: ...
+    def imap_unordered(self,
+                       func: Callable[..., Any],
+                       iterable: Iterable[Any]=(),
+                       chunksize: Optional[int] = None) -> Iterable[Any]: ...
+    def starmap(self,
+                func: Callable[..., Any],
+                iterable: Iterable[Iterable[Any]]=(),
+                chunksize: Optional[int] = None) -> List[Any]: ...
+    def starmap_async(self,
+                      func: Callable[..., Any],
+                      iterable: Iterable[Iterable[Any]] = (),
+                      chunksize: Optional[int] = None,
+                      callback: Callable[..., None] = None,
+                      error_callback: Callable[[BaseException], None] = None) -> AsyncResult: ...
+    def close(self) -> None: ...
+    def terminate(self) -> None: ...
+    def join(self) -> None: ...
+    def __enter__(self) -> 'Pool': ...
+    def __exit__(self, exc_type, exc_val, exc_tb) -> None: ...
+
 class Process():
     # TODO: set type of group to None
     def __init__(self,
diff --git a/typeshed/stdlib/3/multiprocessing/managers.pyi b/typeshed/stdlib/3/multiprocessing/managers.pyi
index bbdb563..c5d053e 100644
--- a/typeshed/stdlib/3/multiprocessing/managers.pyi
+++ b/typeshed/stdlib/3/multiprocessing/managers.pyi
@@ -4,7 +4,7 @@
 
 from typing import Any
 
-class BaseManager():
-    def register(typeid: str, callable: Any = ...) -> None: ...
+class BaseManager:
+    def register(self, typeid: str, callable: Any = ...) -> None: ...
 
 class RemoteError(Exception): ...
diff --git a/typeshed/stdlib/3/multiprocessing/pool.pyi b/typeshed/stdlib/3/multiprocessing/pool.pyi
index 32fe4f7..7d181c3 100644
--- a/typeshed/stdlib/3/multiprocessing/pool.pyi
+++ b/typeshed/stdlib/3/multiprocessing/pool.pyi
@@ -2,7 +2,7 @@
 
 # NOTE: These are incomplete!
 
-from typing import Any, Callable, Iterable, List, Sequence
+from typing import Any, Callable, Iterable, Mapping, Optional, Dict, List
 
 class AsyncResult():
     def get(self, timeout: float = -1) -> Any: ...
@@ -11,26 +11,48 @@ class AsyncResult():
     def successful(self) -> bool: ...
 
 class ThreadPool():
-    def __init__(self, processes: int = ...) -> None: ...
-    def apply_async(self, func: Callable[..., Any],
-                    args: Sequence[Any]=(),
-                    kwds: Dict[str, Any]={},
-                    callback: Callable[..., None] = None) -> AsyncResult: ...
-    def apply(self, func: Callable[..., Any],
-              args: Sequence[Any]=(),
+    def __init__(self, processes: Optional[int] = None,
+                 initializer: Optional[Callable[..., None]] = None,
+                 initargs: Iterable[Any] = ()) -> None: ...
+    def apply(self,
+              func: Callable[..., Any],
+              args: Iterable[Any]=(),
               kwds: Dict[str, Any]={}) -> Any: ...
-    def map(self, func: Callable[..., Any],
-            iterable: Iterable[Any]=()) -> List[Any]: ...
+    def apply_async(self,
+                func: Callable[..., Any],
+                args: Iterable[Any]=(),
+                kwds: Dict[str, Any]={},
+                callback: Callable[..., None] = None,
+                error_callback: Callable[[BaseException], None] = None) -> AsyncResult: ...
+    def map(self,
+            func: Callable[..., Any],
+            iterable: Iterable[Any]=(),
+            chunksize: Optional[int] = None) -> List[Any]: ...
     def map_async(self, func: Callable[..., Any],
                   iterable: Iterable[Any] = (),
-                  chunksize: int = -1,
-                  callback: Callable[..., None] = None) -> AsyncResult: ...
-    def imap(self, func: Callable[..., Any],
-             iterable: Iterable[Any]=()) -> Iterable[Any]: ...
-    def imap_async(self, func: Callable[..., Any],
-                   chunksize: int = -1,
-                   iterable: Iterable[Any]=(),
-                   callback: Callable[..., None] = None) -> AsyncResult: ...
+                  chunksize: Optional[int] = None,
+                  callback: Callable[..., None] = None,
+                  error_callback: Callable[[BaseException], None] = None) -> AsyncResult: ...
+    def imap(self,
+             func: Callable[..., Any],
+             iterable: Iterable[Any]=(),
+             chunksize: Optional[int] = None) -> Iterable[Any]: ...
+    def imap_unordered(self,
+                       func: Callable[..., Any],
+                       iterable: Iterable[Any]=(),
+                       chunksize: Optional[int] = None) -> Iterable[Any]: ...
+    def starmap(self,
+                func: Callable[..., Any],
+                iterable: Iterable[Iterable[Any]]=(),
+                chunksize: Optional[int] = None) -> List[Any]: ...
+    def starmap_async(self,
+                      func: Callable[..., Any],
+                      iterable: Iterable[Iterable[Any]] = (),
+                      chunksize: Optional[int] = None,
+                      callback: Callable[..., None] = None,
+                      error_callback: Callable[[BaseException], None] = None) -> AsyncResult: ...
     def close(self) -> None: ...
     def terminate(self) -> None: ...
     def join(self) -> None: ...
+    def __enter__(self) -> 'ThreadPool': ...
+    def __exit__(self, exc_type, exc_val, exc_tb) -> None: ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index e781842..bbfd044 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -9,7 +9,7 @@ from typing import (
 )
 import sys
 from builtins import OSError as error
-import os.path as path
+from . import path
 
 # ----- os variables -----
 
@@ -138,6 +138,12 @@ class stat_result:
     st_ctime = 0.0 # platform dependent (time of most recent metadata change
                    # on  Unix, or the time of creation on Windows)
 
+    if sys.version_info >= (3, 3):
+        st_atime_ns = 0 # time of most recent access, in nanoseconds
+        st_mtime_ns = 0 # time of most recent content modification in nanoseconds
+        st_ctime_ns = 0 # platform dependent (time of most recent metadata change
+                        # on  Unix, or the time of creation on Windows) in nanoseconds
+
     # not documented
     def __init__(self, tuple: Tuple[int, ...]) -> None: ...
 
@@ -371,3 +377,6 @@ if sys.version_info >= (3, 3):
               onerror: Callable = ..., *, follow_symlinks: bool = ...,
               dir_fd: int = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
                                              List[AnyStr], int]]: ...  # Unix only
+
+if sys.version_info >= (3, 4):
+    def cpu_count() -> Optional[int]: ...
diff --git a/typeshed/stdlib/3/queue.pyi b/typeshed/stdlib/3/queue.pyi
index 28609a0..f85490d 100644
--- a/typeshed/stdlib/3/queue.pyi
+++ b/typeshed/stdlib/3/queue.pyi
@@ -2,7 +2,7 @@
 
 # NOTE: These are incomplete!
 
-from typing import Any, TypeVar, Generic
+from typing import Any, TypeVar, Generic, Optional
 
 _T = TypeVar('_T')
 
@@ -12,9 +12,9 @@ class Full(Exception): ...
 class Queue(Generic[_T]):
     def __init__(self, maxsize: int = ...) -> None: ...
     def full(self) -> bool: ...
-    def get(self, block: bool = ..., timeout: float = ...) -> _T: ...
+    def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ...
     def get_nowait(self) -> _T: ...
-    def put(self, item: _T, block: bool = ..., timeout: float = ...) -> None: ...
+    def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ...
     def put_nowait(self, item: _T) -> None: ...
     def join(self) -> None: ...
     def qsize(self) -> int: ...
diff --git a/typeshed/stdlib/3/resource.pyi b/typeshed/stdlib/3/resource.pyi
index bddc6df..bde5498 100644
--- a/typeshed/stdlib/3/resource.pyi
+++ b/typeshed/stdlib/3/resource.pyi
@@ -4,7 +4,26 @@
 
 from typing import Tuple
 
-RLIMIT_CORE = 0
+RLIMIT_AS = ... # type: int
+RLIMIT_CORE = ... # type: int
+RLIMIT_CPU = ... # type: int
+RLIMIT_DATA = ... # type: int
+RLIMIT_FSIZE = ... # type: int
+RLIMIT_MEMLOCK = ... # type: int
+RLIMIT_MSGQUEUE = ... # type: int
+RLIMIT_NICE = ... # type: int
+RLIMIT_NOFILE = ... # type: int
+RLIMIT_NPROC = ... # type: int
+RLIMIT_OFILE = ... # type: int
+RLIMIT_RSS = ... # type: int
+RLIMIT_RTPRIO = ... # type: int
+RLIMIT_RTTIME = ... # type: int
+RLIMIT_SIGPENDING = ... # type: int
+RLIMIT_STACK = ... # type: int
+RLIM_INFINITY = ... # type: int
+RUSAGE_CHILDREN = ... # type: int
+RUSAGE_SELF = ... # type: int
+RUSAGE_THREAD = ... # type: int
 
 def getrlimit(resource: int) -> Tuple[int, int]: ...
 def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ...
diff --git a/typeshed/stdlib/3/select.pyi b/typeshed/stdlib/3/select.pyi
index fcede9f..83446f0 100644
--- a/typeshed/stdlib/3/select.pyi
+++ b/typeshed/stdlib/3/select.pyi
@@ -22,6 +22,6 @@ class poll:
     def poll(self, timeout: int = ...) -> List[Tuple[int, int]]: ...
 
 def select(rlist: Sequence, wlist: Sequence, xlist: Sequence,
-           timeout: float = ...) -> Tuple[List[int],
-                                           List[int],
-                                           List[int]]: ...
+           timeout: float = ...) -> Tuple[List[Any],
+                                           List[Any],
+                                           List[Any]]: ...
diff --git a/typeshed/stdlib/3/shutil.pyi b/typeshed/stdlib/3/shutil.pyi
index 2ddd3d6..1307597 100644
--- a/typeshed/stdlib/3/shutil.pyi
+++ b/typeshed/stdlib/3/shutil.pyi
@@ -1,4 +1,5 @@
 # Stubs for shutil
+import sys
 
 # Based on http://docs.python.org/3.2/library/shutil.html
 
@@ -27,6 +28,8 @@ def rmtree(path: str, ignore_errors: bool = ...,
 def move(src: str, dst: str) -> None: ...
 
 class Error(Exception): ...
+if sys.version_info >= (3, 4):
+  class SameFileError(Error): ...
 
 def make_archive(base_name: str, format: str, root_dir: str = ...,
                  base_dir: str = ..., verbose: bool = ...,
@@ -44,3 +47,5 @@ def register_unpack_format(name: str, extensions: List[str], function: Any,
                            description: str = ...) -> None: ...
 def unregister_unpack_format(name: str) -> None: ...
 def get_unpack_formats() -> List[Tuple[str, List[str], str]]: ...
+
+def which(cmd: str, mode: int = ..., path: str = ...): ...
diff --git a/typeshed/stdlib/3/ssl.pyi b/typeshed/stdlib/3/ssl.pyi
index 046a2c4..ce77002 100644
--- a/typeshed/stdlib/3/ssl.pyi
+++ b/typeshed/stdlib/3/ssl.pyi
@@ -46,6 +46,17 @@ if sys.version_info >= (3, 4):
                                capath: Optional[str] = ...,
                                cadata: Optional[str] = ...) -> 'SSLContext': ...
 
+if sys.version_info >= (3, 4, 3):
+    def _create_unverified_context(protocol: int = ..., *,
+                                   cert_reqs: int = ...,
+                                   check_hostname: bool = ...,
+                                   purpose: Any = ...,
+                                   certfile: Optional[str] = ...,
+                                   keyfile: Optional[str] = ...,
+                                   cafile: Optional[str] = ...,
+                                   capath: Optional[str] = ...,
+                                   cadata: Optional[str] = ...) -> 'SSLContext': ...
+    _create_default_https_context = ... # type: Callable[..., 'SSLContext']
 
 def RAND_bytes(num: int) -> bytes: ...
 def RAND_pseudo_bytes(num: int) -> Tuple[bytes, bool]: ...
@@ -208,8 +219,8 @@ class SSLContext:
     def set_default_verify_paths(self) -> None: ...
     def set_ciphers(self, ciphers: str) -> None: ...
     if sys.version_info >= (3, 5):
-        def set_alpn_protocols(protocols: List[str]) -> None: ...
-    def set_npn_protocols(protocols: List[str]) -> None: ...
+        def set_alpn_protocols(self, protocols: List[str]) -> None: ...
+    def set_npn_protocols(self, protocols: List[str]) -> None: ...
     def set_servername_callback(self,
                                 server_name_callback: Optional[_SrvnmeCbType]) \
                                 -> None: ...
@@ -220,7 +231,7 @@ class SSLContext:
                     suppress_ragged_eofs: bool = ...,
                     server_hostname: Optional[str] = ...) -> 'SSLContext': ...
     if sys.version_info >= (3, 5):
-        def wrap_bio(incoming: 'MemoryBIO', outgoing: 'MemoryBIO',
+        def wrap_bio(self, incoming: 'MemoryBIO', outgoing: 'MemoryBIO',
                      server_side: bool = ...,
                      server_hostname: Optional[str] = ...) -> 'SSLObject': ...
     def session_stats(self) -> Dict[str, int]: ...
diff --git a/typeshed/stdlib/3/struct.pyi b/typeshed/stdlib/3/struct.pyi
index f41164e..f539610 100644
--- a/typeshed/stdlib/3/struct.pyi
+++ b/typeshed/stdlib/3/struct.pyi
@@ -2,25 +2,25 @@
 
 # Based on http://docs.python.org/3.2/library/struct.html
 
-from typing import overload, Any, AnyStr, Tuple
+from typing import overload, Any, Tuple
 
 class error(Exception): ...
 
-def pack(fmt: AnyStr, *v: Any) -> bytes: ...
+def pack(fmt: str, *v: Any) -> bytes: ...
 # TODO buffer type
-def pack_into(fmt: AnyStr, buffer: Any, offset: int, *v: Any) -> None: ...
+def pack_into(fmt: str, buffer: Any, offset: int, *v: Any) -> None: ...
 
 # TODO buffer type
-def unpack(fmt: AnyStr, buffer: Any) -> Tuple[Any, ...]: ...
-def unpack_from(fmt: AnyStr, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
+def unpack(fmt: str, buffer: Any) -> Tuple[Any, ...]: ...
+def unpack_from(fmt: str, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
 
-def calcsize(fmt: AnyStr) -> int: ...
+def calcsize(fmt: str) -> int: ...
 
 class Struct:
     format = b''
     size = 0
 
-    def __init__(self, format: AnyStr) -> None: ...
+    def __init__(self, format: str) -> None: ...
 
     def pack(self, *v: Any) -> bytes: ...
     # TODO buffer type
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index 8af789f..11b108f 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -12,12 +12,12 @@ if sys.version_info >= (3, 5):
         returncode = ... # type: int
         stdout = ... # type: Union[str, bytes]
         stderr = ... # type: Union[str, bytes]
-        def __init__(self, args: Union[List, str], 
-                     returncode: int, 
-                     stdout: Union[str, bytes], 
+        def __init__(self, args: Union[List, str],
+                     returncode: int,
+                     stdout: Union[str, bytes],
                      stderr: Union[str, bytes]) -> None: ...
         def check_returncode(self) -> None: ...
-    
+
     # Nearly same args as Popen.__init__ except for timeout, input, and check
     def run(args: Union[str, Sequence[str]],
 	    timeout: float = ...,
@@ -185,6 +185,7 @@ STDOUT = ... # type: Any
 if sys.version_info >= (3, 3):
     DEVNULL = ...  # type: Any
     class SubprocessError(Exception): ...
+    class TimeoutExpired(SubprocessError): ...
 
 
 class CalledProcessError(Exception):
@@ -226,7 +227,11 @@ class Popen:
                   pass_fds: Any = ...) -> None: ...
 
     def poll(self) -> int: ...
-    def wait(self) -> int: ...
+    if sys.version_info >= (3, 3):
+        # 3.3 added timeout
+        def wait(self, timeout: Optional[float] = ...) -> int: ...
+    else:
+        def wait(self) ->int: ...
     # Return str/bytes
     if sys.version_info >= (3, 3):
         def communicate(self, input: Union[str, bytes] = ..., timeout: float = ...) -> Tuple[Any, Any]: ...
diff --git a/typeshed/stdlib/3/time.pyi b/typeshed/stdlib/3/time.pyi
index f77f9ba..72140c3 100644
--- a/typeshed/stdlib/3/time.pyi
+++ b/typeshed/stdlib/3/time.pyi
@@ -8,6 +8,8 @@ import sys
 from typing import Tuple, Union
 from types import SimpleNamespace
 
+TimeTuple = Tuple[int, int, int, int, int, int, int, int, int]
+
 # ----- variables and constants -----
 accept2dyear = False
 altzone = 0
@@ -47,21 +49,15 @@ class struct_time:
         tm_zone = 'GMT'
 
 # ----- functions -----
-def asctime(t: Union[Tuple[int, int, int, int, int, int, int, int, int],
-                     struct_time,
-                     None] = ...) -> str: ...  # return current time
+def asctime(t: Union[TimeTuple, struct_time, None] = ...) -> str: ...  # return current time
 def clock() -> float: ...
 def ctime(secs: Union[float, None] = ...) -> str: ...  # return current time
 def gmtime(secs: Union[float, None] = ...) -> struct_time: ...  # return current time
 def localtime(secs: Union[float, None] = ...) -> struct_time: ...  # return current time
-def mktime(t: Union[Tuple[int, int, int, int, int,
-                          int, int, int, int],
-                    struct_time]) -> float: ...
+def mktime(t: Union[TimeTuple, struct_time]) -> float: ...
 def sleep(secs: Union[int, float]) -> None: ...
-def strftime(format: str, t: Union[Tuple[int, int, int, int, int,
-                                         int, int, int, int],
-                                   struct_time,
-                                   None] = ...) -> str: ...  # return current time
+def strftime(format: str,
+             t: Union[TimeTuple, struct_time, None] = ...) -> str: ...  # return current time
 def strptime(string: str,
              format: str = ...) -> struct_time: ...
 def time() -> float: ...
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
index 1026cb1..4a0da75 100644
--- a/typeshed/stdlib/3/types.pyi
+++ b/typeshed/stdlib/3/types.pyi
@@ -21,7 +21,6 @@ class FunctionType:
     __code__ = ...  # type: CodeType
     __defaults__ = ...  # type: Optional[Tuple[Any, ...]]
     __dict__ = ...  # type: Dict[str, Any]
-    __doc__ = ...  # type: Optional[str]
     __globals__ = ...  # type: Dict[str, Any]
     __name__ = ...  # type: str
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index 9629020..96b09be 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -13,7 +13,6 @@ Generic = object()
 Tuple = object()
 Callable = object()
 Type = object()
-builtinclass = object()
 _promote = object()
 no_type_check = object()
 
@@ -103,7 +102,9 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
     def send(self, value: _T_contra) -> _T_co:...
 
     @abstractmethod
-    def throw(self, typ: BaseException, val: Any = None, tb: Any = None) -> None:...
+    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = None,
+              # TODO: tb should be TracebackType but that's defined in types
+              tb: Any = None) -> None:...
 
     @abstractmethod
     def close(self) -> None:...
@@ -111,13 +112,30 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
     @abstractmethod
     def __iter__(self) -> 'Generator[_T_co, _T_contra, _V_co]': ...
 
+# TODO: Several types should only be defined if sys.python_version >= (3, 5):
+# Awaitable, AsyncIterator, AsyncIterable, Coroutine, Collection, ContextManager.
+# See https://github.com/python/typeshed/issues/655 for why this is not easy.
+
 class Awaitable(Generic[_T_co]):
     @abstractmethod
     def __await__(self) -> Generator[Any, None, _T_co]:...
 
+class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]):
+    @abstractmethod
+    def send(self, value: _T_contra) -> _T_co:...
+
+    @abstractmethod
+    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = None,
+              # TODO: tb should be TracebackType but that's defined in types
+              tb: Any = None) -> None:...
+
+    @abstractmethod
+    def close(self) -> None:...
+
+
 # NOTE: This type does not exist in typing.py or PEP 484.
 # The parameters corrrespond to Generator, but the 4th is the original type.
-class AwaitableGenerator(Generator[_T_co, _T_contra, _V_co], Awaitable[_T_co],
+class AwaitableGenerator(Generator[_T_co, _T_contra, _V_co], Awaitable[_V_co],
                          Generic[_T_co, _T_contra, _V_co, _S]):
     pass
 
@@ -341,7 +359,7 @@ class TextIO(IO[str]):
     @property
     def encoding(self) -> str: ...
     @property
-    def errors(self) -> str: ...
+    def errors(self) -> Optional[str]: ...
     @property
     def line_buffering(self) -> int: ...  # int on PyPy, bool on CPython
     @property
diff --git a/typeshed/stdlib/3/unittest.pyi b/typeshed/stdlib/3/unittest.pyi
index 933e8f1..ff64944 100644
--- a/typeshed/stdlib/3/unittest.pyi
+++ b/typeshed/stdlib/3/unittest.pyi
@@ -138,7 +138,7 @@ class TestCase:
     def defaultTestResult(self) -> TestResult: ...
     def id(self) -> str: ...
     def shortDescription(self) -> Optional[str]: ...
-    def addCleanup(function: Callable[..., Any], *args: Any,
+    def addCleanup(self, function: Callable[..., Any], *args: Any,
                    **kwargs: Any) -> None: ...
     def doCleanups(self) -> None: ...
     # below is deprecated
diff --git a/typeshed/stdlib/3/urllib/request.pyi b/typeshed/stdlib/3/urllib/request.pyi
index 6786510..4efa4aa 100644
--- a/typeshed/stdlib/3/urllib/request.pyi
+++ b/typeshed/stdlib/3/urllib/request.pyi
@@ -27,7 +27,7 @@ def build_opener(*handlers: Union[BaseHandler, Callable[[], BaseHandler]]) \
 def url2pathname(path: str) -> str: ...
 def pathname2url(path: str) -> str: ...
 def getproxies() -> Dict[str, str]: ...
-
+def parse_http_list(s: str) -> List[str]: ...
 
 class Request:
     if sys.version_info >= (3, 4):
@@ -74,7 +74,7 @@ class BaseHandler:
     parent = ...  # type: OpenerDirector
     def add_parent(self, parent: OpenerDirector) -> None: ...
     def close(self) -> None: ...
-    def http_error_nnn(req: Request, fp: IO[str], code: int, msg: int,
+    def http_error_nnn(self, req: Request, fp: IO[str], code: int, msg: int,
                        hdrs: Mapping[str, str]) -> _UrlopenRet: ...
 
 class HTTPDefaultErrorHandler(BaseHandler): ...
@@ -117,9 +117,9 @@ if sys.version_info >= (3, 5):
         def add_password(self, realm: str, uri: Union[str, Sequence[str]],
                          user: str, passwd: str,
                          is_authenticated: bool = ...) -> None: ...
-        def update_authenticated(uri: Union[str, Sequence[str]],
+        def update_authenticated(self, uri: Union[str, Sequence[str]],
                                  is_authenticated: bool = ...) -> None: ...
-        def is_authenticated(authuri: str) -> bool: ...
+        def is_authenticated(self, authuri: str) -> bool: ...
 
 class AbstractBasicAuthHandler:
     def __init__(self,
@@ -191,7 +191,7 @@ class URLopener:
     def open(self, fullurl: str, data: Optional[bytes] = ...) -> _UrlopenRet: ...
     def open_unknown(self, fullurl: str,
                      data: Optional[bytes] = ...) -> _UrlopenRet: ...
-    def retrieve(url: str, filename: Optional[str] = ...,
+    def retrieve(self, url: str, filename: Optional[str] = ...,
                  reporthook: Optional[Callable[[int, int, int], None]] = ...,
                  data: Optional[bytes] = ...) -> Tuple[str, Optional[Message]]: ...
 
diff --git a/typeshed/stdlib/3/urllib/robotparser.pyi b/typeshed/stdlib/3/urllib/robotparser.pyi
index e8403f7..11ead4f 100644
--- a/typeshed/stdlib/3/urllib/robotparser.pyi
+++ b/typeshed/stdlib/3/urllib/robotparser.pyi
@@ -6,7 +6,7 @@ import sys
 _RequestRate = NamedTuple('_RequestRate', [('requests', int), ('seconds', int)])
 
 class RobotFileParser:
-    def __init__(url: str = ...) -> None: ...
+    def __init__(self, url: str = ...) -> None: ...
     def set_url(self, url: str) -> None: ...
     def read(self) -> None: ...
     def parse(self, lines: Iterable[str]) -> None: ...
diff --git a/typeshed/third_party/2.7/tornado/__init__.pyi b/typeshed/third_party/2.7/tornado/__init__.pyi
deleted file mode 100644
index e69de29..0000000
diff --git a/typeshed/third_party/2.7/Crypto/Cipher/AES.pyi b/typeshed/third_party/2/Crypto/Cipher/AES.pyi
similarity index 100%
rename from typeshed/third_party/2.7/Crypto/Cipher/AES.pyi
rename to typeshed/third_party/2/Crypto/Cipher/AES.pyi
diff --git a/typeshed/third_party/2.7/Crypto/Cipher/__init__.pyi b/typeshed/third_party/2/Crypto/Cipher/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/Crypto/Cipher/__init__.pyi
rename to typeshed/third_party/2/Crypto/Cipher/__init__.pyi
diff --git a/typeshed/third_party/2.7/Crypto/Random/__init__.pyi b/typeshed/third_party/2/Crypto/Random/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/Crypto/Random/__init__.pyi
rename to typeshed/third_party/2/Crypto/Random/__init__.pyi
diff --git a/typeshed/third_party/2.7/Crypto/Random/random.pyi b/typeshed/third_party/2/Crypto/Random/random.pyi
similarity index 100%
rename from typeshed/third_party/2.7/Crypto/Random/random.pyi
rename to typeshed/third_party/2/Crypto/Random/random.pyi
diff --git a/typeshed/stdlib/3/xml/__init__.pyi b/typeshed/third_party/2/Crypto/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3/xml/__init__.pyi
rename to typeshed/third_party/2/Crypto/__init__.pyi
diff --git a/typeshed/third_party/2.7/Crypto/__init__.pyi b/typeshed/third_party/2/OpenSSL/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/Crypto/__init__.pyi
rename to typeshed/third_party/2/OpenSSL/__init__.pyi
diff --git a/typeshed/third_party/2.7/OpenSSL/crypto.pyi b/typeshed/third_party/2/OpenSSL/crypto.pyi
similarity index 100%
rename from typeshed/third_party/2.7/OpenSSL/crypto.pyi
rename to typeshed/third_party/2/OpenSSL/crypto.pyi
diff --git a/typeshed/third_party/2.7/boto/__init__.pyi b/typeshed/third_party/2/boto/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/boto/__init__.pyi
rename to typeshed/third_party/2/boto/__init__.pyi
diff --git a/typeshed/third_party/2.7/boto/connection.pyi b/typeshed/third_party/2/boto/connection.pyi
similarity index 100%
rename from typeshed/third_party/2.7/boto/connection.pyi
rename to typeshed/third_party/2/boto/connection.pyi
diff --git a/typeshed/third_party/2.7/boto/ec2/__init__.pyi b/typeshed/third_party/2/boto/ec2/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/boto/ec2/__init__.pyi
rename to typeshed/third_party/2/boto/ec2/__init__.pyi
diff --git a/typeshed/third_party/2.7/boto/ec2/elb/__init__.pyi b/typeshed/third_party/2/boto/ec2/elb/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/boto/ec2/elb/__init__.pyi
rename to typeshed/third_party/2/boto/ec2/elb/__init__.pyi
diff --git a/typeshed/third_party/2.7/boto/exception.pyi b/typeshed/third_party/2/boto/exception.pyi
similarity index 100%
rename from typeshed/third_party/2.7/boto/exception.pyi
rename to typeshed/third_party/2/boto/exception.pyi
diff --git a/typeshed/third_party/2.7/OpenSSL/__init__.pyi b/typeshed/third_party/2/concurrent/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/OpenSSL/__init__.pyi
rename to typeshed/third_party/2/concurrent/__init__.pyi
diff --git a/typeshed/third_party/2.7/concurrent/futures/__init__.pyi b/typeshed/third_party/2/concurrent/futures/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/concurrent/futures/__init__.pyi
rename to typeshed/third_party/2/concurrent/futures/__init__.pyi
diff --git a/typeshed/third_party/2.7/croniter.pyi b/typeshed/third_party/2/croniter.pyi
similarity index 90%
rename from typeshed/third_party/2.7/croniter.pyi
rename to typeshed/third_party/2/croniter.pyi
index e511cf0..01f344b 100644
--- a/typeshed/third_party/2.7/croniter.pyi
+++ b/typeshed/third_party/2/croniter.pyi
@@ -15,7 +15,7 @@ class croniter:
     cur = ... # type: Any
     exprs = ... # type: Any
     expanded = ... # type: Any
-    def __init__(self, expr_format, start_time=...) -> None: ...
+    def __init__(self, expr_format, start_time=..., ret_type=...) -> None: ...
     def get_next(self, ret_type=...): ...
     def get_prev(self, ret_type=...): ...
     def get_current(self, ret_type=...): ...
diff --git a/typeshed/third_party/2.7/concurrent/__init__.pyi b/typeshed/third_party/2/dateutil/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/concurrent/__init__.pyi
rename to typeshed/third_party/2/dateutil/__init__.pyi
diff --git a/typeshed/third_party/2.7/dateutil/parser.pyi b/typeshed/third_party/2/dateutil/parser.pyi
similarity index 100%
rename from typeshed/third_party/2.7/dateutil/parser.pyi
rename to typeshed/third_party/2/dateutil/parser.pyi
diff --git a/typeshed/third_party/2.7/dateutil/relativedelta.pyi b/typeshed/third_party/2/dateutil/relativedelta.pyi
similarity index 100%
rename from typeshed/third_party/2.7/dateutil/relativedelta.pyi
rename to typeshed/third_party/2/dateutil/relativedelta.pyi
diff --git a/typeshed/third_party/2.7/enum.pyi b/typeshed/third_party/2/enum.pyi
similarity index 100%
rename from typeshed/third_party/2.7/enum.pyi
rename to typeshed/third_party/2/enum.pyi
diff --git a/typeshed/third_party/2.7/fb303/FacebookService.pyi b/typeshed/third_party/2/fb303/FacebookService.pyi
similarity index 100%
rename from typeshed/third_party/2.7/fb303/FacebookService.pyi
rename to typeshed/third_party/2/fb303/FacebookService.pyi
diff --git a/typeshed/third_party/2.7/dateutil/__init__.pyi b/typeshed/third_party/2/fb303/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/dateutil/__init__.pyi
rename to typeshed/third_party/2/fb303/__init__.pyi
diff --git a/typeshed/third_party/2.7/gflags.pyi b/typeshed/third_party/2/gflags.pyi
similarity index 100%
rename from typeshed/third_party/2.7/gflags.pyi
rename to typeshed/third_party/2/gflags.pyi
diff --git a/typeshed/third_party/2.7/fb303/__init__.pyi b/typeshed/third_party/2/google/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/fb303/__init__.pyi
rename to typeshed/third_party/2/google/__init__.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/__init__.pyi b/typeshed/third_party/2/google/protobuf/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/__init__.pyi
rename to typeshed/third_party/2/google/protobuf/__init__.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/descriptor.pyi b/typeshed/third_party/2/google/protobuf/descriptor.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/descriptor.pyi
rename to typeshed/third_party/2/google/protobuf/descriptor.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/descriptor_pb2.pyi b/typeshed/third_party/2/google/protobuf/descriptor_pb2.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/descriptor_pb2.pyi
rename to typeshed/third_party/2/google/protobuf/descriptor_pb2.pyi
diff --git a/typeshed/third_party/2.7/google/__init__.pyi b/typeshed/third_party/2/google/protobuf/internal/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/__init__.pyi
rename to typeshed/third_party/2/google/protobuf/internal/__init__.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/decoder.pyi b/typeshed/third_party/2/google/protobuf/internal/decoder.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/internal/decoder.pyi
rename to typeshed/third_party/2/google/protobuf/internal/decoder.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/encoder.pyi b/typeshed/third_party/2/google/protobuf/internal/encoder.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/internal/encoder.pyi
rename to typeshed/third_party/2/google/protobuf/internal/encoder.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/enum_type_wrapper.pyi b/typeshed/third_party/2/google/protobuf/internal/enum_type_wrapper.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/internal/enum_type_wrapper.pyi
rename to typeshed/third_party/2/google/protobuf/internal/enum_type_wrapper.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/wire_format.pyi b/typeshed/third_party/2/google/protobuf/internal/wire_format.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/internal/wire_format.pyi
rename to typeshed/third_party/2/google/protobuf/internal/wire_format.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/message.pyi b/typeshed/third_party/2/google/protobuf/message.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/message.pyi
rename to typeshed/third_party/2/google/protobuf/message.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/reflection.pyi b/typeshed/third_party/2/google/protobuf/reflection.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/reflection.pyi
rename to typeshed/third_party/2/google/protobuf/reflection.pyi
diff --git a/typeshed/third_party/2.7/itsdangerous.pyi b/typeshed/third_party/2/itsdangerous.pyi
similarity index 100%
rename from typeshed/third_party/2.7/itsdangerous.pyi
rename to typeshed/third_party/2/itsdangerous.pyi
diff --git a/typeshed/third_party/2.7/kazoo/__init__.pyi b/typeshed/third_party/2/kazoo/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/kazoo/__init__.pyi
rename to typeshed/third_party/2/kazoo/__init__.pyi
diff --git a/typeshed/third_party/2.7/kazoo/client.pyi b/typeshed/third_party/2/kazoo/client.pyi
similarity index 100%
rename from typeshed/third_party/2.7/kazoo/client.pyi
rename to typeshed/third_party/2/kazoo/client.pyi
diff --git a/typeshed/third_party/2.7/kazoo/exceptions.pyi b/typeshed/third_party/2/kazoo/exceptions.pyi
similarity index 100%
rename from typeshed/third_party/2.7/kazoo/exceptions.pyi
rename to typeshed/third_party/2/kazoo/exceptions.pyi
diff --git a/typeshed/third_party/2.7/kazoo/recipe/__init__.pyi b/typeshed/third_party/2/kazoo/recipe/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/kazoo/recipe/__init__.pyi
rename to typeshed/third_party/2/kazoo/recipe/__init__.pyi
diff --git a/typeshed/third_party/2.7/kazoo/recipe/watchers.pyi b/typeshed/third_party/2/kazoo/recipe/watchers.pyi
similarity index 100%
rename from typeshed/third_party/2.7/kazoo/recipe/watchers.pyi
rename to typeshed/third_party/2/kazoo/recipe/watchers.pyi
diff --git a/typeshed/third_party/2.7/pycurl.pyi b/typeshed/third_party/2/pycurl.pyi
similarity index 100%
rename from typeshed/third_party/2.7/pycurl.pyi
rename to typeshed/third_party/2/pycurl.pyi
diff --git a/typeshed/third_party/2.7/pymssql.pyi b/typeshed/third_party/2/pymssql.pyi
similarity index 96%
rename from typeshed/third_party/2.7/pymssql.pyi
rename to typeshed/third_party/2/pymssql.pyi
index c11b9c4..3f697ab 100644
--- a/typeshed/third_party/2.7/pymssql.pyi
+++ b/typeshed/third_party/2/pymssql.pyi
@@ -18,7 +18,7 @@ class Cursor(object):
     def __init__(self) -> None: ...
     def __iter__(self): ...
     def __next__(self) -> Any: ...
-    def callproc(procname: str, **kwargs) -> None: ...
+    def callproc(self, procname: str, **kwargs) -> None: ...
     def close(self) -> None: ...
     def execute(self, stmt: str,
                 params: Optional[Union[Scalar, Tuple[Scalar, ...],
diff --git a/typeshed/third_party/2.7/redis/__init__.pyi b/typeshed/third_party/2/redis/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/redis/__init__.pyi
rename to typeshed/third_party/2/redis/__init__.pyi
diff --git a/typeshed/third_party/2.7/redis/client.pyi b/typeshed/third_party/2/redis/client.pyi
similarity index 100%
rename from typeshed/third_party/2.7/redis/client.pyi
rename to typeshed/third_party/2/redis/client.pyi
diff --git a/typeshed/third_party/2.7/redis/connection.pyi b/typeshed/third_party/2/redis/connection.pyi
similarity index 100%
rename from typeshed/third_party/2.7/redis/connection.pyi
rename to typeshed/third_party/2/redis/connection.pyi
diff --git a/typeshed/third_party/2.7/redis/exceptions.pyi b/typeshed/third_party/2/redis/exceptions.pyi
similarity index 100%
rename from typeshed/third_party/2.7/redis/exceptions.pyi
rename to typeshed/third_party/2/redis/exceptions.pyi
diff --git a/typeshed/third_party/2.7/redis/utils.pyi b/typeshed/third_party/2/redis/utils.pyi
similarity index 100%
rename from typeshed/third_party/2.7/redis/utils.pyi
rename to typeshed/third_party/2/redis/utils.pyi
diff --git a/typeshed/third_party/2.7/requests/__init__.pyi b/typeshed/third_party/2/requests/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/__init__.pyi
rename to typeshed/third_party/2/requests/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/adapters.pyi b/typeshed/third_party/2/requests/adapters.pyi
similarity index 86%
rename from typeshed/third_party/2.7/requests/adapters.pyi
rename to typeshed/third_party/2/requests/adapters.pyi
index 109dc9a..00cc000 100644
--- a/typeshed/third_party/2.7/requests/adapters.pyi
+++ b/typeshed/third_party/2/requests/adapters.pyi
@@ -1,6 +1,6 @@
 # Stubs for requests.adapters (Python 3)
 
-from typing import Any
+from typing import Any, Container, Union, Tuple
 from . import models
 from .packages.urllib3 import poolmanager
 from .packages.urllib3 import response
@@ -13,6 +13,7 @@ from . import cookies
 from . import exceptions
 from . import auth
 
+PreparedRequest = models.PreparedRequest
 Response = models.Response
 PoolManager = poolmanager.PoolManager
 proxy_from_url = poolmanager.proxy_from_url
@@ -43,9 +44,11 @@ DEFAULT_RETRIES = ...  # type: Any
 
 class BaseAdapter:
     def __init__(self) -> None: ...
-    # TODO: "request" parameter not actually supported, added to please mypy.
-    def send(self, request=...): ...
-    def close(self): ...
+    def send(self, request: PreparedRequest, stream=False,
+             timeout: Union[None, float, Tuple[float, float]]=None,
+             verify=False,
+             cert: Union[None, Union[str, unicode, bytes], Container[Union[str, unicode]]]=None) -> Response: ...
+    def close(self) -> None: ...
 
 class HTTPAdapter(BaseAdapter):
     __attrs__ = ...  # type: Any
diff --git a/typeshed/third_party/2.7/requests/api.pyi b/typeshed/third_party/2/requests/api.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/api.pyi
rename to typeshed/third_party/2/requests/api.pyi
diff --git a/typeshed/third_party/2.7/requests/auth.pyi b/typeshed/third_party/2/requests/auth.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/auth.pyi
rename to typeshed/third_party/2/requests/auth.pyi
diff --git a/typeshed/third_party/2.7/requests/compat.pyi b/typeshed/third_party/2/requests/compat.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/compat.pyi
rename to typeshed/third_party/2/requests/compat.pyi
diff --git a/typeshed/third_party/2.7/requests/cookies.pyi b/typeshed/third_party/2/requests/cookies.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/cookies.pyi
rename to typeshed/third_party/2/requests/cookies.pyi
diff --git a/typeshed/third_party/2.7/requests/exceptions.pyi b/typeshed/third_party/2/requests/exceptions.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/exceptions.pyi
rename to typeshed/third_party/2/requests/exceptions.pyi
diff --git a/typeshed/third_party/2.7/requests/hooks.pyi b/typeshed/third_party/2/requests/hooks.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/hooks.pyi
rename to typeshed/third_party/2/requests/hooks.pyi
diff --git a/typeshed/third_party/2.7/requests/models.pyi b/typeshed/third_party/2/requests/models.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/models.pyi
rename to typeshed/third_party/2/requests/models.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/__init__.pyi b/typeshed/third_party/2/requests/packages/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/__init__.pyi
rename to typeshed/third_party/2/requests/packages/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/__init__.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/_collections.pyi b/typeshed/third_party/2/requests/packages/urllib3/_collections.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/_collections.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/_collections.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/connection.pyi b/typeshed/third_party/2/requests/packages/urllib3/connection.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/connection.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/connection.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/connectionpool.pyi b/typeshed/third_party/2/requests/packages/urllib3/connectionpool.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/connectionpool.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/connectionpool.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/contrib/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/contrib/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/contrib/__init__.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/contrib/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/exceptions.pyi b/typeshed/third_party/2/requests/packages/urllib3/exceptions.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/exceptions.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/exceptions.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/fields.pyi b/typeshed/third_party/2/requests/packages/urllib3/fields.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/fields.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/fields.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/filepost.pyi b/typeshed/third_party/2/requests/packages/urllib3/filepost.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/filepost.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/filepost.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/packages/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/packages/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/packages/__init__.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/packages/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi b/typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/poolmanager.pyi b/typeshed/third_party/2/requests/packages/urllib3/poolmanager.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/poolmanager.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/poolmanager.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/request.pyi b/typeshed/third_party/2/requests/packages/urllib3/request.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/request.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/request.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/response.pyi b/typeshed/third_party/2/requests/packages/urllib3/response.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/response.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/response.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/__init__.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/__init__.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/connection.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/connection.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/connection.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/connection.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/request.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/request.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/request.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/request.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/response.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/response.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/response.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/response.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/retry.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/retry.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/retry.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/retry.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/timeout.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/timeout.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/timeout.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/timeout.pyi
diff --git a/typeshed/third_party/2.7/requests/packages/urllib3/util/url.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/url.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/packages/urllib3/util/url.pyi
rename to typeshed/third_party/2/requests/packages/urllib3/util/url.pyi
diff --git a/typeshed/third_party/2.7/requests/sessions.pyi b/typeshed/third_party/2/requests/sessions.pyi
similarity index 97%
rename from typeshed/third_party/2.7/requests/sessions.pyi
rename to typeshed/third_party/2/requests/sessions.pyi
index 35ea3f7..9387f77 100644
--- a/typeshed/third_party/2.7/requests/sessions.pyi
+++ b/typeshed/third_party/2/requests/sessions.pyi
@@ -1,6 +1,7 @@
 # Stubs for requests.sessions (Python 2.7)
 
 from typing import Any, Union, MutableMapping, Text, Optional, IO, Tuple, Callable
+from . import adapters
 from . import auth
 from . import compat
 from . import cookies
@@ -14,6 +15,7 @@ from . import structures
 from . import adapters
 from . import status_codes
 
+BaseAdapter = adapters.BaseAdapter
 OrderedDict = compat.OrderedDict
 cookiejar_from_dict = cookies.cookiejar_from_dict
 extract_cookies_to_jar = cookies.extract_cookies_to_jar
@@ -99,6 +101,6 @@ class Session(SessionRedirectMixin):
     def merge_environment_settings(self, url, proxies, stream, verify, cert): ...
     def get_adapter(self, url): ...
     def close(self) -> None: ...
-    def mount(self, prefix, adapter): ...
+    def mount(self, prefix: str, adapter: BaseAdapter) -> None: ...
 
 def session() -> Session: ...
diff --git a/typeshed/third_party/2.7/requests/status_codes.pyi b/typeshed/third_party/2/requests/status_codes.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/status_codes.pyi
rename to typeshed/third_party/2/requests/status_codes.pyi
diff --git a/typeshed/third_party/2.7/requests/structures.pyi b/typeshed/third_party/2/requests/structures.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/structures.pyi
rename to typeshed/third_party/2/requests/structures.pyi
diff --git a/typeshed/third_party/2.7/requests/utils.pyi b/typeshed/third_party/2/requests/utils.pyi
similarity index 100%
rename from typeshed/third_party/2.7/requests/utils.pyi
rename to typeshed/third_party/2/requests/utils.pyi
diff --git a/typeshed/third_party/2.7/routes/__init__.pyi b/typeshed/third_party/2/routes/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/routes/__init__.pyi
rename to typeshed/third_party/2/routes/__init__.pyi
diff --git a/typeshed/third_party/2.7/routes/mapper.pyi b/typeshed/third_party/2/routes/mapper.pyi
similarity index 100%
rename from typeshed/third_party/2.7/routes/mapper.pyi
rename to typeshed/third_party/2/routes/mapper.pyi
diff --git a/typeshed/third_party/2.7/routes/util.pyi b/typeshed/third_party/2/routes/util.pyi
similarity index 100%
rename from typeshed/third_party/2.7/routes/util.pyi
rename to typeshed/third_party/2/routes/util.pyi
diff --git a/typeshed/third_party/2.7/google/protobuf/internal/__init__.pyi b/typeshed/third_party/2/scribe/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/google/protobuf/internal/__init__.pyi
rename to typeshed/third_party/2/scribe/__init__.pyi
diff --git a/typeshed/third_party/2.7/scribe/scribe.pyi b/typeshed/third_party/2/scribe/scribe.pyi
similarity index 100%
rename from typeshed/third_party/2.7/scribe/scribe.pyi
rename to typeshed/third_party/2/scribe/scribe.pyi
diff --git a/typeshed/third_party/2.7/scribe/ttypes.pyi b/typeshed/third_party/2/scribe/ttypes.pyi
similarity index 100%
rename from typeshed/third_party/2.7/scribe/ttypes.pyi
rename to typeshed/third_party/2/scribe/ttypes.pyi
diff --git a/typeshed/third_party/2.7/selenium/webdriver/remote/webdriver.pyi b/typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi
similarity index 100%
rename from typeshed/third_party/2.7/selenium/webdriver/remote/webdriver.pyi
rename to typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi
diff --git a/typeshed/third_party/2.7/selenium/webdriver/remote/webelement.pyi b/typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi
similarity index 100%
rename from typeshed/third_party/2.7/selenium/webdriver/remote/webelement.pyi
rename to typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi
diff --git a/typeshed/third_party/2.7/six/__init__.pyi b/typeshed/third_party/2/six/__init__.pyi
similarity index 91%
rename from typeshed/third_party/2.7/six/__init__.pyi
rename to typeshed/third_party/2/six/__init__.pyi
index 5f5383b..aadf056 100644
--- a/typeshed/third_party/2.7/six/__init__.pyi
+++ b/typeshed/third_party/2/six/__init__.pyi
@@ -4,7 +4,7 @@ from __future__ import print_function
 
 from typing import (
     Any, AnyStr, Callable, Iterable, Mapping, Optional,
-    Pattern, Tuple, TypeVar, Union, overload,
+    Pattern, Tuple, TypeVar, Union, overload, ValuesView, KeysView, ItemsView
 )
 import typing
 
@@ -56,10 +56,9 @@ def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ...
 def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ...
 #def iterlists
 
-# TODO fix return types - python2 typing doesn't include KeysView etc yet.
-def viewkeys(d: Mapping[_K, _V]) -> Iterable[_K]: ...
-def viewvalues(d: Mapping[_K, _V]) -> Iterable[_V]: ...
-def viewitems(d: Mapping[_K, _V]) -> Iterable[Tuple[_K, _V]]: ...
+def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ...
+def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ...
+def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ...
 
 def b(s: str) -> binary_type: ...
 def u(s: str) -> text_type: ...
diff --git a/typeshed/third_party/2.7/six/moves/__init__.pyi b/typeshed/third_party/2/six/moves/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/__init__.pyi
rename to typeshed/third_party/2/six/moves/__init__.pyi
diff --git a/typeshed/third_party/2.7/six/moves/cPickle.pyi b/typeshed/third_party/2/six/moves/cPickle.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/cPickle.pyi
rename to typeshed/third_party/2/six/moves/cPickle.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib/__init__.pyi b/typeshed/third_party/2/six/moves/urllib/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib/__init__.pyi
rename to typeshed/third_party/2/six/moves/urllib/__init__.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib/error.pyi b/typeshed/third_party/2/six/moves/urllib/error.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib/error.pyi
rename to typeshed/third_party/2/six/moves/urllib/error.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib/parse.pyi b/typeshed/third_party/2/six/moves/urllib/parse.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib/parse.pyi
rename to typeshed/third_party/2/six/moves/urllib/parse.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib/request.pyi b/typeshed/third_party/2/six/moves/urllib/request.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib/request.pyi
rename to typeshed/third_party/2/six/moves/urllib/request.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib/response.pyi b/typeshed/third_party/2/six/moves/urllib/response.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib/response.pyi
rename to typeshed/third_party/2/six/moves/urllib/response.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib/robotparser.pyi b/typeshed/third_party/2/six/moves/urllib/robotparser.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib/robotparser.pyi
rename to typeshed/third_party/2/six/moves/urllib/robotparser.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib_error.pyi b/typeshed/third_party/2/six/moves/urllib_error.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib_error.pyi
rename to typeshed/third_party/2/six/moves/urllib_error.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib_parse.pyi b/typeshed/third_party/2/six/moves/urllib_parse.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib_parse.pyi
rename to typeshed/third_party/2/six/moves/urllib_parse.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib_request.pyi b/typeshed/third_party/2/six/moves/urllib_request.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib_request.pyi
rename to typeshed/third_party/2/six/moves/urllib_request.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib_response.pyi b/typeshed/third_party/2/six/moves/urllib_response.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib_response.pyi
rename to typeshed/third_party/2/six/moves/urllib_response.pyi
diff --git a/typeshed/third_party/2.7/six/moves/urllib_robotparser.pyi b/typeshed/third_party/2/six/moves/urllib_robotparser.pyi
similarity index 100%
rename from typeshed/third_party/2.7/six/moves/urllib_robotparser.pyi
rename to typeshed/third_party/2/six/moves/urllib_robotparser.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/__init__.pyi b/typeshed/third_party/2/sqlalchemy/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/databases/__init__.pyi b/typeshed/third_party/2/sqlalchemy/databases/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/databases/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/databases/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/databases/mysql.pyi b/typeshed/third_party/2/sqlalchemy/databases/mysql.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/databases/mysql.pyi
rename to typeshed/third_party/2/sqlalchemy/databases/mysql.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/__init__.pyi b/typeshed/third_party/2/sqlalchemy/dialects/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/dialects/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/dialects/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi b/typeshed/third_party/2/sqlalchemy/dialects/mysql/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/dialects/mysql/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi b/typeshed/third_party/2/sqlalchemy/dialects/mysql/base.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi
rename to typeshed/third_party/2/sqlalchemy/dialects/mysql/base.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi b/typeshed/third_party/2/sqlalchemy/engine/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/engine/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi b/typeshed/third_party/2/sqlalchemy/engine/base.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/engine/base.pyi
rename to typeshed/third_party/2/sqlalchemy/engine/base.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi b/typeshed/third_party/2/sqlalchemy/engine/strategies.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi
rename to typeshed/third_party/2/sqlalchemy/engine/strategies.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi b/typeshed/third_party/2/sqlalchemy/engine/url.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/engine/url.pyi
rename to typeshed/third_party/2/sqlalchemy/engine/url.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/exc.pyi b/typeshed/third_party/2/sqlalchemy/exc.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/exc.pyi
rename to typeshed/third_party/2/sqlalchemy/exc.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/inspection.pyi b/typeshed/third_party/2/sqlalchemy/inspection.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/inspection.pyi
rename to typeshed/third_party/2/sqlalchemy/inspection.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/log.pyi b/typeshed/third_party/2/sqlalchemy/log.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/log.pyi
rename to typeshed/third_party/2/sqlalchemy/log.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi b/typeshed/third_party/2/sqlalchemy/orm/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/orm/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/orm/session.pyi b/typeshed/third_party/2/sqlalchemy/orm/session.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/orm/session.pyi
rename to typeshed/third_party/2/sqlalchemy/orm/session.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/pool.pyi b/typeshed/third_party/2/sqlalchemy/pool.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/pool.pyi
rename to typeshed/third_party/2/sqlalchemy/pool.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/schema.pyi b/typeshed/third_party/2/sqlalchemy/schema.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/schema.pyi
rename to typeshed/third_party/2/sqlalchemy/schema.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/__init__.pyi b/typeshed/third_party/2/sqlalchemy/sql/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/annotation.pyi b/typeshed/third_party/2/sqlalchemy/sql/annotation.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/annotation.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/annotation.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/base.pyi b/typeshed/third_party/2/sqlalchemy/sql/base.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/base.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/base.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/ddl.pyi b/typeshed/third_party/2/sqlalchemy/sql/ddl.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/ddl.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/ddl.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/dml.pyi b/typeshed/third_party/2/sqlalchemy/sql/dml.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/dml.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/dml.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/elements.pyi b/typeshed/third_party/2/sqlalchemy/sql/elements.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/elements.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/elements.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/expression.pyi b/typeshed/third_party/2/sqlalchemy/sql/expression.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/expression.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/expression.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/functions.pyi b/typeshed/third_party/2/sqlalchemy/sql/functions.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/functions.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/functions.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/naming.pyi b/typeshed/third_party/2/sqlalchemy/sql/naming.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/naming.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/naming.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi b/typeshed/third_party/2/sqlalchemy/sql/operators.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/operators.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi b/typeshed/third_party/2/sqlalchemy/sql/schema.pyi
similarity index 79%
rename from typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/schema.pyi
index acad50e..4e03ff2 100644
--- a/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
+++ b/typeshed/third_party/2/sqlalchemy/sql/schema.pyi
@@ -22,11 +22,22 @@ class SchemaItem(SchemaEventTarget, visitors.Visitable):
 
 class Table(DialectKWArgs, SchemaItem, TableClause):
     def __init__(self, name, metadata, *args, **kwargs): ...
-    def delete(self, *args, **kwargs): ...
-    def insert(self, *args, **kwargs): ...
-    def select(self, *args, **kwargs): ...
-    def update(self, *args, **kwargs): ...
+    @property
+    def key(self): ...
+    @property
+    def primary_key(self): ...
+    def __repr__(self): ...
+    def __str__(self): ...
+    def append_column(self, column): ...
+    def append_constraint(self, constraint): ...
+    def append_ddl_listener(self, event, listener): ...
+    def get_children(self, column_collections=True, schema_visitor=False, **kwargs): ...
+    def exists(self, bind=None): ...
+    def create(self, bind=None, checkfirst=False): ...
+    def drop(self, bind=None, checkfirst=False): ...
+    def tometadata(self, metadata, schema=None): ...
     c = ... # type: ColumnCollection
+    constraints = ... # type: Set[Constraint]
 
 
 class Column(SchemaItem, ColumnClause):
@@ -76,7 +87,17 @@ class Sequence(DefaultGenerator): ...
 class FetchedValue(_NotAColumnExpr, SchemaEventTarget): ...
 class DefaultClause(FetchedValue): ...
 class PassiveDefault(DefaultClause): ...
-class Constraint(DialectKWArgs, SchemaItem): ...
+
+class Constraint(DialectKWArgs, SchemaItem):
+    def __init__(self, name=None, deferrable=None, initially=None): ...
+    def __contains__(self, x): ...
+    def contains_column(self, col): ...
+    def keys(self): ...
+    def __add__(self, other): ...
+    def __iter__(self): ...
+    def __len__(self): ...
+    def copy(self, **kw): ...
+
 class ColumnCollectionMixin(object):
     columns = ...  # type: Any
     def __init__(self, *columns, **kw): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/selectable.pyi b/typeshed/third_party/2/sqlalchemy/sql/selectable.pyi
similarity index 78%
rename from typeshed/third_party/2.7/sqlalchemy/sql/selectable.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/selectable.pyi
index 045170b..a3b1925 100644
--- a/typeshed/third_party/2.7/sqlalchemy/sql/selectable.pyi
+++ b/typeshed/third_party/2/sqlalchemy/sql/selectable.pyi
@@ -4,6 +4,7 @@ from .elements import ClauseElement, TextClause, ClauseList, \
     Grouping, UnaryExpression, BindParameter
 from .annotation import Annotated
 from .visitors import Visitable
+from .. import util
 
 def subquery(alias, *args, **kwargs): ...
 def alias(selectable, name=..., flat: bool=...): ...
@@ -47,7 +48,20 @@ class Join(FromClause): ...
 class Alias(FromClause): ...
 class CTE(Generative, HasSuffixes, Alias): ...
 class FromGrouping(FromClause): ...
-class TableClause(Immutable, FromClause): ...
+
+class TableClause(Immutable, FromClause):
+    def __init__(self, name, *columns): ...
+    def _export_columns(self): ...
+    @util.memoized_property
+    def description(self): ...
+    def append_column(self, c): ...
+    def get_children(self, **kwargs): ...
+    def count(self, whereclause=None, **params): ...
+    def insert(self, values=None, inline=False, **kwargs): ...
+    def update(self, whereclause=None, values=None, inline=False, **kwargs): ...
+    def delete(self, whereclause=None, **kwargs): ...
+    @property
+    def _from_objects(self): ...
 
 class ForUpdateArg(ClauseElement): ...
 class SelectBase(Executable, FromClause): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/sqltypes.pyi b/typeshed/third_party/2/sqlalchemy/sql/sqltypes.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/sqltypes.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/sqltypes.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/type_api.pyi b/typeshed/third_party/2/sqlalchemy/sql/type_api.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/type_api.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/type_api.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/visitors.pyi b/typeshed/third_party/2/sqlalchemy/sql/visitors.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/sql/visitors.pyi
rename to typeshed/third_party/2/sqlalchemy/sql/visitors.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/types.pyi b/typeshed/third_party/2/sqlalchemy/types.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/types.pyi
rename to typeshed/third_party/2/sqlalchemy/types.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/__init__.pyi b/typeshed/third_party/2/sqlalchemy/util/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/util/__init__.pyi
rename to typeshed/third_party/2/sqlalchemy/util/__init__.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/_collections.pyi b/typeshed/third_party/2/sqlalchemy/util/_collections.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/util/_collections.pyi
rename to typeshed/third_party/2/sqlalchemy/util/_collections.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/compat.pyi b/typeshed/third_party/2/sqlalchemy/util/compat.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/util/compat.pyi
rename to typeshed/third_party/2/sqlalchemy/util/compat.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/deprecations.pyi b/typeshed/third_party/2/sqlalchemy/util/deprecations.pyi
similarity index 100%
rename from typeshed/third_party/2.7/sqlalchemy/util/deprecations.pyi
rename to typeshed/third_party/2/sqlalchemy/util/deprecations.pyi
diff --git a/typeshed/third_party/2.7/sqlalchemy/util/langhelpers.pyi b/typeshed/third_party/2/sqlalchemy/util/langhelpers.pyi
similarity index 98%
rename from typeshed/third_party/2.7/sqlalchemy/util/langhelpers.pyi
rename to typeshed/third_party/2/sqlalchemy/util/langhelpers.pyi
index c16b1f9..c688ba3 100644
--- a/typeshed/third_party/2.7/sqlalchemy/util/langhelpers.pyi
+++ b/typeshed/third_party/2/sqlalchemy/util/langhelpers.pyi
@@ -48,7 +48,6 @@ def as_interface(obj, cls=..., methods=..., required=...): ...
 
 class memoized_property:
     fget = ... # type: Any
-    __doc__ = ... # type: Any
     __name__ = ... # type: Any
     def __init__(self, fget, doc=...) -> None: ...
     def __get__(self, obj, cls): ...
@@ -92,7 +91,6 @@ def assert_arg_type(arg, argtype, name): ...
 def dictlike_iteritems(dictlike): ...
 
 class classproperty:
-    __doc__ = ... # type: Any
     def __init__(self, fget, *arg, **kw) -> None: ...
     def __get__(desc, self, cls): ...
 
diff --git a/typeshed/third_party/2.7/thrift/Thrift.pyi b/typeshed/third_party/2/thrift/Thrift.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/Thrift.pyi
rename to typeshed/third_party/2/thrift/Thrift.pyi
diff --git a/typeshed/third_party/2.7/scribe/__init__.pyi b/typeshed/third_party/2/thrift/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/scribe/__init__.pyi
rename to typeshed/third_party/2/thrift/__init__.pyi
diff --git a/typeshed/third_party/2.7/thrift/protocol/TBinaryProtocol.pyi b/typeshed/third_party/2/thrift/protocol/TBinaryProtocol.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/protocol/TBinaryProtocol.pyi
rename to typeshed/third_party/2/thrift/protocol/TBinaryProtocol.pyi
diff --git a/typeshed/third_party/2.7/thrift/protocol/TProtocol.pyi b/typeshed/third_party/2/thrift/protocol/TProtocol.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/protocol/TProtocol.pyi
rename to typeshed/third_party/2/thrift/protocol/TProtocol.pyi
diff --git a/typeshed/third_party/2.7/thrift/protocol/__init__.pyi b/typeshed/third_party/2/thrift/protocol/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/protocol/__init__.pyi
rename to typeshed/third_party/2/thrift/protocol/__init__.pyi
diff --git a/typeshed/third_party/2.7/thrift/transport/TSocket.pyi b/typeshed/third_party/2/thrift/transport/TSocket.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/transport/TSocket.pyi
rename to typeshed/third_party/2/thrift/transport/TSocket.pyi
diff --git a/typeshed/third_party/2.7/thrift/transport/TTransport.pyi b/typeshed/third_party/2/thrift/transport/TTransport.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/transport/TTransport.pyi
rename to typeshed/third_party/2/thrift/transport/TTransport.pyi
diff --git a/typeshed/third_party/2.7/thrift/transport/__init__.pyi b/typeshed/third_party/2/thrift/transport/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/transport/__init__.pyi
rename to typeshed/third_party/2/thrift/transport/__init__.pyi
diff --git a/typeshed/third_party/2.7/thrift/__init__.pyi b/typeshed/third_party/2/tornado/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/thrift/__init__.pyi
rename to typeshed/third_party/2/tornado/__init__.pyi
diff --git a/typeshed/third_party/2.7/tornado/concurrent.pyi b/typeshed/third_party/2/tornado/concurrent.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/concurrent.pyi
rename to typeshed/third_party/2/tornado/concurrent.pyi
diff --git a/typeshed/third_party/2.7/tornado/gen.pyi b/typeshed/third_party/2/tornado/gen.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/gen.pyi
rename to typeshed/third_party/2/tornado/gen.pyi
diff --git a/typeshed/third_party/2.7/tornado/httpclient.pyi b/typeshed/third_party/2/tornado/httpclient.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/httpclient.pyi
rename to typeshed/third_party/2/tornado/httpclient.pyi
diff --git a/typeshed/third_party/2.7/tornado/httpserver.pyi b/typeshed/third_party/2/tornado/httpserver.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/httpserver.pyi
rename to typeshed/third_party/2/tornado/httpserver.pyi
diff --git a/typeshed/third_party/2.7/tornado/httputil.pyi b/typeshed/third_party/2/tornado/httputil.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/httputil.pyi
rename to typeshed/third_party/2/tornado/httputil.pyi
diff --git a/typeshed/third_party/2.7/tornado/ioloop.pyi b/typeshed/third_party/2/tornado/ioloop.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/ioloop.pyi
rename to typeshed/third_party/2/tornado/ioloop.pyi
diff --git a/typeshed/third_party/2.7/tornado/locks.pyi b/typeshed/third_party/2/tornado/locks.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/locks.pyi
rename to typeshed/third_party/2/tornado/locks.pyi
diff --git a/typeshed/third_party/2.7/tornado/netutil.pyi b/typeshed/third_party/2/tornado/netutil.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/netutil.pyi
rename to typeshed/third_party/2/tornado/netutil.pyi
diff --git a/typeshed/third_party/2.7/tornado/tcpserver.pyi b/typeshed/third_party/2/tornado/tcpserver.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/tcpserver.pyi
rename to typeshed/third_party/2/tornado/tcpserver.pyi
diff --git a/typeshed/third_party/2.7/tornado/testing.pyi b/typeshed/third_party/2/tornado/testing.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/testing.pyi
rename to typeshed/third_party/2/tornado/testing.pyi
diff --git a/typeshed/third_party/2.7/tornado/util.pyi b/typeshed/third_party/2/tornado/util.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/util.pyi
rename to typeshed/third_party/2/tornado/util.pyi
diff --git a/typeshed/third_party/2.7/tornado/web.pyi b/typeshed/third_party/2/tornado/web.pyi
similarity index 100%
rename from typeshed/third_party/2.7/tornado/web.pyi
rename to typeshed/third_party/2/tornado/web.pyi
diff --git a/typeshed/third_party/2.7/yaml/__init__.pyi b/typeshed/third_party/2/yaml/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/__init__.pyi
rename to typeshed/third_party/2/yaml/__init__.pyi
diff --git a/typeshed/third_party/2.7/yaml/composer.pyi b/typeshed/third_party/2/yaml/composer.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/composer.pyi
rename to typeshed/third_party/2/yaml/composer.pyi
diff --git a/typeshed/third_party/2.7/yaml/constructor.pyi b/typeshed/third_party/2/yaml/constructor.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/constructor.pyi
rename to typeshed/third_party/2/yaml/constructor.pyi
diff --git a/typeshed/third_party/2.7/yaml/dumper.pyi b/typeshed/third_party/2/yaml/dumper.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/dumper.pyi
rename to typeshed/third_party/2/yaml/dumper.pyi
diff --git a/typeshed/third_party/2.7/yaml/emitter.pyi b/typeshed/third_party/2/yaml/emitter.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/emitter.pyi
rename to typeshed/third_party/2/yaml/emitter.pyi
diff --git a/typeshed/third_party/2.7/yaml/error.pyi b/typeshed/third_party/2/yaml/error.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/error.pyi
rename to typeshed/third_party/2/yaml/error.pyi
diff --git a/typeshed/third_party/2.7/yaml/events.pyi b/typeshed/third_party/2/yaml/events.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/events.pyi
rename to typeshed/third_party/2/yaml/events.pyi
diff --git a/typeshed/third_party/2.7/yaml/loader.pyi b/typeshed/third_party/2/yaml/loader.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/loader.pyi
rename to typeshed/third_party/2/yaml/loader.pyi
diff --git a/typeshed/third_party/2.7/yaml/nodes.pyi b/typeshed/third_party/2/yaml/nodes.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/nodes.pyi
rename to typeshed/third_party/2/yaml/nodes.pyi
diff --git a/typeshed/third_party/2.7/yaml/parser.pyi b/typeshed/third_party/2/yaml/parser.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/parser.pyi
rename to typeshed/third_party/2/yaml/parser.pyi
diff --git a/typeshed/third_party/2.7/yaml/reader.pyi b/typeshed/third_party/2/yaml/reader.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/reader.pyi
rename to typeshed/third_party/2/yaml/reader.pyi
diff --git a/typeshed/third_party/2.7/yaml/representer.pyi b/typeshed/third_party/2/yaml/representer.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/representer.pyi
rename to typeshed/third_party/2/yaml/representer.pyi
diff --git a/typeshed/third_party/2.7/yaml/resolver.pyi b/typeshed/third_party/2/yaml/resolver.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/resolver.pyi
rename to typeshed/third_party/2/yaml/resolver.pyi
diff --git a/typeshed/third_party/2.7/yaml/scanner.pyi b/typeshed/third_party/2/yaml/scanner.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/scanner.pyi
rename to typeshed/third_party/2/yaml/scanner.pyi
diff --git a/typeshed/third_party/2.7/yaml/serializer.pyi b/typeshed/third_party/2/yaml/serializer.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/serializer.pyi
rename to typeshed/third_party/2/yaml/serializer.pyi
diff --git a/typeshed/third_party/2.7/yaml/tokens.pyi b/typeshed/third_party/2/yaml/tokens.pyi
similarity index 100%
rename from typeshed/third_party/2.7/yaml/tokens.pyi
rename to typeshed/third_party/2/yaml/tokens.pyi
diff --git a/typeshed/third_party/2and3/mypy_extensions.pyi b/typeshed/third_party/2and3/mypy_extensions.pyi
new file mode 100644
index 0000000..6c57954
--- /dev/null
+++ b/typeshed/third_party/2and3/mypy_extensions.pyi
@@ -0,0 +1,6 @@
+from typing import Dict, Type, TypeVar
+
+T = TypeVar('T')
+
+
+def TypedDict(typename: str, fields: Dict[str, Type[T]]) -> Type[dict]: ...
diff --git a/typeshed/third_party/3/lxml/etree.pyi b/typeshed/third_party/3/lxml/etree.pyi
index dbf64db..bf7b150 100644
--- a/typeshed/third_party/3/lxml/etree.pyi
+++ b/typeshed/third_party/3/lxml/etree.pyi
@@ -4,7 +4,7 @@
 
 import typing
 from typing import Any, Dict, List, MutableMapping, Tuple, Union
-from typing import SupportsBytes
+from typing import Iterable, Iterator, SupportsBytes
 
 
 # We do *not* want `typing.AnyStr` because it is a `TypeVar`, which is an
@@ -17,11 +17,19 @@ DictAnyStr = Union[Dict[str, str], Dict[bytes, bytes]]
 Dict_Tuple2AnyStr_Any = Union[Dict[Tuple[str, str], Any], Tuple[bytes, bytes], Any]
 
 
-class _Element:
+class ElementChildIterator(Iterator['_Element;']):
+    def __iter__(self) -> 'ElementChildIterator': ...
+    def __next__(self) -> '_Element': ...
+
+class _Element(Iterable['_Element']):
     def addprevious(self, element: '_Element') -> None:
         pass
 
     attrib = ...  # type: MutableMapping[str, str]
+    text = ... # type: AnyStr
+    tag = ...  # type: str
+    def append(self, element: '_Element') -> '_Element': ...
+    def __iter__(self) -> ElementChildIterator: ...
 
 class ElementBase(_Element):
     pass
diff --git a/typeshed/third_party/3/pkg_resources.pyi b/typeshed/third_party/3/pkg_resources.pyi
index a58a0a1..58ef738 100644
--- a/typeshed/third_party/3/pkg_resources.pyi
+++ b/typeshed/third_party/3/pkg_resources.pyi
@@ -226,12 +226,12 @@ def get_provider(package_or_requirement: str) -> IResourceProvider: ...
 def get_provider(package_or_requirement: Requirement) -> Distribution: ...
 
 class IMetadataProvider:
-    def has_metadata(name: str) -> bool: ...
-    def metadata_isdir(name: str) -> bool: ...
-    def metadata_listdir(name: str) -> List[str]: ...
-    def get_metadata(name: str) -> str: ...
-    def get_metadata_lines(name: str) -> Generator[List[str], None, None]: ...
-    def run_script(script_name: str, namespace: Dict[str, Any]) -> None: ...
+    def has_metadata(self, name: str) -> bool: ...
+    def metadata_isdir(self, name: str) -> bool: ...
+    def metadata_listdir(self, name: str) -> List[str]: ...
+    def get_metadata(self, name: str) -> str: ...
+    def get_metadata_lines(self, name: str) -> Generator[List[str], None, None]: ...
+    def run_script(self, script_name: str, namespace: Dict[str, Any]) -> None: ...
 
 
 class ResolutionError(Exception): ...
diff --git a/typeshed/third_party/3/requests/adapters.pyi b/typeshed/third_party/3/requests/adapters.pyi
index 109dc9a..d896f1a 100644
--- a/typeshed/third_party/3/requests/adapters.pyi
+++ b/typeshed/third_party/3/requests/adapters.pyi
@@ -1,6 +1,6 @@
 # Stubs for requests.adapters (Python 3)
 
-from typing import Any
+from typing import Any, Container, Union, Tuple
 from . import models
 from .packages.urllib3 import poolmanager
 from .packages.urllib3 import response
@@ -13,6 +13,7 @@ from . import cookies
 from . import exceptions
 from . import auth
 
+PreparedRequest = models.PreparedRequest
 Response = models.Response
 PoolManager = poolmanager.PoolManager
 proxy_from_url = poolmanager.proxy_from_url
@@ -43,10 +44,12 @@ DEFAULT_RETRIES = ...  # type: Any
 
 class BaseAdapter:
     def __init__(self) -> None: ...
-    # TODO: "request" parameter not actually supported, added to please mypy.
-    def send(self, request=...): ...
-    def close(self): ...
-
+    def send(self, request: PreparedRequest, stream=False,
+             timeout: Union[None, float, Tuple[float, float]]=None,
+             verify=False,
+             cert: Union[None, Union[str, bytes], Container[Union[str, bytes]]]=None
+             ) -> Response: ...
+    def close(self) -> None: ...
 class HTTPAdapter(BaseAdapter):
     __attrs__ = ...  # type: Any
     max_retries = ...  # type: Any
diff --git a/typeshed/third_party/3/requests/sessions.pyi b/typeshed/third_party/3/requests/sessions.pyi
index 9bc3a0e..d81191c 100644
--- a/typeshed/third_party/3/requests/sessions.pyi
+++ b/typeshed/third_party/3/requests/sessions.pyi
@@ -1,6 +1,7 @@
 # Stubs for requests.sessions (Python 3)
 
 from typing import Any, Union, MutableMapping, Text, Optional, IO, Tuple, Callable
+from . import adapters
 from . import auth
 from . import compat
 from . import cookies
@@ -14,6 +15,7 @@ from . import structures
 from . import adapters
 from . import status_codes
 
+BaseAdapter = adapters.BaseAdapter
 OrderedDict = compat.OrderedDict
 cookiejar_from_dict = cookies.cookiejar_from_dict
 extract_cookies_to_jar = cookies.extract_cookies_to_jar
@@ -99,6 +101,8 @@ class Session(SessionRedirectMixin):
     def merge_environment_settings(self, url, proxies, stream, verify, cert): ...
     def get_adapter(self, url): ...
     def close(self) -> None: ...
-    def mount(self, prefix, adapter): ...
+    def mount(self, prefix:
+              Union[str, bytes],
+              adapter: BaseAdapter) -> None: ...
 
 def session() -> Session: ...
diff --git a/typeshed/third_party/3/typed_ast/ast27.pyi b/typeshed/third_party/3/typed_ast/ast27.pyi
index 8e88972..dd26314 100644
--- a/typeshed/third_party/3/typed_ast/ast27.pyi
+++ b/typeshed/third_party/3/typed_ast/ast27.pyi
@@ -2,19 +2,17 @@ import typing
 from typing import Any, Optional, Union, Generic, Iterator
 
 class NodeVisitor():
-    __doc__ = ...  # type: str
     def visit(self, node: AST) -> Any: ...
     def generic_visit(self, node: AST) -> None: ...
 
 class NodeTransformer(NodeVisitor):
-    __doc__ = ...  # type: str
     def generic_visit(self, node: AST) -> None: ...
 
 def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
 def copy_location(new_node: AST, old_node: AST) -> AST: ...
 def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
 def fix_missing_locations(node: AST) -> AST: ...
-def get_docstring(node: AST, clean: bool = ...) -> str: ...
+def get_docstring(node: AST, clean: bool = ...) -> Optional[bytes]: ...
 def increment_lineno(node: AST, n: int = ...) -> AST: ...
 def iter_child_nodes(node: AST) -> Iterator[AST]: ...
 def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ...
@@ -247,7 +245,7 @@ class Num(expr):
     n = ...  # type: Union[int, float]
 
 class Str(expr):
-    s = ...  # type: str
+    s = ...  # type: bytes
 
 class Attribute(expr):
     value = ...  # type: expr
diff --git a/typeshed/third_party/3/typed_ast/ast35.pyi b/typeshed/third_party/3/typed_ast/ast35.pyi
index 04e8a29..2a18ff7 100644
--- a/typeshed/third_party/3/typed_ast/ast35.pyi
+++ b/typeshed/third_party/3/typed_ast/ast35.pyi
@@ -2,12 +2,10 @@ import typing
 from typing import Any, Optional, Union, Generic, Iterator
 
 class NodeVisitor():
-    __doc__ = ...  # type: str
     def visit(self, node: AST) -> Any: ...
     def generic_visit(self, node: AST) -> None: ...
 
 class NodeTransformer(NodeVisitor):
-    __doc__ = ...  # type: str
     def generic_visit(self, node: AST) -> None: ...
 
 def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list